diff --git a/One-2-3-45-master 2/.DS_Store b/One-2-3-45-master 2/.DS_Store
deleted file mode 100644
index 1019af909bdbff683b29ebae009377520534c494..0000000000000000000000000000000000000000
Binary files a/One-2-3-45-master 2/.DS_Store and /dev/null differ
diff --git a/One-2-3-45-master 2/.gitattributes b/One-2-3-45-master 2/.gitattributes
deleted file mode 100644
index a6344aac8c09253b3b630fb776ae94478aa0275b..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/.gitattributes
+++ /dev/null
@@ -1,35 +0,0 @@
-*.7z filter=lfs diff=lfs merge=lfs -text
-*.arrow filter=lfs diff=lfs merge=lfs -text
-*.bin filter=lfs diff=lfs merge=lfs -text
-*.bz2 filter=lfs diff=lfs merge=lfs -text
-*.ckpt filter=lfs diff=lfs merge=lfs -text
-*.ftz filter=lfs diff=lfs merge=lfs -text
-*.gz filter=lfs diff=lfs merge=lfs -text
-*.h5 filter=lfs diff=lfs merge=lfs -text
-*.joblib filter=lfs diff=lfs merge=lfs -text
-*.lfs.* filter=lfs diff=lfs merge=lfs -text
-*.mlmodel filter=lfs diff=lfs merge=lfs -text
-*.model filter=lfs diff=lfs merge=lfs -text
-*.msgpack filter=lfs diff=lfs merge=lfs -text
-*.npy filter=lfs diff=lfs merge=lfs -text
-*.npz filter=lfs diff=lfs merge=lfs -text
-*.onnx filter=lfs diff=lfs merge=lfs -text
-*.ot filter=lfs diff=lfs merge=lfs -text
-*.parquet filter=lfs diff=lfs merge=lfs -text
-*.pb filter=lfs diff=lfs merge=lfs -text
-*.pickle filter=lfs diff=lfs merge=lfs -text
-*.pkl filter=lfs diff=lfs merge=lfs -text
-*.pt filter=lfs diff=lfs merge=lfs -text
-*.pth filter=lfs diff=lfs merge=lfs -text
-*.rar filter=lfs diff=lfs merge=lfs -text
-*.safetensors filter=lfs diff=lfs merge=lfs -text
-saved_model/**/* filter=lfs diff=lfs merge=lfs -text
-*.tar.* filter=lfs diff=lfs merge=lfs -text
-*.tar filter=lfs diff=lfs merge=lfs -text
-*.tflite filter=lfs diff=lfs merge=lfs -text
-*.tgz filter=lfs diff=lfs merge=lfs -text
-*.wasm filter=lfs diff=lfs merge=lfs -text
-*.xz filter=lfs diff=lfs merge=lfs -text
-*.zip filter=lfs diff=lfs merge=lfs -text
-*.zst filter=lfs diff=lfs merge=lfs -text
-*tfevents* filter=lfs diff=lfs merge=lfs -text
diff --git a/One-2-3-45-master 2/.gitignore b/One-2-3-45-master 2/.gitignore
deleted file mode 100644
index 9e1006878b0d1f287bbda4a9cf4b352b2e41f1ab..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/.gitignore
+++ /dev/null
@@ -1,11 +0,0 @@
-__pycache__/
-exp/
-src/
-*.DS_Store
-*.ipynb
-*.egg-info/
-*.ckpt
-*.pth
-
-!example.ipynb
-!reconstruction/exp
\ No newline at end of file
diff --git a/One-2-3-45-master 2/LICENSE b/One-2-3-45-master 2/LICENSE
deleted file mode 100644
index 261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/LICENSE
+++ /dev/null
@@ -1,201 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/One-2-3-45-master 2/README.md b/One-2-3-45-master 2/README.md
deleted file mode 100644
index 4974e7ecc6bc6858a81bb3b7dfc950d88b770e41..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/README.md
+++ /dev/null
@@ -1,221 +0,0 @@
-
-
-
-
-
-
- [Paper]
- [Project]
- [Demo]
- [BibTeX]
-
-
-
-
-
-
-
-
-One-2-3-45 rethinks how to leverage 2D diffusion models for 3D AIGC and introduces a novel forward-only paradigm that avoids the time-consuming optimization.
-
-https://github.com/One-2-3-45/One-2-3-45/assets/16759292/a81d6e32-8d29-43a5-b044-b5112b9f9664
-
-
-
-https://github.com/One-2-3-45/One-2-3-45/assets/16759292/5ecd45ef-8fd3-4643-af4c-fac3050a0428
-
-
-## News
-**[09/21/2023]**
-One-2-3-45 is accepted by NeurIPS 2023. See you in New Orleans!
-
-**[09/11/2023]**
-Training code released.
-
-**[08/18/2023]**
-Inference code released.
-
-**[07/24/2023]**
-Our demo reached the HuggingFace top 4 trending and was featured in 🤗 Spaces of the Week 🔥! Special thanks to HuggingFace 🤗 for sponsoring this demo!!
-
-**[07/11/2023]**
-[Online interactive demo](https://huggingface.co/spaces/One-2-3-45/One-2-3-45) released! Explore it and create your own 3D models in just 45 seconds!
-
-**[06/29/2023]**
-Check out our [paper](https://arxiv.org/pdf/2306.16928.pdf). [[X](https://twitter.com/_akhaliq/status/1674617785119305728)]
-
-## Installation
-Hardware requirement: an NVIDIA GPU with memory >=18GB (_e.g._, RTX 3090 or A10). Tested on Ubuntu.
-
-We offer two ways to setup the environment:
-
-### Traditional Installation
-
-Step 1: Install Debian packages.
-
-```bash
-sudo apt update && sudo apt install git-lfs libsparsehash-dev build-essential
-```
-
-
-
-Step 2: Create and activate a conda environment.
-
-```bash
-conda create -n One2345 python=3.10
-conda activate One2345
-```
-
-
-
-Step 3: Clone the repository to the local machine.
-
-```bash
-# Make sure you have git-lfs installed.
-git lfs install
-git clone https://github.com/One-2-3-45/One-2-3-45
-cd One-2-3-45
-```
-
-
-
-Step 4: Install project dependencies using pip.
-
-```bash
-# Ensure that the installed CUDA version matches the torch's cuda version.
-# Example: CUDA 11.8 installation
-wget https://developer.download.nvidia.com/compute/cuda/11.8.0/local_installers/cuda_11.8.0_520.61.05_linux.run
-sudo sh cuda_11.8.0_520.61.05_linux.run
-export PATH="/usr/local/cuda-11.8/bin:$PATH"
-export LD_LIBRARY_PATH="/usr/local/cuda-11.8/lib64:$LD_LIBRARY_PATH"
-# Install PyTorch 2.0
-pip install --no-cache-dir torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118
-# Install dependencies
-pip install -r requirements.txt
-# Install inplace_abn and torchsparse
-export TORCH_CUDA_ARCH_LIST="7.0;7.2;8.0;8.6+PTX" # CUDA architectures. Modify according to your hardware.
-export IABN_FORCE_CUDA=1
-pip install inplace_abn
-FORCE_CUDA=1 pip install --no-cache-dir git+https://github.com/mit-han-lab/torchsparse.git@v1.4.0
-```
-
-
-
-Step 5: Download model checkpoints.
-
-```bash
-python download_ckpt.py
-```
-
-
-
-### Installation by Docker Images
-
-Option 1: Pull and Play (environment and checkpoints). (~22.3G)
-
-```bash
-# Pull the Docker image that contains the full repository.
-docker pull chaoxu98/one2345:demo_1.0
-# An interactive demo will be launched automatically upon running the container.
-# This will provide a public URL like XXXXXXX.gradio.live
-docker run --name One-2-3-45_demo --gpus all -it chaoxu98/one2345:demo_1.0
-```
-
-
-
-Option 2: Environment Only. (~7.3G)
-
-```bash
-# Pull the Docker image that installed all project dependencies.
-docker pull chaoxu98/one2345:1.0
-# Start a Docker container named One2345.
-docker run --name One-2-3-45 --gpus all -it chaoxu98/one2345:1.0
-# Get a bash shell in the container.
-docker exec -it One-2-3-45 /bin/bash
-# Clone the repository to the local machine.
-git clone https://github.com/One-2-3-45/One-2-3-45
-cd One-2-3-45
-# Download model checkpoints.
-python download_ckpt.py
-# Refer to getting started for inference.
-```
-
-
-## Getting Started (Inference)
-
-First-time running will take longer time to compile the models.
-
-Expected time cost per image: 40s on an NVIDIA A6000.
-```bash
-# 1. Script
-python run.py --img_path PATH_TO_INPUT_IMG --half_precision
-
-# 2. Interactive demo (Gradio) with a friendly web interface
-# An URL will be provided in the output
-# (Local: 127.0.0.1:7860; Public: XXXXXXX.gradio.live)
-cd demo/
-python app.py
-
-# 3. Jupyter Notebook
-example.ipynb
-```
-
-## Training Your Own Model
-
-### Data Preparation
-We use Objaverse-LVIS dataset for training and render the selected shapes (with CC-BY license) into 2D images with Blender.
-#### Download the training images.
-Download all One2345.zip.part-* files (5 files in total) from here and then cat them into a single .zip file using the following command:
-```bash
-cat One2345.zip.part-* > One2345.zip
-```
-
-#### Unzip the training images zip file.
-Unzip the zip file into a folder specified by yourself (`YOUR_BASE_FOLDER`) with the following command:
-
-```bash
-unzip One2345.zip -d YOUR_BASE_FOLDER
-```
-
-#### Download meta files.
-
-Download `One2345_training_pose.json` and `lvis_split_cc_by.json` from here and put them into the same folder as the training images (`YOUR_BASE_FOLDER`).
-
-Your file structure should look like this:
-```
-# One2345 is your base folder used in the previous steps
-
-One2345
-├── One2345_training_pose.json
-├── lvis_split_cc_by.json
-└── zero12345_narrow
- ├── 000-000
- ├── 000-001
- ├── 000-002
- ...
- └── 000-159
-
-```
-
-### Training
-Specify the `trainpath`, `valpath`, and `testpath` in the config file `./reconstruction/confs/one2345_lod_train.conf` to be `YOUR_BASE_FOLDER` used in data preparation steps and run the following command:
-```bash
-cd reconstruction
-python exp_runner_generic_blender_train.py --mode train --conf confs/one2345_lod_train.conf
-```
-Experiment logs and checkpoints will be saved in `./reconstruction/exp/`.
-
-## Citation
-
-If you find our code helpful, please cite our paper:
-
-```
-@misc{liu2023one2345,
- title={One-2-3-45: Any Single Image to 3D Mesh in 45 Seconds without Per-Shape Optimization},
- author={Minghua Liu and Chao Xu and Haian Jin and Linghao Chen and Mukund Varma T and Zexiang Xu and Hao Su},
- year={2023},
- eprint={2306.16928},
- archivePrefix={arXiv},
- primaryClass={cs.CV}
-}
-```
diff --git a/One-2-3-45-master 2/configs/sd-objaverse-finetune-c_concat-256.yaml b/One-2-3-45-master 2/configs/sd-objaverse-finetune-c_concat-256.yaml
deleted file mode 100644
index 488dafa27fcd632215ab869f9ab15c8ed452b66a..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/configs/sd-objaverse-finetune-c_concat-256.yaml
+++ /dev/null
@@ -1,117 +0,0 @@
-model:
- base_learning_rate: 1.0e-04
- target: ldm.models.diffusion.ddpm.LatentDiffusion
- params:
- linear_start: 0.00085
- linear_end: 0.0120
- num_timesteps_cond: 1
- log_every_t: 200
- timesteps: 1000
- first_stage_key: "image_target"
- cond_stage_key: "image_cond"
- image_size: 32
- channels: 4
- cond_stage_trainable: false # Note: different from the one we trained before
- conditioning_key: hybrid
- monitor: val/loss_simple_ema
- scale_factor: 0.18215
-
- scheduler_config: # 10000 warmup steps
- target: ldm.lr_scheduler.LambdaLinearScheduler
- params:
- warm_up_steps: [ 100 ]
- cycle_lengths: [ 10000000000000 ] # incredibly large number to prevent corner cases
- f_start: [ 1.e-6 ]
- f_max: [ 1. ]
- f_min: [ 1. ]
-
- unet_config:
- target: ldm.modules.diffusionmodules.openaimodel.UNetModel
- params:
- image_size: 32 # unused
- in_channels: 8
- out_channels: 4
- model_channels: 320
- attention_resolutions: [ 4, 2, 1 ]
- num_res_blocks: 2
- channel_mult: [ 1, 2, 4, 4 ]
- num_heads: 8
- use_spatial_transformer: True
- transformer_depth: 1
- context_dim: 768
- use_checkpoint: True
- legacy: False
-
- first_stage_config:
- target: ldm.models.autoencoder.AutoencoderKL
- params:
- embed_dim: 4
- monitor: val/rec_loss
- ddconfig:
- double_z: true
- z_channels: 4
- resolution: 256
- in_channels: 3
- out_ch: 3
- ch: 128
- ch_mult:
- - 1
- - 2
- - 4
- - 4
- num_res_blocks: 2
- attn_resolutions: []
- dropout: 0.0
- lossconfig:
- target: torch.nn.Identity
-
- cond_stage_config:
- target: ldm.modules.encoders.modules.FrozenCLIPImageEmbedder
-
-
-data:
- target: ldm.data.simple.ObjaverseDataModuleFromConfig
- params:
- root_dir: 'views_whole_sphere'
- batch_size: 192
- num_workers: 16
- total_view: 4
- train:
- validation: False
- image_transforms:
- size: 256
-
- validation:
- validation: True
- image_transforms:
- size: 256
-
-
-lightning:
- find_unused_parameters: false
- metrics_over_trainsteps_checkpoint: True
- modelcheckpoint:
- params:
- every_n_train_steps: 5000
- callbacks:
- image_logger:
- target: main.ImageLogger
- params:
- batch_frequency: 500
- max_images: 32
- increase_log_steps: False
- log_first_step: True
- log_images_kwargs:
- use_ema_scope: False
- inpaint: False
- plot_progressive_rows: False
- plot_diffusion_rows: False
- N: 32
- unconditional_guidance_scale: 3.0
- unconditional_guidance_label: [""]
-
- trainer:
- benchmark: True
- val_check_interval: 5000000 # really sorry
- num_sanity_val_steps: 0
- accumulate_grad_batches: 1
diff --git a/One-2-3-45-master 2/download_ckpt.py b/One-2-3-45-master 2/download_ckpt.py
deleted file mode 100644
index e11ddb2484ef1b96a7f5566b5ee757dfe8865012..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/download_ckpt.py
+++ /dev/null
@@ -1,30 +0,0 @@
-import urllib.request
-from tqdm import tqdm
-
-def download_checkpoint(url, save_path):
- try:
- with urllib.request.urlopen(url) as response, open(save_path, 'wb') as file:
- file_size = int(response.info().get('Content-Length', -1))
- chunk_size = 8192
- num_chunks = file_size // chunk_size if file_size > chunk_size else 1
-
- with tqdm(total=file_size, unit='B', unit_scale=True, desc='Downloading', ncols=100) as pbar:
- for chunk in iter(lambda: response.read(chunk_size), b''):
- file.write(chunk)
- pbar.update(len(chunk))
-
- print(f"Checkpoint downloaded and saved to: {save_path}")
- except Exception as e:
- print(f"Error downloading checkpoint: {e}")
-
-if __name__ == "__main__":
- ckpts = {
- "sam_vit_h_4b8939.pth": "https://huggingface.co/One-2-3-45/code/resolve/main/sam_vit_h_4b8939.pth",
- "zero123-xl.ckpt": "https://huggingface.co/One-2-3-45/code/resolve/main/zero123-xl.ckpt",
- "elevation_estimate/utils/weights/indoor_ds_new.ckpt" : "https://huggingface.co/One-2-3-45/code/resolve/main/one2345_elev_est/tools/weights/indoor_ds_new.ckpt",
- "reconstruction/exp/lod0/checkpoints/ckpt_215000.pth": "https://huggingface.co/One-2-3-45/code/resolve/main/SparseNeuS_demo_v1/exp/lod0/checkpoints/ckpt_215000.pth"
- }
- for ckpt_name, ckpt_url in ckpts.items():
- print(f"Downloading checkpoint: {ckpt_name}")
- download_checkpoint(ckpt_url, ckpt_name)
-
diff --git a/One-2-3-45-master 2/elevation_estimate/.gitignore b/One-2-3-45-master 2/elevation_estimate/.gitignore
deleted file mode 100644
index 0fe207cdc4cb61b3622443c8f5c739097174306c..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-build/
-.idea/
-*.egg-info/
diff --git a/One-2-3-45-master 2/elevation_estimate/__init__.py b/One-2-3-45-master 2/elevation_estimate/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/One-2-3-45-master 2/elevation_estimate/estimate_wild_imgs.py b/One-2-3-45-master 2/elevation_estimate/estimate_wild_imgs.py
deleted file mode 100644
index 6e894bfeb936d4595ca5dd967ea3316376cce042..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/estimate_wild_imgs.py
+++ /dev/null
@@ -1,10 +0,0 @@
-import os.path as osp
-from .utils.elev_est_api import elev_est_api
-
-def estimate_elev(root_dir):
- img_dir = osp.join(root_dir, "stage2_8")
- img_paths = []
- for i in range(4):
- img_paths.append(f"{img_dir}/0_{i}.png")
- elev = elev_est_api(img_paths)
- return elev
diff --git a/One-2-3-45-master 2/elevation_estimate/loftr/__init__.py b/One-2-3-45-master 2/elevation_estimate/loftr/__init__.py
deleted file mode 100644
index 0d69b9c131cf41e95c5c6ee7d389b375267b22fa..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/loftr/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from .loftr import LoFTR
-from .utils.cvpr_ds_config import default_cfg
diff --git a/One-2-3-45-master 2/elevation_estimate/loftr/backbone/__init__.py b/One-2-3-45-master 2/elevation_estimate/loftr/backbone/__init__.py
deleted file mode 100644
index b6e731b3f53ab367c89ef0ea8e1cbffb0d990775..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/loftr/backbone/__init__.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from .resnet_fpn import ResNetFPN_8_2, ResNetFPN_16_4
-
-
-def build_backbone(config):
- if config['backbone_type'] == 'ResNetFPN':
- if config['resolution'] == (8, 2):
- return ResNetFPN_8_2(config['resnetfpn'])
- elif config['resolution'] == (16, 4):
- return ResNetFPN_16_4(config['resnetfpn'])
- else:
- raise ValueError(f"LOFTR.BACKBONE_TYPE {config['backbone_type']} not supported.")
diff --git a/One-2-3-45-master 2/elevation_estimate/loftr/backbone/resnet_fpn.py b/One-2-3-45-master 2/elevation_estimate/loftr/backbone/resnet_fpn.py
deleted file mode 100644
index 985e5b3f273a51e51447a8025ca3aadbe46752eb..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/loftr/backbone/resnet_fpn.py
+++ /dev/null
@@ -1,199 +0,0 @@
-import torch.nn as nn
-import torch.nn.functional as F
-
-
-def conv1x1(in_planes, out_planes, stride=1):
- """1x1 convolution without padding"""
- return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, padding=0, bias=False)
-
-
-def conv3x3(in_planes, out_planes, stride=1):
- """3x3 convolution with padding"""
- return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False)
-
-
-class BasicBlock(nn.Module):
- def __init__(self, in_planes, planes, stride=1):
- super().__init__()
- self.conv1 = conv3x3(in_planes, planes, stride)
- self.conv2 = conv3x3(planes, planes)
- self.bn1 = nn.BatchNorm2d(planes)
- self.bn2 = nn.BatchNorm2d(planes)
- self.relu = nn.ReLU(inplace=True)
-
- if stride == 1:
- self.downsample = None
- else:
- self.downsample = nn.Sequential(
- conv1x1(in_planes, planes, stride=stride),
- nn.BatchNorm2d(planes)
- )
-
- def forward(self, x):
- y = x
- y = self.relu(self.bn1(self.conv1(y)))
- y = self.bn2(self.conv2(y))
-
- if self.downsample is not None:
- x = self.downsample(x)
-
- return self.relu(x+y)
-
-
-class ResNetFPN_8_2(nn.Module):
- """
- ResNet+FPN, output resolution are 1/8 and 1/2.
- Each block has 2 layers.
- """
-
- def __init__(self, config):
- super().__init__()
- # Config
- block = BasicBlock
- initial_dim = config['initial_dim']
- block_dims = config['block_dims']
-
- # Class Variable
- self.in_planes = initial_dim
-
- # Networks
- self.conv1 = nn.Conv2d(1, initial_dim, kernel_size=7, stride=2, padding=3, bias=False)
- self.bn1 = nn.BatchNorm2d(initial_dim)
- self.relu = nn.ReLU(inplace=True)
-
- self.layer1 = self._make_layer(block, block_dims[0], stride=1) # 1/2
- self.layer2 = self._make_layer(block, block_dims[1], stride=2) # 1/4
- self.layer3 = self._make_layer(block, block_dims[2], stride=2) # 1/8
-
- # 3. FPN upsample
- self.layer3_outconv = conv1x1(block_dims[2], block_dims[2])
- self.layer2_outconv = conv1x1(block_dims[1], block_dims[2])
- self.layer2_outconv2 = nn.Sequential(
- conv3x3(block_dims[2], block_dims[2]),
- nn.BatchNorm2d(block_dims[2]),
- nn.LeakyReLU(),
- conv3x3(block_dims[2], block_dims[1]),
- )
- self.layer1_outconv = conv1x1(block_dims[0], block_dims[1])
- self.layer1_outconv2 = nn.Sequential(
- conv3x3(block_dims[1], block_dims[1]),
- nn.BatchNorm2d(block_dims[1]),
- nn.LeakyReLU(),
- conv3x3(block_dims[1], block_dims[0]),
- )
-
- for m in self.modules():
- if isinstance(m, nn.Conv2d):
- nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
- elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
- nn.init.constant_(m.weight, 1)
- nn.init.constant_(m.bias, 0)
-
- def _make_layer(self, block, dim, stride=1):
- layer1 = block(self.in_planes, dim, stride=stride)
- layer2 = block(dim, dim, stride=1)
- layers = (layer1, layer2)
-
- self.in_planes = dim
- return nn.Sequential(*layers)
-
- def forward(self, x):
- # ResNet Backbone
- x0 = self.relu(self.bn1(self.conv1(x)))
- x1 = self.layer1(x0) # 1/2
- x2 = self.layer2(x1) # 1/4
- x3 = self.layer3(x2) # 1/8
-
- # FPN
- x3_out = self.layer3_outconv(x3)
-
- x3_out_2x = F.interpolate(x3_out, scale_factor=2., mode='bilinear', align_corners=True)
- x2_out = self.layer2_outconv(x2)
- x2_out = self.layer2_outconv2(x2_out+x3_out_2x)
-
- x2_out_2x = F.interpolate(x2_out, scale_factor=2., mode='bilinear', align_corners=True)
- x1_out = self.layer1_outconv(x1)
- x1_out = self.layer1_outconv2(x1_out+x2_out_2x)
-
- return [x3_out, x1_out]
-
-
-class ResNetFPN_16_4(nn.Module):
- """
- ResNet+FPN, output resolution are 1/16 and 1/4.
- Each block has 2 layers.
- """
-
- def __init__(self, config):
- super().__init__()
- # Config
- block = BasicBlock
- initial_dim = config['initial_dim']
- block_dims = config['block_dims']
-
- # Class Variable
- self.in_planes = initial_dim
-
- # Networks
- self.conv1 = nn.Conv2d(1, initial_dim, kernel_size=7, stride=2, padding=3, bias=False)
- self.bn1 = nn.BatchNorm2d(initial_dim)
- self.relu = nn.ReLU(inplace=True)
-
- self.layer1 = self._make_layer(block, block_dims[0], stride=1) # 1/2
- self.layer2 = self._make_layer(block, block_dims[1], stride=2) # 1/4
- self.layer3 = self._make_layer(block, block_dims[2], stride=2) # 1/8
- self.layer4 = self._make_layer(block, block_dims[3], stride=2) # 1/16
-
- # 3. FPN upsample
- self.layer4_outconv = conv1x1(block_dims[3], block_dims[3])
- self.layer3_outconv = conv1x1(block_dims[2], block_dims[3])
- self.layer3_outconv2 = nn.Sequential(
- conv3x3(block_dims[3], block_dims[3]),
- nn.BatchNorm2d(block_dims[3]),
- nn.LeakyReLU(),
- conv3x3(block_dims[3], block_dims[2]),
- )
-
- self.layer2_outconv = conv1x1(block_dims[1], block_dims[2])
- self.layer2_outconv2 = nn.Sequential(
- conv3x3(block_dims[2], block_dims[2]),
- nn.BatchNorm2d(block_dims[2]),
- nn.LeakyReLU(),
- conv3x3(block_dims[2], block_dims[1]),
- )
-
- for m in self.modules():
- if isinstance(m, nn.Conv2d):
- nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
- elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
- nn.init.constant_(m.weight, 1)
- nn.init.constant_(m.bias, 0)
-
- def _make_layer(self, block, dim, stride=1):
- layer1 = block(self.in_planes, dim, stride=stride)
- layer2 = block(dim, dim, stride=1)
- layers = (layer1, layer2)
-
- self.in_planes = dim
- return nn.Sequential(*layers)
-
- def forward(self, x):
- # ResNet Backbone
- x0 = self.relu(self.bn1(self.conv1(x)))
- x1 = self.layer1(x0) # 1/2
- x2 = self.layer2(x1) # 1/4
- x3 = self.layer3(x2) # 1/8
- x4 = self.layer4(x3) # 1/16
-
- # FPN
- x4_out = self.layer4_outconv(x4)
-
- x4_out_2x = F.interpolate(x4_out, scale_factor=2., mode='bilinear', align_corners=True)
- x3_out = self.layer3_outconv(x3)
- x3_out = self.layer3_outconv2(x3_out+x4_out_2x)
-
- x3_out_2x = F.interpolate(x3_out, scale_factor=2., mode='bilinear', align_corners=True)
- x2_out = self.layer2_outconv(x2)
- x2_out = self.layer2_outconv2(x2_out+x3_out_2x)
-
- return [x4_out, x2_out]
diff --git a/One-2-3-45-master 2/elevation_estimate/loftr/loftr.py b/One-2-3-45-master 2/elevation_estimate/loftr/loftr.py
deleted file mode 100644
index 79c491ee47a4d67cb8b3fe493397349e0867accd..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/loftr/loftr.py
+++ /dev/null
@@ -1,81 +0,0 @@
-import torch
-import torch.nn as nn
-from einops.einops import rearrange
-
-from .backbone import build_backbone
-from .utils.position_encoding import PositionEncodingSine
-from .loftr_module import LocalFeatureTransformer, FinePreprocess
-from .utils.coarse_matching import CoarseMatching
-from .utils.fine_matching import FineMatching
-
-
-class LoFTR(nn.Module):
- def __init__(self, config):
- super().__init__()
- # Misc
- self.config = config
-
- # Modules
- self.backbone = build_backbone(config)
- self.pos_encoding = PositionEncodingSine(
- config['coarse']['d_model'],
- temp_bug_fix=config['coarse']['temp_bug_fix'])
- self.loftr_coarse = LocalFeatureTransformer(config['coarse'])
- self.coarse_matching = CoarseMatching(config['match_coarse'])
- self.fine_preprocess = FinePreprocess(config)
- self.loftr_fine = LocalFeatureTransformer(config["fine"])
- self.fine_matching = FineMatching()
-
- def forward(self, data):
- """
- Update:
- data (dict): {
- 'image0': (torch.Tensor): (N, 1, H, W)
- 'image1': (torch.Tensor): (N, 1, H, W)
- 'mask0'(optional) : (torch.Tensor): (N, H, W) '0' indicates a padded position
- 'mask1'(optional) : (torch.Tensor): (N, H, W)
- }
- """
- # 1. Local Feature CNN
- data.update({
- 'bs': data['image0'].size(0),
- 'hw0_i': data['image0'].shape[2:], 'hw1_i': data['image1'].shape[2:]
- })
-
- if data['hw0_i'] == data['hw1_i']: # faster & better BN convergence
- feats_c, feats_f = self.backbone(torch.cat([data['image0'], data['image1']], dim=0))
- (feat_c0, feat_c1), (feat_f0, feat_f1) = feats_c.split(data['bs']), feats_f.split(data['bs'])
- else: # handle different input shapes
- (feat_c0, feat_f0), (feat_c1, feat_f1) = self.backbone(data['image0']), self.backbone(data['image1'])
-
- data.update({
- 'hw0_c': feat_c0.shape[2:], 'hw1_c': feat_c1.shape[2:],
- 'hw0_f': feat_f0.shape[2:], 'hw1_f': feat_f1.shape[2:]
- })
-
- # 2. coarse-level loftr module
- # add featmap with positional encoding, then flatten it to sequence [N, HW, C]
- feat_c0 = rearrange(self.pos_encoding(feat_c0), 'n c h w -> n (h w) c')
- feat_c1 = rearrange(self.pos_encoding(feat_c1), 'n c h w -> n (h w) c')
-
- mask_c0 = mask_c1 = None # mask is useful in training
- if 'mask0' in data:
- mask_c0, mask_c1 = data['mask0'].flatten(-2), data['mask1'].flatten(-2)
- feat_c0, feat_c1 = self.loftr_coarse(feat_c0, feat_c1, mask_c0, mask_c1)
-
- # 3. match coarse-level
- self.coarse_matching(feat_c0, feat_c1, data, mask_c0=mask_c0, mask_c1=mask_c1)
-
- # 4. fine-level refinement
- feat_f0_unfold, feat_f1_unfold = self.fine_preprocess(feat_f0, feat_f1, feat_c0, feat_c1, data)
- if feat_f0_unfold.size(0) != 0: # at least one coarse level predicted
- feat_f0_unfold, feat_f1_unfold = self.loftr_fine(feat_f0_unfold, feat_f1_unfold)
-
- # 5. match fine-level
- self.fine_matching(feat_f0_unfold, feat_f1_unfold, data)
-
- def load_state_dict(self, state_dict, *args, **kwargs):
- for k in list(state_dict.keys()):
- if k.startswith('matcher.'):
- state_dict[k.replace('matcher.', '', 1)] = state_dict.pop(k)
- return super().load_state_dict(state_dict, *args, **kwargs)
diff --git a/One-2-3-45-master 2/elevation_estimate/loftr/loftr_module/__init__.py b/One-2-3-45-master 2/elevation_estimate/loftr/loftr_module/__init__.py
deleted file mode 100644
index ca51db4f50a0c4f3dcd795e74b83e633ab2e990a..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/loftr/loftr_module/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from .transformer import LocalFeatureTransformer
-from .fine_preprocess import FinePreprocess
diff --git a/One-2-3-45-master 2/elevation_estimate/loftr/loftr_module/fine_preprocess.py b/One-2-3-45-master 2/elevation_estimate/loftr/loftr_module/fine_preprocess.py
deleted file mode 100644
index 5bb8eefd362240a9901a335f0e6e07770ff04567..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/loftr/loftr_module/fine_preprocess.py
+++ /dev/null
@@ -1,59 +0,0 @@
-import torch
-import torch.nn as nn
-import torch.nn.functional as F
-from einops.einops import rearrange, repeat
-
-
-class FinePreprocess(nn.Module):
- def __init__(self, config):
- super().__init__()
-
- self.config = config
- self.cat_c_feat = config['fine_concat_coarse_feat']
- self.W = self.config['fine_window_size']
-
- d_model_c = self.config['coarse']['d_model']
- d_model_f = self.config['fine']['d_model']
- self.d_model_f = d_model_f
- if self.cat_c_feat:
- self.down_proj = nn.Linear(d_model_c, d_model_f, bias=True)
- self.merge_feat = nn.Linear(2*d_model_f, d_model_f, bias=True)
-
- self._reset_parameters()
-
- def _reset_parameters(self):
- for p in self.parameters():
- if p.dim() > 1:
- nn.init.kaiming_normal_(p, mode="fan_out", nonlinearity="relu")
-
- def forward(self, feat_f0, feat_f1, feat_c0, feat_c1, data):
- W = self.W
- stride = data['hw0_f'][0] // data['hw0_c'][0]
-
- data.update({'W': W})
- if data['b_ids'].shape[0] == 0:
- feat0 = torch.empty(0, self.W**2, self.d_model_f, device=feat_f0.device)
- feat1 = torch.empty(0, self.W**2, self.d_model_f, device=feat_f0.device)
- return feat0, feat1
-
- # 1. unfold(crop) all local windows
- feat_f0_unfold = F.unfold(feat_f0, kernel_size=(W, W), stride=stride, padding=W//2)
- feat_f0_unfold = rearrange(feat_f0_unfold, 'n (c ww) l -> n l ww c', ww=W**2)
- feat_f1_unfold = F.unfold(feat_f1, kernel_size=(W, W), stride=stride, padding=W//2)
- feat_f1_unfold = rearrange(feat_f1_unfold, 'n (c ww) l -> n l ww c', ww=W**2)
-
- # 2. select only the predicted matches
- feat_f0_unfold = feat_f0_unfold[data['b_ids'], data['i_ids']] # [n, ww, cf]
- feat_f1_unfold = feat_f1_unfold[data['b_ids'], data['j_ids']]
-
- # option: use coarse-level loftr feature as context: concat and linear
- if self.cat_c_feat:
- feat_c_win = self.down_proj(torch.cat([feat_c0[data['b_ids'], data['i_ids']],
- feat_c1[data['b_ids'], data['j_ids']]], 0)) # [2n, c]
- feat_cf_win = self.merge_feat(torch.cat([
- torch.cat([feat_f0_unfold, feat_f1_unfold], 0), # [2n, ww, cf]
- repeat(feat_c_win, 'n c -> n ww c', ww=W**2), # [2n, ww, cf]
- ], -1))
- feat_f0_unfold, feat_f1_unfold = torch.chunk(feat_cf_win, 2, dim=0)
-
- return feat_f0_unfold, feat_f1_unfold
diff --git a/One-2-3-45-master 2/elevation_estimate/loftr/loftr_module/linear_attention.py b/One-2-3-45-master 2/elevation_estimate/loftr/loftr_module/linear_attention.py
deleted file mode 100644
index b73c5a6a6a722a44c0b68f70cb77c0988b8a5fb3..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/loftr/loftr_module/linear_attention.py
+++ /dev/null
@@ -1,81 +0,0 @@
-"""
-Linear Transformer proposed in "Transformers are RNNs: Fast Autoregressive Transformers with Linear Attention"
-Modified from: https://github.com/idiap/fast-transformers/blob/master/fast_transformers/attention/linear_attention.py
-"""
-
-import torch
-from torch.nn import Module, Dropout
-
-
-def elu_feature_map(x):
- return torch.nn.functional.elu(x) + 1
-
-
-class LinearAttention(Module):
- def __init__(self, eps=1e-6):
- super().__init__()
- self.feature_map = elu_feature_map
- self.eps = eps
-
- def forward(self, queries, keys, values, q_mask=None, kv_mask=None):
- """ Multi-Head linear attention proposed in "Transformers are RNNs"
- Args:
- queries: [N, L, H, D]
- keys: [N, S, H, D]
- values: [N, S, H, D]
- q_mask: [N, L]
- kv_mask: [N, S]
- Returns:
- queried_values: (N, L, H, D)
- """
- Q = self.feature_map(queries)
- K = self.feature_map(keys)
-
- # set padded position to zero
- if q_mask is not None:
- Q = Q * q_mask[:, :, None, None]
- if kv_mask is not None:
- K = K * kv_mask[:, :, None, None]
- values = values * kv_mask[:, :, None, None]
-
- v_length = values.size(1)
- values = values / v_length # prevent fp16 overflow
- KV = torch.einsum("nshd,nshv->nhdv", K, values) # (S,D)' @ S,V
- Z = 1 / (torch.einsum("nlhd,nhd->nlh", Q, K.sum(dim=1)) + self.eps)
- queried_values = torch.einsum("nlhd,nhdv,nlh->nlhv", Q, KV, Z) * v_length
-
- return queried_values.contiguous()
-
-
-class FullAttention(Module):
- def __init__(self, use_dropout=False, attention_dropout=0.1):
- super().__init__()
- self.use_dropout = use_dropout
- self.dropout = Dropout(attention_dropout)
-
- def forward(self, queries, keys, values, q_mask=None, kv_mask=None):
- """ Multi-head scaled dot-product attention, a.k.a full attention.
- Args:
- queries: [N, L, H, D]
- keys: [N, S, H, D]
- values: [N, S, H, D]
- q_mask: [N, L]
- kv_mask: [N, S]
- Returns:
- queried_values: (N, L, H, D)
- """
-
- # Compute the unnormalized attention and apply the masks
- QK = torch.einsum("nlhd,nshd->nlsh", queries, keys)
- if kv_mask is not None:
- QK.masked_fill_(~(q_mask[:, :, None, None] * kv_mask[:, None, :, None]), float('-inf'))
-
- # Compute the attention and the weighted average
- softmax_temp = 1. / queries.size(3)**.5 # sqrt(D)
- A = torch.softmax(softmax_temp * QK, dim=2)
- if self.use_dropout:
- A = self.dropout(A)
-
- queried_values = torch.einsum("nlsh,nshd->nlhd", A, values)
-
- return queried_values.contiguous()
diff --git a/One-2-3-45-master 2/elevation_estimate/loftr/loftr_module/transformer.py b/One-2-3-45-master 2/elevation_estimate/loftr/loftr_module/transformer.py
deleted file mode 100644
index d79390ca08953bbef44e98149e662a681a16e42e..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/loftr/loftr_module/transformer.py
+++ /dev/null
@@ -1,101 +0,0 @@
-import copy
-import torch
-import torch.nn as nn
-from .linear_attention import LinearAttention, FullAttention
-
-
-class LoFTREncoderLayer(nn.Module):
- def __init__(self,
- d_model,
- nhead,
- attention='linear'):
- super(LoFTREncoderLayer, self).__init__()
-
- self.dim = d_model // nhead
- self.nhead = nhead
-
- # multi-head attention
- self.q_proj = nn.Linear(d_model, d_model, bias=False)
- self.k_proj = nn.Linear(d_model, d_model, bias=False)
- self.v_proj = nn.Linear(d_model, d_model, bias=False)
- self.attention = LinearAttention() if attention == 'linear' else FullAttention()
- self.merge = nn.Linear(d_model, d_model, bias=False)
-
- # feed-forward network
- self.mlp = nn.Sequential(
- nn.Linear(d_model*2, d_model*2, bias=False),
- nn.ReLU(True),
- nn.Linear(d_model*2, d_model, bias=False),
- )
-
- # norm and dropout
- self.norm1 = nn.LayerNorm(d_model)
- self.norm2 = nn.LayerNorm(d_model)
-
- def forward(self, x, source, x_mask=None, source_mask=None):
- """
- Args:
- x (torch.Tensor): [N, L, C]
- source (torch.Tensor): [N, S, C]
- x_mask (torch.Tensor): [N, L] (optional)
- source_mask (torch.Tensor): [N, S] (optional)
- """
- bs = x.size(0)
- query, key, value = x, source, source
-
- # multi-head attention
- query = self.q_proj(query).view(bs, -1, self.nhead, self.dim) # [N, L, (H, D)]
- key = self.k_proj(key).view(bs, -1, self.nhead, self.dim) # [N, S, (H, D)]
- value = self.v_proj(value).view(bs, -1, self.nhead, self.dim)
- message = self.attention(query, key, value, q_mask=x_mask, kv_mask=source_mask) # [N, L, (H, D)]
- message = self.merge(message.view(bs, -1, self.nhead*self.dim)) # [N, L, C]
- message = self.norm1(message)
-
- # feed-forward network
- message = self.mlp(torch.cat([x, message], dim=2))
- message = self.norm2(message)
-
- return x + message
-
-
-class LocalFeatureTransformer(nn.Module):
- """A Local Feature Transformer (LoFTR) module."""
-
- def __init__(self, config):
- super(LocalFeatureTransformer, self).__init__()
-
- self.config = config
- self.d_model = config['d_model']
- self.nhead = config['nhead']
- self.layer_names = config['layer_names']
- encoder_layer = LoFTREncoderLayer(config['d_model'], config['nhead'], config['attention'])
- self.layers = nn.ModuleList([copy.deepcopy(encoder_layer) for _ in range(len(self.layer_names))])
- self._reset_parameters()
-
- def _reset_parameters(self):
- for p in self.parameters():
- if p.dim() > 1:
- nn.init.xavier_uniform_(p)
-
- def forward(self, feat0, feat1, mask0=None, mask1=None):
- """
- Args:
- feat0 (torch.Tensor): [N, L, C]
- feat1 (torch.Tensor): [N, S, C]
- mask0 (torch.Tensor): [N, L] (optional)
- mask1 (torch.Tensor): [N, S] (optional)
- """
-
- assert self.d_model == feat0.size(2), "the feature number of src and transformer must be equal"
-
- for layer, name in zip(self.layers, self.layer_names):
- if name == 'self':
- feat0 = layer(feat0, feat0, mask0, mask0)
- feat1 = layer(feat1, feat1, mask1, mask1)
- elif name == 'cross':
- feat0 = layer(feat0, feat1, mask0, mask1)
- feat1 = layer(feat1, feat0, mask1, mask0)
- else:
- raise KeyError
-
- return feat0, feat1
diff --git a/One-2-3-45-master 2/elevation_estimate/loftr/utils/coarse_matching.py b/One-2-3-45-master 2/elevation_estimate/loftr/utils/coarse_matching.py
deleted file mode 100644
index a97263339462dec3af9705d33d6ee634e2f46914..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/loftr/utils/coarse_matching.py
+++ /dev/null
@@ -1,261 +0,0 @@
-import torch
-import torch.nn as nn
-import torch.nn.functional as F
-from einops.einops import rearrange
-
-INF = 1e9
-
-def mask_border(m, b: int, v):
- """ Mask borders with value
- Args:
- m (torch.Tensor): [N, H0, W0, H1, W1]
- b (int)
- v (m.dtype)
- """
- if b <= 0:
- return
-
- m[:, :b] = v
- m[:, :, :b] = v
- m[:, :, :, :b] = v
- m[:, :, :, :, :b] = v
- m[:, -b:] = v
- m[:, :, -b:] = v
- m[:, :, :, -b:] = v
- m[:, :, :, :, -b:] = v
-
-
-def mask_border_with_padding(m, bd, v, p_m0, p_m1):
- if bd <= 0:
- return
-
- m[:, :bd] = v
- m[:, :, :bd] = v
- m[:, :, :, :bd] = v
- m[:, :, :, :, :bd] = v
-
- h0s, w0s = p_m0.sum(1).max(-1)[0].int(), p_m0.sum(-1).max(-1)[0].int()
- h1s, w1s = p_m1.sum(1).max(-1)[0].int(), p_m1.sum(-1).max(-1)[0].int()
- for b_idx, (h0, w0, h1, w1) in enumerate(zip(h0s, w0s, h1s, w1s)):
- m[b_idx, h0 - bd:] = v
- m[b_idx, :, w0 - bd:] = v
- m[b_idx, :, :, h1 - bd:] = v
- m[b_idx, :, :, :, w1 - bd:] = v
-
-
-def compute_max_candidates(p_m0, p_m1):
- """Compute the max candidates of all pairs within a batch
-
- Args:
- p_m0, p_m1 (torch.Tensor): padded masks
- """
- h0s, w0s = p_m0.sum(1).max(-1)[0], p_m0.sum(-1).max(-1)[0]
- h1s, w1s = p_m1.sum(1).max(-1)[0], p_m1.sum(-1).max(-1)[0]
- max_cand = torch.sum(
- torch.min(torch.stack([h0s * w0s, h1s * w1s], -1), -1)[0])
- return max_cand
-
-
-class CoarseMatching(nn.Module):
- def __init__(self, config):
- super().__init__()
- self.config = config
- # general config
- self.thr = config['thr']
- self.border_rm = config['border_rm']
- # -- # for trainig fine-level LoFTR
- self.train_coarse_percent = config['train_coarse_percent']
- self.train_pad_num_gt_min = config['train_pad_num_gt_min']
-
- # we provide 2 options for differentiable matching
- self.match_type = config['match_type']
- if self.match_type == 'dual_softmax':
- self.temperature = config['dsmax_temperature']
- elif self.match_type == 'sinkhorn':
- try:
- from .superglue import log_optimal_transport
- except ImportError:
- raise ImportError("download superglue.py first!")
- self.log_optimal_transport = log_optimal_transport
- self.bin_score = nn.Parameter(
- torch.tensor(config['skh_init_bin_score'], requires_grad=True))
- self.skh_iters = config['skh_iters']
- self.skh_prefilter = config['skh_prefilter']
- else:
- raise NotImplementedError()
-
- def forward(self, feat_c0, feat_c1, data, mask_c0=None, mask_c1=None):
- """
- Args:
- feat0 (torch.Tensor): [N, L, C]
- feat1 (torch.Tensor): [N, S, C]
- data (dict)
- mask_c0 (torch.Tensor): [N, L] (optional)
- mask_c1 (torch.Tensor): [N, S] (optional)
- Update:
- data (dict): {
- 'b_ids' (torch.Tensor): [M'],
- 'i_ids' (torch.Tensor): [M'],
- 'j_ids' (torch.Tensor): [M'],
- 'gt_mask' (torch.Tensor): [M'],
- 'mkpts0_c' (torch.Tensor): [M, 2],
- 'mkpts1_c' (torch.Tensor): [M, 2],
- 'mconf' (torch.Tensor): [M]}
- NOTE: M' != M during training.
- """
- N, L, S, C = feat_c0.size(0), feat_c0.size(1), feat_c1.size(1), feat_c0.size(2)
-
- # normalize
- feat_c0, feat_c1 = map(lambda feat: feat / feat.shape[-1]**.5,
- [feat_c0, feat_c1])
-
- if self.match_type == 'dual_softmax':
- sim_matrix = torch.einsum("nlc,nsc->nls", feat_c0,
- feat_c1) / self.temperature
- if mask_c0 is not None:
- sim_matrix.masked_fill_(
- ~(mask_c0[..., None] * mask_c1[:, None]).bool(),
- -INF)
- conf_matrix = F.softmax(sim_matrix, 1) * F.softmax(sim_matrix, 2)
-
- elif self.match_type == 'sinkhorn':
- # sinkhorn, dustbin included
- sim_matrix = torch.einsum("nlc,nsc->nls", feat_c0, feat_c1)
- if mask_c0 is not None:
- sim_matrix[:, :L, :S].masked_fill_(
- ~(mask_c0[..., None] * mask_c1[:, None]).bool(),
- -INF)
-
- # build uniform prior & use sinkhorn
- log_assign_matrix = self.log_optimal_transport(
- sim_matrix, self.bin_score, self.skh_iters)
- assign_matrix = log_assign_matrix.exp()
- conf_matrix = assign_matrix[:, :-1, :-1]
-
- # filter prediction with dustbin score (only in evaluation mode)
- if not self.training and self.skh_prefilter:
- filter0 = (assign_matrix.max(dim=2)[1] == S)[:, :-1] # [N, L]
- filter1 = (assign_matrix.max(dim=1)[1] == L)[:, :-1] # [N, S]
- conf_matrix[filter0[..., None].repeat(1, 1, S)] = 0
- conf_matrix[filter1[:, None].repeat(1, L, 1)] = 0
-
- if self.config['sparse_spvs']:
- data.update({'conf_matrix_with_bin': assign_matrix.clone()})
-
- data.update({'conf_matrix': conf_matrix})
-
- # predict coarse matches from conf_matrix
- data.update(**self.get_coarse_match(conf_matrix, data))
-
- @torch.no_grad()
- def get_coarse_match(self, conf_matrix, data):
- """
- Args:
- conf_matrix (torch.Tensor): [N, L, S]
- data (dict): with keys ['hw0_i', 'hw1_i', 'hw0_c', 'hw1_c']
- Returns:
- coarse_matches (dict): {
- 'b_ids' (torch.Tensor): [M'],
- 'i_ids' (torch.Tensor): [M'],
- 'j_ids' (torch.Tensor): [M'],
- 'gt_mask' (torch.Tensor): [M'],
- 'm_bids' (torch.Tensor): [M],
- 'mkpts0_c' (torch.Tensor): [M, 2],
- 'mkpts1_c' (torch.Tensor): [M, 2],
- 'mconf' (torch.Tensor): [M]}
- """
- axes_lengths = {
- 'h0c': data['hw0_c'][0],
- 'w0c': data['hw0_c'][1],
- 'h1c': data['hw1_c'][0],
- 'w1c': data['hw1_c'][1]
- }
- _device = conf_matrix.device
- # 1. confidence thresholding
- mask = conf_matrix > self.thr
- mask = rearrange(mask, 'b (h0c w0c) (h1c w1c) -> b h0c w0c h1c w1c',
- **axes_lengths)
- if 'mask0' not in data:
- mask_border(mask, self.border_rm, False)
- else:
- mask_border_with_padding(mask, self.border_rm, False,
- data['mask0'], data['mask1'])
- mask = rearrange(mask, 'b h0c w0c h1c w1c -> b (h0c w0c) (h1c w1c)',
- **axes_lengths)
-
- # 2. mutual nearest
- mask = mask \
- * (conf_matrix == conf_matrix.max(dim=2, keepdim=True)[0]) \
- * (conf_matrix == conf_matrix.max(dim=1, keepdim=True)[0])
-
- # 3. find all valid coarse matches
- # this only works when at most one `True` in each row
- mask_v, all_j_ids = mask.max(dim=2)
- b_ids, i_ids = torch.where(mask_v)
- j_ids = all_j_ids[b_ids, i_ids]
- mconf = conf_matrix[b_ids, i_ids, j_ids]
-
- # 4. Random sampling of training samples for fine-level LoFTR
- # (optional) pad samples with gt coarse-level matches
- if self.training:
- # NOTE:
- # The sampling is performed across all pairs in a batch without manually balancing
- # #samples for fine-level increases w.r.t. batch_size
- if 'mask0' not in data:
- num_candidates_max = mask.size(0) * max(
- mask.size(1), mask.size(2))
- else:
- num_candidates_max = compute_max_candidates(
- data['mask0'], data['mask1'])
- num_matches_train = int(num_candidates_max *
- self.train_coarse_percent)
- num_matches_pred = len(b_ids)
- assert self.train_pad_num_gt_min < num_matches_train, "min-num-gt-pad should be less than num-train-matches"
-
- # pred_indices is to select from prediction
- if num_matches_pred <= num_matches_train - self.train_pad_num_gt_min:
- pred_indices = torch.arange(num_matches_pred, device=_device)
- else:
- pred_indices = torch.randint(
- num_matches_pred,
- (num_matches_train - self.train_pad_num_gt_min, ),
- device=_device)
-
- # gt_pad_indices is to select from gt padding. e.g. max(3787-4800, 200)
- gt_pad_indices = torch.randint(
- len(data['spv_b_ids']),
- (max(num_matches_train - num_matches_pred,
- self.train_pad_num_gt_min), ),
- device=_device)
- mconf_gt = torch.zeros(len(data['spv_b_ids']), device=_device) # set conf of gt paddings to all zero
-
- b_ids, i_ids, j_ids, mconf = map(
- lambda x, y: torch.cat([x[pred_indices], y[gt_pad_indices]],
- dim=0),
- *zip([b_ids, data['spv_b_ids']], [i_ids, data['spv_i_ids']],
- [j_ids, data['spv_j_ids']], [mconf, mconf_gt]))
-
- # These matches select patches that feed into fine-level network
- coarse_matches = {'b_ids': b_ids, 'i_ids': i_ids, 'j_ids': j_ids}
-
- # 4. Update with matches in original image resolution
- scale = data['hw0_i'][0] / data['hw0_c'][0]
- scale0 = scale * data['scale0'][b_ids] if 'scale0' in data else scale
- scale1 = scale * data['scale1'][b_ids] if 'scale1' in data else scale
- mkpts0_c = torch.stack(
- [i_ids % data['hw0_c'][1], i_ids // data['hw0_c'][1]],
- dim=1) * scale0
- mkpts1_c = torch.stack(
- [j_ids % data['hw1_c'][1], j_ids // data['hw1_c'][1]],
- dim=1) * scale1
-
- # These matches is the current prediction (for visualization)
- coarse_matches.update({
- 'gt_mask': mconf == 0,
- 'm_bids': b_ids[mconf != 0], # mconf == 0 => gt matches
- 'mkpts0_c': mkpts0_c[mconf != 0],
- 'mkpts1_c': mkpts1_c[mconf != 0],
- 'mconf': mconf[mconf != 0]
- })
-
- return coarse_matches
diff --git a/One-2-3-45-master 2/elevation_estimate/loftr/utils/cvpr_ds_config.py b/One-2-3-45-master 2/elevation_estimate/loftr/utils/cvpr_ds_config.py
deleted file mode 100644
index 1c9ce70154d3a1b961d3b4f08897415720f451f8..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/loftr/utils/cvpr_ds_config.py
+++ /dev/null
@@ -1,50 +0,0 @@
-from yacs.config import CfgNode as CN
-
-
-def lower_config(yacs_cfg):
- if not isinstance(yacs_cfg, CN):
- return yacs_cfg
- return {k.lower(): lower_config(v) for k, v in yacs_cfg.items()}
-
-
-_CN = CN()
-_CN.BACKBONE_TYPE = 'ResNetFPN'
-_CN.RESOLUTION = (8, 2) # options: [(8, 2), (16, 4)]
-_CN.FINE_WINDOW_SIZE = 5 # window_size in fine_level, must be odd
-_CN.FINE_CONCAT_COARSE_FEAT = True
-
-# 1. LoFTR-backbone (local feature CNN) config
-_CN.RESNETFPN = CN()
-_CN.RESNETFPN.INITIAL_DIM = 128
-_CN.RESNETFPN.BLOCK_DIMS = [128, 196, 256] # s1, s2, s3
-
-# 2. LoFTR-coarse module config
-_CN.COARSE = CN()
-_CN.COARSE.D_MODEL = 256
-_CN.COARSE.D_FFN = 256
-_CN.COARSE.NHEAD = 8
-_CN.COARSE.LAYER_NAMES = ['self', 'cross'] * 4
-_CN.COARSE.ATTENTION = 'linear' # options: ['linear', 'full']
-_CN.COARSE.TEMP_BUG_FIX = False
-
-# 3. Coarse-Matching config
-_CN.MATCH_COARSE = CN()
-_CN.MATCH_COARSE.THR = 0.2
-_CN.MATCH_COARSE.BORDER_RM = 2
-_CN.MATCH_COARSE.MATCH_TYPE = 'dual_softmax' # options: ['dual_softmax, 'sinkhorn']
-_CN.MATCH_COARSE.DSMAX_TEMPERATURE = 0.1
-_CN.MATCH_COARSE.SKH_ITERS = 3
-_CN.MATCH_COARSE.SKH_INIT_BIN_SCORE = 1.0
-_CN.MATCH_COARSE.SKH_PREFILTER = True
-_CN.MATCH_COARSE.TRAIN_COARSE_PERCENT = 0.4 # training tricks: save GPU memory
-_CN.MATCH_COARSE.TRAIN_PAD_NUM_GT_MIN = 200 # training tricks: avoid DDP deadlock
-
-# 4. LoFTR-fine module config
-_CN.FINE = CN()
-_CN.FINE.D_MODEL = 128
-_CN.FINE.D_FFN = 128
-_CN.FINE.NHEAD = 8
-_CN.FINE.LAYER_NAMES = ['self', 'cross'] * 1
-_CN.FINE.ATTENTION = 'linear'
-
-default_cfg = lower_config(_CN)
diff --git a/One-2-3-45-master 2/elevation_estimate/loftr/utils/fine_matching.py b/One-2-3-45-master 2/elevation_estimate/loftr/utils/fine_matching.py
deleted file mode 100644
index 6e77aded52e1eb5c01e22c2738104f3b09d6922a..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/loftr/utils/fine_matching.py
+++ /dev/null
@@ -1,74 +0,0 @@
-import math
-import torch
-import torch.nn as nn
-
-from kornia.geometry.subpix import dsnt
-from kornia.utils.grid import create_meshgrid
-
-
-class FineMatching(nn.Module):
- """FineMatching with s2d paradigm"""
-
- def __init__(self):
- super().__init__()
-
- def forward(self, feat_f0, feat_f1, data):
- """
- Args:
- feat0 (torch.Tensor): [M, WW, C]
- feat1 (torch.Tensor): [M, WW, C]
- data (dict)
- Update:
- data (dict):{
- 'expec_f' (torch.Tensor): [M, 3],
- 'mkpts0_f' (torch.Tensor): [M, 2],
- 'mkpts1_f' (torch.Tensor): [M, 2]}
- """
- M, WW, C = feat_f0.shape
- W = int(math.sqrt(WW))
- scale = data['hw0_i'][0] / data['hw0_f'][0]
- self.M, self.W, self.WW, self.C, self.scale = M, W, WW, C, scale
-
- # corner case: if no coarse matches found
- if M == 0:
- assert self.training == False, "M is always >0, when training, see coarse_matching.py"
- # logger.warning('No matches found in coarse-level.')
- data.update({
- 'expec_f': torch.empty(0, 3, device=feat_f0.device),
- 'mkpts0_f': data['mkpts0_c'],
- 'mkpts1_f': data['mkpts1_c'],
- })
- return
-
- feat_f0_picked = feat_f0_picked = feat_f0[:, WW//2, :]
- sim_matrix = torch.einsum('mc,mrc->mr', feat_f0_picked, feat_f1)
- softmax_temp = 1. / C**.5
- heatmap = torch.softmax(softmax_temp * sim_matrix, dim=1).view(-1, W, W)
-
- # compute coordinates from heatmap
- coords_normalized = dsnt.spatial_expectation2d(heatmap[None], True)[0] # [M, 2]
- grid_normalized = create_meshgrid(W, W, True, heatmap.device).reshape(1, -1, 2) # [1, WW, 2]
-
- # compute std over
- var = torch.sum(grid_normalized**2 * heatmap.view(-1, WW, 1), dim=1) - coords_normalized**2 # [M, 2]
- std = torch.sum(torch.sqrt(torch.clamp(var, min=1e-10)), -1) # [M] clamp needed for numerical stability
-
- # for fine-level supervision
- data.update({'expec_f': torch.cat([coords_normalized, std.unsqueeze(1)], -1)})
-
- # compute absolute kpt coords
- self.get_fine_match(coords_normalized, data)
-
- @torch.no_grad()
- def get_fine_match(self, coords_normed, data):
- W, WW, C, scale = self.W, self.WW, self.C, self.scale
-
- # mkpts0_f and mkpts1_f
- mkpts0_f = data['mkpts0_c']
- scale1 = scale * data['scale1'][data['b_ids']] if 'scale0' in data else scale
- mkpts1_f = data['mkpts1_c'] + (coords_normed * (W // 2) * scale1)[:len(data['mconf'])]
-
- data.update({
- "mkpts0_f": mkpts0_f,
- "mkpts1_f": mkpts1_f
- })
diff --git a/One-2-3-45-master 2/elevation_estimate/loftr/utils/geometry.py b/One-2-3-45-master 2/elevation_estimate/loftr/utils/geometry.py
deleted file mode 100644
index f95cdb65b48324c4f4ceb20231b1bed992b41116..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/loftr/utils/geometry.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import torch
-
-
-@torch.no_grad()
-def warp_kpts(kpts0, depth0, depth1, T_0to1, K0, K1):
- """ Warp kpts0 from I0 to I1 with depth, K and Rt
- Also check covisibility and depth consistency.
- Depth is consistent if relative error < 0.2 (hard-coded).
-
- Args:
- kpts0 (torch.Tensor): [N, L, 2] - ,
- depth0 (torch.Tensor): [N, H, W],
- depth1 (torch.Tensor): [N, H, W],
- T_0to1 (torch.Tensor): [N, 3, 4],
- K0 (torch.Tensor): [N, 3, 3],
- K1 (torch.Tensor): [N, 3, 3],
- Returns:
- calculable_mask (torch.Tensor): [N, L]
- warped_keypoints0 (torch.Tensor): [N, L, 2]
- """
- kpts0_long = kpts0.round().long()
-
- # Sample depth, get calculable_mask on depth != 0
- kpts0_depth = torch.stack(
- [depth0[i, kpts0_long[i, :, 1], kpts0_long[i, :, 0]] for i in range(kpts0.shape[0])], dim=0
- ) # (N, L)
- nonzero_mask = kpts0_depth != 0
-
- # Unproject
- kpts0_h = torch.cat([kpts0, torch.ones_like(kpts0[:, :, [0]])], dim=-1) * kpts0_depth[..., None] # (N, L, 3)
- kpts0_cam = K0.inverse() @ kpts0_h.transpose(2, 1) # (N, 3, L)
-
- # Rigid Transform
- w_kpts0_cam = T_0to1[:, :3, :3] @ kpts0_cam + T_0to1[:, :3, [3]] # (N, 3, L)
- w_kpts0_depth_computed = w_kpts0_cam[:, 2, :]
-
- # Project
- w_kpts0_h = (K1 @ w_kpts0_cam).transpose(2, 1) # (N, L, 3)
- w_kpts0 = w_kpts0_h[:, :, :2] / (w_kpts0_h[:, :, [2]] + 1e-4) # (N, L, 2), +1e-4 to avoid zero depth
-
- # Covisible Check
- h, w = depth1.shape[1:3]
- covisible_mask = (w_kpts0[:, :, 0] > 0) * (w_kpts0[:, :, 0] < w-1) * \
- (w_kpts0[:, :, 1] > 0) * (w_kpts0[:, :, 1] < h-1)
- w_kpts0_long = w_kpts0.long()
- w_kpts0_long[~covisible_mask, :] = 0
-
- w_kpts0_depth = torch.stack(
- [depth1[i, w_kpts0_long[i, :, 1], w_kpts0_long[i, :, 0]] for i in range(w_kpts0_long.shape[0])], dim=0
- ) # (N, L)
- consistent_mask = ((w_kpts0_depth - w_kpts0_depth_computed) / w_kpts0_depth).abs() < 0.2
- valid_mask = nonzero_mask * covisible_mask * consistent_mask
-
- return valid_mask, w_kpts0
diff --git a/One-2-3-45-master 2/elevation_estimate/loftr/utils/position_encoding.py b/One-2-3-45-master 2/elevation_estimate/loftr/utils/position_encoding.py
deleted file mode 100644
index 732d28c814ef93bf48d338ba7554f6dcfc3b880e..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/loftr/utils/position_encoding.py
+++ /dev/null
@@ -1,42 +0,0 @@
-import math
-import torch
-from torch import nn
-
-
-class PositionEncodingSine(nn.Module):
- """
- This is a sinusoidal position encoding that generalized to 2-dimensional images
- """
-
- def __init__(self, d_model, max_shape=(256, 256), temp_bug_fix=True):
- """
- Args:
- max_shape (tuple): for 1/8 featmap, the max length of 256 corresponds to 2048 pixels
- temp_bug_fix (bool): As noted in this [issue](https://github.com/zju3dv/LoFTR/issues/41),
- the original implementation of LoFTR includes a bug in the pos-enc impl, which has little impact
- on the final performance. For now, we keep both impls for backward compatability.
- We will remove the buggy impl after re-training all variants of our released models.
- """
- super().__init__()
-
- pe = torch.zeros((d_model, *max_shape))
- y_position = torch.ones(max_shape).cumsum(0).float().unsqueeze(0)
- x_position = torch.ones(max_shape).cumsum(1).float().unsqueeze(0)
- if temp_bug_fix:
- div_term = torch.exp(torch.arange(0, d_model//2, 2).float() * (-math.log(10000.0) / (d_model//2)))
- else: # a buggy implementation (for backward compatability only)
- div_term = torch.exp(torch.arange(0, d_model//2, 2).float() * (-math.log(10000.0) / d_model//2))
- div_term = div_term[:, None, None] # [C//4, 1, 1]
- pe[0::4, :, :] = torch.sin(x_position * div_term)
- pe[1::4, :, :] = torch.cos(x_position * div_term)
- pe[2::4, :, :] = torch.sin(y_position * div_term)
- pe[3::4, :, :] = torch.cos(y_position * div_term)
-
- self.register_buffer('pe', pe.unsqueeze(0), persistent=False) # [1, C, H, W]
-
- def forward(self, x):
- """
- Args:
- x: [N, C, H, W]
- """
- return x + self.pe[:, :, :x.size(2), :x.size(3)]
diff --git a/One-2-3-45-master 2/elevation_estimate/loftr/utils/supervision.py b/One-2-3-45-master 2/elevation_estimate/loftr/utils/supervision.py
deleted file mode 100644
index 8ce6e79ec72b45fcb6b187e33bda93a47b168acd..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/loftr/utils/supervision.py
+++ /dev/null
@@ -1,151 +0,0 @@
-from math import log
-from loguru import logger
-
-import torch
-from einops import repeat
-from kornia.utils import create_meshgrid
-
-from .geometry import warp_kpts
-
-############## ↓ Coarse-Level supervision ↓ ##############
-
-
-@torch.no_grad()
-def mask_pts_at_padded_regions(grid_pt, mask):
- """For megadepth dataset, zero-padding exists in images"""
- mask = repeat(mask, 'n h w -> n (h w) c', c=2)
- grid_pt[~mask.bool()] = 0
- return grid_pt
-
-
-@torch.no_grad()
-def spvs_coarse(data, config):
- """
- Update:
- data (dict): {
- "conf_matrix_gt": [N, hw0, hw1],
- 'spv_b_ids': [M]
- 'spv_i_ids': [M]
- 'spv_j_ids': [M]
- 'spv_w_pt0_i': [N, hw0, 2], in original image resolution
- 'spv_pt1_i': [N, hw1, 2], in original image resolution
- }
-
- NOTE:
- - for scannet dataset, there're 3 kinds of resolution {i, c, f}
- - for megadepth dataset, there're 4 kinds of resolution {i, i_resize, c, f}
- """
- # 1. misc
- device = data['image0'].device
- N, _, H0, W0 = data['image0'].shape
- _, _, H1, W1 = data['image1'].shape
- scale = config['LOFTR']['RESOLUTION'][0]
- scale0 = scale * data['scale0'][:, None] if 'scale0' in data else scale
- scale1 = scale * data['scale1'][:, None] if 'scale0' in data else scale
- h0, w0, h1, w1 = map(lambda x: x // scale, [H0, W0, H1, W1])
-
- # 2. warp grids
- # create kpts in meshgrid and resize them to image resolution
- grid_pt0_c = create_meshgrid(h0, w0, False, device).reshape(1, h0*w0, 2).repeat(N, 1, 1) # [N, hw, 2]
- grid_pt0_i = scale0 * grid_pt0_c
- grid_pt1_c = create_meshgrid(h1, w1, False, device).reshape(1, h1*w1, 2).repeat(N, 1, 1)
- grid_pt1_i = scale1 * grid_pt1_c
-
- # mask padded region to (0, 0), so no need to manually mask conf_matrix_gt
- if 'mask0' in data:
- grid_pt0_i = mask_pts_at_padded_regions(grid_pt0_i, data['mask0'])
- grid_pt1_i = mask_pts_at_padded_regions(grid_pt1_i, data['mask1'])
-
- # warp kpts bi-directionally and resize them to coarse-level resolution
- # (no depth consistency check, since it leads to worse results experimentally)
- # (unhandled edge case: points with 0-depth will be warped to the left-up corner)
- _, w_pt0_i = warp_kpts(grid_pt0_i, data['depth0'], data['depth1'], data['T_0to1'], data['K0'], data['K1'])
- _, w_pt1_i = warp_kpts(grid_pt1_i, data['depth1'], data['depth0'], data['T_1to0'], data['K1'], data['K0'])
- w_pt0_c = w_pt0_i / scale1
- w_pt1_c = w_pt1_i / scale0
-
- # 3. check if mutual nearest neighbor
- w_pt0_c_round = w_pt0_c[:, :, :].round().long()
- nearest_index1 = w_pt0_c_round[..., 0] + w_pt0_c_round[..., 1] * w1
- w_pt1_c_round = w_pt1_c[:, :, :].round().long()
- nearest_index0 = w_pt1_c_round[..., 0] + w_pt1_c_round[..., 1] * w0
-
- # corner case: out of boundary
- def out_bound_mask(pt, w, h):
- return (pt[..., 0] < 0) + (pt[..., 0] >= w) + (pt[..., 1] < 0) + (pt[..., 1] >= h)
- nearest_index1[out_bound_mask(w_pt0_c_round, w1, h1)] = 0
- nearest_index0[out_bound_mask(w_pt1_c_round, w0, h0)] = 0
-
- loop_back = torch.stack([nearest_index0[_b][_i] for _b, _i in enumerate(nearest_index1)], dim=0)
- correct_0to1 = loop_back == torch.arange(h0*w0, device=device)[None].repeat(N, 1)
- correct_0to1[:, 0] = False # ignore the top-left corner
-
- # 4. construct a gt conf_matrix
- conf_matrix_gt = torch.zeros(N, h0*w0, h1*w1, device=device)
- b_ids, i_ids = torch.where(correct_0to1 != 0)
- j_ids = nearest_index1[b_ids, i_ids]
-
- conf_matrix_gt[b_ids, i_ids, j_ids] = 1
- data.update({'conf_matrix_gt': conf_matrix_gt})
-
- # 5. save coarse matches(gt) for training fine level
- if len(b_ids) == 0:
- logger.warning(f"No groundtruth coarse match found for: {data['pair_names']}")
- # this won't affect fine-level loss calculation
- b_ids = torch.tensor([0], device=device)
- i_ids = torch.tensor([0], device=device)
- j_ids = torch.tensor([0], device=device)
-
- data.update({
- 'spv_b_ids': b_ids,
- 'spv_i_ids': i_ids,
- 'spv_j_ids': j_ids
- })
-
- # 6. save intermediate results (for fast fine-level computation)
- data.update({
- 'spv_w_pt0_i': w_pt0_i,
- 'spv_pt1_i': grid_pt1_i
- })
-
-
-def compute_supervision_coarse(data, config):
- assert len(set(data['dataset_name'])) == 1, "Do not support mixed datasets training!"
- data_source = data['dataset_name'][0]
- if data_source.lower() in ['scannet', 'megadepth']:
- spvs_coarse(data, config)
- else:
- raise ValueError(f'Unknown data source: {data_source}')
-
-
-############## ↓ Fine-Level supervision ↓ ##############
-
-@torch.no_grad()
-def spvs_fine(data, config):
- """
- Update:
- data (dict):{
- "expec_f_gt": [M, 2]}
- """
- # 1. misc
- # w_pt0_i, pt1_i = data.pop('spv_w_pt0_i'), data.pop('spv_pt1_i')
- w_pt0_i, pt1_i = data['spv_w_pt0_i'], data['spv_pt1_i']
- scale = config['LOFTR']['RESOLUTION'][1]
- radius = config['LOFTR']['FINE_WINDOW_SIZE'] // 2
-
- # 2. get coarse prediction
- b_ids, i_ids, j_ids = data['b_ids'], data['i_ids'], data['j_ids']
-
- # 3. compute gt
- scale = scale * data['scale1'][b_ids] if 'scale0' in data else scale
- # `expec_f_gt` might exceed the window, i.e. abs(*) > 1, which would be filtered later
- expec_f_gt = (w_pt0_i[b_ids, i_ids] - pt1_i[b_ids, j_ids]) / scale / radius # [M, 2]
- data.update({"expec_f_gt": expec_f_gt})
-
-
-def compute_supervision_fine(data, config):
- data_source = data['dataset_name'][0]
- if data_source.lower() in ['scannet', 'megadepth']:
- spvs_fine(data, config)
- else:
- raise NotImplementedError
diff --git a/One-2-3-45-master 2/elevation_estimate/pyproject.toml b/One-2-3-45-master 2/elevation_estimate/pyproject.toml
deleted file mode 100644
index c54f1206ba6bf53530400613847e41b75ec1625e..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/pyproject.toml
+++ /dev/null
@@ -1,7 +0,0 @@
-[project]
-name = "elevation_estimate"
-version = "0.1"
-
-[tool.setuptools.packages.find]
-exclude = ["configs", "tests"] # empty by default
-namespaces = false # true by default
\ No newline at end of file
diff --git a/One-2-3-45-master 2/elevation_estimate/utils/__init__.py b/One-2-3-45-master 2/elevation_estimate/utils/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/One-2-3-45-master 2/elevation_estimate/utils/elev_est_api.py b/One-2-3-45-master 2/elevation_estimate/utils/elev_est_api.py
deleted file mode 100644
index e4f788f2cfc43b300d233d9d3519887080bed062..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/utils/elev_est_api.py
+++ /dev/null
@@ -1,205 +0,0 @@
-import os
-import cv2
-import numpy as np
-import os.path as osp
-import imageio
-from copy import deepcopy
-
-import loguru
-import torch
-import matplotlib.cm as cm
-import matplotlib.pyplot as plt
-
-from ..loftr import LoFTR, default_cfg
-from . import plt_utils
-from .plotting import make_matching_figure
-from .utils3d import rect_to_img, canonical_to_camera, calc_pose
-
-
-class ElevEstHelper:
- _feature_matcher = None
-
- @classmethod
- def get_feature_matcher(cls):
- if cls._feature_matcher is None:
- loguru.logger.info("Loading feature matcher...")
- _default_cfg = deepcopy(default_cfg)
- _default_cfg['coarse']['temp_bug_fix'] = True # set to False when using the old ckpt
- matcher = LoFTR(config=_default_cfg)
- current_dir = os.path.dirname(os.path.abspath(__file__))
- ckpt_path = os.path.join(current_dir, "weights/indoor_ds_new.ckpt")
- if not osp.exists(ckpt_path):
- loguru.logger.info("Downloading feature matcher...")
- os.makedirs("weights", exist_ok=True)
- import gdown
- gdown.cached_download(url="https://drive.google.com/uc?id=19s3QvcCWQ6g-N1PrYlDCg-2mOJZ3kkgS",
- path=ckpt_path)
- matcher.load_state_dict(torch.load(ckpt_path)['state_dict'])
- matcher = matcher.eval().cuda()
- cls._feature_matcher = matcher
- return cls._feature_matcher
-
-
-def mask_out_bkgd(img_path, dbg=False):
- img = imageio.imread_v2(img_path)
- if img.shape[-1] == 4:
- fg_mask = img[:, :, :3]
- else:
- loguru.logger.info("Image has no alpha channel, using thresholding to mask out background")
- fg_mask = ~(img > 245).all(axis=-1)
- if dbg:
- plt.imshow(plt_utils.vis_mask(img, fg_mask.astype(np.uint8), color=[0, 255, 0]))
- plt.show()
- return fg_mask
-
-
-def get_feature_matching(img_paths, dbg=False):
- assert len(img_paths) == 4
- matcher = ElevEstHelper.get_feature_matcher()
- feature_matching = {}
- masks = []
- for i in range(4):
- mask = mask_out_bkgd(img_paths[i], dbg=dbg)
- masks.append(mask)
- for i in range(0, 4):
- for j in range(i + 1, 4):
- img0_pth = img_paths[i]
- img1_pth = img_paths[j]
- mask0 = masks[i]
- mask1 = masks[j]
- img0_raw = cv2.imread(img0_pth, cv2.IMREAD_GRAYSCALE)
- img1_raw = cv2.imread(img1_pth, cv2.IMREAD_GRAYSCALE)
- original_shape = img0_raw.shape
- img0_raw_resized = cv2.resize(img0_raw, (480, 480))
- img1_raw_resized = cv2.resize(img1_raw, (480, 480))
-
- img0 = torch.from_numpy(img0_raw_resized)[None][None].cuda() / 255.
- img1 = torch.from_numpy(img1_raw_resized)[None][None].cuda() / 255.
- batch = {'image0': img0, 'image1': img1}
-
- # Inference with LoFTR and get prediction
- with torch.no_grad():
- matcher(batch)
- mkpts0 = batch['mkpts0_f'].cpu().numpy()
- mkpts1 = batch['mkpts1_f'].cpu().numpy()
- mconf = batch['mconf'].cpu().numpy()
- mkpts0[:, 0] = mkpts0[:, 0] * original_shape[1] / 480
- mkpts0[:, 1] = mkpts0[:, 1] * original_shape[0] / 480
- mkpts1[:, 0] = mkpts1[:, 0] * original_shape[1] / 480
- mkpts1[:, 1] = mkpts1[:, 1] * original_shape[0] / 480
- keep0 = mask0[mkpts0[:, 1].astype(int), mkpts1[:, 0].astype(int)]
- keep1 = mask1[mkpts1[:, 1].astype(int), mkpts1[:, 0].astype(int)]
- keep = np.logical_and(keep0, keep1)
- mkpts0 = mkpts0[keep]
- mkpts1 = mkpts1[keep]
- mconf = mconf[keep]
- if dbg:
- # Draw visualization
- color = cm.jet(mconf)
- text = [
- 'LoFTR',
- 'Matches: {}'.format(len(mkpts0)),
- ]
- fig = make_matching_figure(img0_raw, img1_raw, mkpts0, mkpts1, color, text=text)
- fig.show()
- feature_matching[f"{i}_{j}"] = np.concatenate([mkpts0, mkpts1, mconf[:, None]], axis=1)
-
- return feature_matching
-
-
-def gen_pose_hypothesis(center_elevation):
- elevations = np.radians(
- [center_elevation, center_elevation - 10, center_elevation + 10, center_elevation, center_elevation]) # 45~120
- azimuths = np.radians([30, 30, 30, 20, 40])
- input_poses = calc_pose(elevations, azimuths, len(azimuths))
- input_poses = input_poses[1:]
- input_poses[..., 1] *= -1
- input_poses[..., 2] *= -1
- return input_poses
-
-
-def ba_error_general(K, matches, poses):
- projmat0 = K @ poses[0].inverse()[:3, :4]
- projmat1 = K @ poses[1].inverse()[:3, :4]
- match_01 = matches[0]
- pts0 = match_01[:, :2]
- pts1 = match_01[:, 2:4]
- Xref = cv2.triangulatePoints(projmat0.cpu().numpy(), projmat1.cpu().numpy(),
- pts0.cpu().numpy().T, pts1.cpu().numpy().T)
- Xref = Xref[:3] / Xref[3:]
- Xref = Xref.T
- Xref = torch.from_numpy(Xref).cuda().float()
- reproj_error = 0
- for match, cp in zip(matches[1:], poses[2:]):
- dist = (torch.norm(match_01[:, :2][:, None, :] - match[:, :2][None, :, :], dim=-1))
- if dist.numel() > 0:
- # print("dist.shape", dist.shape)
- m0to2_index = dist.argmin(1)
- keep = dist[torch.arange(match_01.shape[0]), m0to2_index] < 1
- if keep.sum() > 0:
- xref_in2 = rect_to_img(K, canonical_to_camera(Xref, cp.inverse()))
- reproj_error2 = torch.norm(match[m0to2_index][keep][:, 2:4] - xref_in2[keep], dim=-1)
- conf02 = match[m0to2_index][keep][:, -1]
- reproj_error += (reproj_error2 * conf02).sum() / (conf02.sum())
-
- return reproj_error
-
-
-def find_optim_elev(elevs, nimgs, matches, K, dbg=False):
- errs = []
- for elev in elevs:
- err = 0
- cam_poses = gen_pose_hypothesis(elev)
- for start in range(nimgs - 1):
- batch_matches, batch_poses = [], []
- for i in range(start, nimgs + start):
- ci = i % nimgs
- batch_poses.append(cam_poses[ci])
- for j in range(nimgs - 1):
- key = f"{start}_{(start + j + 1) % nimgs}"
- match = matches[key]
- batch_matches.append(match)
- err += ba_error_general(K, batch_matches, batch_poses)
- errs.append(err)
- errs = torch.tensor(errs)
- if dbg:
- plt.plot(elevs, errs)
- plt.show()
- optim_elev = elevs[torch.argmin(errs)].item()
- return optim_elev
-
-
-def get_elev_est(feature_matching, min_elev=30, max_elev=150, K=None, dbg=False):
- flag = True
- matches = {}
- for i in range(4):
- for j in range(i + 1, 4):
- match_ij = feature_matching[f"{i}_{j}"]
- if len(match_ij) == 0:
- flag = False
- match_ji = np.concatenate([match_ij[:, 2:4], match_ij[:, 0:2], match_ij[:, 4:5]], axis=1)
- matches[f"{i}_{j}"] = torch.from_numpy(match_ij).float().cuda()
- matches[f"{j}_{i}"] = torch.from_numpy(match_ji).float().cuda()
- if not flag:
- loguru.logger.info("0 matches, could not estimate elevation")
- return None
- interval = 10
- elevs = np.arange(min_elev, max_elev, interval)
- optim_elev1 = find_optim_elev(elevs, 4, matches, K)
-
- elevs = np.arange(optim_elev1 - 10, optim_elev1 + 10, 1)
- optim_elev2 = find_optim_elev(elevs, 4, matches, K)
-
- return optim_elev2
-
-
-def elev_est_api(img_paths, min_elev=30, max_elev=150, K=None, dbg=False):
- feature_matching = get_feature_matching(img_paths, dbg=dbg)
- if K is None:
- loguru.logger.warning("K is not provided, using default K")
- K = np.array([[280.0, 0, 128.0],
- [0, 280.0, 128.0],
- [0, 0, 1]])
- K = torch.from_numpy(K).cuda().float()
- elev = get_elev_est(feature_matching, min_elev, max_elev, K, dbg=dbg)
- return elev
diff --git a/One-2-3-45-master 2/elevation_estimate/utils/plotting.py b/One-2-3-45-master 2/elevation_estimate/utils/plotting.py
deleted file mode 100644
index 9e7ac1de4b1fb6d0cbeda2f61eca81c68a9ba423..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/utils/plotting.py
+++ /dev/null
@@ -1,154 +0,0 @@
-import bisect
-import numpy as np
-import matplotlib.pyplot as plt
-import matplotlib
-
-
-def _compute_conf_thresh(data):
- dataset_name = data['dataset_name'][0].lower()
- if dataset_name == 'scannet':
- thr = 5e-4
- elif dataset_name == 'megadepth':
- thr = 1e-4
- else:
- raise ValueError(f'Unknown dataset: {dataset_name}')
- return thr
-
-
-# --- VISUALIZATION --- #
-
-def make_matching_figure(
- img0, img1, mkpts0, mkpts1, color,
- kpts0=None, kpts1=None, text=[], dpi=75, path=None):
- # draw image pair
- assert mkpts0.shape[0] == mkpts1.shape[0], f'mkpts0: {mkpts0.shape[0]} v.s. mkpts1: {mkpts1.shape[0]}'
- fig, axes = plt.subplots(1, 2, figsize=(10, 6), dpi=dpi)
- axes[0].imshow(img0, cmap='gray')
- axes[1].imshow(img1, cmap='gray')
- for i in range(2): # clear all frames
- axes[i].get_yaxis().set_ticks([])
- axes[i].get_xaxis().set_ticks([])
- for spine in axes[i].spines.values():
- spine.set_visible(False)
- plt.tight_layout(pad=1)
-
- if kpts0 is not None:
- assert kpts1 is not None
- axes[0].scatter(kpts0[:, 0], kpts0[:, 1], c='w', s=2)
- axes[1].scatter(kpts1[:, 0], kpts1[:, 1], c='w', s=2)
-
- # draw matches
- if mkpts0.shape[0] != 0 and mkpts1.shape[0] != 0:
- fig.canvas.draw()
- transFigure = fig.transFigure.inverted()
- fkpts0 = transFigure.transform(axes[0].transData.transform(mkpts0))
- fkpts1 = transFigure.transform(axes[1].transData.transform(mkpts1))
- fig.lines = [matplotlib.lines.Line2D((fkpts0[i, 0], fkpts1[i, 0]),
- (fkpts0[i, 1], fkpts1[i, 1]),
- transform=fig.transFigure, c=color[i], linewidth=1)
- for i in range(len(mkpts0))]
-
- axes[0].scatter(mkpts0[:, 0], mkpts0[:, 1], c=color, s=4)
- axes[1].scatter(mkpts1[:, 0], mkpts1[:, 1], c=color, s=4)
-
- # put txts
- txt_color = 'k' if img0[:100, :200].mean() > 200 else 'w'
- fig.text(
- 0.01, 0.99, '\n'.join(text), transform=fig.axes[0].transAxes,
- fontsize=15, va='top', ha='left', color=txt_color)
-
- # save or return figure
- if path:
- plt.savefig(str(path), bbox_inches='tight', pad_inches=0)
- plt.close()
- else:
- return fig
-
-
-def _make_evaluation_figure(data, b_id, alpha='dynamic'):
- b_mask = data['m_bids'] == b_id
- conf_thr = _compute_conf_thresh(data)
-
- img0 = (data['image0'][b_id][0].cpu().numpy() * 255).round().astype(np.int32)
- img1 = (data['image1'][b_id][0].cpu().numpy() * 255).round().astype(np.int32)
- kpts0 = data['mkpts0_f'][b_mask].cpu().numpy()
- kpts1 = data['mkpts1_f'][b_mask].cpu().numpy()
-
- # for megadepth, we visualize matches on the resized image
- if 'scale0' in data:
- kpts0 = kpts0 / data['scale0'][b_id].cpu().numpy()[[1, 0]]
- kpts1 = kpts1 / data['scale1'][b_id].cpu().numpy()[[1, 0]]
-
- epi_errs = data['epi_errs'][b_mask].cpu().numpy()
- correct_mask = epi_errs < conf_thr
- precision = np.mean(correct_mask) if len(correct_mask) > 0 else 0
- n_correct = np.sum(correct_mask)
- n_gt_matches = int(data['conf_matrix_gt'][b_id].sum().cpu())
- recall = 0 if n_gt_matches == 0 else n_correct / (n_gt_matches)
- # recall might be larger than 1, since the calculation of conf_matrix_gt
- # uses groundtruth depths and camera poses, but epipolar distance is used here.
-
- # matching info
- if alpha == 'dynamic':
- alpha = dynamic_alpha(len(correct_mask))
- color = error_colormap(epi_errs, conf_thr, alpha=alpha)
-
- text = [
- f'#Matches {len(kpts0)}',
- f'Precision({conf_thr:.2e}) ({100 * precision:.1f}%): {n_correct}/{len(kpts0)}',
- f'Recall({conf_thr:.2e}) ({100 * recall:.1f}%): {n_correct}/{n_gt_matches}'
- ]
-
- # make the figure
- figure = make_matching_figure(img0, img1, kpts0, kpts1,
- color, text=text)
- return figure
-
-def _make_confidence_figure(data, b_id):
- # TODO: Implement confidence figure
- raise NotImplementedError()
-
-
-def make_matching_figures(data, config, mode='evaluation'):
- """ Make matching figures for a batch.
-
- Args:
- data (Dict): a batch updated by PL_LoFTR.
- config (Dict): matcher config
- Returns:
- figures (Dict[str, List[plt.figure]]
- """
- assert mode in ['evaluation', 'confidence'] # 'confidence'
- figures = {mode: []}
- for b_id in range(data['image0'].size(0)):
- if mode == 'evaluation':
- fig = _make_evaluation_figure(
- data, b_id,
- alpha=config.TRAINER.PLOT_MATCHES_ALPHA)
- elif mode == 'confidence':
- fig = _make_confidence_figure(data, b_id)
- else:
- raise ValueError(f'Unknown plot mode: {mode}')
- figures[mode].append(fig)
- return figures
-
-
-def dynamic_alpha(n_matches,
- milestones=[0, 300, 1000, 2000],
- alphas=[1.0, 0.8, 0.4, 0.2]):
- if n_matches == 0:
- return 1.0
- ranges = list(zip(alphas, alphas[1:] + [None]))
- loc = bisect.bisect_right(milestones, n_matches) - 1
- _range = ranges[loc]
- if _range[1] is None:
- return _range[0]
- return _range[1] + (milestones[loc + 1] - n_matches) / (
- milestones[loc + 1] - milestones[loc]) * (_range[0] - _range[1])
-
-
-def error_colormap(err, thr, alpha=1.0):
- assert alpha <= 1.0 and alpha > 0, f"Invaid alpha value: {alpha}"
- x = 1 - np.clip(err / (thr * 2), 0, 1)
- return np.clip(
- np.stack([2-x*2, x*2, np.zeros_like(x), np.ones_like(x)*alpha], -1), 0, 1)
diff --git a/One-2-3-45-master 2/elevation_estimate/utils/plt_utils.py b/One-2-3-45-master 2/elevation_estimate/utils/plt_utils.py
deleted file mode 100644
index 92353edab179de9f702633a01e123e94403bd83f..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/utils/plt_utils.py
+++ /dev/null
@@ -1,318 +0,0 @@
-import os.path as osp
-import os
-import matplotlib.pyplot as plt
-import torch
-import cv2
-import math
-
-import numpy as np
-import tqdm
-from cv2 import findContours
-from dl_ext.primitive import safe_zip
-from dl_ext.timer import EvalTime
-
-
-def plot_confidence(confidence):
- n = len(confidence)
- plt.plot(np.arange(n), confidence)
- plt.show()
-
-
-def image_grid(
- images,
- rows=None,
- cols=None,
- fill: bool = True,
- show_axes: bool = False,
- rgb=None,
- show=True,
- label=None,
- **kwargs
-):
- """
- A util function for plotting a grid of images.
- Args:
- images: (N, H, W, 4) array of RGBA images
- rows: number of rows in the grid
- cols: number of columns in the grid
- fill: boolean indicating if the space between images should be filled
- show_axes: boolean indicating if the axes of the plots should be visible
- rgb: boolean, If True, only RGB channels are plotted.
- If False, only the alpha channel is plotted.
- Returns:
- None
- """
- evaltime = EvalTime(disable=True)
- evaltime('')
- if isinstance(images, torch.Tensor):
- images = images.detach().cpu()
- if len(images[0].shape) == 2:
- rgb = False
- if images[0].shape[-1] == 2:
- # flow
- images = [flow_to_image(im) for im in images]
- if (rows is None) != (cols is None):
- raise ValueError("Specify either both rows and cols or neither.")
-
- if rows is None:
- rows = int(len(images) ** 0.5)
- cols = math.ceil(len(images) / rows)
-
- gridspec_kw = {"wspace": 0.0, "hspace": 0.0} if fill else {}
- if len(images) < 50:
- figsize = (10, 10)
- else:
- figsize = (15, 15)
- evaltime('0.5')
- plt.figure(figsize=figsize)
- # fig, axarr = plt.subplots(rows, cols, gridspec_kw=gridspec_kw, figsize=figsize)
- if label:
- # fig.suptitle(label, fontsize=30)
- plt.suptitle(label, fontsize=30)
- # bleed = 0
- # fig.subplots_adjust(left=bleed, bottom=bleed, right=(1 - bleed), top=(1 - bleed))
- evaltime('subplots')
-
- # for i, (ax, im) in enumerate(tqdm.tqdm(zip(axarr.ravel(), images), leave=True, total=len(images))):
- for i in range(len(images)):
- # evaltime(f'{i} begin')
- plt.subplot(rows, cols, i + 1)
- if rgb:
- # only render RGB channels
- plt.imshow(images[i][..., :3], **kwargs)
- # ax.imshow(im[..., :3], **kwargs)
- else:
- # only render Alpha channel
- plt.imshow(images[i], **kwargs)
- # ax.imshow(im, **kwargs)
- if not show_axes:
- plt.axis('off')
- # ax.set_axis_off()
- # ax.set_title(f'{i}')
- plt.title(f'{i}')
- # evaltime(f'{i} end')
- evaltime('2')
- if show:
- plt.show()
- # return fig
-
-
-def depth_grid(
- depths,
- rows=None,
- cols=None,
- fill: bool = True,
- show_axes: bool = False,
-):
- """
- A util function for plotting a grid of images.
- Args:
- images: (N, H, W, 4) array of RGBA images
- rows: number of rows in the grid
- cols: number of columns in the grid
- fill: boolean indicating if the space between images should be filled
- show_axes: boolean indicating if the axes of the plots should be visible
- rgb: boolean, If True, only RGB channels are plotted.
- If False, only the alpha channel is plotted.
- Returns:
- None
- """
- if (rows is None) != (cols is None):
- raise ValueError("Specify either both rows and cols or neither.")
-
- if rows is None:
- rows = len(depths)
- cols = 1
-
- gridspec_kw = {"wspace": 0.0, "hspace": 0.0} if fill else {}
- fig, axarr = plt.subplots(rows, cols, gridspec_kw=gridspec_kw, figsize=(15, 9))
- bleed = 0
- fig.subplots_adjust(left=bleed, bottom=bleed, right=(1 - bleed), top=(1 - bleed))
-
- for ax, im in zip(axarr.ravel(), depths):
- ax.imshow(im)
- if not show_axes:
- ax.set_axis_off()
- plt.show()
-
-
-def hover_masks_on_imgs(images, masks):
- masks = np.array(masks)
- new_imgs = []
- tids = list(range(1, masks.max() + 1))
- colors = colormap(rgb=True, lighten=True)
- for im, mask in tqdm.tqdm(safe_zip(images, masks), total=len(images)):
- for tid in tids:
- im = vis_mask(
- im,
- (mask == tid).astype(np.uint8),
- color=colors[tid],
- alpha=0.5,
- border_alpha=0.5,
- border_color=[255, 255, 255],
- border_thick=3)
- new_imgs.append(im)
- return new_imgs
-
-
-def vis_mask(img,
- mask,
- color=[255, 255, 255],
- alpha=0.4,
- show_border=True,
- border_alpha=0.5,
- border_thick=1,
- border_color=None):
- """Visualizes a single binary mask."""
- if isinstance(mask, torch.Tensor):
- from anypose.utils.pn_utils import to_array
- mask = to_array(mask > 0).astype(np.uint8)
- img = img.astype(np.float32)
- idx = np.nonzero(mask)
-
- img[idx[0], idx[1], :] *= 1.0 - alpha
- img[idx[0], idx[1], :] += [alpha * x for x in color]
-
- if show_border:
- contours, _ = findContours(
- mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
- # contours = [c for c in contours if c.shape[0] > 10]
- if border_color is None:
- border_color = color
- if not isinstance(border_color, list):
- border_color = border_color.tolist()
- if border_alpha < 1:
- with_border = img.copy()
- cv2.drawContours(with_border, contours, -1, border_color,
- border_thick, cv2.LINE_AA)
- img = (1 - border_alpha) * img + border_alpha * with_border
- else:
- cv2.drawContours(img, contours, -1, border_color, border_thick,
- cv2.LINE_AA)
-
- return img.astype(np.uint8)
-
-
-def colormap(rgb=False, lighten=True):
- """Copied from Detectron codebase."""
- color_list = np.array(
- [
- 0.000, 0.447, 0.741,
- 0.850, 0.325, 0.098,
- 0.929, 0.694, 0.125,
- 0.494, 0.184, 0.556,
- 0.466, 0.674, 0.188,
- 0.301, 0.745, 0.933,
- 0.635, 0.078, 0.184,
- 0.300, 0.300, 0.300,
- 0.600, 0.600, 0.600,
- 1.000, 0.000, 0.000,
- 1.000, 0.500, 0.000,
- 0.749, 0.749, 0.000,
- 0.000, 1.000, 0.000,
- 0.000, 0.000, 1.000,
- 0.667, 0.000, 1.000,
- 0.333, 0.333, 0.000,
- 0.333, 0.667, 0.000,
- 0.333, 1.000, 0.000,
- 0.667, 0.333, 0.000,
- 0.667, 0.667, 0.000,
- 0.667, 1.000, 0.000,
- 1.000, 0.333, 0.000,
- 1.000, 0.667, 0.000,
- 1.000, 1.000, 0.000,
- 0.000, 0.333, 0.500,
- 0.000, 0.667, 0.500,
- 0.000, 1.000, 0.500,
- 0.333, 0.000, 0.500,
- 0.333, 0.333, 0.500,
- 0.333, 0.667, 0.500,
- 0.333, 1.000, 0.500,
- 0.667, 0.000, 0.500,
- 0.667, 0.333, 0.500,
- 0.667, 0.667, 0.500,
- 0.667, 1.000, 0.500,
- 1.000, 0.000, 0.500,
- 1.000, 0.333, 0.500,
- 1.000, 0.667, 0.500,
- 1.000, 1.000, 0.500,
- 0.000, 0.333, 1.000,
- 0.000, 0.667, 1.000,
- 0.000, 1.000, 1.000,
- 0.333, 0.000, 1.000,
- 0.333, 0.333, 1.000,
- 0.333, 0.667, 1.000,
- 0.333, 1.000, 1.000,
- 0.667, 0.000, 1.000,
- 0.667, 0.333, 1.000,
- 0.667, 0.667, 1.000,
- 0.667, 1.000, 1.000,
- 1.000, 0.000, 1.000,
- 1.000, 0.333, 1.000,
- 1.000, 0.667, 1.000,
- 0.167, 0.000, 0.000,
- 0.333, 0.000, 0.000,
- 0.500, 0.000, 0.000,
- 0.667, 0.000, 0.000,
- 0.833, 0.000, 0.000,
- 1.000, 0.000, 0.000,
- 0.000, 0.167, 0.000,
- 0.000, 0.333, 0.000,
- 0.000, 0.500, 0.000,
- 0.000, 0.667, 0.000,
- 0.000, 0.833, 0.000,
- 0.000, 1.000, 0.000,
- 0.000, 0.000, 0.167,
- 0.000, 0.000, 0.333,
- 0.000, 0.000, 0.500,
- 0.000, 0.000, 0.667,
- 0.000, 0.000, 0.833,
- 0.000, 0.000, 1.000,
- 0.000, 0.000, 0.000,
- 0.143, 0.143, 0.143,
- 0.286, 0.286, 0.286,
- 0.429, 0.429, 0.429,
- 0.571, 0.571, 0.571,
- 0.714, 0.714, 0.714,
- 0.857, 0.857, 0.857,
- 1.000, 1.000, 1.000
- ]
- ).astype(np.float32)
- color_list = color_list.reshape((-1, 3))
- if not rgb:
- color_list = color_list[:, ::-1]
-
- if lighten:
- # Make all the colors a little lighter / whiter. This is copied
- # from the detectron visualization code (search for 'w_ratio').
- w_ratio = 0.4
- color_list = (color_list * (1 - w_ratio) + w_ratio)
- return color_list * 255
-
-
-def vis_layer_mask(masks, save_path=None):
- masks = torch.as_tensor(masks)
- tids = masks.unique().tolist()
- tids.remove(0)
- for tid in tqdm.tqdm(tids):
- show = save_path is None
- image_grid(masks == tid, label=f'{tid}', show=show)
- if save_path:
- os.makedirs(osp.dirname(save_path), exist_ok=True)
- plt.savefig(save_path % tid)
- plt.close('all')
-
-
-def show(x, **kwargs):
- if isinstance(x, torch.Tensor):
- x = x.detach().cpu()
- plt.imshow(x, **kwargs)
- plt.show()
-
-
-def vis_title(rgb, text, shift_y=30):
- tmp = rgb.copy()
- shift_x = rgb.shape[1] // 2
- cv2.putText(tmp, text,
- (shift_x, shift_y), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 0, 0), thickness=2, lineType=cv2.LINE_AA)
- return tmp
diff --git a/One-2-3-45-master 2/elevation_estimate/utils/utils3d.py b/One-2-3-45-master 2/elevation_estimate/utils/utils3d.py
deleted file mode 100644
index 9cc92fbde4143a4ed5187c989e3f98a896e7caab..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/elevation_estimate/utils/utils3d.py
+++ /dev/null
@@ -1,62 +0,0 @@
-import numpy as np
-import torch
-
-
-def cart_to_hom(pts):
- """
- :param pts: (N, 3 or 2)
- :return pts_hom: (N, 4 or 3)
- """
- if isinstance(pts, np.ndarray):
- pts_hom = np.concatenate((pts, np.ones([*pts.shape[:-1], 1], dtype=np.float32)), -1)
- else:
- ones = torch.ones([*pts.shape[:-1], 1], dtype=torch.float32, device=pts.device)
- pts_hom = torch.cat((pts, ones), dim=-1)
- return pts_hom
-
-
-def hom_to_cart(pts):
- return pts[..., :-1] / pts[..., -1:]
-
-
-def canonical_to_camera(pts, pose):
- pts = cart_to_hom(pts)
- pts = pts @ pose.transpose(-1, -2)
- pts = hom_to_cart(pts)
- return pts
-
-
-def rect_to_img(K, pts_rect):
- from dl_ext.vision_ext.datasets.kitti.structures import Calibration
- pts_2d_hom = pts_rect @ K.t()
- pts_img = Calibration.hom_to_cart(pts_2d_hom)
- return pts_img
-
-
-def calc_pose(phis, thetas, size, radius=1.2):
- import torch
- def normalize(vectors):
- return vectors / (torch.norm(vectors, dim=-1, keepdim=True) + 1e-10)
-
- device = torch.device('cuda')
- thetas = torch.FloatTensor(thetas).to(device)
- phis = torch.FloatTensor(phis).to(device)
-
- centers = torch.stack([
- radius * torch.sin(thetas) * torch.sin(phis),
- -radius * torch.cos(thetas) * torch.sin(phis),
- radius * torch.cos(phis),
- ], dim=-1) # [B, 3]
-
- # lookat
- forward_vector = normalize(centers).squeeze(0)
- up_vector = torch.FloatTensor([0, 0, 1]).to(device).unsqueeze(0).repeat(size, 1)
- right_vector = normalize(torch.cross(up_vector, forward_vector, dim=-1))
- if right_vector.pow(2).sum() < 0.01:
- right_vector = torch.FloatTensor([0, 1, 0]).to(device).unsqueeze(0).repeat(size, 1)
- up_vector = normalize(torch.cross(forward_vector, right_vector, dim=-1))
-
- poses = torch.eye(4, dtype=torch.float, device=device).unsqueeze(0).repeat(size, 1, 1)
- poses[:, :3, :3] = torch.stack((right_vector, up_vector, forward_vector), dim=-1)
- poses[:, :3, 3] = centers
- return poses
diff --git a/One-2-3-45-master 2/elevation_estimate/utils/weights/.gitkeep b/One-2-3-45-master 2/elevation_estimate/utils/weights/.gitkeep
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/One-2-3-45-master 2/example.ipynb b/One-2-3-45-master 2/example.ipynb
deleted file mode 100644
index 8100c4b4309870d799a93b7e930243cd56cc3d40..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/example.ipynb
+++ /dev/null
@@ -1,765 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": 1,
- "metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "The cache for model files in Transformers v4.22.0 has been updated. Migrating your old cache. This is a one-time only operation. You can interrupt this and resume the migration later on by calling `transformers.utils.move_cache()`.\n"
- ]
- },
- {
- "data": {
- "application/vnd.jupyter.widget-view+json": {
- "model_id": "c59dab96c2f0475f85425eb03f2b71df",
- "version_major": 2,
- "version_minor": 0
- },
- "text/plain": [
- "0it [00:00, ?it/s]"
- ]
- },
- "metadata": {},
- "output_type": "display_data"
- }
- ],
- "source": [
- "import os\n",
- "import torch\n",
- "from PIL import Image\n",
- "from utils.zero123_utils import init_model, predict_stage1_gradio, zero123_infer\n",
- "from utils.sam_utils import sam_init, sam_out_nosave\n",
- "from utils.utils import pred_bbox, image_preprocess_nosave, gen_poses, image_grid, convert_mesh_format\n",
- "from elevation_estimate.estimate_wild_imgs import estimate_elev\n",
- "\n",
- "_GPU_INDEX = 0\n",
- "_HALF_PRECISION = True\n",
- "_MESH_RESOLUTION = 256\n",
- "# NOTE: Uncomment the following line in the docker container\n",
- "# os.chdir(\"./One-2-3-45/\")\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 2,
- "metadata": {},
- "outputs": [],
- "source": [
- "def preprocess(predictor, raw_im, lower_contrast=False):\n",
- " raw_im.thumbnail([512, 512], Image.Resampling.LANCZOS)\n",
- " image_sam = sam_out_nosave(predictor, raw_im.convert(\"RGB\"), pred_bbox(raw_im))\n",
- " input_256 = image_preprocess_nosave(image_sam, lower_contrast=lower_contrast, rescale=True)\n",
- " torch.cuda.empty_cache()\n",
- " return input_256"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 3,
- "metadata": {},
- "outputs": [],
- "source": [
- "def stage1_run(model, device, exp_dir,\n",
- " input_im, scale, ddim_steps):\n",
- " # folder to save the stage 1 images\n",
- " stage1_dir = os.path.join(exp_dir, \"stage1_8\")\n",
- " os.makedirs(stage1_dir, exist_ok=True)\n",
- "\n",
- " # stage 1: generate 4 views at the same elevation as the input\n",
- " output_ims = predict_stage1_gradio(model, input_im, save_path=stage1_dir, adjust_set=list(range(4)), device=device, ddim_steps=ddim_steps, scale=scale)\n",
- " \n",
- " # stage 2 for the first image\n",
- " # infer 4 nearby views for an image to estimate the polar angle of the input\n",
- " stage2_steps = 50 # ddim_steps\n",
- " zero123_infer(model, exp_dir, indices=[0], device=device, ddim_steps=stage2_steps, scale=scale)\n",
- " # estimate the camera pose (elevation) of the input image.\n",
- " try:\n",
- " polar_angle = estimate_elev(exp_dir)\n",
- " except:\n",
- " print(\"Failed to estimate polar angle\")\n",
- " polar_angle = 90\n",
- " print(\"Estimated polar angle:\", polar_angle)\n",
- " gen_poses(exp_dir, polar_angle)\n",
- "\n",
- " # stage 1: generate another 4 views at a different elevation\n",
- " if polar_angle <= 75:\n",
- " output_ims_2 = predict_stage1_gradio(model, input_im, save_path=stage1_dir, adjust_set=list(range(4,8)), device=device, ddim_steps=ddim_steps, scale=scale)\n",
- " else:\n",
- " output_ims_2 = predict_stage1_gradio(model, input_im, save_path=stage1_dir, adjust_set=list(range(8,12)), device=device, ddim_steps=ddim_steps, scale=scale)\n",
- " torch.cuda.empty_cache()\n",
- " return 90-polar_angle, output_ims+output_ims_2\n",
- " \n",
- "def stage2_run(model, device, exp_dir,\n",
- " elev, scale, stage2_steps=50):\n",
- " # stage 2 for the remaining 7 images, generate 7*4=28 views\n",
- " if 90-elev <= 75:\n",
- " zero123_infer(model, exp_dir, indices=list(range(1,8)), device=device, ddim_steps=stage2_steps, scale=scale)\n",
- " else:\n",
- " zero123_infer(model, exp_dir, indices=list(range(1,4))+list(range(8,12)), device=device, ddim_steps=stage2_steps, scale=scale)\n",
- "\n",
- "def reconstruct(exp_dir, output_format=\".ply\", device_idx=0):\n",
- " exp_dir = os.path.abspath(exp_dir)\n",
- " main_dir_path = os.path.abspath(os.path.dirname(\"./\"))\n",
- " os.chdir('reconstruction/')\n",
- "\n",
- " bash_script = f'CUDA_VISIBLE_DEVICES={device_idx} python exp_runner_generic_blender_val.py \\\n",
- " --specific_dataset_name {exp_dir} \\\n",
- " --mode export_mesh \\\n",
- " --conf confs/one2345_lod0_val_demo.conf \\\n",
- " --resolution {_MESH_RESOLUTION}'\n",
- " print(bash_script)\n",
- " os.system(bash_script)\n",
- " os.chdir(main_dir_path)\n",
- "\n",
- " ply_path = os.path.join(exp_dir, f\"mesh.ply\")\n",
- " if output_format == \".ply\":\n",
- " return ply_path\n",
- " if output_format not in [\".obj\", \".glb\"]:\n",
- " print(\"Invalid output format, must be one of .ply, .obj, .glb\")\n",
- " return ply_path\n",
- " return convert_mesh_format(exp_dir, output_format=output_format)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 4,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/html": [
- "Instantiating LatentDiffusion...\n",
- "
\n"
- ],
- "text/plain": [
- "Instantiating LatentDiffusion\u001b[33m...\u001b[0m\n"
- ]
- },
- "metadata": {},
- "output_type": "display_data"
- },
- {
- "data": {
- "text/html": [
- "Loading model from zero123-xl.ckpt\n",
- "
\n"
- ],
- "text/plain": [
- "Loading model from zero123-xl.ckpt\n"
- ]
- },
- "metadata": {},
- "output_type": "display_data"
- },
- {
- "data": {
- "text/html": [
- "Global Step: 122000\n",
- "
\n"
- ],
- "text/plain": [
- "Global Step: \u001b[1;36m122000\u001b[0m\n"
- ]
- },
- "metadata": {},
- "output_type": "display_data"
- },
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "LatentDiffusion: Running in eps-prediction mode\n",
- "DiffusionWrapper has 859.53 M params.\n",
- "Keeping EMAs of 688.\n",
- "making attention of type 'vanilla' with 512 in_channels\n",
- "Working with z of shape (1, 4, 32, 32) = 4096 dimensions.\n",
- "making attention of type 'vanilla' with 512 in_channels\n"
- ]
- },
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "100%|███████████████████████████████████████| 890M/890M [00:09<00:00, 94.1MiB/s]\n"
- ]
- },
- {
- "data": {
- "text/html": [
- "Instantiating StableDiffusionSafetyChecker...\n",
- "
\n"
- ],
- "text/plain": [
- "Instantiating StableDiffusionSafetyChecker\u001b[33m...\u001b[0m\n"
- ]
- },
- "metadata": {},
- "output_type": "display_data"
- },
- {
- "data": {
- "application/vnd.jupyter.widget-view+json": {
- "model_id": "997efc0ee5c34aa988ee133a6657075b",
- "version_major": 2,
- "version_minor": 0
- },
- "text/plain": [
- "Downloading (…)lve/main/config.json: 0%| | 0.00/4.55k [00:00, ?B/s]"
- ]
- },
- "metadata": {},
- "output_type": "display_data"
- },
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "`text_config_dict` is provided which will be used to initialize `CLIPTextConfig`. The value `text_config[\"id2label\"]` will be overriden.\n",
- "`text_config_dict` is provided which will be used to initialize `CLIPTextConfig`. The value `text_config[\"bos_token_id\"]` will be overriden.\n",
- "`text_config_dict` is provided which will be used to initialize `CLIPTextConfig`. The value `text_config[\"eos_token_id\"]` will be overriden.\n"
- ]
- },
- {
- "data": {
- "application/vnd.jupyter.widget-view+json": {
- "model_id": "665d396a173b4035a736a691051fa464",
- "version_major": 2,
- "version_minor": 0
- },
- "text/plain": [
- "Downloading pytorch_model.bin: 0%| | 0.00/1.22G [00:00, ?B/s]"
- ]
- },
- "metadata": {},
- "output_type": "display_data"
- },
- {
- "data": {
- "application/vnd.jupyter.widget-view+json": {
- "model_id": "c8f706f5de2349b8b1eba79b3416b5f9",
- "version_major": 2,
- "version_minor": 0
- },
- "text/plain": [
- "Downloading (…)rocessor_config.json: 0%| | 0.00/316 [00:00, ?B/s]"
- ]
- },
- "metadata": {},
- "output_type": "display_data"
- }
- ],
- "source": [
- "device = f\"cuda:{_GPU_INDEX}\" if torch.cuda.is_available() else \"cpu\"\n",
- "\n",
- "# initialize the zero123 model\n",
- "models = init_model(device, 'zero123-xl.ckpt', half_precision=_HALF_PRECISION)\n",
- "\n",
- "model_zero123 = models[\"turncam\"]\n",
- "\n",
- "# initialize the Segment Anything model\n",
- "predictor = sam_init(_GPU_INDEX)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 5,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAMAAAAEACAIAAABqIe5QAAEAAElEQVR4nEz9Sa9sWZYeiK1u732Omd3ude4eER7u0WQmmRHZsCmSVVBRRRUEVc0ESYOCRtIv0FAQNJF+hiBAgCBIA0HQQBBAlASKpQHFrtgks8/IjAgPjwhvXnvvNbNzzt6r0WDZc/K9iftr7rtmts9ea33dwv/Jf/U/YyIzV1UAcNPzchYmHVu4YqCqIhGEu/vQwUylFCnNzHV0NRVhRFIDIjS3oMCI/bwLdw8nACYaYwNARHJ3Nd/65m6tliZCRIAwDPoYc6uIoB6AzMi11FoEEJkZEFkEUKapuUcpMu8O2ntrbTjudjuCYGZVRwRm8fCutt8dkBAQEWDr/bxsu2kKh/1h766IuK5bK0IigAQetbWIMNdwCwepsm6DEVUVEYl5qo2ZiQgRPZyZwb2U6uFmBuHzPG99AwAiRsKho2+blDrV4uYegYAB0bdNRMzCwzGiFtEIAFBVJi5SmAgIzAwR3UxVwx0RRu+lFnN/9+7t1Nrh6io/OyI0tdPp9PB4dICf/vynP/mrPz8tj9NUA2IMK0Wurm/maX5cjr3r06fPlrEdT+day7ybl+PZIgBgP0/buiKhEHvEMLUIqW0sGyLO8xxuqtvd0ydv39zf3t7I9773/XBHQESqpYzeu3aIsHBGBMCIeDyed3MjAkTqfTAXYgo3M1UzVSOErfdlWbZtdfAqRce2ricBYcIAaFwIMcIDQM3VrJZ6td8hoXt4ROgaZh5euIQpohOxFFr7Ssxj0XC30bex7fc7JiTE+8dlmENEbdXUW6uliKkScylCJA6ASLU2KeJmTITMTGQOu93eA8K8tlqksBS12O9mPDIzjTGQgIho5a4GiDoUA2qt6/kkUtY+9ruZiSN8WRcEnPc7RKxF7Ghb78Jca7Vwj7AIUF3cxlAArKVAhJqJFMRotYW7uasZMdfaEJClAIRqhwB1m6dWimgfarrb7ZAQiTwcAyBAmADYPQhp16Yi/OXXL3vvQwdCmGtr083tk21d7+/vj4+PdZ5un9w+nB+GGQsBQKmyMum63t7crueTmZ0fl7snT9xH732a591u2gDXZe3bGmZmdv/mXrf+7tUbQYRS2MyZiRBKkU17m+YIAABhjggLKiKlllKKe0SEuxOxmbkbMQEgAIwxdPSIuLq6QoR1OUNgIDARIkR4uHvA1jsiFhHVwcyjd0IcNiBiXVdmCoAxRqlNRzezItx7d1NwtTBTu9woCjNi37Zay7E/no7nm5sbAFuW07ZRrW2324nAtt6fHhUhaq2AYGa9j3fCy7Lu5tnDhXioIqKIIBIA9tHNvbCo+cPjSYoc9ntEFmZigQBDLkzTvIuI1lprrR4bIs1zY6QAdHMRbtMEgCzSzadWza3VyWyEAwD00d3j8XistfYxkKiWoqBVqtnmEQggQiS8jRERiAjE6g4ecjlqQERuWqT03qkwIupqahrhEUaI11c30zx9/fKVqXn4zYsPP3jxwZ/++Z/s727unt09vnnUrmN0qcSdltNpW1cp8uGHH26q63mb5nmo9m2bpja2LcwAkFjMrJa6nheZChFRD2NyYgKAw252QAREQI8QKYfDHhExwEzDASl/O0QIUdQUABHzji/MCAAIhFiQsdbi7hGRpcotiCox1VZVjYncVEpxd1M1N8QgogBgEmYmQg/XoQje122aq5sHkhQJHcuyrMtKjEgIAaUW1bFtvdaiZqaGEOu69DHc1E0BYO1brTMR7ub9NLXzsrl7qa2U4qZj9FYLU7CImYvp48PGbmiCCG/fPRQpXbsDPn/27N3y+v7+YW51miYzD4gipasSCYALUWvzNE9C7ADM7O7MBZCywM3zzsyH+dXhqtSmwwFjN0/hvvV+2F8hEVMWbhg6xtDDtHcbgOClDnVAEGFGBIBSa4QzUUBb12U5HwWp7Pa11i+//FLVIXy335sOMwUAKRweOoaq9d5LKWZmfSDiRx99uI3x+qsvrq4OeQ4cYpgGxOgjIKbdPO9n4bKsm5g7AAjTGH1d1wjY7w+hnv8AOoRbETEzwtAxiJmAgAAQRQQAzW2qk0O4OzKXyhAICCzMzEycpR0AIKDVgkQijABVBAlBOA8rFSoA21iJSKQgIgQSUVgAZgsk4RhByALI8343zVfnZSkiOrTUwkzbtu0OUYuYOTL3bbsFr0WIeNs2iDB3NWciKUwIYwwWiQgIMB99W9ydkAPivCxh/vF3PjmdFx29j23eHdwNiZFRpNjp9PTpUyFignA3N9VRhWtrquP0eL+c7vf73dQmkdLNIkJEivDbd++O59Oybds6iFCYiNjM27SfpsosrbXaqmkwETM7okdMbWIihEBCkUJEgFik1DoVKVKllAYBHn5aHt6+e00EDw+Pbx7etmkiAh2mam/fvAH3Jze3x+PJHcysldKX7uGIwKV88OLFV199ra5NinUtOw6P0/F0c3ddp7aeFmZmFiCUIiQspRQdAwAeHo/TNO32uwgDiHCHCBaCwHVdSy1MVECGDgBCYbi0STFPs9kwi1YrIQaEmYVFLcXDPTx7D3dn4ggQYVOtpQTEGN3da61Syhi6nM/EGBGqo5TahxYU1QERLMK1AgIjBSIzewQgTvPkAWS+3+/6NqRUIQwPB5+ktNrMFMMRsDVEAAs3M2YxNWbMVle4mjkR9FLX5by/ulXTq+tnSEREph3CifC8LBAxzztANLUxuoi4O0L0sSI4Ip5Pp3VbCdGeP5/muW/D3RmDCdZ1I8Q2tf1+jyxd1c1N+26/d/P7d2+XdTVzd2MK3RZidgvrBswsspyWcK+tAlFYdpMW7uFuZmYKyIj0cDx9/fLN8XxGxDxhwrwNZZExBgAeT6dPPv10faU2tLXJVd399vZW7uT1l69+9vOft9b214fjw6OpUtRJ+LRoWFhXRJJaImJsStEBQBCACN3jyZMnaios59NRSgNwCA93CKitCIu7RwATFSnmzsyAHgBEYeqjr9O0C4BwF2ZHN1eAiACzAIBaChKraeUSiH0MJkYEZty2NSKEZZ6nYd3dEWlk4QchIiISFiAcYxQRixhDV10ioJQyTVM77ACIiMghvzfmhogQwCIQEK5CYm7erZZGhIVZRIZ116HapdRwE+br62sPY8Q2TarqrrWIqUqp3HXd1oMIs/DMow8kVHcmwq0Sxlzb1c0zEQ531VFakzqZarii+xhjWTsSvXn19ZOnT1jIzB5Oy26aDldXHw1dlvPj40OtxbSbKrj3bRXGddu+fPXu1atXAPDk9soCzBSJGLGVEgD3Dw/rskopUurj42lbFzNHAAjMfgsQi7A5uIOp/fKXvzR3rrW0uqwrAX71+ZfaBwEVEgTo55WR1r5umwCSmR3fHX1obe3q+uq8LDCs+4aIYm4sYmPUUswsIlqbiZmIxra52nlZ590h3NydiOo0u7uIRAQiD9O+bRE+taLWAyA8RERVEQkR3NQC5mkuzMOMgMYYJMQAalaKrNtGRGNsY/TdvDPXpW+I0NrETEN7uAOAw2Di1po7aO8k3N2YGQAIkZC3vhUhCbLeDUJKHX2Y+9RqaxNhxfCtdzcvtbi72fBwQrJw5kJE5spMZmZuREwIIjQ6uPvWuweI0IxVhBMmQIKAECZiFhVAcKSAMA8P2IY9HN8iETP3rZv7w8MDUZmmeTj98otXgIEISGwKb94eT8vaqgDGcTlO08wkIkTR6jx1PHPzD761v725OR4fx9junjxbzud1XdpUw2M2Bl7MjFgQz4+PDyQtG1NQXJcNEM0gANtUt2Vdjuf91YGZCbDVCgAY0NUAI8e9UN/6VltpbT6ezrXUiCi1EKMUsUdzAFUAByEiAHC1d+e3IoIDc/KKLIpM+XZrRKsCgGYe7kDhDiwU7sQMeY+qAjETq5m+Hw1qrWE+xiCkCCCideuUjwVEDxWm0RUIEHDb1lILIhxPJ0AQEQBABFdV1QFWa3N3i0C3VisxI+BQ1RgBAGEAICLrtgESM7uZmY8xEBExkKiUkkiM6UAIkUpEzOSuQ3WeJveFkIjwmzch78LT+dSaEMYYKwL03qXUddvCHRCn2gAxAABcHd6+fXs6nsJdajmfzzfXNyIiLG2aEKC2JqVCuIggIrMQyeH6GiJH97CIddmo1d1hJyI74O+0eVnXUkSDrovM827eXbn7bre7f7if9reB+Hh8PD3cX9/gzXm9fzwRYQCCBSAwIwQA+NjWCM8fgoAIqibMrmrmQNB7Z5bRtzBn4j5UVadpqq0i4XJezK22MrpGgIcLEoGHCP/q66973z799g8RCdBUPcxYWFjUDADXtYtwxEBE9xBm9wiMPjqGI8C2bW2aVdUjECEAapsgvNbS++ijM7PaKFUQQHUgBgQERG3FI4SJkMyNmYXL+bxEi9pqDth9xDZs6JkB6jSZDwSIcCJOWEGY3B2Itt49om9ra3NtDSB635CglgrmImXZlvABoe5BVAHRPYhFpJgbERGTRyBjDAAAh1AbtTZhUr9AFdu67YWbyPl8rlPtfS2tmobpCIDT8QgA8272ACZelvM87w6HPSJHRKWybd3DPQABxrbe3d4s50XN5qmJFHW/OtxEhKoh4hgKiCIFAYi41sk9IsA8eh8QiAgAoX1EgAEZYITH+19HRI8AN2IyDwgAxDEGMM6FbfSxgRCJcAAQsak6Rpubm/e+RYS59dFLLWa2LmvvfXQVkf3NXsa2qVpABDhwIGFAYGQbHeQBEGNswpWIwoOFCVHDPJwQ3FxVIyIgWq0RTiyM5KbhDgRmOlQhACAA/Hg6FZFSq7m22jzc1C63UdeIYJYImKb6eOyAEOHmsGwbIkFEhHEtAebupZSsbkQQiOpqphGASFJk9O5uRBEQpl2E3Q0Ahq5EqBamJojbtgJS771UjIjwqLWqKnis69q3TkSPxyMhFjYiCYUyFWFBJB0KiLWWuc3mBkRF0GpZl6W1dj6fEVGYp2lCdB1jnne1VI/Y1o2ZqtTz+bRtq4ioaW2tEbr51lVEtnUNt9qmMbTWMoa2WonpMuvVCkBmvbapb6PWcjqfinC7ufn1F1/oGNlmAKBHuBsSQUTC9BAIEBBhakMtPGxoEM272dz7sgZEmydTI6Im1cwAgJgQiZDMFAnD/Xw6IyGFaxGqpdbSfv3VZ8u2rH15+/jSwxAhICJ8N+8SDyBGIjBXYnR3iDDTbV1rKVWKmbVaEcFMs2S4m0eenYgICjjM8xg9e/Ot99EHM0U4IDILIg0dqkO131xfl1qy1mzb2vvWtzMTIIWO3rduOpjQw6QIEYYbIeZXQ8BSK4CHa6iWIhFuZqUIE9UirRYppbWW81REMDMhRPiyrr1vQwczF2F3Y+JWi4Mty2KmqpoXu7udT6d127Ii2NAxhpu11g6HQ6t1Xdd1XRFjbg3C8mWKCBHVWrOBUx3MGB6JK0qptZbeNw9HonVdIrQIuSkCLMtZVbfea21EOLWp1TrNbd3W8/lUiuymtq7rGEbIrc0iAhEAkCxKeDALMRERIRIiIwEAMyOimlUpRISAY+vaxxh9aN/Nk8dlfkKmNrep1WlqiNDXLuu2uMfQ+OnP/+LNu69/+os/m+fD24evn94+e3L1YS2t1gYYyNjHeHx3Op3Ou91unqbdPJs7IJLQpusYGwTjygDRWlPThMu2bZvaBEhubhFSyvX1HWIAQj7xeZwREAgLyVjG6XS+ub4mJgQcQ90diRBBmM20QjEzZtIxSpFEw/q2BeTAWD3cw9wMEAQ5ECAi8bDeN0RwMwCQIgDARMGRw6OZIUapRTuFmaltfaNExs3maTqfTiKiGqWSmbEwMSNigAMgEagZEonINE3X3/n2y9evj8fT9eE6wg9XV7WWh/t3tU6IMM3T69evzuczhM+tguvDu/ub27usOswUgMv5fHd3vZvnMXRZFtUxel+W5dnTp7WU0/Ex3B7ufVvX0/EBAa4Oh/v7h3weEPF8Oia+C55AGiWzZmb5H4VxOR4RIAAAokhBxkvZc2cRJgozHd3VsDVhFmYdY2rTtg5EjAj8/f/0dwq38LJu787rw7r2UqbdPLn7p9/5jXm/H6vtd0/vbp6YDRE5Hc+lViaqtSKRqr59+OrXX392Pj/8xic/vrt5YW61tqF6Oj3M01xrE6m1FiJChGEmJG/uv3rz+ovvfOt7tzfPAtFV4fIyYFsXRGKRWsTMEImZ1q2rDhERIjet06xqiNBaAwAE8IihHRL/B0hcFQEYvKsJF2JOcldY3BUQkpsEJFUHACYGdGFCIiY5Hs9qPfnjQCKE3TS7OzFBACIFRABERCtyPi/zvPMIVS217ub9uq1jdGGqIkN1qJVa12UFREJCRGY8HR9evXq9P+wPu72H580xhtZpNh2menNzdX11HQCn0+n167eHq+ut9+P5dHt9k4UbEZfzsvVt69u6nG9vb/7ypz//1a9//fDurXsERAQQYt4cBhGRnasDABKVWvvou/3O1CCgq7bazMxNzT3pvMBwdzM/XF2VVtdl27a+v7kyVQgYavKw3Ify07vnHqNv23lZdxC9ROH6lz//UyS4mp5975O703kptRQq8/6wLEu72mkSgG63109Px8cYdjo99m0jmR2ImEdft267yYT74+lN962Pfno8T9Ph9bvP51JO6/Hm9vlheoqAquqm5/OCRH1sX33xxbyb8vMupai79vH8+Qsze/v27W63e/Lkrpa6bOv5fCakDz/4sNa6LOfj8XG/203TXIr03rdtba1G4N3tDQQs67rbz4Q4+jifT8RcSiVCU98frsboiFEIt97bVGMLjKBak28hJkCoItn6EhdASlpgv7+CiGF93s1Eoqqjj1LrZfYJ2M2zB9Ta3t8I4eZtmj/+zreBiJDdbZoaAPWuxEzg4damqQ919y+/+nLrNu/3AVFrdfcIF64iUm/rm7dvjsd7YXjz9u3bt+9cLQIw+SQAALBwtDBINglzVoiIUsp+v3/+4fNwf3x3PJ3PY4yptfPZCBEAkuLQoUSkXd3N1VxtfTwDAiB5uAT5ui2Pp2BxEiSiOgmQv3n7zvu4PlytcXz5+tdPbp+aqW5j7eZmT58/X5Z16Li5PoSOvg7r/rO/+vPp0ER2rd189O1PzItjLH2NgIfz42l5ez7enx6PT599a1sfkNvp/u3Ld59/9PyvvXj26d2T5+5jf62IfDqffvmrz305m9vN4fD6zb0DPH/2wd3TD5jl5u5FH4qMwDjNew2eWzufuxq8/PrtqzevXrz4YOg7RmSCN+/u2zSFRy1fvn37mhCmaSLETZeHh8e7u+fH4yOEr+v24sULJHR3Fj4+Hve7nTCjlAgffTscDu6upgjIREQ8TVPvQ3Xc3lwn516mut/tEeLtu3fPnz8fpin2eHi8r7UiMhNPUwOkiAiArY8xeqltarxuQ4iIYrc/ZHWWUsxNQFTHRx99S0Rqm46nx/v7d2Z6Ph+XnILd3Db0bhFffvH1+fhwOp9SfIJECJej5ABEBIFuHhSE1LUvy/p4PB5PRxExVVUTEQAJCEQ6XB2QcDkthMTMiGBDPYKZbGiAI4uZij8Gg/jw83Fb+yhFiFA3nZnq1UHVXE/v3nx2u5O72zu6nW3kUHfGWF48vS4Cjw/Ltr4Nf3g8vup9V8sZr+PdazydjqrH0/HsUdZ1Hf0cupVSfvnzPzseT2WqxDhPu+3hzz/7yV/V0nK2VMdSpyJ4mKda6/XNNTGWWscYZkNVj+ezexCTGkNEOLjHuqxSywcffvj8gw9Kbcv5TIRF6OrJi6nN4QaAT54+3+8Ptdb7d29fvvz85ddfu/uTJ0/3hxtm2u/253UhgNN5MaP91RUiDjUPX7cVsCMRIp9O5yKMEb3bGOrhL1//vHA5HPbL1/28nN+9fllbm/dXEeHhTNj70L6xSLjv93sERCIp5f7+3bouT+7ubu9uT8fTPE8s5e76NsCZmZA8bDfvh44wc9fzcoISZn53/czDtvNSSm2tffXVKzctbVLT+/t3Qy0QBZEI3QHzHkJEoMAgSg5AADDCs+NxiwiorW3rNk2NkOZ5QsTe+xgjrysidjdz90iA2wsRRuD/+L/6n25jmKq6vrn/8uH4KhTMR6v1w+cfE5eH+9cfvvjoyZMPAYhF5mne1gUghgcC1Foj/PR4//Nf/YX5+OTDH07TYRv9vJzfvn0Jvt3ePS9S3QJAiUgd3OPx4XFdz7v9YZ6mNs1EtJ1PgEhUzH0MnXe7w2GvqhGwbevd3W14fP3yazcF91bq82dPEenrV2+Q+PG8nI5HHeO8nM2ACG9vb0/ndVuXNs8YiDnmbGtrDQK2bZ0an5fzGDbvdrXtb25vhWtrE4AnamKmr9+8qbUiUin16uoKEUUKIY7e1+U0TdPV9U0ffehgLoerq2VZw6Ov5+ubO0A6Ho9IWGqD8NPpuN/vmYWQmKjWst/v37x5fVoejw9vbm6evru/r4Wnadf7uH/3OsJGt+vr68Phau3b55//7LOf/mXwkLncXt/t5LqIABJBEPHD8WRuY+j5fNrWDQADgJgJMRDAA4gDITvrUoqbAaCa3tzdAsT5fJ6nKQLdfdtWD0jxDQuzMER8c9SIWN2Z2S8slyM4/i/+V/9bV4UIYgTwbTszlaHLeT3vd1fv3r6tRXbzHgIDcLfbrcsy7IK2AQCzAIC5bdtKELU0D+x9A4Tet1JLLQ0ALuoiuQyuiGhuTJT0KjNvfQtAZhEmiDgv5/xjtdTSWikFA8bYIKAW1j7O2yqljqFuNk1za1PvvfdtjO5utbY+BkSYBxMixDALDzN1MwLsfSWWaZ4j4uH+Qc3cbD2fkIuIjDEQ4XB9A4g6xtg2HUoiKVto81RrRYDTeRmjF5bzciZiRCSiMVRRzKFGn+bduq6qY2w9EGut2YUg0KVWLg96/urF0+fbMoL47tm3z8v69vWXu12lck0oABEYbx++un/40lQj8Pbw7Ppwt9/vPUDHUNVt68uy9D6GjuV8evLig5ub24eH+23dwn3bukVEgIcRAkvJMc1Mf/Bbv/Fw//Dq669LKRFgEQghUpBw9H64Opipu09tAsDW2ul0xkIkbMO2pYc7AUkTAaEcaqa2O8z7PnqR62U7n06nu9unY3S3qLWyMCDWNk3E5qrm2akRMXOZ6hwYBAQIh8PBzIeOaaoREQ59jDEUkUuttUwB7uGJQOQRbPPB3PC9sEjqbK5FpI8OgcQNEa92B2ZGQCJ6QuBmY6iZhQciljpNbswUAafzKbB7+NymcG9FEDEQVXXbtlpKEc5BbN22aXdwN2aZpkYkRBxhAGgeUhozFWYAMFVhDIKu+v75A2YRZkBQVXdP/EAdLKKYEqOZMZG7960zU601AtTc3fvob95+tZ2nm+s7JhGmtr9Zt/7hh88RsLXZzZbzsY8+z/idD18AJJBU9rsDs4yhhHB8fNhq3+/3Y9iyrWb66utXj49HZpmn6eb22j0C0VQfHx+Pj6fUe6TU4u3rt4frQ3i4WakMCu4x+oaEEVFqjR5mNkZ3j23rZkbIhQERIQIDWVhaoQDiCypj+ZAdT4+PDw+73TRNc0wTXx6+kmMyIqkpICXR7e69DyIEguV0JqI2NRYuRRDCwoOAmYmYEIswAKSMdegwC2IUqQgAA9XH4/19H90dbm+uZS5IObgQIBGJeQD4XBsL2xhuiMAGZjbgIn0GRJymmaVAQG0tUvMAWFtdty5SiHjocLUipVS8f3h5dXXV2kQspQgSm5q5TdPEpYQ7MkcEAAETE0zcEnkPdykVAcwcgxC9VnKHKR8MkTH6UEuwyMNUU7PL4oZIU/j10xfuhgBMzESquosgwghzM3Obrw5uNrQjgJkN09GVCDGiAGD4XLiUHRAz8rYtGPrmzZvEe0+P7969fkksJEKIgBChOlyIzN3DX37x6/nw6fPvXBPbctyur2cEWR+68BQBplql7OdZ1YiYRdwDheo0mfp+duFSp1kikg1AZgZAMz2djjrGRx99GAH6HpXKVpwCiNDMRQoSXa7GMBFCQmaqN9eAGBF8EUkZjEEARQoRC5MUcXMPCIQEZrIzcDciHKfteDq11qapjDHWdYmI5XxiKk+ePqWUUwGoqZqGekKogEAEWUnzBjLzedolG0oibk7MiFREzBAJJ5kiAgAbz/v9LgLUTFiYJSLqXAGBSZAo5ZQBQKUAIhNlIT4vi0iZWnUHJGcudJnZc8KlPlQNEDkQgbly5RLuzkQQYW4MwCJhjgSEBAG1JUKD4aGqCd8jYIS7WxIUqsNteHi49XWVNgUAhiPSDd/cPXmybutQb20yU3DLO13Neh95iHWM3oeank+nx/tX3IAFyxXD5KZ997Te7e/25YaJiogwASCTsPDa+1/+/LOx6t3ds+ubu65WaxVzC4/UZ43ez+czM827/VAzNRYuUiBBBURCTLrRwyECkZBonncR4WHuySEUBOqjR0QpIsyqikxu6habqZkRMxEHgA09n04JjDJxK+X7n36KeR2CmzkRza26B4TrWJGIkBCBiNthX0RU1T1runr6QIhKqedlJYLdPCPxhU0BNNPeOxMlgizMydV4wBgdAs0jIZDj8VhKnaY51ZIBQEQRgUQRoDqEWUpRVUSupWBaBgIALz+JuaUCjsg9CAEQ3TxrdL7tiIgViQjA3V3HYCILB6baalIQgJiKsVpaQAC4qpoNRAyPdVvXZUMIJGDmtru6AvQAjHj99s26nK/2c23T1CZEVBuqqkPdVNXe3r/95dc/P56ObCA7ISKuAOjTrn760XeqFGaeaiUmVUeIrW/CMs2H589fSKmBCICyrZ0I3S0Azqdlt5vni456RAAAIV2KjqlF+BgKGMIFCInCLR7WJVIu5CHCshUWGb0DAF0UtcAhZjbGKMLhgREDPXny5XyWIkyc5REi1BQiAiFyJCBiAnfzcApS91S3h5nVWmtrrazLAsKtTEXk8fGBGRiDCMMVXIEYAKWUcEDwUi4qEdMeEQ5Raysym7k55Kw6TZNIyfaFmCtLonNIGG6XbwBCDR+Pj+FxfXPdSkUiM8trhliAL4BekAcEhAcCEBYp0d0gWNjN3RUiiKjV6hGMYu4p4OSsnkS11KEK4YjYppxKYltX4np1LQjhpvlwadfzuiDyiw++LcxSpLAwM4K7a+8jsWZEdIhXx+m8Ho/3J1llWZY6l9vb27nOdSpPbp4RsY++bOuyrizcpt3v/PhH87TzQEDc+oYRwsIi4mYP9/dXV1e1VQ8kEi7AWMNDh5kbRLhbVn4kDAd1QwSI0DFKKcgsTICACO/evovw6+trCCMuVapIJaG+rgDBRBcAzNXdb25uSilm7uapo8gHKJUMoQDhKSTKj42IIjwiHHQ59+W8RHi4llJ19LPZ+XwipAjvwzc/M5G6c61ZCQBg9M1UidjDE3Ab2yal9D6meRIWQyCiUmSedwB0Uad4pFo354Y0PEkpRLTlF5xnet+yqZmqtlo9fIyekF4pgujwHphhEXcnQkLG9FUhASARDVN3R04gEBEvCCcA6FAijvCEuS22qU1TrUjxzU2ppr1rinBqne7untVSHt698lC/3Hyh6su2ff7qrw5X0zwXryGtIGPf1q9e/eKHP/itFx99jEDhOrZt2ChSSpumea512npH4gQpJcJVx3pa9rv9NM0BUEpV18bsHiJFxyhhZuppFHv/XUq4cF5OjgjJHgMSAl4frpipTlNtUykFACOg1AL7qwg/nU6V2Nx09AhorZk6oWFBJNKh2eQTidRKgABubkgY+V+qAJHUIAAwI4KEgZvq6AlXIIAOvaitmUE13M/L2dXNbZ5nYTEzKaKqqpr/iwimQ0FTUxuhblZbS8cSBM27nbRiZsnb9bGZ2dXh8OTuCV+8TRERiMTEQa46AoAQw5SZzQyRE5ET4d5H0pPfKLwco5QSAARITBbBRCzCxAGgY4gITGgJiDO1Nu2vnInPp8dtWZ8+eRYAplpY2qHleEvEjP7sye2uyfH8mE6poYrkH374rTcPn7x9N787vnu3vOvnMyLcXd99+vFvffjBdw+HK3cIt93uCol3uz0VNjVAqNMOAFnYPeR8OpdSbu5umUsEutvpdFYbEY4orVzEOu4XUFxEcowDlFSZILq7YQB4SGH3kCqtTm2eWUSHpmVgjN63joDMnOICQLq6viVC7WPb1nz3pQh7Wj9hnnfr1vu21trG6G4pDkGWC+jyHhkLFGaEBB6ZDRCkiLmDB2WxgdhNu2VdwFFKQcCSQutamIUQI0JNCTAQIPlOYiJ0HUR8/+6tlLr1fnd3527MrOMisRUWYvQUfgOAQyK2ImxmEBEQKW0hRjN1V0IkwKvdHJD21tK33rfV3Nw0e81aqxAHBBMFACFIkQRswh0RA1IVTSJyOBx28y4Lor03VZmpu9VSAdB1vbu9aQVfvnm1dau1hocI/52/9Q/ePb76//3LfzTN7e2bNwZ+Pd/+9m/8jdvD3egjO7BaS20NAF1tjG6q0zQBonVlEUHCaZ6JGCCIeF03h0Bi8OThgiLUDBFEOF3C+eEFgGd3p8PDCpeACNcIiEDVoY9jqKZcsNUaEWbGzBDgkI2yu3UzhYAITIcqvLdQAYDao7v3vnmIeyQy6uEpL1IdROy+hTtGSJG88L+pTYxElR1gnnaEoD0l2x4epdWcLpAoP2BEJGcPh8BSSqrVPELVmOPZ02dmZubr+TEARz7otQxXY3JXcABEcwdIT1Js69nNhg5EYpYIY6bwcE+UwGttt7d3tbbRO2GwoHdDwNGHDh2lkrCIbB6IKRYgjEDCUiQu3zIGOBMBlFKIEIBDRIhx6xsMRyT3AMD7x8feFRDm3R6QVZWYwCAcbvZP/rO/9z94ePj6119+9hs//OuV5+dPPwB3LiTEyIRIOc64WRGuwh5J7Qeqy263R6A0UDKV6xsxd1UzUyaqpU5t6qNv6wYYSACX+sKpGisiRSTA3Q0SLGdOFZyZEl0aiKGdiFPYxsS9a4ADguqAi+1Qss9gIgggQSYGQAWrteQpLKXkoEyCSfG4OyB4urJ6ECECbr0D4H4/E5IFIMB2Pr1583o5n8IdSZ4+fdpK6b27GpIjkaMD4DRNnmZ1wvSLUQpCiQCBiIBAuyIhAaVyzd3GcMi+DSAvSBsdGRAiANo0JfuBEToGAgIEIQS4jf729cuUdhGhiNRSImBq0+CBgESpwrQIcABwHWEkgshAdHV1jUjuepn8AJjweHpczsvV1VWRCjHG6Os2SqlR4LicCQkxpmkaYyDS7c1tLSXCMb59On0bQv/tH/7Tv/s3//7hcBURgJhORTd3iLEZM5pjCu9SRUQIgoBFpJSSnzQhE1IAZreYsMRut4OIMUaEw0WCGwiYiOAFO2bJaTl7bQ9HgPV8ZhFkdo+85HbzDgBF9Hw+pxYiPB8UAwQhBAgiqKXWWre+FWQA2vqWvYuq9tXCnKUQYSCpKSEaQBKrxCScT3xYKCEDYN9WIpymWYrU2qSUnPb76Da8lAIA5q468iURElCy7gRC4fnT3a2PLszUKiFGIIAjgnuqgy5EBSGWIhCeRVJNURgAA4KYiYWJUyBISEjkAa2WImLu6RaHTkQYHuGWX7SWIlKGKcCFRDofHzyitWne7bHiuq5jjKlNqcwVEWZumRVhqqNDVudURDMxYpgGc0Q8PD5o37733R89nt68evfyhz8oOgYzqYXZiAjyRCIJElljJiJzh1D8X/6v/zcipdSqasxCxG2qQ3Xbtt20m6ZpXReES31JqxdESpw9IjG2VFwB5KgS/h68Urd05AQgmSogCUumZ+BF76xgAODEkmfgG34tEBBpN+8AfFkW0+GOxBeYRZgiYtv6xa8REQHzbhYWjwSG3Nwz3OO9Pgbc/aI5iwviZ2bqFhF93VpriJhihlqLsBCRARBSRLx/bZjfJCKWUoXTv+YYQERDB2HeHYFIRCmEuKRKMDMgTfOeWFLudRE0RiTKmo2Uex7WcZFsWjCjubfWiHiojTGI0zLl0zTX2oBQhwKAjhWyRAbUWmutTHQ8HVUNIooIEao5EgkhgnuGAhC6e91dIfq//Xf/7jd/87enlk5LFGGEIAT3IEowC4mACImlq4q7jQ5qCgHOOs/73nsEzPNMgKNv7l5ECJmIj6euOpgwazBcNEpAyWCrElGWKiScyjT66H2Ee4ATgpkBsxRxVXMjpJR3n06n/X5f25RvJREFYiYfQAAA1VqoltGVRQBDx4BLMAUT0zxNEZfklHVdWbjWttkmzAkixWXudwAguNDOLKUUIWbGAIey3wdA2oCmuRYR9zAzQDDXNBogs7Dk7AYBhJjpH+5GxAgxRrdwxAuj7aoBke4iBwAIQB6jS8BQJQIyrrWlsdRGSggBidzUXYsUkUqIvffeh8VWSy2XVjoiHAqUUjx8PS5qigBuWkRqqe5OAOuyeGo1IlgE8/QAYLgZsiAXAaBl29R8wHI7yZPD1ZdffvGtD5+X2oiwr8tUhZg9FIIC0czZCUQiBngIAHiYdy+lIMa6nqVURHp8eASIuU0s/Hg8EuNunpnJ7RsZNiBiljBictWSGQlmgAgeTEzMUhxA8L0tQ6SMvqk5ILgOZDK11ioAjL4CoIiYhpR6Oj5EgLu5pz6GI7wvJ/dLv6yqzFyzABnuW/vmmTbVLExuFhEXgIXzZNPxeP7F558/e/bs7vYWiVMvkINYfoacDgQOV0MiIGQAac3d3QERd/sdAZVS4z3ITUSn42Odd/kpRlwQ7YtwGAnDmSQQ3M20b8sSEAR4SrsBUXrEzL1vm5khRGttahNL6X1oHwP748PDbr9vrTFzQgM9lghwGz5GjiNFJIMJhIiJzqtBALjFGAM5gMJtXddpbhKFiDDUbeyIDmJ3ev7rL27+6PMvCJ8RuKsKo0OgmyQQj6hmHprupQiX0cflMiestQAiYdRWkMB0nM+PmR81T4eMMREpqno8nbKsIoGZEWJhcQDMRoooWzAIKKW6W7iPMS4ab8SELiOncQ5wiHBmyUcXiWwMQJRS0iRPiDk4sBQ0w8CMUkBEZjkvZ/ToEMSSd2F+NswsrQ1VjyjZiyASy7Pnz25vrs/LeWrTbn84nxd3RQgzd/BCPFS5SLghcSliZqOrbcpCLExIGGhh2+mhlMbMwhJuEGGqTCjMIuwBFy9vESLO+drUTQcJpG8xh1gIcIIxEAFKKcJYpAJAWiKZeb+bhPG8LK1IqyXCCxeu07puVYqqFZFtOafXADF0jD4GI0aEmQNEQ5MwJenOEbGb5yyeBOGuc6GJ0LU/EBaKj17c/cVf/sEPvvfbEBgRtVaqFRDNDdwIAt43LIgkHkGEkg4DYimFhcOVEYiZpinHn7S8AOLQMdQI4frmGhG3rbvZbjerKkQAoqoy0ePpaBdrUiDAMPNwTvgSYF0XSFeokEekWTg53XwWiX0MDfc0mogwfHOvlJJ2IiYcY4R7YXE0InK3QCRE4pp3SCqIMyeGhXWMdT2PzhAhLNM811pH37YtvqHnEACJidBMa2ullIC4vmnn85pxM0R0f39PzPvdZDpyrBq9920pRbbeFdBdMKWr7osqIdZWVdXNUnbDRACg4QBYpCBS2gE8QpiZJQDSi/T4+MjM8zzXVoWkluLu27YBhJvP0ySN12Wd5zkAtnXZ1q3WwojuVkp139bljK2GFAifoA/kWqcIy24WIbqHITPLcN2bzlKi3//FX/y3v/XD3ycShBi9G2F6OGutHgYAaROTzK9orRKzufnmsEGyfQiYvwgj3K1vo9QCiCJc93NS+NOE7rFsa1+3cN8fDvvdbt1WNxMphJT2scLMhKYWkM+cl1LNzbqFG1PJiQxLMbexjrggukhIW988Sg7Abs5MahruDiRSknsiIiZppQBhpscx01DVMWqrwmQe8zyHR+/btnVzQ8TdPG/bdj6fEoFMnTkLv3erwRijbxsgHB8fRSTppgytIYJtXXe7w27erdtqOtSs1Npaw0QP3RARwhEJALZthQAAJIi3r16mCKIUub65Q7rga+aRjdQYI4EfIrq+vipSAWKiZmoYwQhtnrZt09HX5QSYyjLjdGyqXQQEiBEWbsLk67EU4ogmPJCWgQ5IGdDDRA4Qrl0NoDts2/j2t3/0h3/0r/q27fYlwohKAAwdTGz5D3i2LizMRFwQsk+IMTT5HAgAgnRveAR6qG5DOxGVWlglixFeAjiSnEdG6qoQcHV1vfWe1mMi1DEIMAiRkIJry0qFHlFaQcTeR+o6RMTcdGib51IEkZK4NrP9/sbc+tY5DwhxmqZNR60tItMLJD3Ud7c3vQ9klERamJKCbrVGABIy8batUuo8zafTycEhACISlzL1AGWGPqLUxoS9d2YmRB2diMJhqLk/rOt5W7faWi3iOXUn5Efk7gARYd8Md0wIDsL81ddfY/iTp89dB5eaqAcE1FqdKLWLzGlk4pxDVTVDNhJdMOtjbExYihAipQMEARDcDcJNhw8gJqAgkTWAmUYOsK7IxcwS9UCkRDFMHQCRudb2vU/++k9++tlv/PAHiFCKERETD9XUtyBiHxsiSSmSGgOzRI0NgfJys26IgEgFyxgj/z0mGqoinEQYEdVWuRRhRsJMBmKRytJqW9Ylb4tLvXdHT7SJLuM95MGj9E6UUuZ5UrXBw90QBCFcTXUw0+ibiLRWtnQ6QyzLYmb5NKSVsU11t989Pt6/e/euTZP2HmYJnTPztm2jd2ZG4FevX4E7sxz2h8PhcDqf1C+QekNGwvPpNE1ixlWKuR8OU6LGmnM7UmGc2rT1TYTDlAmZycPHGMLyHuOAywSXfj01ALi6upqnBoilVAt3vfCjiKBjFJFE+LMoq46I2Nb1ouGESlL6tlLAft55uKqKCDoBohAFoYeZBRMGRGHmsif087KdekfEHVdiBrjwum42RkcgQjQMQGIWs7h79vSLL744n853d7cX3DbM1blI+gGTp5OUwwUkU42lFFXLSkwA+aouClR8rwsKiIh80N1dzcKtSOl9wPsraTfPbZrmeXY3HRrhpgHhAVhKjYtClDMH6EJE4yVIJdXTZjjGYKZSxWyczqckKRPgHqwQMVRTvYVEyUzN0xQRrZY+FAFUdajWWiNimiYkvExzvc+tZVvGzG2qu90uk1YR8BJewc9G91oCEQ/7PRLVWqvIuq5mVkoxDxGRUlqtgKGqInw8HjmzlDww0ejMbkPgy+eA4Z5vAgAWLonoMMsFkRrd3YmEmVPpNvqmOiK81ibCgJEUFSB5BCCEWikyT3PvKwqZE2my/CySWStEbKVc7igEsrDWGkQ+XSm5BBHWoZtpCph++Bs//Mu/+svbJ78PEUyAiCKcIjt3h4CAIe9p7UuGg5tlZcreolW5zKIXXQ7HBUGEHKQT/0DErXc1Q0gMGo9HPR6PzJwiBCJqralqa01EAGmMMe1mcIfMsvAYY/S+IjIiSCkiklEsTMws3xS4eZ6I2c2RUPpIedk8t+xGh/Z12+Z5Zh69dymCSEzkbsuyICKgZbFIm2mif4CR0G1qn5P4K0USnc+5miLMtG/b8XSEf2+662pjIZ7mmYl0RKstGTd3V/OxbUjAGVFrCZvltYQA6Rs2KTXVsVe7/da3ra+lFGIeo2/bOu/2A4EJS5mQeN16qwWZ+9BAaG0SJkcws9HXDAQGABt6OBykiKn13olwN8/rSh5ubnbhKAYhmTsAZu5HimXhcgZwntrVYf/Zzz/73qefXK5SwkQopqmaGmBIKvTMFQEjAJhSlSAiGazBRIQkwgmQ5j0RlubzAMg8g4seOQBqrUUEAhNXBPfzutRaWq1cq7n1c79EC3gUkTF0mneJdGXecb59EdDaNEY3t8TmWy0RyCIibGTbuo4xAKCUcj6fpZTzct7WZdu2WkWk1dZEJDxOp6OZEWFiyJdHTZWDgWOaZ3c/n0/MJcLcc+BgM9/WNVMpkgYe2k29looEbj5GN9NUlbhbraWWQsgAQUxJD4kwEbrZtiwZwcAipZZ8Js7LmqShMAO4MMk8MafkkhgIGc2sClNhlmIepn3bjJizUz/1XoSJ86UBINTW+rqWIknOq/ZSxFx7tqQI5k7IbgrBVOhidibUMTCCmcd7Smfr+L3vff+f/dN/9sGL50/ubs0dLlkfTAAoDAGSMrAARoTQC+wmJEkGXmahi7YB1nVV1dRRcBE3TeA4rZB5sVNmSRMRQrJLtVVh9oymIcySJMJu4/F8CkhNP+Tjbu7eR5HMhe7JrKmOCEA3M+PakuK5aEsgUgZkqkMVMHa7OenJkvIB4afPnp1Pp21bS6lZztTGNDeABDWIK3GKbgMK87qcAsDDzfzx4Ty1GRFLK+G+9eERu2kafQzTAMigPtOuEOBOxNkAAFJCRrXUut9vpWgf+RAj4hiKiNPU5nmqpSZAlgkfl9hoHVNrFHw+PpRS5/0h2R4C731Ft3SVbNtqLMxcSt3MmIUAWi21ViTcRnc3Nd22Ld0sAJCZuJnCYZ55EGRqAKFqhJeUpsJs7sTyvU+++6d//hd/7+/8bYggJIeBQBnu5mFiYwRRsomlCAIB4Rhql+gdyjBiDDAdREjMbiYirkrC4TF0JHYuwmGuMNydL/oNRwQhcrWA4CLJDRGgmoFdJFc+POfz7DUtdPQ1rWfhMVQRKRFCYWHEvq2998wrKaUyM0QQE0tFJGFelkXVVvOcZDxi3u32+/3j8VHViCLzTdScEGx0G2E+cnTYRoanu7unBCfRdlf3UAgrXHIgaqVufcskJQMswmMMREVCs56BRlKkh+voNizCKRVzRNPUVM3MRTifHIxQG6fTo5ulVYMghPHh/m1rk0gRcUQAd0YstRLR6CPx68TQGZkQ3Mwu5jsyc8TLsUi4fOiAS70gqXJR6ccl+ycYxtAkf0zVI9bz+cUHH3z96vXPfv7ZDz79bvLzZqNgmghCAJGF3AMUImBYTxEBuDPTNwSnpikFggiEcxIGCCAiV88bqNXi6hEuRKYDMQPvYYRPc4uAUms2H1IKRGyZmGlGSJmUgRkvTZeEzSJljC7MahlaSO5eEFUVfKg6AjoRI5+XBdxrq9M8uxkhBLJ7mI/eR+AWEFVYx0gGWG0IC7EUYQAnJE2RToSOQRGtFQQEt1KJuKiF+fBALhXftziBVEQ8nIQShh1qKTrI8H0AGO4ulkjPGEYIxDz6FmEphiy1uTkgSCFC3O9mHcMscytMgW5v70rN9r30vgI4MV9UDITZ8qcIExAy8qu8j7vMhEJW7b2nJHeMkRywmY+hKevRMYrwphoQhBTpd46ojNq3abr5vd/90T/5p//82x99uN/N4UZU0itCJCLMcMmZ70mLAoKpfUMUI2PSv8xEwun4xHDmou5JNuXdGAHqigGAmJsoCHG4khEGknASvExkqmPrKbUupbgnLo4XGw1AqvkvcZUQUy3h0fsqRdTHpVtDQEIE7X3UwhEozOmYQYDMLlIdblbaZObdNcwzlJ4ECYEwc5O4Dw3T20N5vsenV/P1TFNBpoTswQJVYR3+uNjbk7056ct7s0DVzgilVkB0s8dtQ0IppZVi7hdKBxJF88SZmcXdIOx82hJzhzC1TVhC3QBYuNaKxBahQ9211QmRdAwdw0xVB0ueD8ggbTOHyLRrxySeiaZ5Op/PqQBmIgQMBDNrrUJAFlekpFY0tZfElOQxEzt6+uwOU9OxSSmffOfbf/jHf/J3/ubvA4C/F1y4DiFOYagQgmb6sCoiFBFi1tEveVaAanZBSCEEuasONYAoIil7HTaQ0C3C9GI0QxSAWgsiZBglAro5YAB67xszgQYRuxlRJOJnepkEI9S09zHmaddaA4hSimrPM5ZJsRHwTWbP1nuqycwN30scmRDCIcwN3C3CWAoj5l9XVUf7zq388IP9BzdlLh7hhAwYCEEi+UwJUxFOpYWFvH4Yv3i5/umvzq8eFQFFmLC2FonVpu0kp/SI0KFCuTslIGz0gYjzNA0dESZUbAwbvZRKRJ5J0BBhLkUIxVQhPKFzYWEpEH6ZWCIyLCuT+hMbS0Zo9F5KcVKPWLfNPEMBOSHibC3cPMyLlHQC8HuFOwDM86SDex8QMLWmZp988t2f/OP/5uXrNy+ePhFmYoYAI5VwV/d1Xd3dzaVIRlMTARM6EwuP3t9LzGLpQwi5VoRgyiJGqQS6iDPMkICJwx0QkkjPYD0PGKsxSyrkiYhz6MMID3XVMUqpmY1n76XmRQTAAaK1aqbb1okYiUyNmZAo0blLDBtTxGXFBwUzk5mqjeyxmLHW6ZL0yBQRAPbXPqDf/97t3Mh07b27udQJgOapEpFcFrgQMQWYBDYphw/Kp8/r3/r+7g9+sf6LnzwaAGIQApWSDorAVP9EhAshEwY4EkZAbYUQA6AIZ00xVWZWU8EiRJa+r0vCEKXjxyyYKcCZSaS4qoVrH0SXMWqMkUjLtm6J3ROhm6p7dmOE0PtAjFYnvNhLIj3dkSNMXHyPcZG+YCkFCYkIVAPpN3/4/T/+0z97/vf/OxCxLMs0TaVUUbMIRwAEmKbCLIAkfDkMRSQd/Kqa3DuFI3BEEFIVcnNIlpSICdUMMGqtAKjqwiLCAX6BFyMCfIwtEIR5mmrKP5DZwRHAwgqGm/YxzKzVhoStFHfXvtVpAsBpmvMiRAZmzskOAdGBLhIKMLPsnDLczd0IgZDcTF2FOdHeCHi2g9/99FoymRRR1R0ILQhjqE9TQaBaZ2ZCMCEBcEp5MvHNVfsHv3u4PZR/9EdHB2SmMXSMkWxPDr2MSMzJiUVkKo2m3c1Mxxht2nHFxMaKcFICORNt21pS6J1aoiD3SGUVIhLQNE35qWcrPXSEh7AkukZI6gERzJjvvRAFQjYkiccWKeYGQHnuiwgTve/uRXUQUe89IADoe59++pO//Ok//xf/kogPh5taJcLlomHiiywXERPpUR0AkP1a0l4ALiLLsiVgb5fuh5lIx2BmtehdITwJtYv/kyA8AuGbQFYPC4vNNA1gIqX3Hpenljx3aHmS01BrERInx4u72+my6UfTH+KmTCyUebL5Rl32NBBi3zZhaa1mKo8hShVhTml9ABY//vpnr//43/zZpz/45Hf/1u+cj+e7p0/0+Oaf//M/Km36+//9f4ClAUIAMjGCEwpJI0rZv6Pg956+28t40Oqe01OYalZfC69SAwwBCDEH43DPcw4I8zwLcyCHezbe4XZ5oWHCnBA/BFj2gx6BsK4DAqZpYr7I4zNRtZYaECnZTyU8IjJy33p6KQEAA6SIDr0I6dxFSspKsvcHxloLZvqYiLmt61prXdZ1nuqPf/uvvTvpb/7w0943095qEwgQYSZ0h7BARmLKJXOulm0NMRXkTPNN6CXCIRwBswevtbi5eYhwBAHAbp4B0s5BEJ5Gu9TSCrHaMPfM8ErdZtrhVL1QIGEtNcJZMgGGMNDcLzEGCO6BiJcviNj7MMQxRiklwpnZTTnXjwgjsaoiRLhejjShBUop5n4+n/7gp3/46vX2vd/85PH+zf6wL4LT7dN/8F/+51x2dZoRgpky0I0uwaaceCSRRMS8m+6u5O1rhX8vBQEPb60mWGDmiGQ+1CIAmCn9l8x8aQcvsDirDoCQNJIS5ijQexcpCEnscHhARJGST3ha6BHSxOMXBaa7m13g7vcirIgAushzPTzCpZTLqUIq5QLtmFkgAhhR0u08z7OpEeIY+q1vfeun/+xff3Vz1TjWdTkc9hTglNpMynYELqN7gLmz5PXrqYpn5nlqbWpIWEp9L/dJX0C0JszIjLVeFlxM05TgChHWUgAh8dB0h02tMYuwpN+9lTpPk4MTAoSCX1jSiDB3IrIM61NLv5Wbj75dcgADkvdIuWZr9ZJ3puo2ks6D3M2WcfJwUbiuLtP1/I/+1Wf/u//zP3ag/8v/7b/5P/wf/2FwLfMNlwmQkIS4igiTEBckjnAARfAAjwKteq2FkImplMKMUjLeKdWH5uHMWEQSjq9SpBRmBoRSa2IyUoRSIoyUnzkzpzgAMFXdkeHUqc9832OBpMpWJD8dTOciXfa7JchJTKVWKUyI4QbguUcJIW1GmgMjInoYQSBEttVm6ubJl7dWixRA/vFvfe8Xn/8iAJho3VYptaRZiiUZNczKZ2rEVEsZY7Aw6rAwgFAPjEgP7+XwEKWnp69bOnaZ2UwBQdOxbAMAmSsjg4dDpABmmIlIhDNiqmPBsRZR1SKCQgFgOpDJzcfwRIkYIT0VECBS+lCIULdcqJhL7PLddBsA0PuQIpT7BAqNbomULOuKiLPU8+n8X/7d57vrZ//7/9M//L/+wz/5j377o/+5uQMRMRIDXlLXAdwtSCQzN4FA4czru8e3j1+84QRwmZlIzJ0o8P31wEzuRpy5OnnzWV4JyGSbDdUU9TJy5jQiAjhSmt0JIy5ux5x53V2IEiBNjayQEGVAJ8HFF5miHLBuQD76hpi9IAny6hs4DO/hgXRxKgKgm2YfBhBulpgQQUayGLgvfX3+4tlf/uyzP/jDP/nrv/l9fq9WRkRwU4DM/AZI3Nt99DXlU1iEgsfo4SHMLDxGpi9EOAgRAiQ8mpZvN2Mm0/SOFVUz3TJHjZCGjQCQctEN9tEpskBf9gP1PlprpUg+gpJ3XQrKBMkTUQUzRICMqhjDaynCAohqWjCNjo4EiFiYww0cAzwAl3W5mfCjK4P1fH/3bF/xB9/affwEbvnjH//eb799+6oua6n15cs3+53sDgcptTEWAYxNmMCOPt6J3lfwLx5frHZbCjJhYieuRogBUUrKYFyEiVjHYOE+elKVyWcV4VpLuFs4Aow+AOMC6gTWtFsxY2rUHCJMKLWqFhCMlDEwY3QPZ3h/b7330qdD8iJ5YHL3PjoRW+hF/U3k5gHJ+OLQkQs9wiEpNuFLlQWIWksEfu+Tb//X/+ifPH325Lvf+kDqxWOQXsHI6AJmdu+UpzICTN39m0QSZnY1V02DEYsgQIQXKfR+fIsIM83ytw271MF8ltGJKJIrArz0SeC5ze4yLalN85TnL8nd/0Bl5wkCqep7XV/1jCpPdxaEpYACgUuJ8GmaIEkooggV8O/cxlN+fPvyqy9fvfred5/uXlwdl/s249/98e3NzTbe/DlUXmV++Or+XZl894zqfqos3l+UV3s6VbS+ngDp9um3n1/vfvSB/dmX42Gz1lotJdFVInKzCBARQkzE1dTyO0tpbECojjSP9rEVkdw8h2hZui4FK9/PHOtYsluPYCmCgOROhLW2tB9GXEaQNJKamaU4hykAhNjdhxkEZkAPAbBQQimllE2BEJnFVNPvphGIkJJcEeljfPyd7/7e77798uWb50+fSOLI2ZZ6qDAjcLilC8fcmRgYtr7lIUuR+fl0FsmQZQo3YiJmDwMHJkSMhHZSMpHU92XnQeQHicno57JKRFq3XkphYgBfNy1FdHQPYGZwGzrMgS9BMxctTmrE8mwxYQC7+6XmIkZEggjvfdnFTddlZfTv3sIc969evVb3/X5GUBGa7p5A3K3n0+vTKR4WMNscr8T+6rViu1dH7X4+nr57sz0/0PMPrp8/Pbx48Xy3f1EU/9Pv248/PvzbX9q/+/njtq1ZBSIzusPBECX37SERWe7k8yB37dvoW1errdZSc/zOqaJcFjkmVUwpEcmvzFLMLaEKA2fEcHMAN8tq4Q6FORMgmMgp8ihn+57DHiLXkifbU7kGEFykXQonKkTS5BFepdC/947C8XT6/icf/+KXX5zWVbLXCbdcnLYsSwZN5Da/PFh5yJg4NWVEtD/sI5XIwm7KwpktggB28bUF5Wbhi8oARETNLh4rAGFOvX32bqVU1eHktdbD/oCIqiN37eSulpJRhJbreRL6oZQG5LKSxNOIsUDJr9zHgPAASO09EdZan+36vupyijLvG9HOJsQFIQiAhG9ub5luIHTTWLbQHs/FzNEUCHH3nWc3B/zwZnzrjvbzVISRWXgiGh9O/l/83s23buv/5w9fq6m6l9JULcKY2PW9Kw9CUshbJS6GO2gTHU+nvL+llFqLqyVKnp0yhDGxXFAiIyK7kINoQ7+5oYnZ3Bk5b4TEizFntEsEAOjQgIAIYXQzKaVMzVRzsQGLFMEMh8iJLOPY8kpAhHVdAUKkPHv69MNnTz/7xa8FEYWYCmaCnzBn+gQiARh9M6HRxYYSxLUWQtTMlMrlcx5DRybA5VTEuc3DcesbpOAwLpJogDBzllJrSVtnvikAwUyjd2yTEE3zjBC956b61sdItIMYkxWRInhJonIhTiY2dbGXBAwiN5t3u6Eabu7Evt7spZW5Vhm969jCKpcbw0VgpYxdg0DCqfLcEJAcmBCIgJCYYCfLNQ+hCZECBepB6lX0bqrA/Ld/6wlL+a//9ddSZK5Yd9PjaTXbKoPF5EAY6DbGJSAmzDKlz1urGEHEYRpISAgeUgoCJEhNclnV8P6TCGIYo1+I28u9lRC+EXMAQNC2LmlMiPf2vctvMBJCrtykoIhMqWs5T+em5eBgtyolt8dfFkUU2dbVLR7p9Ls/+mv/j//X/1eSuWWk8HG5aZiHDsnND6USESK0WsfofVMkQ3BK7Ahw29bp6qDumbaietHkE5BqZyIQGUNJEIn7ttElzi0txTxN07IsEa5qzExMqQwxi1RBZguUM0XNvKa+JZWxrT0JMlMz91YLI2692xilFvdAwlprLhVcR1fdnux8rrsqCEGttjGmMTozlXKLsJk+MmxpcwcwiGA0RiPMYDFicIENmUEaTi/o+hPYP4+xeH8bVLjOwOV3ftCOr3/5+It/9fGTmw8+/R2bP92G3R6mz16u/+SPv37sesFD3RGRM6+NMGXMuaC59w0BE9O6xEliqI5MrSDGcGhTS2+4W6RRVU23dSHmVEEFXJYqIWKC44TQR0egZIQigDDcTNNuwRzu9F4gn2AOAlg4BAhRQNhwCJznWYdu23a13/9Hv/tbEh4AcD6f4CKoLgCARKnbRYJEq9wdMEqV9IhjOKKUIrXU87LkCkgAzP2Pbm5qSJiJO8y49Z4VBBAuJ8h9aBehViQyvo6wlsrE7o5EEE5MPgIRw4yJ1VREBiIxFSmmmg4eQBijp7U0QzAJycPDHJBMhw6LiLBRYhS+ynD0ACxt2vowHYBApWK9QnBHA+/hC4J5mKARIRIIU608TdN8uJ72z8r+WXAD01ADrCQEWAIrV/hP/ub3//D0By9/8i/GV3/40Y/+u5/++L9wlN/eb0/28C9/8uZnL5VrI8KhWetMSnmPWo3sbLIoZ6mKuAB1KJIOTMi1CRfgMZFAYOLaWhF5P/anctYBcZqaDnUIBMyIiKR438c5Yj4eeSNmwMbQUZiZ4Hw+EcvhcBhjEHDXXOIWEDDUvvvdj8XdA7y1XNaXOkgDz3EsHxW7OFDVs5qkfNjDMDD8spAQAiFXXw4zN5FcqAhICA6FKcN9hNny4WMipuV8jog6NXdPyER1IJCNXktZ1xUuYQORXiwzyysqb46EWU2t1ZZy9CLSWp1qu+hxEYYZERAJGDVyDCOiQPaAVPQEN0Aw0MJETJUxE1ccAgMEoxVq7FMFkbKbD22aEMkc0BawERFYd0gMkFEbEfXqt//+/+gP/umzr3/55+d/84+VDx//7f+hwenFU//vTfH//Bdf/uoeBDjy2r6osLG2kh5fRNi2Tc1rFdU+T3OGQea7SkjqqR4cpsacvPJ7MzVmgJJjgKq1Vs0c6BLFZ6pmlnaqUgoi9tHzi2MuuHW7hK64n7aei6Ii/OH+XQCez+daSjpciHDbtlKKpMeCRcDw8fGxtqm1dulwiQIYc5IP7rFlKVEbZkqIQ7W1BgYpeg03RGKhGJ7VtNWquUDVdFd2iOgRmXMICBDgoH2MvFE4l3+pJmgLgMwS7gEBnJ04ZpBvYmLClzyUBFsBYOaZmZjl/T5sc+aMUYOAfYmrXculk4kXQCAyFgBALIyVgtELRRFOfKLkIuSSODQy40X+F4YISIxF0jcIcQlwCB9uo7TDj/6T//y//Se7N5/92+mP/9+33/sbh49+683PvtbHX7wob38ZL9S0VgGk9+u98iWkSoukELOzZAsMmVcB+TLe13QAJ8S8d0uRnN4vGbbI5/OSaXmcyuWSodXd7aJEqKUOHTPPyYSkDMjcpHD2mvv9Tsd2Oi/7tmOq52U57HYJprCQvKcjRXVkCrF5sLD7cOO8YDMsgYl1dEBqrSXlpsPgErUZEYGIAYD/IbdClC912zZi3u132cxcrugmRKw2IJCEZ56RSIqISLjP06x6kQemZyzljiysF5LSMNA9FCwzmAhJmLdtW9e+O+zCPVItBACZ2RDQYP3OM7m92bVaRYSEAYjz8kZiQgYrhMJQGISQpZRSipRc8SfEF+vRJRvMCT1lb0jkuY+iIITG8gpJnA+7Gp98/wd/9Rc/rfb2+Z/8w6uPfqPxu+Obzz+4vYOvA5kckC/DqQJEvsxaa6o7DBzCidgvhhkHC4Q0AZqtNnQULokWQmYTuXEKrlVLrRB5GUFhLiJFWGtRNSb+DzSAkTZOFt68uxviLvFPNZVSbq6Lqi3LUooUKYS0jY7E4bGbd+u6ShHuPR3a0mpJn4OZYUDfNgBUGJixU0i5A3We2yWFgyKlIEWSnQ4EzDRhvIip+WKZA87kaMTMa7LRNf8uIBUiJhp9y0hKZs7dke+3Q+5UDSCktXDL8NH8kRgPC0d4azWHuMd1OVxdu6O7AeK+lZvav3V3fXu9o1KFBXNIAUytASASBIMQcy0kGIwgpTARc7nI+SJV9gm8BZEABrgjFSiVAJ05xiM8/AqXV3j1Haz79d3bD779Yn9z9+7N65c/+6PnX/45gFTEVncXKeoYQZjZs4DofaTAwcwgFDB675km01rLfCpzR7dSShqqpBZAeo/4EzOmNdbNAi9xHwnf6tjykNUiI3dluKX8CxB0KBOVSqo4Rlcd2XoMs3Xbkm0gpjF69toQ6BB9dEmuoNQCAebm+s1tiSyMEW526XwxlfVBgVlK25Ri3mASItq2zcxUlYnNDN2DMFnl8EsiTC4cFZGE/oiru48+FGPoJeeFkMI9E0lTymgORDjUUox8+UQJAqLUafRhI2M3KPPwU0sJAOC6o+XF3u4ObZqpTW2aZ6KS9NY34IRdFFGBhOYQiE4AasGY4dUp48Nk8TxKkdzBGWSuq+spSh3v3tXTZxxbpPij7Mf6Wk5/9HS3ffkSjsf+7vN/fX33bfP0vyABSilD1RL+YGai3jdT2x/2uTmQiQAxLShpBSfEPkYfY7/bMbu7pYGYkYpgBAbAum5ElMjWq5dfH66upjZnlHu4SymliHssy5JWmQRliMndEsl0h/RdpVUyE4ySocoYBbVw89N52U/T5QDmsQVEYdahyUrW1rBWQoAIYhx9eHgifolRUlro39/Db96+fXr3JI8RIkaAqgkgMpVSeu8MnF9ZpHxDLFNDQHRzpku4FxBPraZUKmUMyUX75eV5ay08RMTcqQhzDYDRh7u3WpFo27Ym9vHV4wz3tVSRup9L35bex4sPXohMftnfkNn7rjpGX103dQ3AdP2JcB2bsABAFSbIZasqjEyEsca4H8s7geFBrTYqF0oyfEB/2B2u9WG9m+0tuzucXn99d3NNvv76qzcI30LMjFVzDybKnL/D4SpNIBf8t7YUWCY7yZeMwLkPzb7HjJioR9RSsvDl9iNKyBfj6rCLcMRARHMvUgApAohwnubUBxGBai+U8stCiAQXK3ZrZZoqIZtZ2oxUk1+jNFwMNyHiUmDLBQCU8DkyM3Ht28ZFEkKguOBXGTqRhpt0sgZAxoTd3tykFDizldMPambotqoxY7qiwGmMHoBTa0mM9NEhIOiyCRpTy45k5h5RasmTlEFxbjmUUYrL4JKkTEVKFvWtj13pH8+/ui6ru4LB8u4txfnb3/1th/Llr3/10bc+3u0PFxcIXJ7MlWJduvdH65uDA4AidmIkClOmIC7btlZ7a9s7QprmMosXHAAA1Nr0HD1AJgAC2+L0xk/3DPjkSl7tSId/8cuv277+2a/sr7aDCLvp1nOdNrNwTppEmWtmEMHEGN63XmrN698hmJLDDjMgomybs2tkZhHJ7p6YTLWbzruDmQF4KTV6zvyOgDoMEFn4fDq66W6es0yXUtw8EV1TJS7v9TbEwq9fvz7sD6VIAIjQRZKVEfkZdpGVCFMgHp6hlqam4ZrJARppY/D3433voyK6x9DRas3rIVM+cqAAwD46gHpkqHZ4+NZ7q01EEIkohLnVmhDWUBWUImXd3ufx4CWJl5FlEoggwot3A4OY3CLlFFnvatVvlS938Rq9ECBLaaCxfPWLn46PP/3xJx9/9ItffPbJ93+43+1TcYUIEciwI4QlNGyz5TF8RIARZzM3IiyAfNmX0zQXHR22DQJBgESQEUETUwkHcB3L/Vi3GiJ0inG0Mez85vP77/+b84/bNAnhqr2Umr1cRizmfNfHgIDsc9WURbLhC0i5HyCSEGbMHjGH2zAlRgACgAu0DZ7rp4cOdy8oZt5a1Vyu2zdmUe2IpYigSCk1da7uFyoimyozh1AASL7s5uaWiETYzZCIk0ECAHNnJs8l7xnmNQZAJF3Abo+P523rh8MeEbdtI8xsPUSA/X6vqgkx5w6YmhKTixTc0uyMAB6W70StZZ53fpFRAiDO0yQsA0dclKzhEVNrYwwCHNt2icQMD6f0MWajDQBtmgACMHSYqprHs/15Fy/BVioF2p16rbiynWPc/+qzP/vuD37nB9//9Bef//w3fvO3W6mJYQZQKRw+RdwCEoRv59duGq5M7JCBMFSpYy5acE8nAQAjImMwOGJJqbiHo49amLWo9vX0uJvr/mr68/vnUpqNYYgZ3MYilzWSEa3VoRoBfXRhAYQwFcAIIC6uJiKmJsQOQIjgkRFWiEFIOjSXRvklZBQu+miBWqqqLuua8TGEyIzCDQAckQnTs7BtGyAqpLKZksHMAf2iB4ookiJaBsDwmHe7S3t7EaTZhQVLt9Hoo29ryp32+13qCkoptdacDlJ5k7nxagoIGQuae41T/1pETJWZLvF1iGG+rgsACBOEC1G4m2ru5nkfhoKe9D0kj0uXdS5ueXOmJZQQMwRiDPU0ZQu3/qv+8PJ4Wjbk6+vdYV83xXMnV49x+sVPf+IR3/rWh7/6/GecGyjzBzIXaa1N89V89WLaPUmbMIQRBFMQKAL46BRepby/GyIAVS28RyhkhDoA+oq6ANFyWhhM2u4X9KOvTlNYT7c8RCByWHDGcBC7BQLW5FfD3Wy329VamOCiiYcgwD5W17FtHQDAo2Qnbin/pXj/IzGRhObHGJnSKsxDR1wcZ5aD87pty7Il5dzXLd3+icL0vp3P5z6GWUZfgma41PsIh9E75UNDRKVImyoLmxkhqNm6LafzKVHOIiJErbYiUmottRBSreViKyZMGSXAZc9Ibk4hRO1d6JK5qWruKcBrRUSEayksTKVoRjeEz9NsbufT2cxECiSXHtBaa21iuUSVQYCUkreum6f2T1iYrMH5oZe+//jd1n79689JXz+53aHsBjQ11LH+8Z/80c3tXZvam7evOAfFhA8QRXiqZZr3h5uP5sMdS1rigMAJsr+mlBa/T9cCRO6WkedZRxDBUU++vuodPv/i9HLc/aT+x7+y74dbXvuZBeum5pY+aiLa+hbhfWwAkO9MfpYJEAdEhoREIDGsa36uZq5FShrKzssCkLGnYGY5KLj7uq6XdgIx5edIxMzvQyxLa5NalNpY5O39/RjdzXvviFRbOx5Px+PpfYXtAEDErZa0/gkiEL1PegIUFkdzNybc73Z5hJO6IuYwA8u0jeZm6WWstSJga5OqMoGBNyosvG29tTrNc7gT07ZupYiZz9NkAQGRFrsm8+hdpKxj2Kp1mpilNkCAMQbTxWqZK0JzmGytZdSGeQizeaiaLuv9/cOXX/zq//7TP+rcfvRb+7/ze9+/+uivvx1vDsuXz548X7QtWxSy48PLv/izP/3R7/7+5z/7q7vbW0RMzJ6IKMAhptqMkOkDBF3OD5npw4gAbDQDOEduVk+6nna7iUtFkkhXka6w/BqXr6E9+3X9vV/eHRiu5X1CFCEGBnjaE2BZV2FGLK1NhDisE4WZqlmttdXmHuu6lVIkaW8md9/NuzE0MN7vertYoVMKr7kKDiHPX7a96SgvWFSHv3ck5oWEgHmSmfnVq9e3NzfZ1AODql5fHXKAR6RwcMxYEgKk8BAiGqMTESJJyesHt62nviIAlm1rpUBEmh2lFO+hYxChvWdbLgmS798gQhhb1zFqbYFBTBhQ8u6qaG6FCBGHmtQCGcqho9V6yehkAoNMMhQWFvJ+kWy7KRHm6YlcKeY24/EH32Jdzid5/Z98fD3+1n/8xdvHL1+e/s2//HcPn371e7/3++pwfvOnt9/5ezf1aX/4Ndny+td/8fDd797c3b198+bZs2eptU3BiUiBMGTENuPtRxG+rQ/Jb2MEBDoIEhpayiUJ2FQD5vjGwdXPdvrl4/H8J6dnb/jbiz/ubKAweZo0LPfPr+uWnOhQBVgBQ0pu5QV3KIjJK4eDiOQWB5bLgAoARBnAyMu6XsCIMcYYyQdIOAJIKX10Jp6mauoXRBvA1AwNLgpPyHDt9Mz8td/8jUvsJF82a6U+5JJkR5FRdIzsACgoaRsoSBG+LSsRW9gYI/OXMaCVizF0DCNmVE211zxNmSDhENu6TNMcFkSYEHYpBTnBpJqgkZmxOJMAcRExNykS5gZKSA7ex6i1zvPsEcMsA9SywTqtp2Vdp9bU4uqwT4Yv5U635fTh9La5yg4Oz5+9ePH0xZODCITam/vTupz2+/75r97Qzfjgdj1vD2/tWGGB/ubzn/yL3/wb/+DLz35xd3d3aWbAETKAN3tkoFbg5ulDjG09Qv4ipkcS0x9EzHmNI9LF0YLo/fj6fvlHn//wZ+ePmGxqVaSKlHwfclcaMYuwmRJzYj+MbGoiMoarWasl81ZEhIEAcIyBmEHssG49g6D70CKybT3CMz/aVANh2zZEZDKAsIjzSUUKOkYEEiUNVWoNj+5jXRcW2e/2bjZM5bIilyOCLz4oHH0gQkb0lFpyCyUhyzRNW9+ST3GIVgsa7nY7AiSCvnVmPJ9XqdNu39JyBeBycbPzZW40cLNwcyAbPfvfWqqq5UcdEbU2Mzfdamu5lKRIyai9ZCrcfahKKdu2EeA8Tx6uI7fdUIo+s2MZfQCAe4CuT6eXghYRY+jrN+d3j6e3jx9+8tEzkbq7PlzdOEN/+uThJz/54vZDuruZHl6NcIV+On7x5+8+/q1pP92/e3N794QoQsPDEAwwebgAwlan6+ube1tHXzM3GQGEAiEYAi9GxwKIwBWQAPztu4d//fW3X/EPb24uCq9a6rotZpZOK059J7PqxX7l2e+Y6RhSKjF6gA/NPzNGZxKWFJ4CIrsHFgyAcAPmq6vDUD2fTuu6Xl1dtVpzWE4BTKKz5lZKcfV0q1ERRArQwiyHg3us2woBrTVOBwPm9I4iHAGSax49xugigkyjjxCXS3vlns4eM8WcTREgoraGELs9z/N8MYtd9qGYmUqR9OeWUiCisPTRCZkrmVut9UJTp72NsLIgYoZw57onIlRVkZLqgtqam02tZShpIlKEVIqYGxHm9VuER+/mcWhbpQ0CEQaDOmLfjl9++euplg8++BBwMgAmPDyp0+HtLz/72Y9/7/nt3d16fFAuvj6+/dWfffT933v3+uXV9Y7g/8/Un/ball1nmthoZrfW2nuf5vbRMCJIiRQlSkxKylRmZbqpMiqrXAW7YMMowz/Of8CfqmAbhoFyk0glstJKSZRIUSKDwYhgtLc53W7WWrMZY/jD3FeojwQvAvfcs/dac47xvs8DhALQzIpZoy4GkQJWoqdpTPu6ODLQPv3rkfcGhuQDqIF5gHP89+evtn9/eEpRHLtmqqrLspCjtq6icnlxhUilZHTkPTrna63Mzjmfa+4Pv+BDq43Yee+lqRAxk3OuLxnWnPvyUVoBRGZZ1xURhmHY7nZSiorGlIL3fWEDZqUUQPM+EFZESilB57G8xQv1/9m5sGfvmJ5fZGrGROcqsFnwoUfa+4PTdcOS846IliXnWlPw7FiaqIF3rGb+PGtHMen/uRgIMPSgWU/wIwKi70Hdnv4nBAXNpfRXu4g0s1qqC9673sJGU6uqbFbP8i9RM23NB99a550BESO44+FA0amp9wyq5Hh0IdEdQjYxwMZcnRmgL+X49bffxmH36NEWTECV3PTs+Ye//oe/fPXq28dP3rt99W3NSdbj6ea39v5HtRzLekghgKlpAa0gItpUsrZVZTXJAepm8LVkYkDUc1SLwLFDckjn0lY38tzpU8UC0rIUFWXnexFus9muee2+4jNpj93bpYS22udb4JzzLkpQABNtMQV2b299yERuSGcGQKXSEVsiDYm9czEl6SYNAOe41saIQDgMqQeLg/fYbTkICIGY7KxWwhhDx4mwYwHprpZaah8vE/U+jIp0hQgyUx9BY2sNmFptRDCE0D0mPezcA3KlVAgA+ja+rQBgIURw/ZyPaupc6C7q1gspBPPxSIjDkGpTOmMYwQdXazW1krW3AhyzmmKD3lcKntUE0SNiLZnO6wbwnvM6t1p9CDElIsp5Vb+ak9bWBkqDS9ySmZlbjjevX73cbnYpBkNgs83menfx5OsvPn307KPd1ZP19FDZlYfX88M3Y0zz4T5cXmrLqFWtqTSQqi1rOUI7gYqIOQJkklZV1dDOLnZiADZQQOt1JkBGwhhjaR2j7JCI2UtrAMDUdS2+lAJg8LZVg9AIAzF570oppa7DMLVWAbmzRBw7AGMDJjQjaeKDQ4yAXaneR2ywLkvHL/a9QilVVc/tBiQBUVXumWszdtwPD4jntgb3z0o7rylTSj292vufTZqpMTvoc1UkNaB+Fun8WUIyEQQwgBAjOj7jIB2DQmstl5LXsubch3qm6p03VXa+J1/74QvpDEZ9c3vbRB0hmXZoUPQ+eocm/Vojqq2WDuMlwFaLis7zvMwzI3aV3TzPrZbj4fDVbz8/Hfen4761lnNOXK53KQ2bmJLzlEuZqxpDDMDWbm6/vbt9DSa9VOW9v3j07ul4un399fby8ThOPqSW1/vXXxOc5oevJB+sHqwt0LK1BdoRyh7qAWUlyaracgWV8wfazKO5/kUBQ2QAAmsArbcpWquE2Ko00ZxLKbm22lpz3qkZYTcGFdPztdzAeiHazFx/ILXqveu7uv4JMIOuQmu1ImH/FSBSrQXe7vWC98Gfk6+qElPEt02HfvRqrSr02Zm21uB8ubRuTuqfth78KLWU0mOftublLKdsrZRczgb7ZiKdlGDO+R6v0beMKmQys7NFmNkMyAjMhnFwzAbQwaVImFISVSbUYwVC57yBEYI4l4ah1RaGgRCwgqkZWQyhi1SbqjYjYnasBnmZe8Ifev2lz4GVx83EzEDue9+/7Fl0NRu9frQ7PN0CYVJlqaGWrLLmis7HEO20nl6+/OLqahf9AKBMth2jD/HVb/9+/MGfDNO2ljWf7ub7109fPLmfD3l+Hfv0VqvVxWSFukrNBNbZen2V26OHngmRgTyR68QFAAUgMwETNculpuQNjIldtzMjOe/MxFQ7biF4JyLkuGMkzKyfuHuOhYhqO1cu+z2jSWsNvXcKYCK+37Fb7ePW/jertYI5co6g7wawZ3Uc+reVUQeIfejvHJda+yGhn0Y6If8tzhZUhR2P03DY70stvdZIZ7MnMIGaOTCws6sRvHdZe6tTrUqKUVSltf586vsOAPAhAEDJmZkVVETYOxN1PnjvvQ+lFDNL4+R86CwYAAwx9oaAEqWUVM20dfxbrWJq3a3Bzm0dhxD6i9YHbwB9FiKqrTYAE4WUxDPcP6wEx91uE8ZHYfKm7TQfX93cb5Ohyf393d3NzbNnz9AaWtG6qGFrZRpd3V5+9dk/zMdDQ/flVy9/++XyzpNLVQBrpg1qBi2oq7TWj5isriqZqQAidvrrWVFNgAjUTcv9FX+OvQIEH9QMmuacYwz9Et6a1FZVWj8h1FIArLVeGyIz7Tdw7ZbxJoDgneNzOkzmeQEAaU1VvQ85Z1ULb5OAjqi26gDWt405U+2slR4iI6IYwpoLETp2uZTSWvAdN4Bva9c9FN7r1MCOp2mzrAuYIVCPrveDhffs+ngbALtluLuVaqt9Oo4A7i10wQxraz3Z30R6DJmRmgjURogxJjMtJTdRRHRA07QRaQC4zMdeq+48oVorsUPkED2aEQI5brVJE++8mZZac84xhJ4bbK1Yt+wyBx8AydsdE5aCCOnNm7v3vvOR0jbP99LIMczr4tDU+Ob+7vpqU9dlWWZi/8/+7F+kYdjf3+8fHtbT8XCYR45f/f3tzf3pP/3n322lku7VyKShLdgWD6bmi1AzMG1mauT7xKc/88+1SXLQCeLYfRsYY+ofNiKstfVkXCkFsUdgm6h49N772mrHtPWwjDQpObvQFbpATK1WcJ6QrCPnTUV0nEZTq7V47/htYbTWCszM3F8O3T4gZ+02dLPAOAwigqAi2NqC0EO00q/GBIYI580aMnZSVG3901Zb7berdc3OCTHZCh2UwN777poAAHLswEIMKl2Vif0WB+iWde0PciJSMyCQKs47fisW6Z00dtBBEIDQ31CtBTVAxPOclKnW6r07f2tViMh7750DVEJy4DBhL9mv64LUF72OiUXVQBAbAsQ0mmkr/Or1w/W1M1NmYoLatLbKGN7c3HhsL56/e/X4HS3L3def/exnf0/MH370Xq15XpobWhjht1/e/OIXP7t9dfvP//C5cw5aBqtNVMwJooBrJqoiQAoWmAzJ0BAVQQEVic9XsDOIBwmBkVyMJVfPnQx2hnr26RwHMrP+xqceXmDqRLNWa2eNxRhrrT3LsK6rcyyiHXshoj2j2D9T5/eDd875jmUKiIqw5tU732Xh/ZR9PJ1qrV1x0V+XfdfZmiEiO4eA0llWqN21QMSdOsfMgASACUlU+urdMaGovY1zi/brX6kl91qgP5+dWq1NHDuffD+i90kRemRm71zOGayj7Lrm19BTLaWJbsP2Yb+/vLomoBB8KbmW0h9pgOf+kNQzSV6q+NALKiCmvejvnYPQqfhccilV0nTOhCMi+6i6ojWi/oDjHjUxKTmvn39x8KhYjvP9t62sL795vXvyLhKXYqesU5Pf/07Y+Gegp+1AAbPko5k0haZcVRSrWBNwAkQh9fc4AzhrDIjEYAKEQM2gIAQDBwSIeppPoVYXU9fsap9WVfEhQseQIUrrjzb4x2OHc/7i4rKvvmOM3FrwwbOr0sQs+YBDEpUYYq1nPFePTneAfA/fMlmpTVSAkAg7z7D3nd/CpayXq3IuZgYIKca3fFVstTnHoucPkDsbEAwQe+RLmjh2wCyiTkTY+X6B6n+IOYzjeG6PtmqqrZXWWv+kW7FSsnN+nKa+POor3z6EdM4djodW22Z7QchVG4A+3N8hYl1zTKnWQgSIvq9QWqnB+Q44653tPu9HwA5bB7MA1iew/UclJjYqDRHQQB7u7zbbSwCuZRHA1uq6rnldpxi0VXbVkR5ff7odh93l1VLq5aMsZq1UFV2LttYCth98MPz8l7c//uiitnb2bQHU1pqIWang1DvywfqtFYBAGBpYQPKEYu2eXABy4CcEX8tcq2w2WwCoJXPwSMyAzrmLi808HwFcrbWv2c+WI9O+5XKOc2khhhgCMXdClJmpaBpST0EhqIh2WjISntEAjqGfwAD74Di61EN/HTl3Ljw1QYA+b2MkImxN5tPS78593dlDRaYm1hDJ6MzkijER0vk5i4hETcSpChkBQGv9aN0jY71TgWtu3jGxQzWptUntPj01847730GlIUHLlZiRKOfCxNLqquIdq1qKyQePvUnbt/EiSIgG7pziNCI0QPYkTQGBiUqt/cmsZkRAzMu8dAJrq/Um49MNJ5Zps0MEVSh5ZceMttls0GrT1s3Gu4uLJztPqKVZywZIZlpyvjuU/bE9eSQl19NxbVKCg2UuHZ0hYq1pE0VyGifkpEYMICLUHznMAj52yg+RDdcwPJO8cJ1/94ndnswM1ryAGjuHrSMQ4LC/B8B+7AMw7713QaRJ0yaaYugkuJ43DwSmYohNGxHXkpVdLqUftjsKoNamZ+wGSqkIMC9LSgk79pUp59onBa0ZvLUdtCZ9WCda2fHF5VZaQ0XnuKdJzzAdsBDDvOSzmHZdzj6uY48pSoy9olXPzhtEaCJk5n3oE4JpGG9u39zf3+22u2maEM+YyM120y8FnQnl0Klo8KHWst3terHV+9Ba0fPVlFW1lwpOTQDAwELw3vkeVxWTFKIBKJqBnZZFRGII65r7ZrG2pSehIEZmbpa+fNDvXiE5h2aErKqBSZUdc13raT5dXm49oDRRIQMrVXKWXMwFa602xVx7HKz+7cdvbm+WP/kda6WpSlPtFUdg7zdbDDvDHp0Wx+ScJwYiCsFz3Kjb4fappQnKAXO2Jv/0u/HFNfyHXy+fHKuPSVsVQzFIkR3zaZ679E5VmUhNgndmAsidT+jZocdedgDELv5trXrvEaEHwP+ROyitiQE65oYIoGYxRiRuIsyUc9eumQEGz4bnUnkf1zHTmqVPm5iplOqcc64nfLA3c1TVe9cPxH227Ng753PO/cHmQoz9ScjkAW1dc2/nA5BIm+dTKXkaJ+fdvCwds7rZbhFxzSuYppSIcM2ZiUSllNKanOflJthhfc4RUV9m6dvIQ9+n9vRCrRUQ5nnue9Oudg/e5TU7x6atVmDnzOzhfu+d896r5JMM+4oXYQYzYrp/mOurPZh99tm3n395u936P/zRJg3OVEvOgJZzm5e8P67vPNv88vPb//v/+PXzDewX+2aP//av7t7b0f3DqbVsb9uuPqQQL4xGBPPMCiTIPnh0ZIFx84SnR+omM2QjzBlEgSZzidg+fG7Pr9JvXk1/+enp9QGdC1pyKcXMI8Cac+95pRBVLbfiQwDANWczpZi0Nu9ZTMnIx1ByBYMmrYqkIbVSO1OwiZhITMlE7u/vxmli5loKmznvzz4G01YFiIVA1HwIyfsOLFNtMfjaWq3SS2SlFpHWT+L9Jg+GPcnfUWBUqht8f3b2UIrrGWlpjaOvtYbgW2tNJfhIzE2FyXnviDrxmMdhQICSl2VZenXorS+XSqml1Js3r1rJ777//jYEAFNpgqiqeKbRAjOrmL3l3DjvnGPv/fF0VK0Bgxn0EUXfvHoeci1NWorx+bNnesbBICC+WceIS3Kqqvd39x9//Mmvf7vPpQ3JTxtfhKp61LasImZrbq9v7h8e5g/fg//hf/z0bz47vvijSwrjLz+7/dc/ufjwmb+924sIonrP0zT54ZJdULDjaf36EChdProYXozEw0DpQtNlBo8NEMyQHLEhE4lQ7lakxvzikf03Tx9/8rL91SeHh33x3mH/R0RERBFdc0YEJlryGrzzwasxgLEj0Z7cwHVZ3rIACQEklz565nP7i8q6MrP3fl3XlNLZBwpoZjXnYYhFilYRDEBoIs1ARFXPk0N8i5QDAO+6uL2pGhFpq4iYS1a1IUUwrLWZzd4HM21LUzWnouxcfxgSUmvNsUPqRxTcbTYtNUTwIaBZO+NRBQFTDE2k7w8Re6OxIcDjR48RMYaUS2FmA+xTzp74t7eeYutMibeUdUAEgx5Q7ulW5xhMg495nRHJszM1BDTpiDh2jheRb07pku6PJb1ZL4zSe8+xiprJo0dbpNAUpOmc11zl5mH52394uZv8q5v5489ePd7F3cV0nFeHp91l+PJlRtDg4GLjx2FycXPSoc5umujNSZeCHz21MRGFS3CjUSxNSaRj4Tq4jqC+fv1FFd1dvQhxcFxM7r3HH38Yv/Nk929+7v7+0zsRSUMSkWkcpbUQPCE5700E0BCwHx/VrBN9KXhm19HaAigqquKD19Za0xBDqy2mJK3tdru1ZEfeOV7WFTufFGxZVpE2DKP3znm/rBlEOrrZOd9JHT74Ukp461roswZEdOSaSA9KtPaPmZxWivSKqjTpMYkzlrZ7Zc1UBYqUbrbtPG9msta6g7i387VpzoU9mUF3Ufd3ZKsQYxTpJENm4lJr3w31ujEhZ1mJGQFrqcR4DigRDsNQckVC731XRoopsCOQXmzt3Z1WCyDWZqb6YOm2XdeqNL4/PSvvwleAJgrTtI0pMUg+HW/2959/e/zbX99Kw+999OT/9m9+bWLvPZ7mCl+9Xp9u8OMvJHp4fu0vdynEgC4Vc3cnHUcNHr/7PJAbzAXhJEaAZACiJiBEVEVJJZfT3c03adg9evbC6wGOv8H8jdMTyR7MPfLb/+YP3vnOoyf/w0/3fRsjZ6czNG0AvZ0NpVTmcy0LzvuybGfDdVPRJjoMQ7eS5VIsZzSYa/HOretKzGbamrVW67r2t79jzz4goopmyaYm2pgZAJq2Na/O9dI7qmnN1XnXuzfd90iICNhPhDFFM1W1JtLn8GkcnLQmiIjI1PnWVHIhxo49fzukglZKLxyua04p9YRPHAbHbpnn3JcV3negZPCsZweyqUoM/twQYBzHqVUBgBijGbwlRp4fKiWXfj6q0h9a1kthvR4WKLZSAIDZVZXzowhZKXrXallh82GF8dq9QmsxTQj2zRdfr/v7h2P+7NtZDb//wcXn397dH+t3X2wuJp64TYlqo8uNe3ThtlNgH4AckvNE71wheWdAFQeAEXRAcqK8Vu12CSYjMibNy522dvHku9sosP+5LZ8znMBd2OYHoK/h9qeaX8L88k8fvVt+/3f//ScEaHldAcCH2AuT69qc4yYNCQwQWkWkGJJIA+ReMK+1W/oE0GKKb0mm0tc+HTfVN2veeyZ03tcCPgTn2M6YeyDCJgYAaqoivUl4PB37ybKJllpj8Jtp00RUmqhK6fy4czuszwukKRGllDqPCFS1ljnGUEtGMN/5/p2qKqJAPVrb92W9UgAIjpy0GlMQ0WWZY4g5l+1mqq06x4hOTFsprTZTHVIkRwbmg+ut5x6vJCJT608sM6ut9rVhB3mZmXfBOVdaZWQCrK3EGE1aH3j2xgiH5Jic98RXB31/Uz85Hl7lJd/fH2OIT15cP30HwArWOXH4/rNLBHAMCOA9DIGHyGNyzgd2wXkGcoDclBAYzKG43rmO3hn5qk4FvaOmwGAPt98g4uOnzyb5il/9LZWXRsGmH0r6UHhD6XdD+sjm1+h3hvCT76Zv9/DxN4cYo4p055X3zgABXP936CPj/pRi59Z18c6VXPrxFNFEdZkXAOyYgL5pV1F01ES8c2bmXTKwEKOqdndAx7ypNO89AoBhU3UhtJa5s0eQQuRa6rKsTcS5M2Bvd3U1nw7ruoq0Wo2IYkwAoCa1ZEeAHSDiY3COCVGkc2qlrwrZuz7Zs7c1EecdIC3LApaHYUBEYJ7GjWNGPkf015yZ5Mz4eYuZba2FEPoA+dwqVHXe1VbLmsdxAsLTw0lVVeRtil5rruM0GRi6cwirijjmmnMuGYm4AzId9wud+c2JXzzfHnQI49WLI7/I6t3+l0nfaNPdxtlbxxkTMmP0NEQ3jNE7R84hs5Gr4BWCViBVT8bk0cVmxErYA5wNPPPNq6/YhyePHo3Lr8L8t1gfSninDj8U9wgsJJfQbdT9Ae4IyAwgyfJf/6v1118c/t3fvjkaMREK9yhjX0/mXNgRIwH3B0yttXTxlBmEkETFWoP/CfK2lsrM5DiXHEOUJgpG0DmKBAjrujIxdnbbOSrf9QHUtYJNO+oOIsWORGrS5tMppdQh4tvttrbKSABgAB2O4NkDghMz71yH4q3rYta13AZmSNBqA2gpJRE+P3gAa6khhHGcOoFFRErOTQQBx2lAolr71rBXG8V7F0IopbbaxmFqtZh1tIp143iMQVotJfsQhiEhYm0FrM82W8mlb+Raa57ftvKI0jCseR3T0Pd/H//61++8eHFxcSG1NJu+ru8JDhqv2XlstcVnsL6pYp7IM4Bp9IyIosbOkfPsnfOe2ANRVaoqQJV8Ih8UA5EXJWlgIARgxJ7w9s23BO76arcrP4/LJ03xFP+JDh8Cua9e7n3cPnk0XAYFtFoMrEpZoGUC/d4zf/nPrv+ff72/O9X+l+/fK2ZyLjKTiTJxM1WFYRh7ntUUkNhEEBEJuzozhmheu18AzM41va58AC25IGFrTVENrDYmPHt6eno119LxXznncRgNoHMagGicppxXUnm4v3XeqaiBnHu3yMhgZtEHqq3lnDvjMoTB9YAYOzVAZDXtB6O+7XPOE9O6LqfTSaX1/6u11nNnMSU7P1Hd27sSxhRVFQmJoMcI7Wz0hCEN/WqaYvTeE2NrJaUYQ5jGMaUUQvA+hBj7Kd57Zs9A0Ecgpcpmu+0TepH2cP8wjCM7JkZEd+JnK2x7jF+lARKzGwePaMTsmIPj5Hka4hBDjJFdMHIVeRXMtdOPHLqoFAWDohdDNejwK2C6v7vJRa+vd9ftF6G8Wty7+/FfyvC9pjaXQOHy9n7/yRcvP/vtV/u711qOKlVabtKAnCk93rj/9Efbd688IaaUVAW7PxKxf9FzzczOndFjuubcVJZl6YR/U+OzmqmPbYDZ9dw6MwOYipRSz3Uw50QFCWsp3QLRYx4AgIDeuUfXj3/vBz+chgl6hcgxM7VaU0xnyj1ADKGUCoDY7QdIfT/qCKC1xo7XnM87iq6ZJVLVGGL/cz1G5LyvyzJNm3N4AOBwOAzD0FMfjtE5WtdccnHBmSp6D6YdwOa9d+xKqwRnTrtIK7UYaDmVnsCX1no1TEWQmJixSe8PIQAyiolDUlVDkCaMAI5TDJ9/+/Lq+mqzmU6nU6211jqkBIxm0oo21dSxFUQcOtMaiQmRu/kRmQVIgZp2VQWRC+AGo4gUEbkJIiOpACEYPNzdnPb377/33pX8quU7vfiRxsfc6vHwgOGa0+4i6O3t6+X25njafvPm9p0n2+vLq27A6ld09vydIT25Tp+9XH76yenVg3WTeme7eO8JuEcfa83jbidNXr25GYaRGk3jWHoAnh0i5FJ6w4sAzbQBEHNZc0/Ot1qRzxvu89aSHSEanc0Jc16vHz95dP348HB49uLFfn/f15EcHQEEF0IMgJSSZyRRAWNAVJFpmoio7+FcnwcqKDlmZhADOifcusOrH3F7KZqZEGkYhjQkUe2cVI++T8jSgN67vvrHc1DO8ppFmvehX1adc0g2L6cmLTivIodlQcQYQ63imEMM0oCwT7pMWkXHptCPYs5xEx1SarWsywoA33z77e//8IfLspRSOgO0i4/Y8f7+gTqghB2bQ7IuiW2iKXlE9MGxc6JWW1UQ9oFcQh+BA1AgDmIIIozOCEsTqPPDzZvnL54/ws+Wm0+/OF5tzS42p5ef/iI9/cEw7RCpil48+c7D7cvj/vVx3yLn7Zicn5q2eHnttxeKlk8H5jc/+jB+753h3/3d/uefn2qtTJjScJaRM0YfpnE4nk7EVPIaQ9huL6TWlAZVvbm92W4vUgrrMvsQkJ2qOSYz64HSs3oGyQ0+pjHnNS+LmYpBv5cxu81me/Pm9cPt3Yt331WV3FHMCM47beK9G4ZRVGopXVPX44SdTumdd0NKBsDOQS+nm7a3duoYXBFBxOC9nv/Rm4HVJik555yK9jRuh67lnIchnU7zdrNp0jxTT8211kKI58XvPIs0M+t+ux5IMLBzmLzUXsDLuWj3rqgCaEqpdhApIHtv0kCbY1+r5ZLXdUkxXVxeaJNxGAGxloJgrbYereoZWWQHXXSsAIDsWQDG4IFYDJtIrsIhsIvkErpkGAxdMwJ0SF7NylpaOR0P69Pnz58MB3n47PPXmtMgD/cf/+WfX773w832uZqtDQQgRc9P3hk2V/c331xeXAhvAcP2cmsuPtzs27oHOfn1mwLmQ/if/95lLumTl9QDhGqqpsH7OCSp7Ztvvh3H4fGj69ev36iq93673da8juMopqLS057nWLqZc66zTXpMGbSW2qbtLpde7+oFDDA1ZELE0zw75vu7N8RuHAZVBQRCdN4zkpggADG3M5pIEKkHREsprpaCiLVU791blnmnHWoTSWnIJS85/6MkMK+rcz7F0KRJOYPT13XtyaacCyHO8xxjEBERGYahlwZ7jz2EYOb67CnnrCreB1Xrw3hmJsRlmc2M2XnHxF5aldZKKURkiB5caZWQutz++vr6q6++fvL0cQfvEbN3nmNUE2sNkXonl9Uce1BHKgLqkIYUchFFQITcmrTqnVdiBTQkA0fkRIEUkAgNDsf9z/76Z1988c1/8V/8Z9cbc/t/+O2XNyd+Hv1w+/UXbvfe9Yc/QrQiKNIQxFQd2eXF9vH1BQEgB5+GUk3WI5ix82SNQMi0zTPkhx+///TzV1AMcl47/QkQS6lgdnGx2z/cI2Ia0rqu291umsYvvvjt4TS/8+KFOVazphpDGGLqcXdC8j4iWl5zBc213Lx5k7q5kamJmFprtXMvh5SaCLHTs9sFmJkAU4wq2kx7ghkRY0pMvKyLqhiC9Ey/SMt5LTk7JiIw1VJqCL6WWmsDwLNjHGBZlmEYai05l1pqa+3MogE7R4JUe11IVVsrMcX7h/28lGVZaulEwLOxoOQiIjElBAwh9qmUdkEEERK1Jsd5rk1KqblkZgohAOJxmYOPKUZmmsaJiK6uLp4/f9YhJl0Ek9f1dDh1m0fwgQlBWw8IADkiIgY1Qab/7t99+/JuFYWl2JIFkQ1QVNVADATYgNRUtUVHP/7JT/5X//o/e/J4O5VPX33zzd3iwKWasyE/+92fMEGptuZaSl5zTxv3smlzIfgQWqtihkgIAtZMKqihNWYS1Qu6f7yFJgII3jvvXS75eJqJ6PmzJ8GH129er+vKzLUJED19/PjLL748Ho/L6WQqt7c3RKimgCAi67qoNuccEm4320dXVz2afEbLnRULSESOzhHSnNc+LTSR4DwjnTE6ogQYQgwhOOcMzPlQSy1rMTUXYgCDgZ2ZneYTIsQYAeA0z62pC4AGHTUCgB2i0IHIpUr3m+RSEGw57kH16skTURIVBp6mzaeffvrn//bPT0v9X/+X//mzZ09VBQn5bFOQvgkS0v5B7NAWMPDOg7Y3r1/l3AxhPp7MBAFLbd0q55zfbjfbaay1zPNcSw0xDGnoR5/WWoohpQCGjkhEhnGk1h3NhkjEXJv9f392+89///r9F9PPPj/8J7/fyb4R0RFSF2MTGgaybtWTxs6nafIed+5w+PbTV/fVpufoUm169cEfheBFbMnZzgcBBUTvSarsthtHpGrEAcDAKlgBLWACSKCCyIiMqE929Ns3GHwAAlUDbbXWlvNuuzktOcUhBl9rbjUf9/v5eIjeO+fiMJSSnz99xsylFO+9ZwcGHdHSlai9uNMFYW/32crEzrvWZC1ZVHvYkgjBWJswU8+2ImOVNsTUAQdmBp3WQljW9fxiIuwnknPrr9aqYIBUco4h5Lx0L07P5253u37oMbAYo6iuy7wcHo4Pd94HSkO3LddaU0o//P0fXlxe7XZbFSFytbbp4lJMUVREvfeDc/f390S85kIdSWFGQB9+8IGqsPNEZKK1Y3VazaVKk44K3G6m3/zm099++c0/++M/Oh6PzO7u/n5ZltOcd9sNEYhaWZc0jD/+gC4uepyRAcCavPN4Os7lg6fxs5+tf/43r/7J9y5cDAZGAGpmKtR/CYad3hZ8Wpd1isCnz7+52We6Smmjxuny2TBemMpxKdBj9sgAYIBVaTtOjJ1bHUwFrIEWtIK6kFViBggGBgQAloIxne1NznNrQkghhlKbNDksi/esYoeHh8C0vbj46HvfZeZxHLabTX/m9b2VmJEBmtEZUIzeh7yuepbGRQCsraqZ1dY7Dv3CD2ocXMmnFKOoGHRNmBDRsq79G66q02baXly+fPmND8HBWzpfa80MpK/zWxuGQcFqxbfWtP4QOtvdcl5LqSlFVUOkGNPF42dP3nkf2dHZ+2oC8ujRo2fPngPAuq6tlpzXadrUkr/69uXV5dU4Jue4nOek4MyZKpxtmucrvTYBj6pi0giJvN9sJmlSW221jeP4+Rdfffe73/vwg+9cXF7GEEstZV33+4cQYorpzZvXa15O8xr9EdApsHfMiLXKH3y4PS0ZAP/FD8f/8Ivjzz59+IPv8rPtBfRNAhmYgCmAQKf6IiHqhh4eXn910si7KwXnwtaPlwRlWa3W7tlhoN591s1mGFgD+xgG04oqaBWgoBbSTKCEoNiRhoBmDKZ2rndbtTQkVXXOO3Y//vGPVOXm7q6spdQqatF5lZZSCudyjxJTDPF4Oom0aZzorRtKTbS2viVVVWnNOXbslnVlT845H3xtIq0ty9K3n6VWZg7el1oQ4YzdFGkqCHg6nWCeVWQVcYBwOp6GYeizrE5HJKa+YwJRBUopiTRRJuJOVV6WdRiGXpkDBBEVwGaY+Py3HIbBwESkq2KY2VGsOau0/fFkrfzyl78cx+litw3Re+e+ffXmxfOniD1r2z1nkIJ3IdRau+sOATrWQkGZeNgNt7f3L1/d/Ff/5b++uNihwWH/MAyDqsY0EMI8HzabYbMZLrc56kpEaIzoEHQaQBVKNUc2Ovzh++Ovv83P3nlve7FjkuOpNlWFRtSQUaQxx5zn5ETuP304NRhe+LgV85yumKCUnBubmTVQNGZnxDGNyWP0nIbBtII2sIzWUFfUlaA5E+hkr55lQatVpmlSqfYW+9+BJAbQD5DTNI3DaF3jzvw73/seO3f+F3bcd5rBewheTWMcVLXWMg7Duq541h2BtDbPMxIPKRqAqDDQ8Xi8uLjoG1nvQ8lraw0MHLue7shrZuboQ2utSVPRGOJSskMA7wMA9FmP9/7M4GRmJvOu1ibSRLXWypF9x68QE3VcNxIhONimbT+xHg57Ym5vmeKI2OtzhMjO5eX06a9/uVR1zv2//sNf/M53v/vinWeneZ2mMTgutdYCHIOqpmHozRUAoB4tIzYT6Y+j1mJKf/8Pv3rx/NnTR4+W47E2GabxdDwiwjQOh/3DV1999e677yKISunDAgAwQ0MEREUrAqW0KfGTC/q9H/7u1dMX3tZ69yXqCBANSVshMDBF8y2fAuXj/U3lDacLQKduyz6iLHNBA+lzECQFRMcuBkoO0jCYFLRGIGAVdSWZ2ZTOg2DDnupQAYIvXx1e3eRxCH1SzERolrwDQFVlYzNDZz4EM+i3fWu1o2JCStIaIDBzzrln/ZCIHR+PB0AkpWLmU/JnlPYaQnDd3wwwz8t+/6CiSDSOIxggWi65n4v60WedT855MxuHVLGZASM5ROqGcyZ3dlOIROd6TwMA++MHEbvbsB+GhjGZQgcidalsbxjN8ymEiAS11vk0b7fbfiPrqzXy4dnljsD+8me/ZOb/9n/3X//yl5/85te/efb82dXF7nia+3VMpPWMVdEGJUtrojqkAbmLHXpgyDmCX/7y43/2T/9piv6YT6fTHgkcsyqcDvuSl+fPnoAJM2P0TtgM7S0IoTcLHl+O87yUUi4ev3P1/INN0OWb36zHfWMAz0CsRm2tgOzYE6qeXi6VefvMpd085/HJI89aqqpy14qbKiCBmWdLVMe0QV1BC4KCVZSFtDC76MjWtatS0dRAEVSM/t9//td/8cs3hjamJNJePH96dfXo9c3N5eXVZjOYqqimmB4/ugwheOdazYR0cXkZgg8+AgASOUe9Et+tAc5xdj7ntWf3TATZIYJ3ngljDAi4PxxSDJBtLuX2zb2ZfPDBh6rgHRGzyRki2zMC65rXNRNzNx847yMCeOfA+3WdQ/Ct1R6MXZZFVMdhOPetuEMhzo+okovvuN3WOiIJiayJqvYCeDfKNtHog/O+1VZKfrOucdx8//u/U5u+9967Q0x/94tf5rKO4xhCHMcEBgQmqqWs0qTjOCOz89FMGcE7bqIA9uUXXy7z8tEH7+d1HYZEhOTcw8PD9fXjWrJv3ge/rmuMgdSTETEhsLSCBs45E1Mz592SKw0XwftPPv75//n/8tf/x//Fu5vr7N0A6gFazc0Ao3cOynL/RsJF2L44Hk+HRa5jkHIq2i2AaKaiQsTB08YdLsaJbQUtBGqara2lruz8ZgiAjXzQuoCZgQIqAJxa/J/9L//zn/xZLVJLLuu6Ohdaa1PqaMBwOh7X+XDaPzzc3jSR/cP+tCze+xhjZ0AT85BS68ByxCFFAEgxOu+YKcYYQyDi7W5rqtM4XlxcmMHhsD8eTxcXFwb28S9/KWaPnzy+vbmZc7u4vHx0tQNCVKil9PJnbVVV+va9NXGICG8d0ojcmjgO/cXEjrVqLxRL66A833F0rbYQQp9L9t5hZ0z945KL2Y0DP+wfYkzjOJgqB99nzaJuu914H2/ubsXsJ3/848+/+HJIKcRIiAjWGQ9MPG7HDvroJnRp2hvb3jvH/Pe/+vXTp4+H5N68fj0MaRjS6XDcbTcEen7wIEzj0AVsJgs6cIxInkxr6yVwYB/WcqrL8fNPP/3zv/zCyE8Dt1p6+RRUmeA4H5jgycbuD/fVYXysn/z6s3c/+gG0Uy1V6dwQVQNizz46vY3UPD0GyQhmJtDmw+FoJoOn5iNPV422CN5Or0QVEDzhq6NHlzabYADzfBqHYRynENIHH3zH+wBoIURPmMtKxIbUatsf9k0UtCGic76JhBBP87zM62+/+IIQ2fvD4fTwUI6n5XA4qoqq5VyOp5mYd7vd9fVlXZdSy6PHT3bbzeHh7tmzp6XZw/H4s5//XVP70z/5SVc/b6exJ4GmaSBHjtkTrOvq+mIBBZ13PZ7ho8+lmioTc2JEynkFg2EYAAHADJQQvPfSutSNkKnvL6W14/FEhCmmfjAPIXa0rIjWUhjQdbJ9ydKUnXfObbebVqtnUiZC7H1uQlxzDsGrEnXCG2GthdgRwDgOn332xT//sz9ZT4fgqUMdpqlTTVpf7g5D6jI8qTUFMmuq5jukEzqlRRzbdjfdvvr2L//u/s/++L3ffQaONIuxmqERgoKlNDrPst48HLPCXfn1T4/7dnG5a+VUGxu1rvoCM/BRtHz58V8+/ZOfoM4mFQDQyv3+dHt3PzpZ/c7vxnY8nIoFf5FckfLKMarKlzdWS07J19rMjBFV9XjcO+9bFSA7HObgPYDGGJflMIxjXwTd3+8/+M77rTUfJgMcUtx8Z/r+73zYIQprztNmGlKqefXB5zXnJsuyltKO8+l0PIGpqOVSVO3F00dMXJdlORz+4Ae/89U3r3/1D796OBxvbu6ktc1mc3d37z07F32vsCG4WpuKxJRKqc67cRhF1ax0FrX3DkmncXpLl9SmcjgcN9Om1tpaCzFC37SLLMvSfzOn+bQu6852IcQmzbEjcjnnDgesrRExOfLeI6CBTdM0xOQcm2kuGfE8cIqOQREIS8nGPZMJABZ8/OKLL733P/r93zs+3DvnSynTODl2LrCoDEPa7w/zvLx+fbu7vGAV0qMBKVoTC0yOvVgDI1O73I2Syp/+CE/zenNbodZ06ZyAYQVygAQgjK3Nx5sH8VF2bv7wo+86wlpKU+6fHCKnyIAM3/7Fh++9c3l5DW1GbWRyWk5ff3sjy16m62ePnrAtpxrMjavgyu8PQ3Dl830JmR95X1KMQxpO87I/zO9sL8YhtlJcSOzcWtZOVjBTdq6U6p1nondevCi1ecdodr8/IFKtbRjHZX9ExOPxtKzz40fXplZaAQVGGoc0TXxxsc2lmrQQo/exfyK7odcMmtTeNCKih4f7w/1DKeX+/sF5X5sC4mle1mVx3beVYhKV1mqppdZiIszExOM4HI6Hjk87i9/IbTZTT7qGEHo0rk8XmHB/OLQmJedpmvqCrIf8CdE7Bj03LohZpdUq5NgBjONgosScS0spOebWRKSxY/asCkRoaOzZDBBgu9n89//9//WD73yHiba77XyanfO1FUccQmitItFut1vW9fL6UlVLbaFZiAhWibnT/U1JOpsL0Tl9dhXnue4f+LObeil1vGhardlq4ADMHK/LsmRRbIZ4eXVldZZaAUdgU2lq2Jh8Pbz74vE7738ELaOJast5/uyLr29vbrYpbK6ep+hm3YIL2iqQA6Ps3wM3fvlwXKXvpKXUdbPZdm51q9nIEUKreZlP02YLgKd5Dj4QYysVAPwQl3nOpXRkeM65lOpcdd6raoiplDKvFQFrWWspIQ5xGO5u7vw5+8zLWtZckZjZjcPAzrNzCUdR8SEigvYhHOKTZ0//MZsKYPPp5IZh6OP/2moI3QklueQQk4mUkoeUACzntV8miYidN9XOjlDVLq8QkXXNec3jOI7DMAxD7zJOw1BqRlAEULHamnOOnSu1OM9E3JoSqHNeamUixw4RARWZzc5teeecndkmlsbh/uH+629e/Z/+2395POzHIam0bgwxtXWd+0XMe99aIaKiEmKSkvL8OgRPYdPzwcRopAyoVQyRnd9t6cPn+nC0bx705ubu8nJrKvuHu902ZWSZ13UV0WqPmdiBFrWODBcjEyE1TnzcXT+HmhHUTKXm1zevbl+/gnyKj37nfoXQNtthMj2jSoyctnL3cO8YtPFhWaZpXNds0E9+WkWYcF1PojZNG+/CMj+IyiprGgbyTED992VApbSefnfeq0EHtZpaGgYz8CE470u9J+dFLaUBEU0t+HDYH3a7i1JLPc2tVgVTle1269gRs5nV2vJa0pC8D9KktWKAtZZlXZ2aISigee+ccwAoTZ2zZVli8MuyAiAzqFrvvfbTCTCXUtZ1MTPnOioAvffDMLDjzbQJIXRvHDN5c6Wn2lKMkMyUkL0fc8kI4LxL3rdaW1l9TICwrIv3Pvpw3q06J60RsfeuM0f//u9/9fjxo+uLSaWq2eXl5ZpL5/GsayHElIZlWZwLZpriILmEwFKn1laphbwj7Op0kFq9c0ZoIqh2tRs84/3p4Ztv7lutl7sBQFQVFFTkMDdXQcEZqLZqFk2zgTPlBgw4j0FGTwwCJq21/cP+5uXrcnjYJFcafP0PP33z+u4P//BPxmnnGA3Q2rref+J0f53co4kUptpaqTXEIYahNo1xWJeTY/YhhBBEdbPZzKcDEz/c342bzfFw3E5DSkNTW9ZFRUOIALacTkDYU78DjzlrH+0N4zRNGzMLztfanPf9YyQiwzj041eMoZT1zatXa17TkB4/esLMm+2mlLzMXXWAzjlTGYfR9aJQSqnXAMA6C6ztdtveZgfAUrKqmDGeqYBgZmteich5753Hs8/MpZQQof/iEbB/TvOam4gPHgFjirU1ZFZpjjmloc9kfPBM9LDfD9PYu7QdVtTp753AJyImMo3jJ7/5/Ic/+F1pFUyd8ylGBOiMaVObT8cuqqK+WXC87OfYTi7sanGKYAYiFcG6PdAQXUgmgoJLm2NyH73YfX0zHw9rXvM4Bs9IZqI2Z4mAomJSAUVRQBXQlNAsW324enbBoKoCWufTfLh9ue4f6nyE6UmdH65jIb391d/9/5699zsXj99DOfnls6inpogmDnMTriXXJkR1XZcYUzGLIaiIiCzzyQBCiN65Na89HF9KXpimcVCRIQ211F5eV60AKE3GcVSzUksHMJZSmwgTM+M4bmqttdQQY+8xTsOo1qGUtan08/vxtEzTVHNRVQOIISCAc26IodbiuhJ7Ps0xRQBstYXgU0q11ZzzNI7MxJxMlYgNLHhfSp3nuQOzneuB+cJE7ZwUY8+EhOuyqoiY+n5WMrh/uH/E19BZAHBOzXY+mjZxRME7qU0RQ4y9Tt9T4k3bOaRH+NVXX3/51Vf/6s9+MqRYSlGxteTjfNpMu1praTUOU39DAIL3YV3z2YWj+5B2YKcm5qytyylN21ZbNUzJiLiZ269m2q6n+AHBb1+dyKG1+tU3Nx88u2gNCNEzh91zBG11MWYkVq1gKGJToM2QehFqXdfT4WE5PqzLfL9faFjgsG4vrj0C6PLVpz/7+JNPf/zRtJ0sW0UX58Zfvpbj+rDZbszAB+eV52XG3m4+nQwxxqRmOWcC8z46duu6bqbprKE9Qzz9Gf5K5J1DpA5rjyHM85GIY4yMoNLWpeRcxnGqUqW1vOb9wwMxd8ZBB1ut62oGCtB1hQBwcXGR0lBzrrUgkvfBIaFjF0JU7X7uMvrYmnrva8m3t7chhHEcvPe9QcHsfMARQFpLIZBzvcwFCqLSQ93DkFRss9msy6KtikhKsZQ6bba11nEcwXSdl2EcAayUlZF8jKLa+2gdAtST0T46FXPk+mQyxvhXf/U3zx4/GVNY8uqdR4Ql1xAToXlPTTyeTVPs2CE7gJxrC5QGfeMwovOk0vKqZmAarKKZFDVQI7fbbUmrs7Ybw3vX8uphWRt8/NWSGLfO7QYOg58evT89erI+vJT5iGyIpJKb+N3VOHgEyaXkeT7l+ViLzIv8w2/nH6X5vacbdkxoRCqKo908ubhqy5ERYop//xmu6lOEWvK65JQugPBwOL548RxUDGxZsvdRVKZpfLh/gLXE4KdxJGJVPRwPwYUYXa/aiQphl9RKLcV7h9CdTBy8ryUb8DhN67qu60JMSCgqSBRjnwxXAJg20zCOznVMMZecnXfrsppaiBGkdWKfQ8RWxUwAAMlijJ0L3CXpKSXvHRMDYNNGSEteu1IvjQEBRNUIUxxO8zEE30v5rUm30K3rQnjuKfeoAztXa1PTaZrWNTNRDEGq/GPgutuRDaCji+d5DiH0F5moLsvpzauX//TP/iwNqcNWqrbdZrq9u5O8JE+aq4+Dc+x9YiZDvLi8hLbX2dP4SJbbMG7SdNFiHLSpVClZ84nihhmGIQKYVhEhEL7ahsHbV2/md6/967t5fL7xwZ+W+ubTnwL98fbyXR6W+eEbaYtAJHSja6yl1XU5zXmeW62iIMAN0t//Zn+9jZup770ATa63YYiB45W0/MU9fn7TI7g2z3OI4bDf7w/H1jSX7Hq5gBAJUxxzzv1l3TP4xKzapmloAgbYJeVIlNc8LwsY9Eh7CCF4pwq5FGnNe1qXmQgJof+ChnGQpghYWt1utrXVHq7SfpEm8iGUUnri9gxRZBZV12rrnw9iBART6BcxqbVH+NKQOmOxtVprA7MQYslZBHrBgAAQDQw6fK8vWadxkNaIoJbimIgopdg95q1K98Z1QmVr4pgJAZ0rubjghzD0OCx3wRRaP4rFGL/8/LPa9J1nj4jAgHNRAKslDyl2y8SGY5WGSLVWNfbeoQGDTlGBYlamfGyg6CMxkxt9HDRPpspWjZhMEJGdB+j0E/7Ok3Qx1K/uysMxj2P69dd3v/n//PxfLevTdz66fvHd6fF355f/UJYDJR8IW1vX5VTWU+eRK0ATev/p9PjSH2YJ65w2F83YAK4utx4RtCy1/eLroEbSivfROd7vH4KPYEqM67q2UnyMm82ICMt8QsIYgoCZQWkFEXpbIUWf84qE0bl5XszQOec9m5iY1ta0e2dFEbC12i+2vWSnquu6Bh9EGpg9POz7xNg5P4wjIJU1O3YEeJjnnkBCgpji/f2DSynmfMbZh9A16dADbCGd84chJlVxzjO72lopxQePSNYaIrRWpTVHwD6s69qaOO8cO0ACg5TOmWgwOByOQ0rDMOz3+7zmXhkDQDNFpA7T7GQ0VQRE9q437jrQZLMd/+pvf/Gdj7672YxEBGCn0ykGr1I22y2mlNel1YbMYhqcN1MVLXX1uFi+03A17p7Y/DrPc5qwFlE3DA7DeIEg2Bma0pDNDImNjcFY1TYJ3n9EX95WQf7O893f/Ob+iy/vX4y/uZ1v6OJpWr8tbUhhJAhlrSWfCUO9dXS19c+u/PPH6foyDUNczb7a68vj7Pgir8dlnf/6s/b1/smUAoJ671oIz7fPTC2XvOYaQ+zcQc+sBl2S6rxnNEJi5iqtp8YcoxB65wgxBv9wOI5+i4jsQKsty4JE4zDMy0JEvYnVVEspqobI67xUlzu3VBT7tXe32+VSlnkGMDANwY+bsedGpQmTG4fR1SpmVkpurRKjp9AVKipG3bh0luxpV1DHEHvjxESqdFIalzJ776WWnkTrRC1QPddK+OyW3G23PYo7DOk0Lz1iMY2joVdV7/wwDApaSkai5KOaNmmE0G+C93d333776l/9iz+T1k77OcYorVLyjlyrlTpiEmGcNq21M2kEpImwQZmPU9z4sFN6jG2tbbV20lrM2VItl/VikwQjs+vQAgRGJSBS0opwWMU7vDkIOvzdd7evXu/vX8TLHZTXR4EmHlmOr244Pd007dZlJMIQ3NNHw6Mdj8lN02AcLxzHwX15t/vp5/YfP8vH7KtOF9e7vJ5KyT74aUgcOkDSmPy6LGrq+Bw2r2v23kurPRBsqs7xfDoNw5BSAiRkntdsKsGH4L2Z9VLpMI5MJKKIMKQUQmitWW1qYmDecQxhXpftdtfz9gg4TZOo9JqH944dI5EHbCKIyEwhOBI8o3HNdLPZIGJrNeeWYorREWE3BAYfvYNW27ouznkiFLGH+z0RbtPF6XhsYiLFeY4hpDTkvHrvTQQxdPh8LmtrMg6DmmoTQAzBxxhbq7U1QyxrDt53E6opiHWUnnTfal7LZjP8+uOP33vx7PHV5JjDbmMG3jt+C4Y163EG1FYBcRzH1ppojcEfDxaB5fQqkMQ0qd8ibsuJsZ3IxbYeBoeE5EgQuu8JOj8HTc18TNjuxXv7/rvTutZ5tZuj/Md/uPvRR7txTMM0eFArp/kUquyaUpfDOh9SStHDbjLHgC46FwTDz7/Z/eYmj9OY0tBQfODj4b6rHR3z7f3dbrcDIDNj51ja6eEYvD+dTjEOKQXnQ1ccI+JmGlVVYgwhLMsKAGXJPsbWWkiuX4pzXoeUHNnxeIw+IFAV1VwNjB3X2oPF5lzQ03GZTyEmMwsxLutSj5WIGbHUNoWgagDWlfGt1XU1AHIA56OM8857XytTqWpmpoi+RxtLzgjKRCl49t4MSq3DNJqa975X8F3Hy7cKCMM4ElLVtUe0wYydG8epu6Gi972lykyENi9LGoZOIVrXdRiiaK+YCALE6EU0BB+8+5u//cWf/ukfoymT06bBB2AkA5UerKVa15SGDitW5Pl0aNIuLy+ZCLfvlPUNnx4AWgxJ3DZuH5vuVKqzJTgm5n4mIzLqPz0SMjPEogJMo4NNoG3EuoHPXg/fe+aeX8d1lURGpDXn6fqyCnY9BLMfxuAYogMfUerCTMdCf/Hb4VYuUryNwddaas0UQxwGE+nWnN1mJ03mZUYi7wkALi53zrELqbssTLXnFZmoT4R71s/HANZXBRiCBwBgqqU5xy9fvQ4xhBCatO12A8Sned5uNojofV3WfDot44jb7U5Np2kU1bKWlnNMyXlvKgZQSzU4H0YBgJjmZXHknPe+lBJi8C6YqikgErw18Tpm7x0A5HXtmb4m67qWOKQYQi/6LMtsBj20o6Jk5vrS3gDMiFlrba3FEJZ1jT7ENBh0cmFmojSMxDT4oLWGcVRt4ziefWm1Og599Pnb336h6L733Q/JKnaTq8hZNZ+rqsUYCLsKU73zIjXFgBCkFZUKPFRztB599GW+09HCtGMXDHGKEVSByAB6U8LAOtKfTA0sBnz3yYZNW7PIaqY/+Z3t1QjbyRVxf/j9RyrtV1/l+/08phQ9E2lMKTgGEB+8kZCnr07j33wZ920yW9Wsj/WyGiG1Umpr2+0FEJWyEvNmu9Emy7r0ulzwHl1Y55kJCWEcRnJc15WR9oeDIrGfVExaDTGu6+p9YKZWBRFiSk+ePt7v95tplFbVBMQc0eFw6MX4Lum+f7jf7S6mafLe59OJmNh57bxvxH7c9iHldUUCZmcmHc/gau1oYD7NRzCLMc1znjZTKbnUgmB5UXYupGSqZV2baQjeew8G9w/3YKAqvV9Sa2XHpeR1kRCjmnYcaceGr+s6jUNrUlo1UR98J4B23S2o1dbAOqmjheAfbt7M8/zo6fO85svL7SeffvHdjz7UVsIQCRCJW2vRJ0Ng4lZb//Kt6xKCB4QU/Lq0169fP3361DnntPrdU5pFWgOD//jTjz/88MUHz6alQXFj9L4UMbMUPJiaChMbW2dNetQL50Daw7GgC4HaD7/jvKcO2guUHz0enz599HefHu8f7uOjS+dwCGxg3WS4X/mLw/Unb7wAt5oRMQ0DgsboQ/RlzWAUfJROFn7bNFeRYRid8/1NZFIV3sJoY8ylKiAxb7ZbRGqqjn3wvra2LisAmjliyuva6Re1lLKu7LyIIWjO2ftgqmLqnDP1wzCIyLoshJhzHlJSsM0wLcvM3M13rtXCRH0VTUhAVGp1PYXfRIaYSq2AMI6pF0YRkdHMrDYVETAzU2lSkcvx2GpDhBg9U4ohlCYd19jpec65VlunrlJKWDtYRKSJWe7HZ6IeOFZmpyjEjGZzh0F7jsMUhqlJ2263p3n+zae//d/8V//apK0Zvffb7QWVZipaSwjROz8vS2/BShNTNWkI+vTJ4zQMed/IFnJXMjwVO1LLu00aApZSRHQ+nnTaboZQBcCkE4+JAJUAGRCQuXOZL7aowAMTEYyJhgBjxGlkAR+G4Sc/fu83Xzx8/NnLi0F1pwLumO2hXXx5x37crHm/3V0WayHGNVc1ABHtq+IQwaCdkYmmrXgfsqpzjohbzn5IANZqy2u+vLpc1xy9p+hrzgAIRCWvENCnhCAxhO5J6dyFw+n05NGjMSUFKDmXUoP3PTC4luLMi6qJEYmZ1CrzAp0mE7xXkzSkVqu0hp61tb5QYu+C9z2u7szseDrFEChSCMHAOpuXiVqtTCwqzpF3fDqdamu11tpqcIEJo/cAQMSGGFPs7M+WS3cYhujBrLdzYggiwuxLzrmUYRjzmsdxQGbPTlWoY/RFOrlsnufgPQIZmpn86le/iTFtp8iOiYhdYPY8+OP+3lSaUPBuu92aqnNBZTFVF4KIxTT1n1PNoNxzfKTqqsGPfo+InKgGz9tLV5fZag1h6n+w+9gAGRgMGUwRgY2YRAGGiATqWGMkcuhiJA6GHhEuHr17fPn02+OdPCj7hG4EjhSr1KxqTBjTqKaEhASOQNUUSUoWhSXn4PvLwNl5ANhqKcMQl2UNwXvmFAOYRR+aVIe05rzd7qraOI7Hw4EJDSCmKK2CSq02jkNortXsva+tsfMlHwgNEfPazGBZly6eQmQwYOcMDAGPx5MPXsVyWUQaIcbQmfnSb7sGoCLOsRuH0fuWcyF2Z7qMQe2YeFPjbtiGw+GoZtvNJpdKRE2ktXaaZ0QYxymk6J1fl71njnEw05706FS8mGKtdVmW7WYbQgQsMQVpPahK67qkELuuph/GW2vbzcbApCkxtyY//enPf/xHfxicSyGScymNJo2dS2koGYmQ2BNoESGimIZW293d3dOnT4qIJ1RDgYBWAc2AaLg+aalL8ZbHMS3ZomMy6ZnrDp2FDgJREBVECsxopACO0HszLcieCAmFXEB2aswAVSwLuuEKW8u5BDZPBgzHuVxeP+6g3rLOzNhqBt8Ptra21ftAzgXvl3WurfoQxmEwg1Zryblj7bomVPvgR2U5LTEEkQqGy1p6E2td15yL9+yYDICAhnDmLqgpSJ3Gs0v1OM+1tW7MNNDD4TCMY855nEbHjiOX1kLyLnDJ2RGpSGvFh4DEtVT3tr/lnPdAhEDdY9jfaERUSkEEBCSCdV0VQAGqaL/mWV+/x+iJiAlUTSqBAZyznWDA/I/GzJJSSsPQRJoKAORl9TEQ4rqutTQ0dMEx0Vv9YrfaKBOZyqvXb+Zl/c57z0GN2O8urk3bfDoiALNPaaqtIhIitraEGNRM1TabbW3iffzyy6+51ZQikcc2iwDEnQdnBihSlNm5w+HOOZ62WwXpRkEAQFAAdN41xVXMMxuAZ2MnbM57ViSDDIZi3BpwZCDHpE1knKZhmkoRJgRw7MLhcOzh7J6vGsap5MyAiOa9J6I8r8G7FFPO+XS6v378uKwFEJxzBkBdCARASLms4xBFmqrWZWHnVVptMo4je+eYEQnJaV29h1rrME2n45GdMwXnHTCCQW0S41DWDAAIlFIq65rGgYlKXUmcGUgTFRnHUUp13iEhIOSca7Na6zRNIQRaTnNv5zjHOedlWRCsB4qJUFRzFR8jInZ9mnVOtAGourNGuX9PCnS6B5EjTkOKMYzj2JsfpVbvHBESIhiIylsysAxjct6pSuefA5oPHolVzoK0j3/9m9//4Q+ury+HadruLrz3auDDgAghsPM8jaP1D812awYqLaVIzCr68uXrX/3qE9WmCt6xtKwm6/E2Lw8h+GG69GFkgri59tO1IisykDd0ncRlhAV4rjiLq5ia2xJ7IC40IaJgaDSh3xEPMY2AtOZSakPAmtd1nktZj4f9cX8/DrGss7Scl5OBkfPdbgVv3fPOuxCDGYQQN9sdAHz+2ef9fe289971pVWvDfvgFRmRQkyltJzLdjsF79/mDhTRjsdDxzsZWKmtVKlN19Kmza6fu3a7y+CDi9HOkjzrDyQTISAAyDmrtBh8XjMQzMvC3BWcFr1z3jeRUrLrdyQRsa6E9VFVtImImGEueRhSD7kBYGdJay0hhloLUQresyM1EDXvQhcKnd0/AMH57oMGg1wyMzN3fyyUWkytE4pFpNXCeKavNWnMCM7lUlT1y69f/h/+9/9bAEB2BtYz89JaGjZMKKLI0OrqfTge9r2p7QL3U9d77zx78fTi5Ve/qK2C1BiTStOa10wpqUgNPiAnRgPopNzOViBAZBAw6o53BAiOQGXgquDBDUxrDCiCTSl4NGugoRojiYhWqecA17CRWtMQwHZIJE2cC875UlZVRRXHCNoIOIZQRWot67pcXl7strvgWUXKupBzwYdWsidGYiMYUrq7f2AOhBxCZPZEOQRvBuxImjjv+zh+Gqf5dJqmqTapdb27u0NEZp7G6XA4OOa15taMwBihtcqMzocYI5q1WjsxuLWaUtTWDCzFuD+ciFxIqTf4Yh8uL/OsasRnEFptTVoDsD4RBoT+PXEsxrSu67TZGiAQhZgAOhRBj8ejO+NdxDF3m+44Dr08BGaqBqDM7J3vsvAO3ua3wKIeABJTNb3Ybv/iP/7l02cvhpQCY/AeVPuctAOLTS06V2thQmIehuF0OrYiiNDNo0yY62qmzH5eDo+uh1yJeQNqD/tlwFPYbshdECqAooGSMZoBNvEC4FCQ2JQcIUEhwsBayXtq5KKqMQlaPmVKyaEBoPPRt7Kk4HsAN/rAMRrSNMG6FkHssUoEKLmo1OBIpbXWQtoyc+fa1lLjkNZcELA1iex6DUYNfPCttprz5W6X13WchtravCyPHj1u0k7H41zyOExDr6OYIlMHXXYij6leXF4wu+PxEALXCiF4aW1ZlxBjjGFd1pEJzbVa0zDcvnkzTW8d2WLDEKSpY8fOQWcBNpHa2vF4WPMKYH1GzkRMaGad26hq3vX6cwPsfflex+YQY6kdJFRrbcTY9QbsfB+t9vN/KYWI52U5mzgRcy739w+v37w5HA+AUErJee1H6SbnXFGt5Ve//vRf/if//PHV7nh/W/LiY3QhmFkIAQDBoNTaIR7sXK2FkDabDRgS8TBOoo04OkJEXyEt84O1dRziFN2UovcBTBkagaKBmXUJHqF5qkRgSCLUhMyQrDmsgECEnkEpqvWn9mAYek+IQJDY0NUqJZdc6jwvSLTm8nD3EEJIMZBZq8UMps1GVOdlzTl3thEAMJqqhuA7Brr/OgBIWm1qRpxrRYRlXqRVJqy1xRBB7e72TkWd90MagCCXuiwrIo3jcHl5teZVVR8/fjyOU68aO+cQ+XA8OmJAHDY7FxKSQ3ImtqwFmed5GYfR+aBqrVWm87c9Bo8ApWRptXt0tDRtraUUiajU8o8pUu89kvQfxASIsNZ6PJ2QKMRoAGDaMdXav0dNet27OxZ7WLG7OVQthoiItbX9ft9dLdvtFHxgpC4Ccz6oqjaVIjGGb1+9evLk6XvPH2urL1482+/3+4e7y+tHYOqcy2vpuqGahdmXtXgfkBiRNxdbBMlrBkOSEyE0aS6M+1U3CTTvc6Xr66t8KkiIIGbY80rdeQWIxObRVO3YjLr/HaqjZm5wCAjmSVhr1YDIU8Su32pSTXm720nTX//6V8z07PF1LtW5UARa6/RXNISUJtU2jlM/sBtCSgEAzSDFVGtB0ZL7ITo21XXNzntDOaswEIYwtK59RGLHIppLNrNpHJd1fXNzG0O0dSWmvKy73Xah0h8QptJqJXYqbUiJ2EnOzoUYo5pud9uuuA8xmIdlmde8huBVW17zdrddl0zkiBCIlnkhRKyd/+WDD16kwhkjDyLaRMAgpdiaMHtEYnbDMCLiw/3+/m5/Oi3rurZWc15rLZ5dX72f/SmAzoe8rp0fpaqd49dEXQgXFxfbzdY7bq1N08Z537clQxp2u11M6bdffPNPfvxHjilNWyS3u7jcPzzUvALYfDyUvBAzIvoQvQ8+BPaJ3jZ7lmUR0ai3j/1rM3XOSytuuFLenpZmIJofPGOn3Zppz6z1iyeCESiBMKr3hgAGQD4gcbWIukY4kawEUhpovbV2AkRVeZtRaN67H/zgB8ycxlFF1vX4+PHlsixEiGim/YxQECmmcdpeTOOOicEQkQHB+whmzrsYY0jeVLa77TAO0bHWmqJnRBGJcWDm0oqoUXeJIIyRQGW72SCad1zWbCan0wkAAHEYhjUXIkoxXuwufAiG+OLFO+x4yQsi5lLWNdfWai2Idn19dXl52WfR/bkgotIqM6/rUmohFRmHYUrpYndhCl08YGa1nLUVfQ6OCKLCTMycUvLONxEV9d750AVSHRkh/aw+jFPPDIBqSNGQRFVNaymq+ujR9cXFxTiNiHg8HnPOZV1cFz8x93zT6Xi6fTh896OPoDPngAzpnXffZ+IQQr/0ret6Oh2BkJxfltXMQvD9Qici9fTNJd3krpVDWNdKpABA4zWHcV1XQwdI3WTbwR2970yICAqmAEbYmlVppdXMugZbGAHQAwfPNroZYUEKJirNnqeHP3z60AQe9g+l1A8/+IDY7R/uW2vSZEhRVDtHoNasUokg57WWkvNamxJziCHG1Cc3zjGxM9XoPZiqSF9nmoELgZhO89yB6zWv0NYxBiJ3mAuy226naZpSTGA6jpuY0jiMhHhze9OaALGoHo6HHhY9nWYVTXEYx/Hq6qq9bWjsH/YdUkbMpjAMg6nGELW7yc3GMTl2ZKYGCCp5LcRERIygAF3SaaqtK3od19pyzjHGlCI754idd3AmUxg7JkRESCmp2LrOnZeI7Nm54/7hnOg060tQMCitEtFnn322rMv3v/+D7W4n2tZ5JuK/+uu/ef+990NgTyjSzBohkwugYmo+hCZVWmUXVGCtCzKLmJiI1DWXl1999vvP23GW24cVgJh9Gi/u3rzeTcPm0aU1MRwFCAhrUwyuO/oAVQ2wuxkMANDJao0305ZbRVMFBgRBdCCkB1ZDe2JK0Llabf7++Oa00m/tyWF//+jxo7KsqAUariswu4eHE5HtdhdqSuTm5WTW0RWI0GoVkdb3GzGE2koH6lap5ZBjTCHGEIKCGQIYNineR2kaQlgOd45dBVblaZqW5VRLVhHqnBcEZk/Ejx89ct4DYs/JtNpaq8MwbnebUmrfYT95/AgQvfMxpg4Vlyal1O12653rdANTy7lM40RNWim190eddwhgaqWUYRyc41Lysq7978FEtdUOC44x7bZbdlxLqbUyO1XtACiD3ptR570CVrEQU15XRBCRGONutyPC4JxIYyJRu3+4P+wPp9PRpIEaIKnK4XD88Y//QFsr6yw1M5L3vuVlXdZ5Wci5VkupxYeIhGoGqnmdT8f9uuSHu5sLvl+OD19//fL169eqDcCSs4vt1ntu654It7sLQnRMfQSK2PfxaEiAKEZADpkRvCzVoXgWQkArjI01Y3kp9VQbio0KJFLMlBikrn/06PMNPmx2l4fjEuMwbjaqSiBE/NXXL5uC8xEMcy4AmFKQVgGMiEWaqTL2LGdXzdg4DOMwMnGMAQxEGiETOiTebCZEq6W4EDbXz6tRT+4ty5JzMVDnudZaSmmt5byoNmbOpa7rCojeB2LqWfVccq/pmVlr0mrz3uV1ZWYgNDAmnuelNFGAGFKI8erqqkpzaBgCA1AIXtWVnEUlpVhrrfWcj1ZVqK0gsnM9RbssS7dHdf65Y64IzN4xt9rAgJgRQ2vNeddqabUE53tVoJRiYP1z7YMbp+knP/ljAEQEEWuqBvbq9avHT54+ffy4Lse8zqKC5LwogImYVw9oRCENqeS1lLK/v6nriX2MIaz5pOsNtcPL42ld85Lb9eZKRBxRSKPDXMuRyagKQvcXkZkSIAEpAqH9T95oGuN4/Tg1RSaPKM4qqgghiSKRuQ3wBkGbKjtgFGKHUH/09Pa/+5ssZnkdNoOPEUoVwPy9j969vLruXSaRisgIGFPq9FLH7Bwv65LSCGDIrvt4nHP9lNakpZQe9vuYhhBTqdmxI6azuQYAiQFQwVyIrG6el8P+cHV9+fTp0zdvbpi0SQUzhPMP4ZzfbLYGysiOTdSIztl2U7vYbktZezh9SAmI1lxiCGCN0Tn2Cupc8Kba0bSiGrw3UDXsZPLuCOuxyDPUAomJfGQCq1ZjSAB2Os3MBKZNwHvfkVNMxEwqWqV0n5mq9Ss9OxcCizTRzvXlYUjruiKddTE3N/c/+oM/POzvTg830zghMbMXtZiG5Ni7bo+vp9NJtfW4SJx2Yvjq5m6d90P79mH/hrxfl/pw95AuZh+YyKsZsedhsnpajhnjCIatKmIjIuvG9x73NwUERQ/oDJsCKHA1BSNA7yyLEocL4WsgbyoITCoqixiVbO9dHH7/O49//kU+nObtdF0lG2KHKj/c37TWqsCj6+tlWRWwiWJtzrn9/rDZ+pTG0+nUJ64++N6xvL6+NgAPEQDGYUR2pVRiV6vmsk7juK7LOG2cc6Iwxniaj+wTIo7DKCKllHEaDw97pwqAfa0xBn88ZkRUsC6DTjEe9/s+bi6t9qOk985UWxOFxkRgikDeeVEVFSo5l1w7X5wZc16adh5qc8430bdmC9fBEDEm5x0T1dq8d86xmhEhM5dSXcfIApRaAK1jyK2TrZalNbm7u2Nm7926rn1L5NkR0rl1z9yavH712sfp6upiXRY1MmLnAhEpIDlGdmuuudZc6rjd+bRV9KXKsiyH43xz/+b+9pshmABk42/eHF/fLi9f3TrPAgTkFZyBQz8Jh66lXUtG7DhfIELsFqg+gEEH6ABAQc20iSg6BFGewE3qLhDJTAEM0JsVySdVGb1iPXx4dbRWp2EAcpvtJZguy+p9DHE8HE6EJoaA6NmP0xYAW2vDMPaFKLGPMTKTtEIIwzD01GiP7xJjB0Mv85zz4h03qUS8fzgs81LzWmuNPpiaD7HUcnN7W2p92B+aKSIG50+nk6m+evXKOY/MQ0oqss5LzWvsALw1q+j+cFjXdVkyADJTCjF412ozAGIXY6xVHJMDBDVFQOc8My3LTJHDMLamISZ21ERBz4KqvK6EAIhdGsUMhMDei+pmu+t1lp5hKzkzoRHWWsu6DuNYawnBI2Fr0u/zajakQc1KqWa6LEup9e7u/uLyihBjGohdCCM7dt450byWJidTC3EaNsM8z2qWS77b71vJuepnn3/1fCNv7uzy4uLm5v50mH/x6ZuPvz19//vvUwxiJGYArBQIkaCIaCkNiBBRpMvYkdDIOrTbzuo0KeDI+UQ2gzYjD/EStHpvqIuCp5h0/up0eFDwd6U9vhrf2eUPXjwv5vtofrvdffP1Vy7EqsJEeV1v37wZhmGVNabUub7s8Hg8SGsXl49UK1hn0mVmVrUuA3XOOeeXvF7sNiml1rTP2MZx8r6mlI7Ho7SKCIjWciOiF8+fMxA2neIwH2e32QV2p+OBvRcTJiq5tFpqLSKViN7WueK65nVZzHScxvNKQrW2aqCttmEcQnC05iLapEmKCQFzrtO0TWlg4q6aarURECAEH8dhDDEqQC5V1EIIfXjSj2kibVlmRgeA/V1ecr69eVNLCSm2VoHI+dBqA9OYkg9hWZZlXXJee7t2XuZxnB72h8vdJi/zOG6maSNqatCqtNqIedzsxs2FGpzmdc11nudlPT3s98e1vnz15uNPPvdsv/ny4ZtPf7nDu1zXr+/z64fTt9+8cqhivZSCaNrMCY1NzFQ6ShLA1BRADUDBAHEtYEDORZRCoASVdDUXUUuDWNR5AgRh5xjqev/tX/zt3f/j393+xd+tn35ZUqRnV9xabS0DABCnYQw+1HV1jqbNpqzHWtY1l5LXeT4t80mkjsN0efmohxKJ0HuXYpC2gikxE7ucS2stxTQOKZccvG/tHEy7uLwcxtE5HscBAEDVO04hjGGguflsmPVquozsNyHofB+8z/Oa11xrFTk3zTud03uej3utdZwmU7u5uVmWpZZyd3vbz/tE9PDwYKouDQOYEulacr+LGSAg9kZirU2VKRIgas/Hi4EBEQ7DdjkdiamUAmDddB9CFBMzy6WEEFqTYZjOmlxAF2KMqSxLt78g4pBSK6Vfy/s1+ttvvr68uJiiz+vy8HC/2WzGzSTS9nf3zDztdsu6IpKI1tZaq2vOay4+DrnKzcN+ycvjq4QAP/vlm+n2cLMqj9Of/uG7jiCfHiBdzaVcTmgGaqiE++PCRK7/xH2G2ANBSGZgaLm0GICYABTaSVwiU0SyloF6dMKxT/evv/73//43bw78yevynXcDpOuH2UU73Lw5jmOq9fb6+npzcbUcD6Jy2O/HJtN2B9LQeUO6uLyqeQ0hADkzEJHNZnc87gnVoTETOa+GYPbw8HB5dQWtSWveuVrzdruZ19V7T4i1FmbKeWV2uYljiD6hwCYlVnjY752Bm5IQXz/7oGirUuKYEOnw8MDEwxBVtaPHaymn0xJSBMN5nokwxBBi2kyb2lpeV8cMgI6RDMAImB2xGVg3djWpJRfvHAL0BywA5FJEpMMiAFDUEK3DEpl1XhbHDhCkaasFpJH3m4tdWVcE8N7nvLa8+hBkXVtrKsqOkR07XHNGoprz65ubH/3ojzYpphQNSQ3ymn1ww2aDhqd5XpaFXCTieZ4fHu4QdFnmwzwf9gdRHVKIMXz0Du2Gj3778jTt5J/8Lj662EzT9PrlN+++H1MKKs07YiJmQPQcGHoUCQxNkRgQwBDAhoAAvtZW1OlyBKlpN0U9IrNqC9ETFOSwnB5++YuP/+3PDv/kQ/eTDzbP3+Gy3t4/PBri9MUXX7/33osQQhpSq6opLcsphnix25YqVRGbqJnzvpZ1Xee1tOBjGhIieRfBWm3Vu6hqzrGI7Ha7u9ubzWYDyD56F3maRndG4VQwC97XWuMQE4K0GrxvraxrraBpGsc4tC42YUcoKQQTBYJxM8FZeePAQpPmfNxsSFqbSx7G6XicH8eByfqvIMV4OB4fxeT6mbxTvaRVZue9AzPHzpETVSRExJbPMnORFlJ0or38JqaRQ0/293e5mRloyyWE4F1Y1yXnGr2vp1Mvnoq0vsxjdsQ+pnE+7Uuby3pSlWHYjONmGKJJM1Fkh4Si0AyZAwH5SIiccwbyLsS8zA/7w1rKm5u729t7M8oVvVc0eefK9YngkuuVKRjefPPJe9/5Xhzo7MwAef7kAkA7PLrrh836NRQNsDZGQiMCcFAb8MDagIm0ctsDreSD1GHZ315f8NPnV//ml7f/6kdZ2no46IsPaTY/DoEInz17FkNq9UTMMQ7X10+k1ZyP0ftpuwMDadoHMM55xPMvonMEpu2lqPU0Wa8hPHr8eF7WUsswbp2jXBoSa2s9JCmtTdNE5B6WE3PnD2M14BC01LmsbozOeQO1Hu6rZSBzzj/My1//9O9idE8eX//u73x3XdYYo/MBiJ1PgLTmHGMsJTtm731CQASHYK2LoPEMIDsdT2lI3Q5Wa3POc3DsWaQB6DQOuZZx3JS1lJqZHLNrIjUXJmT2pooE0+VlBxpKrSF4le5ZBwDK61pK8169j+MQfUwjArlw3D/c3Ly6fvSMncs5h5i8RzUwQDGozXLNzjlAPuzvRSTXJmprKQKYSyHiYRhvb27nXLTWu2MN3ANsDFDneR2ncTm8vvn64ydPHsfdYwBvCACNiO1tDamvewzA1MT6etWQMJI4J8beyRE4EPngZ6v702GiNO6ePJ2S+2d3Mjdvw24Nj58+fXx1Mb681SENwzA9POyHYToty3azOx7zm9v7zRSDD7XJm9dvrq+vRRs57wCcjyIKiB1B2btR0qr3Zyauj4mIJuYLF/7/PP1Xsybblp6HDTNduu9brmpX1bbHu250o0ECEgBRhCCAEVToQsEb6ZcpqD8gXoARDF0xQgwBBCmgG65hG+jTfbY725RZ5nOZOd2YQxdZR/cryqy1vsw5x3jf5zHWS8k5JwAmw9uxyTlHhKUmqcV7l2ttqL4PSqgAaVnSspgurCmGzqlI50LLl/143Yj/D3/nb799+87w1vxiIm4qxjprrTEmp9SahPA+3aGt5VpMStF3fS651iraGLlKRcSctu4EbIwmw8yNcopVBIFiTITQpFQAjx2gblFt0aYKGwhGtSk0ZseMqZYNrg6Im+xjnpeYjl0/j8PUD8O6ppRLSvX29lnO2RAqYMyCiAqYUsklIyMg1yqpSCkpxlRKnJeLSKulPR2Oqno4zdr05ma4f7zMqX1w0zGC1JJzsa4fx2Gen/BN7udTP93ubu6A6H0PGRtoVd1eZFs2gxS1KbYGtYlXYdNMK9b2pBIjiIChMHW9uf2MwHzy4um/5PBHv/+xG25iQQWIsYQujOPgnPPBq+Ia0zhNl8vCbHNe2JjrYbTGaGsirZQ2z4dhHBHtuq7bhrvWCqClFuf8tNu1prkU5zwR15KrtFoKkhgbzEZmaViaNq1brgsAgCCEbrnMxOS8V2xd3wk0Ed1UAo3c4+FETL4bXnzwXEGZKKdaalYFa2zwPudsLceUWmshBO+7TZVkSi3lcjKGpRapEmUrCimzkZoM0SaA0e3ltyzOOWutD6bk3HUDEqWcmmjfhbYpMomkNULKUrz3xJRTUqAimY1VgFKrtMZskAUADofD6Xy2zj0dT0gmhEBMJcV1y1ASl1pyTfN87od9M5qLKFtUsMEAm3a5pFwVcH919dtvvj/P6e3j6cevPryeAqCZBkop956JdFnTy2fXQ49lucT5rFKwXMbdles6RK9gSeumKSUCbWLYFqSmxhrxYIxCyYuIJlmlrLkaMMMPXkzd85dF7On1NxAf/vBVNsISxaKz4dn9Uwnd0A1j13XWuefPn+ec1iV+8tmnUtacknFuHHttKk1U2xqjs2QNWWsv86XrQqnFkrfOw+/8JKVkANxMtwAteIvNGu+d87BJmNhs6OfdbmK22+U/tayqzrql1Nbasq6IaK0D0GVdjOEY47jbHU7HoR+C8+uylpL6vuv6UVvb+l7W+u2AX0ux1nWhExEzjEOMcV1jzonZeGM2fJ1KU20pV+t0w9dba93V1ZYk2i5ozne1ZEV0zsYYRYXIyLbvz9l3PRnTVNm4LeiTU/LBNxGR1vXD7vrOWfv48BCGIcU0z/PHH38MqABgrItrFE3WuXWNG/533Ls1xpgyIAtoEhU1yHazoy9LjDEjmy++ffhbf/DZzdXQ3tPT0TlW1VoKsh3GDwpzXo4pp1ry8fG+73y/uzZ+GrqNSaSyvdRaVSzfvzuXtL7qDz08qSJwR9TeLGHs3c+eU3dzB+52+frzN19/fjclY7Wtv0UTGu/a+KrBu/3trbFByQqw7wISxliOh8M0dsM0+dBLzXFdkWyMC0KzbJd1HciOw5BSIsKtVHSel5Ry6LrQ9Ux0mWdiNsYSYz8MVdo2MmLmXIu1ZhMHsmPrcFmWxls4H30fsLAizHGNKY3jaJhPx0M/DFLqFm/fgGAiw7wszvciZV3X1pq1jIjEjEQiJaVkrTUbsHTbqG3CsY3OtxUsQtcN/UCkMaZtOZ9SAsCcc9cPtRREyimXXEtJ1lowyGxTLltWNca4oUAM0/bsTTnnUpm5yPrx3QcpRut9E0lxqaVeXV0DYFNdlgWR2Li4RmYnTa5vX6ZSL+cl5ppSOh6OMaXbuytEbg2dD8MwWOeY6fv7uSH5MByfDn1vEFxM2TACyOPjSa+m3ficEdbzkzSoynVepeTc3nlv+2CR2FlmxC1/d/z2rHmtL+BQ47uzqrNZ9Pd/HH58dxqe/wh3nz1+8Rfrw7f91YeAr2OJxgKlN2ZvauXdOBYdyTjjvAITaS3zOI6X06EWRuacVtAmVUSrgjYFAd5KhpsqZYPtNKne+xA6QsopK2qtFRWsc8fD0Ts/DJOoIjMoQGu1CTOXnImMAklr1uBWPG8IbAwBVd88URMJwTedShXWqk2XZR3HgYiubq7kXhBRmjaRmHPX7atUZn56Ot7d3hr6XXS4lPeyCxMCExu22+uzNWFrETGmxMxSBZEU0PgOATfLbvD+Ms+blmGTKm68I2MZkDaafaw1pygizvnD8Uhk1jVZB2/evCGiN99/l2IspXTDGLq+1Pf/aGIKPpxPh3E3GuMu52NTmJeUa318eKxSDENJyzKfHw+ndYlSc84Fmn5/f/nm7fH3f/RSmrS8aKunU7re+85DLrWm5QKhD3ufc17nEHwwxhK0tT4c1xhzcCYzWWqOYS5svb2+4den9R/96QxEf/+vpb/5i/2Hr/o2/rTtP3n46vXl4dGyD7s+neapZ2MQbY83P4+nw/GSeXqugFvryFrfFOfL7PsJUZiollZLlZJK02l3df/wmHJ++fIlIUqDznskulxmH8j7TkrZjHG4dTmMkSbTNIm0ZV2JzYbwIaKUMyLGGJvqMEytNeucV25VKyoyZSnY6DIvuaSue//LCYhshJC6EEqO51OM69qFYJiNMel8aG2HbEopu93OOpdSAhUzL2vOteR0ePt2+unPFXCzehsGYEPwfoikBE1hiYkIrbE5ZVCtta6qfdcBoiFTSk0pi6p1PrABVM0FtKWUiMBYKyKIXGpTJCS+XC6q8N3r17fXV4fD8a//zV8paJH3t9kq7Xw5N6A1rrYzp8sKQGw91HZ1dSOtquZWYoxr8Ha+XOK6bJqYec5/9pvvf/Lh1IVwWM7GmttrX6usa0aVVjUjzXP2CljBOfHOB2ev9/ixQpPSmhaRKjTnGsW4wNoudxP9/T/aXYX26UfD1bMPs39p/V1+8+evv/ge7Li3kerBGmzaSVWcPuJmT49P393jnU9smhnHklPNW1jcvn375tntdUn5fDpP49Dvrra/9Gq/N84bNgCt7wICsrVd18NW2Jaa1qXr+5rLhh5Iy1Ktsc6nGH3XNdW6xpJz33dVSj+OoJBz3hxq1ofUsup7GR4hrst8ni9sbm+uO5G6xNR1PWh7P6JpLedUpTAbH8LN9S0b3nxkOZdtfc7GmhA6NiYul1effKqAhGidR0Dr3Xy+bCw0IiqlEJlcsnM+SWratFbv7KatRMTT6ciG1xiHYTDGbPCBGJeUkqiCUor5cDwCgLOu6/rHp8eSS0wJFJZltc7u9ldS8+Uyb6jh0HW11K6fcpVaBYnmZXEu9MNUq+R0ietaajHGgrSxc+vcUGVLYvyzf/3l3/zV9asXLwlval5KKdJqXFciZg/9zcdP9+fzb//dzz70UlXMqN1QYHM0KWthEEatTQ1Wbbm1vtbGiEbLU7wqj9yenp5P9x4erm7vUpJWlX3nd3fGrI2cufkELse5+OP5tP7mPxjru1/9kXU+p7guF2PtRx++qLn4MBChFGkbQFShC16abuqWFFOp0nW9sZYQWlNrbZUKSH/x+Vcfvri7vb3LGlVURZwzTaqqgjZQOZ+OXd9vsOxlXcdxAtUiFZlJIJdCRAZMTrnWWms5HA7G2mVZpYr3LufkfWe9v7u7a4pxjYSExMuaLFMupTUlBmSqbWM4XBIa68edqBBSKTkEvwWDtm3+m7f319fXxMjGKYD3vuTsBreu0VrTVFvNzNp3zjuraFKM0pp3TqSVXJDeJ20JKXTd48PDZV4IMeasqrmUNaaf/uyXKeZSxFjPiD70pcRlidZTA2rSnPcA9Pj45FxAglriRv8/HZ6WeQHQmjJqI6YQ3NdvLv/8P7z+bz784OpqXGdd4wKN10uryrHy05dfMLQPX+yW5bQU/fbw+OwDvr0erbPcoLMUPAtAq9FYQABD7Fo9nr88PKXP3z7efHL38s6+fbj/cHhz29XVPU/6ie+tpQR8Z24/hfOXUunNEQjx+YtXh3fff/fFf/zwR7/y3mmTh3eP++ub4+kyjp3Uutvtj8djN/TzvLSmXdchqA1d143rMotILsVZh7gVzQkA/+D3f3l6eltr9aF33uRUVIGI5nXugrOGEZHZ5pQBqet7UPDIy+liXLDeAcJaUtP2yaefNlBjzeV8YmO8d9a6lPP+6tp7l9Y1xdUYh6BPh0Pf92PfaasArEDEBpmkFBNjTDl572OO0zgBwJZRZKaN4dIU2PDWrrXOWW9LSojYRGupW2QMVPp+aIDIJE2BGJSaQsqFja9Sl2U2hqW14+mMxJfzzMaoYi7VubDG9OLlC0ByjnOOlzgr0ps3r/f7m2VZFLkbeDtc+85LLY9v3gRvSl5rmktcmtTz+bQsF20ydCZlU0v9n/7kt7/8wdUvfvyx8+Hli5enw7ua4599cbhe24d33RKrk/p4Sb99kOvb/lMry3zR1fXe1cpZ0Frbu74RgMrx3eu23H/+xff/4ku6u6Y45pvrfa37j+4+xvR2Gq56ezfPF3BTd3slhy9yzELhj//l559/9/Ty1Ss0XBucT4eac0ql60cC6AcfUwzer+uChNrEGLvGOE67pk1yQRTnXFNd1kieAIER8ly0yEWkHyZ4r1jG30EzoQsBUFtT693mvg1dx2RIgBqAaFxX46yCWmuzVGZGwphWVc0ph9AhwDBcXea5pLRJUq1DbIQApVTEyITeuZTLZsNRIlrmmYkIyVpLTKWWZVlzTqUU74MxJsU4DuM8XwDReV9z3bLxx+MxprQp1vphrAKlArKrRZhZmizzmlOOKYqIdX5d0/bCiqkY55o2ItrvdufT+cWLF865y+kwL8daEyG8ef3tMPSttX4YhqHPMV7O55iis9YaHIfOe9d3nXf25uaqNckpjr13BqfeD0NwzhzO+f/5P/769evvjk/3PoTbu7uXL+/CMNwO8YNnHbju6Zzu4/Czn//45z/5qKhTcusqlyWXIk1Ea261aJNa0vn49Jvvlj/5AnxnPnx1W9f719989fyDKyRbzQcVBja2n641DPL4a7k8oHFff3f4F//uS5EyX45kwmWJ59Pl6f7Nm+++3k8TIjrrd7t9F7p1jbVWACq15q1cF3PXuB0XI0DSpJaHx4et9rnxJp2zfd9vpXKRbZMJKtUyoWroOmPs5r+WUhkIFMGw69xu6BBaA405ppQUdENTDv3Q9733HhlzLtYYQALUWiuzySXHlHPOcVljTJtLNV5OKcdSi5EN+oK0UdClFssECtY6RBBtiChSDbNzbl0WAGitxnUFgHEYc605p5QyEI3TlGJOqVjLgJhzqiJbIjbnknOuRWLM1gUf/NPjfR/Msqz7q6uf/+IXMSaRmkutIt71oZtoW3ihRaIq1TpHIqqyLYPmy5EtpcVczkdD+vKDu8v5bAznImPv5tk0qb/++vz/+B/+/P/2938y9Q/92N/c3nz4Qfy3f/lwxOaMffXqwz/68DZYWDLU2pzFMmgRfnbdAzREso5Bm3Pw4aefQXe5e/ru89+++7sv3ccfPTsf82jy0yncXN8YP0jLeT1wfYe4NL7Oj7/9h//8AkRE9vW7M2oyRN+veT7dv/n+9ceffmad911/Pp8s4X6/I2Pmed1Nk/e+xJWzYquaRZMwtKtxF6WIyGVZ4hr3V1chhFqlpExsL/NMSBtoYOuLsW3O+8t63vCmCGQs55yBWBEBQLQBAhtKKQHiRgYbp6nWUktVBefspvVFNpfLRVWvrq/Pp1M/DFLFGEsky2XZesbmX/3pv5cm1ru/+ge/t3mW5nWZdnvr/DpfSsoigsTe+5xyyunw9OCtcb47nS/IxjkXN7tqg2VemioSAuC6Rm0t5ZxSzjmJ1C1vj2yIaFlWJJdF52XZ7Xa15O0bcb7MXdfZ0McYyXin0FRQgQ0jsg90fHyrrWwhxxjnJoKIN7fXl9PJev/s+fN4PtVSj95eloUJ/+yry3//j774r//G/Hs/fjk4/dM//c3Hn3z4d/7WH2DdMF4Um4RQh0DObooJNe0iJVV7Ow4dquS8VsO7K/uTT6Kq/vqrd6d5Hr39pru6fbZz/TifH9LyNi6L42x2Pev533/Dq/vko4/M8XT5d//m3wRLf/SrT3/9m68vy/xXfvWLu7vn5/NJm4x9X+s2hg3aEBGYMRcdjA/D6HdXHPx8OkoVa+wyz6Hrh2HYKvfrGp8OT69efeSDfzocjeHNy/4e2ZGy3SLFm0gFhIhOp6OxvuuDsSyCiFSlgsLlfPYuiMimLt2Cl9dXV6pAxMvlTMSdc33XPT49resavPMhuK5VkRC8+eN/9s8R8Sc/+TGiAWTAOoyTNk0pXZZZRLzrrHPG8DxfROo4jqqtthb6XkTu7++1tWGaAFqaY1NVAOp6Y+27d/fn81kb1pr7oa+iqsrG1gYx5pjifre/zMvLV69Syt75Wutut1/XfDoeiSgzpzUhlcPx3A29Nc57h4iA1CRpKzWvzrGxloimaWTjum66WFvj+WmOz57dvHn9zhv813/+IIpk3M9euP/r333BrC59HcY9WS9Ns+yDM5MXC9mQStOabNcN3F0RW5EKxlGVPeUfqPnRDz69pIZo7252N5PF/P3rL/+8SG1gS2luDEuGz794+n/906W7otHTq8+ex7j8+//w+Xx4/PknV59/Rx9+8qN5WaSpMbg527rQl9qqSPCu1tIYhbgRVsTaGjvPSaSmZZ77vo/rdjQxXQjP7p6VnIjN9X63tTqJqDVkNrVkAIUGjhmaItF8WcZxgoYAuL0UQJs3RtE4Y0Xa5ub6/9NklmVFBST03p3PszRp0oJ3+92O2VwuszG86RMNqvzyF7/8u3/v7/V9l9OKiAhYSgbEfphKztpqTKvMrZS0bWKrNFUgQmbjrLE+IODxdKoiGxVww1ePQ19KUcU1gnNhExkZJgRMawzB76bxG2n9MBVpkDOiKpDzHgHWdWmtHc+Xr7/8QoF+9JOfMJtNwzOfH7oQLsfFWo/AMT4SyN3tbZWneT4lEWv9r37yUUHzn/rdF3/xF33wf/abh1Thf/vL8fc/ci/2XE/f5Xo0rvfe7sZmurtlaa8flx989tLUiweE8RUZi8ioDUQZIOzN/u5lLQW1aDqul9fz2wO2bAyxDVG7zmeF9PoB/8H//OarB8DvH3/4yfOz1hLXu9F98839H47+7/yNP/jgp786X+bz4VFVL5fzNqPLaSViIAZAxZIBvFJV1VIRiQw4sXe3dwDa9aGWysxrjLWU7arbZFsfqXXGh05qNcSIgIQpJWdRG4zDpKJMkKUqKiHmUowxqNV3Q4yJiNZlmaadSA1dh4jQtNa6xuiccd7P86IiTEjvJ8ZbvwDM/+m//q8++eyH07Sbl6Xm0nUeDDjvmG1OcZqGtFx++933MZbdfrf3HSJUESa0zi3LKgpYKyiGENa4IiEAxJxBoeRsjCW2ojivabMgEKCztkq1zf72669fvHzhnE0xlbxezucXrz4S0SLl6uombYQrZhV9fHj47Ac7JYzr4p1P66KqIk2lDJ2LMR4OB2ctExgCY/j2Znc4HP/6f/ZHD/cP58NT59znX94/Pi1/9tHVrz7i3/80fPLKdRpTyofLV2TfPT7F/+n/85f/xX/xh3/4Bz8GvKvnFbkoEkjOOdaSrYGW0pe/+c1nLz20AggE6n1oZvyXf/5gfZL1iHZ3czUJd3/rjz66fzyd5/lyntO6fvLRq2Cwsg83H8Z1WeaLtSaXbC3tdleAtGHdU1yd99Z1JZdcEvV9KamKBLYEaK2treVcnbXOh1rEBN6uzIgQQpdz3jzwItLeH2EQQJGxVvG+lyqgKkUBmnNWtYlIKRmQpQpbNw1TqcKEcVk3AIO17Pb7FNe4zDfX1ynlptpaNWxySWtcp2nC//a//b8r0EZKFGlDZ6XEmOr11e50PHz33XeSYyzy7INXu/0VW1drYcImlYkfnw5V2jRNUisxpVwAlNmItNP5DADO+RgTElnnU4rn42G5vBehMcIyX37v9//AOXO5nGtafTf40OdUfNc57wHg/uH+iy++OJ8uwzj89Kc/34AhCHW9HCyDc67VtK7LMs/emZRzTVlBz6fTuLtKuaD1xk///T/4H0By5x0q9tOkKoPXH7z0v/js+g9/9vyjF1eMrZb45s2TiPnBxx94Z6tiA5SWS8k5leDUGbVArA1atQxRB0H3NNc//bM3/+7PvmnKh6W9enn78x9cd/1EaEXhPM85xeBomK7i+fDi018ML36ynA9sTB9caypNnHWllBB8KTXlwsyEkGIc+1HVKOO8LB27yYeIspQ09EOrBZBiTt6Z0/FoDVvrRZqq1laJaDeNtWFrUnIGhO2ANQz7nHMthQ2lGrejkioAKLMVUWbLTCmnrg8557iuItV7v4WQ2BhrLWhjY+Z5lVqNMUjYRIxImZcsZfE+FKlxrU0Kkfniy6+//vrrx8eHF3fXLz76dNztjA1NQaqQc/MSCWF/dX2+zGxtKfVynhWAkJhhnmdjbWuy0RtKjOuySBPDzM5sRViiZq231scYh2GKxNc3d000xidtSoBVpOuGLRWlTc/nU/Bhv9+nuFyW9cWzK9BmrOlpcM7FddnAaogauvDwcP/Dn/xiWS7W83/5v/9b//Af/s8NCbWVvNbaGMN/+kb+9V98+z/+8du/+tP9Tz7e3exsitWwPl/Ojj0TS63HcyQEa5phg2CWpH/6Hw8vP7gVxa++u//mzenPP3+8f4re2j/8a3/l/O03udaHs9xxbSrTbvjs2Q9KiZ//5quGcWlXI4wcl93V7f39W+esivgQlmUJ75dKaNhUEaA2TlPOGURY7c31TUmpgBIzV12Wi7WuSTHM6xoNU5VmrCICMQW/K1XIWJYmUqqIMRaAiLGkpNKIsGnT1qwxsgk9pG1UfkCoUokprpGY+3FqtapWRLLWbjEpYsPEsLkgQEVa8N4cH+/XLMfj+fmzG2Ps8XgsOcdcv/zyKwDw3T5rt7+6lSrClY31zl/mCxN5H4jJex9jKlUAkQFqrTnlnAsi5JykgTGmNTkfT6Xmvu+dddZ65+z58PTZD36MxOO0U61X/rZW0SZ9P6QUDeNlnodpskxpnRu7p/t3H3/6aZN2td+jrIaaKiFZwEyIcZmJiK0BgOvQj7sbJh7H3ZvX33306vazzz768qvvnDVLKpYRVMcuWGjnS/1///H3/8ufvhkHa43tHQf3zcvb4XYfPnvRNSVvVEGXCDm1mOUf/+nrZf1NKTXlKhUcU3999Qd/9VefffzBNOCXX3/3l1++xs9effrZR7vdgGigkHH+5Wc/f/bq0y++/DqlNPZhCDavF0SMcQUkw1SrILK1zhveOjrGugqioCJ1M/Zd5hNAA2JVlCqE2mq1zs+ns7F+GnrVpoiWTWsNVABQEUPoSq2qwMawodpaCE5aySUTkraGSLkkZscMW3+5Nt2sCVvzcwuzG8aUEpHNUABhS8G3Jk2aeXx6ejrOubRlWc+Xy37ff/nF18sqt8/urnbj6XS+vdkv62yMKbWICCCytSXnlHONkdnWWjcmawPIuTRt3dAfjwcmUsCUM4AS09V406SKqg+eANaYr2/vpDUSYMOtiWEzx+V3a68LgTprqlQirHk9HgoC/PKXv8cIIfjT8anU5kOYL2fSmlNltiH0Tdu427UG8xKZ3dXNB6r1P/ujv3I6Xk7nma3Nrc6lLG/eGaaxD5bsmnKx7vFwntfMbP48nD98vv/62JGWppIg3FxP+vTm68+/enOWDRrMZKar/tMffvarP/jlr376w++//HMDtfNcoO534263P13Wjz75GBGjBjfeffvN9zdX+2k3Iqj3LkUlaqmItbY1RUDnbMqJmHe7m3meFRANIuGynBBNBFBQY62xDgERfc4rGYtEOZd1XYe+K7WNU19LVsCmYK0l4pRSFSml0MBDGFNc53lGgu0knlK23ltrkWhdVybi0LFhFt7g3c7ww8NDP4zWGmkKpOu6GGP7cScisK7GO6OqljHn9vbhOPTh4en8wx/+6Nd/8ReX82k+H54/f3FzO4E2bzmmZRyn0+VyPC0i0nd9bWLMlh8rG4LaBa9N8/txEyGxs1abdM6/u78HMqELN9c3l/Pp5u5Za62JkKHz6TSMw+npiRBqLaEf+q5X0JzyOE7JrIbZ2W6YdjFFa+F8WYA8UvG+i2sq6dL1nTN23O0OD2/SGkUJFNaYxt0VEfX98t/8X/7P/98//pM3bx9++vOfHo+n7777pqSMjqfef7J/YagpmPNaT4+Pz+/2P/75j3i8juK6adrffgTpMb/+jQ3jRyl/9/ZhTu2HP/rBX/m9X/z0Jz+2lksp09UdaRYRa50CPB5ORHg4L/3u2dUHn718+eov/+LPffDLssZ18c7V1upyvr6+W9ZUq2rTKs16B4gu+CramizLebefGAFAi1Qkcu/pkTal2HXdduclBMPcVAF1mVcFLVVKrbc3V7JGazjnGLxDxFy3c0+Dpsxonds+DMuyOu+sNSWXkvMGO0BmkSZax2lsuuk4KrEJvlPQFCMiEmOOyZRcS84MWkpe1uYtf/n5l99/842ofvrpp48P97+hdj4dvLc//9nPY4rOOufyumzdR2I2CqBIrclW10opbx5M55xzIXRBal3my83dHSB2IcQYv/jy67/+1/9GTqnvQ625Dx5aQ1Rmmpe0eU988Arws5/97MvPP68lT7tpf3XVdVvaAa3lEAIiffjxJ+fDO1nPZGxKq+/6p/NijWPmq93uzevX7Oxumna78NGrDx4fn3702afPXrz8i1//+fF4fPfuwXv/tFziJXadN9ZOV1fj3UfJv7j/6jerWMtN5D+6btfWY40lOP7lH/3nv/jJZ5+8fE5skE1J6U/+138sZbHUNk7q96/fqn7585/+qB/2F7S+60+H++v9WEXius7zvJmvY6zLcjHWA5gKwsYQqLVmuZwUCKGNfVjmxXtnmUn59dt3Maau88b6y3yx1rAxMcar/R6JtqqVMjBZleq9m+eVCVNaU1yRqBsMGYKkzNgaMJv3rNLWxmla5sV5YwwTYVxT3/XAZK0tBQ1BFdmwT01EagEANV61xZS6rjMx5dfffhvny/WLV3d318syf/X45vd///deffypYR53k7X23f27L37zl3/2H//js9u72+cfBGeD99IaNliWZZNUIBtVSLmIqiEC5Fxaa3Hbea0pq+o0jkPf3b97MwwTArrgVPLr7799+fLDVoVQT6ejMdx5ax1rk+B9LcUak+ICIN69N+r0w2De73q1pguhKDEA+jCeTscu9Dmu1tplPh0Pj9N+99Xj/W435VxevXoBra7n49U4Xk/Thy9fhn7Itd4/HF9//7bUkrV8/e50P/8lqDpOPIzXN9PNGG6ffXIz+ufX+/3VlQGpec2xutBbax4fHi7nw+/98ie1xMent/tpHMbpsmZ69+ajH95aNpfzOedkrVXk3+H0eb/fI3FTdM6mUnvvpeTWFKXUKmysMdwFf358Zxn3Lz7bjdPpfNxbK1KlSEyJa3HOJSmqLaZo2ORcjAFCkFKRidmmuLbW2JhNcK7aci4ihY3dyv81iTZhRkbKJRrCru9a0875dU3neX316gNb19PpvMbkvU3r2vfd0HelFpEW1xX/3t/52/PD6+sXn8ZSY8o3zz54/vz5Bx98cHt9ta5rlhqCb00B4HI+f//994D0/PkH1hgFzVVSzMxUt1wOQEzJsFHQdVm3qtAwDNbYnOP5dGI2Q98djocPP/yk73smeHj7rffu+bPnW3xuXdcNgOWdN9ZY542xX3355cPDO8NmHKfrmxtm7vveWksIcTmv62Vdz5ZM6AYTBsnx66++PJ6OP/rRj65vnqWUU4pbOKnWvCyLtS6njaZQUywNEBDjusZULvNyfXPbT1efffxhReewdZ0Lzg5UQzCOsKYl5Ry8W+alNHC+A8D7+zcG8ueff/XV55/vR3u93/m+N8ZVxY9/+HNmw4RSK7KpgjUvvCFbQH03GucNU4x5HIYYL914ta65lTV0g7VWaqola2t+2NeapVbLXESrtFwy8/aqMSmuIvr6zZsf/fjHCLCuq7GWjWkiJae+77cKw9blDd6JlC0bDwApF2No61CUWp0za8yEZuinVEQa+GAJcspp87iXnPq+Y+Lamip+8+1vzThOzKYRxVxCP9zc3r569aExsMZZWmMyJTdiqiKA9ub2GSJvtd8GWosw00a7OZ8vfd8jbP1nDcGr6vuwtyoAMLNzDqHV0p49fzGfn9K6Omt3u+l3y3/vveu6ILVaQ1WKVGhNhmF4uH/367/49Wef/uDm9jqnhak5HlMuKSVmC8rO986FWpJIqa0F7+NyORIH343jhITL+dIFLyUCtJubG221pHU/DEhUGw7jR3GZSy4fvHzJrvfek+mwFdTiGYL1iK3W7Jxf1vjuzXdsXKx0nlPwbpqmoe/76fbu2QdvX/92GgdrDRh/NexLqcuyXu13qkJAwZuozhjHJGldRSoWbM0ys9QcfNjs3X3fVQVRldaQDRBsZjFvLRsb09lai2CIsEqtVaxz3OD58+dEppZEzIQc1+icY2M2NHyVurERrDP1shbRDfpuDKlUJCMiztqcqzSVVpZ5ubm5q62VEgFJajNEAJpBmzRQXda0300vnj83RVo/Tn0/MLtf/OqX+/01EV0uR0Tsul4Ba5VlWUS1NWXjdrvduszb5rwfxpyjqpLwJkhQVTY8hEFEADTnTMwIapmGrtvtpof7N7e3N6rNe0ctq3UEmFMi0HU+hRCgleBdrdk7q4Cllqurq2+/oRcffLC/2lnmsuZWzfl0XtYLIE7j8PzZc2kNQA3B4/GY1vVqPzLSfgjWunF/dT4fHcPgQyuJCbynJmbwe2IquaB10zjROLLZFK3UdVa1EgKTOx0fWmZELTEaw6fzpRQMTHd3t63B49PTzfWt1NoF/8Mf/dBYO42TpLLE3BJYz+ypltKaemZmFlmtc4hkncspARAq+2BLkzXlricyJNI2Sk4FImiqDYARjTTN61KlAmiVjYJQWkNksxEHL5fTusz9MBFpSllBnbXSFJCYOcalC36Z59IAyORanKXgXK24LCsZI7kBUheCiNScpRQAaNKUGhEucb3a70qtDQkBQxe2g5cBwm+++36c9tfXt7urOyJIMbKx/dADEACpFoC4risAnM+XLYUESM6FzbncVJ3ziCi1Ou+ttZv2mxm7LpSU2RCSQdWr/fTlF5//9Oc/XM5HQ4CMwVvLpmIBgt51qs0aQ6DOmJILGdOFvgvdL37xi/uHh/l8iuvsnE1x8b775JNPUi5MuDWNQCXGpbP02ScfGtQUY5wvGpqEefAumF1r7Xq/v5wOJHUYJmMtE1cpxvngwjauNMaVWgwCW2Os0SbOkDYQkZu7Z3Fddldma1nu9/tcsmo9n49SJQTvvB/7MZ9WICONDKLE2lCw84jUmrZSg3eGwBiHiD50G2h607QNvd3us1IbESE4VWBrWBXIqCoCEKoPfc1JRFQVmjrnLvOKAM46VJjGXQNA1GkamraNsF6kqUDXdbVkYw0pNtWmqEoxxq7vJzYxlzXmcRxVRKSN0zjPl028GvPinBORuMbdbkqxrClu1PkGaO5urgBpt7v91a9+5bwtuQxDX2ohRERqiqVkaw0SlVwQ4XQ8DH3nQwCkGGMtxVi7jcadc4qIxCmu0JoqEGouGYoy0e3tzdPTg+vG65vrb7/+TVwu19fXRFikdJ1flwUQjHNI1EQAdI1zSuXlR58SYd/3/niEPkhJXZiY+NmzO5D6dP/u+csPW45xPlvGtM7WMCE1KX3w+3Fg64J3ALgFGKDV7tkHoe+IeFNqOnTed84YAGXLyMY78/hwz9b4fkc27K6u5/M5zZmN63ucrLfWSS211uPhWEpe16hbE9j4uOaHd48vXr3adyEtizWWrD/n7LyZl7nr+i0dJUCWne/7eDmVXHKrNceuG4iJEBVAQVNaSylSdOi7jX6n2kQUgaqCc161xVIsojEMiIA4TeO6rgyIAE21STPOsnWlppRWY0xwrtRSckFiw5xzNUSAKE2ttdKglIKAmykbGZFwszOzIR8UQeOaU1q981VarWW/2xln3bNnd8YO1pllngGwVFCVnIsxNudsjKm1DZ2P0Ax10zQBQE7Z+i50nbVms3LY7WdAXGvtQ5BWpdRaitliQMhM9t27+08//dG6HJf50nmPoLUUKes07Yzhp8Phw48/JsRSKxL0XZdibjWfDlFB91f74J+pZARcY04x9oE/+/hlRRYwJcdY8rLMwzCiof1+750L3rH1xtgY11rKNO1E0IXOMDNbbWKMUQVrLYIym83TU3KcdnsBsJYN67IUH/rr22c5564bRNuGV6o1O2ebSj+MzvvXbx7mJV1d71HAeqeq1lpHZIw/xIXJsvPamjTV1mI67Xa79XISkdP5/PD2nTH8wfNnt3fPmYmQmkJT9SEQ4vZxJ20iVQFak6Efc0lpyT50MSbDVlVT2jjjKk1H76A1BYWmJeV1ma0lbVIEcFtmNwVUJjD2d44vQG2pAW46SlW13kKDFFOSzIW11WVZfPDO2G2J1PmutkYvP/zw+upqSyF1wSOB1Bpj3OD4m3gw55zWBVG32pG1lo0lInrv39rkGFpyfvf2TcnpfTna2FJbU1UkYEppPRyO4zBcTuf9bkcEzprgjGFqtT4+Phnrlnl+9/a1NCEy1rlx7ON8Qa2WofOu7zoiulzO0mrK2ThPZLDJRsqB1pyxOWVnnTHGOaeAgFBrmaaJCYiwH6bOB0a2zF03WOu2UIt1BlBBlRmNMdaaJrLOcymCbnDdINIYOZccY8wxNi211VKqVLHO5pxub276oSdD1pJ1FpjJOkXSNe27QEQuBER01u73++B9SjGmiAjPn91+8ukn3/z22//uv/sH//ZP/8XXX35em+JmpXG+AbJhQFhSboqtKW8rCEDrHbPp+x4JAMA7V0VKFWMopVRqaU2Nsc75ruuY7WW+5FTmZcm5aNu8jrKFGlJKuVZj3cZ4BgAFZUtzXM7LGRCkVsPGWptT9N5ZY8e+f3x6SjEZYr+/7q+uyRje+F2lVlW0xsR1RSKFTbhqS5Gu67dhq7FcqxDh6XQa+jAMY5M6X5KIOOeOpyMi+k0VbF2p5fZ6f3x4++GrD713rVUmtAYNb7JPYyxfXV0REtQyDQOChuBqLZapC46NRabWamuVCaw1VfT65jqtKwKwcZZwGvo+uJxWROO87btAxAjAxClVGvjq+ibFaK1rrVlrFaDWbI2VVolpM5wZa+K6dMN4Pp3fvnnzwctXVVWRq7SaUlrOSIhIXRdEWkkF3gshSBWB0BpLBM3w4fA07vexFEQDzMx0XuYQgvNBtCmiCx0x17SK1NMx/tP/9Z883N8fno5//E//xc9+8aNXH3/i2LdcDSFZW3IxxlZRaVVBRRuRYbQKurElVSXHYpjJGJCmmy8PEAFzKSllkTZNo2rD36EspRZrOBdxnkTe6zeNMYZ5WRdADN4jgLHkggPQ3TBUKdM4xGhirgrKtXWhu394MMiO2IhUQKyllFKc94SYYrTOhdAt62xtqKWyFeNCThm0KWIV3e935xNvX2nYDNO+H3cpRWuMcx4JN9rIxld//frND37y83W5EKg1lEo5HO43e4YTMgZRFZVba2ldtzkXqM7LYqzth9H50IWghsZht8bVW4NmBIAmlci3mkpJ3oeu6wAghKAbUhl0GnuV4kOw1gAQE2/0qNZqLc0Ycz6dhmEMXQeItaSYnO/HFy83BQhfYq1Qg7G+C3FZWqu1FGmyAWGk1tYYABCRDKuUzgfear5s+mGoKaacUEFyRqJt7ETGMZtaAblSW58/fzaOg4ie56Ufd9vMF7HN5yOBomGplQlqbQjIhIwAxKVolVKlbaxBqVVqVWBELbk4a5+Ox74fAPFynve70TnXWlvSqgDTbidSmraNLmeMldo26pRlU5rElIi5irBhb70PnSwt5rjVYRkp59hUxmk0x9M5eF9rRQ2gyMZs0mgmbAoiNfgAyMYFW6XW0pogqHOdtaigxOyNQ1VilqbGMKglxCrKRK2qttZE3t2/I9cxwXI+dMEfHt4Yw33XzctqvXc+lJxD1y3LigAh+HVd4hpVdZwmKTWn1PVTStkyGkND31ljS8k5xfP5CNo676+urmoR7y2SqbWltAQfmG1rwsxSCyAiKDKltDrrAFSaWDRMcDwdNi217yeR1qD60KmqSAmGoAEClLxBRVEBVchZTinl1ohoGHpil3MSFeMMM8NG/EcMoUND9999i4x937NxOW9y7jWl1AU37XZ/7a//5+fT8Q//6K++efP257/4hTRRJRFhphwTq1UDbBxVAZW4Ls0HNg4BgcgiBeNzyszGWns4nRFtCD6mbJ2bpklVgwtrzKrivfMiKafz6TiOAyFdLpdpGnOtmw86SV7XdZjG1tplnq1xIq1i+f7775y1DWAt0TlrLG6DhpwLGebWKgKwMYpAZESqlKyqxjlAWlMGxJxyKWV7Q4V+aNqMNbVKKcVao6qHp4e0LiUlou2VSdBa571qu7nev3v75vbuhlGXy/F0uPedn3Y7Nnx9Ne2HQZsO40SEzlrvgzWbEEdTjASw243BWdRqCKzZtFyEjMyQ0mIMtyqEWHPph6Hvh1qKc57ZpBRTnHNamTSuc5OS05LjKrX+Tn9mS62hG4IPG/7cWD9Oe9BmrRGF2qApAVkAsNZ2/WidN9YhYm2wrPF4OgLq96+/f/v2e2cZkY0LreHmDQUiJSrSbp/dhTD4biRiw1hLRmxdcEyYUiqllCrjtPvJz36GxMa4piANrA3dMFnnmR0o9l1vjAPgnIu29v7wi1pLbqpVakzRWRO6kHMJ3nfBx7gCai5ZRIyxORdrrZTSROIal3UJXWiqUgtiwyZNm/WOiJrIps4TKWuM67q8e/fu6fEg0kLomsLpdG7SLDFtc1RrzSaWI9TtFk1E87xUUe+8lNRqtmYL/xvjXNd1oE1q3U8jEsV1ZaRxHJrUFLP13jkn0pCJma+vdpfL3Ep6eLifxsF7t1FFnHWI6LxVbcfHd/fv3l7mszGMAFf7nXPm9u7W+/A+kKA6hK7rOussI6Q1vnv7ruR8Oh598Ou6NKklp7jG4K13Ztpdb3l7BL1/++58vtQq1hhCNYYBoZSy4XSA2IXOWtekqUiKc80lxbWpHhNVbc4yG/ZhAMQN1YMIaV3XJTLbx8fT99+9BdWcotR6f/8OCFMt7+7vlyUi8VavUYAcozXGGLvBDKWWmKpxHRD34w6IgSjl3AA2ESYxL2sEIG2iLZ+Oj8TUj2MIHTH1fb81jhUweG+tddZ7Z5sIABj7XlujTZ0zRHCZ5+Px2Frrx3EYhhDCbtp57zfU6WbsQ9UmLadirGPmvvOg71UKw24Xuq413cwWhJxzKbWY3xEzwRojtbx588Z765xn65gZoNUi8+U8DOPWApkvi/cVEUQqAGpT34Xicq11WWbnvLWccq6lNG3aJHT2i8//8uXLFyBFmgAOzLxczs5abQ0A53keun7z9I7DUEoK3llrRJpFNNYgaEol5+xc2XxMxpjD4WmaprReFkQ2DlStM9banAsil1oQHZNBUCJW0HEYrLVISETQVKRtiE2phTY9eylsnao+vf2+ks3vzm7YRRg9AFgvrZWcmFFarSXHdT1fZmOt9y6ndHt3O46jsW5dL8u6DMOg2qbd6L0Hotok+GCtLTnHnDexcCkZFULfMaMoU4OmKrl4Hy6Xpes7YhYpXReYOaeygVTSuky7q4obk4dZG4JBAGNNTJmZqjQiZqMlF+td01xK2QxMhk0CWJbYDx0R1Vq3rJ9z5vXr19barhusNaVWa1mkVhFurKqqOk7j+XJhMs64w9ORCG6v963p6XQ2Ihq6TlvboCHG2tYAiUHBsrGWz6cTIvoQjPNSS/A+pVRLaQDGMCLOywUQSi296xCkFOn7vsm2y1sJyldff/3ZJ59OQ0BUbQKoDYCZkGiJyTsXgk8pjdMur2upNc7Lzc3N82fPq8q6zMMwGEPBOwRlxpxzqTV433Uhx+X6+hoRmHnTmVlrvHfeu1IqkZFWDIGzlpkRgdkYNqooTQBJAdblMow7JSolr8dDN0zjbjocjiHYVhZCU41VQCSy3reac8pEOC/LOO2+/e57w9QH//zZ7bouJRdj3W6/U4C+65CYEIlxHIdaBRAa6OaL1VYAwHuvrWij96W51hSA2fT9MF/Ofd8JAIAeT8erq6u4JmM9E55Ph9D1pRajAKrOmZo0xpXY5pRKqV3X11KYKK2L67pcCm/2XsZhGFrTUhszlipCIqXaxtM45VK27N7QeYXGhE2piag2Y835fJqmnVR1zlnLxhjRhgjMRK3pusSUMhA21b7vh3Hqh4GZRbUBsfXDuDPWVqnMZlO8bOjWJtuEVHOu3vltLOSdD85ZywhkrT0cnpB43O1FpElx1mw8S+c8W7Pf7dgYNnaadt65cTdZa7bDkLQ2jVPXddpkHAYkCl0wxvjQeecQ8c3rN02hltpqbVL7vpO6icXlfDqeT8daSxUVQB86ZiylElHO9Xg+rXGtpSiADYGYahVpygzn89F3fc1rmG7uPvzR1dXOh8DMqm0+n5d5XpdFgYJzJcfb6ytEYGOJzHuaqbFXVzfjuGNjkXBrBG9WvFoEARFxaz65MDSgWjXHlYlKrQoICvN8aVK6zm/trc0kejgcWqvrmqyhoe9KqQiABMwcY9xe8U2KalOAlFMI3XbI1VpbrTllZLshoIiotSbSuq6TBoAo2pxzRJRiZDYidZmXlMtmOLWGicBau9ENELHzg/NdCKGKjNNkFNUY2282BjJu8DEmqep817TVWn0IG0sKiQVAWmFC1/eIWKUaNsyUXd60hxvh8Hw6WWvGISi03375l8Mwbia30/F0tb+y1ofQlZxBW27p5vqOCNlaawwiNKnaNNdUSpLqnOEqzRizQdQBgAgPx8NyPoqq58CuN8bWnE5L3Y1dXC7zGn0/Gscll7FzNoTWEFVqnJtAqaWWGsLYmrSsoetyTtKIjWuqvdXTeXXel1yNMb2XKlqaGmN98CUrUddUnPdVKhJ0/eicO55Ot3fPckq1VsNmezsAKveDNim5llKMNa1pTok7GoYh5ZprZsIGmHMF2DSGTQGRGrNho601BQih22JcRFiqSC1EXEtChApKyCKV2UiDmHIIXdmmDKrGcGut6zoFUqkKuP1OIBIQqwKqomHDVFMmBe+8qprN7YvbUFCtNZ4tIB6eDg3EmyBFDKNztrrweHyi4LvtGp9zRiRj7O84NLDZT7a8WAiBmbYvizFuAnPvXExRAX4X2YBaq9SCqF1whgmkvnnz7rNPPsKWDYO1bl4Xw6ytrcuyxtT1PTF772nzsyrs91e73aQiu2lQafPlXFJc5kuMcZ7nnIs26EMYhuHm7gWEm4PudPgg81iRCtoKbrh5oW5np2c83IgdbJhcGJR8v7tmw002UC0j22EccypAxjjHxnrnDg/vkMh3/eX4mNallXw5HVRhjUVErQsKikjOOWbjnOu7UGphNrWUlNL5fE45A0BTaKLQmjEWAH3wCGCMQaSYUlPcoEpNtTVhQ846Yzcbeo1ZUhHnu+30bawL3WCt88HHlFOVnHNTFamquMbVGA/Ex9NpGyB672upxnkgTuV9iqMWqbXoduECBWjHw5OCbutwZK5NFKCKFFHV323BjJMGUss8X0Tk4eH+fDnWVrQpE3d92O/2BgByzq01Y502WeaZDXtnWxMkKqUAgjNWFWoRkcqE3ofN4L2uq7WsCvOyMPFu6nKJDLzfTZvw7/vvv3/x6kNLaF0nNY673UasulyWrUI7DuO23kdRqdU575wtJeKiJScRBYDQdd4HYo4pblWQLJpommFfG049WsO720mKZePYhCLNOHu5nKWBQs1r9M4OQz/P584RMxnnjfckLedca+nDXlojRhG4/eDjLoRW2YUBibUkbbW1WtJacwJEw8RMsRRj7MPjYb+bNgdWUyEEQhSp2ysPFRF0jYth3vK+uSRVpc3b9zuWrSocno77/R6a5JKhgSFv2Tw+PoVgW2s+9M4Y2Sj0zueSN4d6lcYgxthUBED74A3bba0kRDkl3w2lKSI1qc7a2gSRCckYU2pFxCqKUojJsGHjkJCJN2XFNhld17iu6zj2OZdSyjgNSGgsi0hKtUh2zhvaRpxITFikIWJJMYRug3qqtdaY1pq25oxZazbWger5dLQuMLdWtIgOfbfRYXrTE23KLXKWHx8efvTTX/iwHZ/ZGpqGHlVBabebTudzyRUd5lqNNa0JESlocC5aC0iAFHxoisTknN8+97XBY3HEbuxcqtoZYKhNxDA3ASKotdRavAvz5fz2/s3Vfucx1CjeMiH4sF1whNlsNwcANcYggAtdEzkcnhjJ+OBcV8nvwn493h8fH8ZxFBEbeoQmIkBkrYlxjSkymb7vRVW0OcRlnsdxh7AdvrnUWnL0wSsyImzic9WGiMZaANMpVKmEuMlrrOFaM5PZ/oB1WTT47ZZTSiZANjauS1NVVWZTSvXOKhnVdj7PV1fXW/pCSibdwKkWiRxTTrkpzMu6QTmYqe+HnMuyxq4LBBhTBADvHRKlHIlov9+lFPf7/QlOy7yME6eUjHGE3IdxWVdjmCtW5y0qNKnzMo/jjoxR0FbFMJecmHhTF3ZdJ62kGGst1rpdwJhlWSsxeWcQlBBrycw+pThfYkzZexeCL2kNoYvriog5py74eZmvrq988CF42IDmRKVkBFOh+dDXWkuV0PU5l60DxmzWmB4iu9C3nKguI2OKkqNKuuw6a30QoWCNIjrLNOzGvse6rGs0xraWbD8hUZXcmiJxzmkjbRvDy+Us0nb7HUCT1ubDUyoiTc6X027aI20Xfq45IqJzXkrdxCDehdPpNM/zfr+3xqaYrHW5JEsWEVoTZiNslmVlaw0ZJNoyvoTYmuScvXOtCSABIKHJtW1rB+eMNl1lLTkF7zePVlMlEUQqObbWapU+dLXVw/HY9511Tlvr+v48X7RtniNgovPlHLwHUOd9kbr9jQRac0Ygw6YW8cGzsYZgWRdC8tapKhEkbVLy0LnOMVvbWqu1iEorjQjNdsMsuQCADwEQaq26XdEBmrRN79MA0xpzjsTIzNM01VKfTkJsiHBLaKtUZBNCMNYMwf3Jn/y722cvrDHENE5TjGvfd6jadR1Idd5Pux0CbltP52xrjZBryZflbK3b7a8eHh6qNLbWMaPCX/7Fr914BW7qQy8NeseIGi+z1Dr0Adlsr/oQPCqm84wAbLjvu1khhLCdTGoRRKi1IZVaivdBSkGATdnxdP82pWitJSjff/tl3w/eu2W5TNPUpG7u8E1EjIyA9Ph0eHZ3dX19df/wUEqZpl3NpVRRBSCQJmSIiNkYBJTW0AAiphSlFmOMInnfiZT5Mu/2V6KtlmxtuMwLITjT1ypETKTLMgPiOI5S31dKFagfxpLLGtemwkxbleo8z2SYiZd1bU2M8+8ZzmwUWxPpQoeIgFBqjrkikvMuritbK20rOxskVoWUonPOO6eA2nQchi9++/04jUyc4xr8OAw9KSoTblxkbQ0BrCEmqHnDhSgBgjaExgS1ZjaMoMfjsZSkiIBqHBNDk1xqJiJjTNd1qu2337179ux5LZlAmYiJgg+AiEjzGod+kFIRdLMjns+nUorz1ge/u7ruhiHn0g9j1/Vd1xMRGXrz5nUqbTd2lvVq36NqnM9Xo7vZdd7y0AdqxbHmdSaELnjDVNNyOh1FapVKbFprpWbY1rxEm9jq8PR4OjyVOM+np+PhsVR5fDo0pL7vt4q+t7wpap21AFhqndclp+yctYZba6ELV/sdMuWctpmZCx1ZS4YBaWOIKdEmT1EAaa02BUAAsNa0WruuIyJrrLXWWrPMs2GjSA2QaXNhs7Uu54KoMS7apOs6Yq5SY5y7EPp+YOYtjR7XWGsN3iNgLbWWsttNUmvOpUkjhFJKKiXXxsasa7xcLhtmClSrqLFeAZdlybkMfb/Fg2pTYPPixQePT8d+GBA19A6RqJUCCpvEqZRSpTZVUK01r8vFe7fl+521wYdhHIP3ROysdc6WHFW11WbYtNa60E273bTfP7u7/c3nX1kXQhc67wzBtu33IQzDkHOyzhlrYlxLTst8ttYwYvAOWmvSUkq1SsrFsCm5NlFCKrn86vf/IAx7QmK2j49HqbkLodXKBMFSjRdr2FqngLmWsm2Su84aa61jYzcLj7GuijjnWq3e2RjXYdpJTY9vv60lIfH7tymSMWZeFhd6RFzXJeeUcxSpp9PJsEFCJnzx4hmA1tpCN0zDrgudMdYYgwjGsjGmieacjbPbE8gYrqVa44wxqRRCqiV774N3tWRmJmNjWm+ud4CtNejHyQdfczKMqjDPcy3p/t2b19/9dpkvIo2MHcZpW3LFGEsph8Mxl7pZR/uh74NTESnVGEaE2uoa1zVGRhqHsUnzwRtjm2LKuakiwBqXvu+nadpfXx9O53ldt8D7vEZj7bO7O8OkACmvuSYCJESwxjhjmop3HreH23o5HO4vlyOCGmtrra1Jyem9By9YBA3eE4B3NnhnjOmC95Yup9O7+4c/+0+//sFnn13tJm01xZhzIsRlnmup+/3+ar8XkWkca8k113VZjDGqWkpBBO+Cs+97boBapWxDIGIzeTCEnsEbIGjBuRB6ZjaGmjQy1lhnXUCAUgoxX87zu4d7NrwpgDe15RaMOh2fvv3226aa1kst6XI5bxz/lKICKJACTsOYYsylxpgUoakWEWOttIaAbBgBun4g5PfiVDKlNSIyzKVkRTTWOO8UkK0TZLJekbeHDSIty5JLVYCUi3eeEHJat4GIikgtOaWY8vF4TLkAYgghpuy8v7q5nXY7QjVsAM0mWFUAY81+v2dr2nssJrYm27WLCN8LbhG7rjPGpJxaa9aYaRyvr6/G6WqaRmmt6/plmWOMZqsOIStgzqWJxLgyIxP0fXe5XFKJxnu3FVqlVgBUlbZpMZwbhjGEUFJ03r0XGbe2zDMAKAghqWqVYqwhhOurqy4EkKx1+Ytff931u5//4mfOkoEgtY7j1EBryqXmZU2WjIK2JjmtiNRUS62Xy9kYHoYxhG5r4pVajULf98saXfC9sVW0C+ZwPHXBM6iqlpIvx4Nq211fLWsMPgxDv8yXuMzTOE7TDkBjRXvkbAAAW3NJREFUig00hC7FOE7Te5G5Qtd1BLqmtaQ47W+McwCUSw2hzykucVWFadoxEQIQQC5luwAjgAs+pxRCIMCixTufUlIFYwwA1tYQyTI1wvv7B+vdcHVr/NWlQheS1AshGrNV0DmmYqwha5bLjExb7KuW3GSboiAQGmO2ax0bv7++s87XWktcfBc2OxMAWmOk1m2P6r1nNrlUADXMCLwty721RJirbL5sZ21KMaZ0e3uD0ERAdSONt+1zUmodh6nW2vd9LaW26q1Jcd2e5Skm2p572tRa66zd1sCE4H1HzF0IxnApuZZiN+4e0fsDgXfDMGzwYmPNNIS0LMTm7vbu6en42Q9+QKDQShXxoT/PcylZpLZaLbH3dpnPCM05G+OS4vr48ICqqK2WItLWNZ3PFzZ2XuaUEoC2KqVUbRrXpUlCbfN8ebi/77ru4fGwvemXeX739u3xeKgiu6t9a8KWN2a5tVaqGGPisuaUyNhu2l/f3TExszXGzOsaU0E247STGmstIXTH0/l4OhDTbr+rIsQmxlxKcd6VUqW1yzzPyywiMcUqQoahaclpvhxzWpd1bq2xcf24y9QdpUvQRZ5s6EvJoMC/64ATcsmFjTFsckrLshrrADflag5hQOKS8/l4YGZjXYprq6U2bQ2byBpXNpaYrbXMBCDLfAZt3ofWYFvvA6Aq1tbO87J1u4ioqbIxwfvz6ZTiejweEXGjaEitJWfv3Ol4EJHgA7NRaRtgo0rLOW9XOSHCJrU1SSltd0VpbUshLMu85daYiYhrFVUtJRISqCLAMA6EerUbEbSKSIM3bx/XlJ/f3QbHUrYOALVW12U1hmsty7rUkkDFGOOcH4YRod3cXG8BF0LcHkWGDQJ0IRSR7bQorSKqMWSYAZSZp/3ucrn86vd+9fKjj1KW69u73X5fSiE2TUGa5lyAzHYbUG0i1TpLiHFdmQhU2Ro/TP3u5vrmRkHuHx6Op3Ouja0zzF3wwYWUStf1xLbWMg0BQE+nkzRh64CYkLbl3xZ1baBFcpWSUgJlIrra74B9VtektnxptS7rXN4vwzXl9H4ZTLT99/uuM9ZbF6wzIm2JKfSjMdwNw7MXL7fRNgCWKoREiN67jTG131+VKqjNsNntd4igIn1wIM2y2ZYYVRobS4S5ZABIMVpjmbmJtCrW2i0mG7w3hgHAWNv1vQ9+WeemzVrThaEJzMvirANEk9LKzIistRmmkrNI9T7Myzz0ndSGgCXnVdV633WBEK0hadUbG3Mau64PnJfLu/NlGKY3b978k3/yT2+evfSOa061lL4LKsVbzwQIzRj2zjTVrh90M6oET0jalAhFmg9ogPp+iCldzufdbtoyvkgsDWsRawwArMsagg/Br9ou8xxCuLq+NYzWmIloXddaq7Ou6dapi7vd3jBfLu3bb75pTa2zIQQCiikTwTJfUlqN6xTFhy6EYJjP8+X22QetVEJc11hKVSnOmlx5XdbOV+s9E6d1Wda1SdvKX2zIWhtCV2upUud5Hvqh5Lq0hUNfWlOtramxtm0rOpEY46ajK6VaY+Zlub65a63lJTIZhAZkXBdARWqJ6wJECmicD84AaC3FGTLetpoRNOfcGbMuKwLWmohZQXNOqgCERllUpDVLjESrSErRGBtzXpbl+uqq1hpCaKrbxiPn6H23PX2ZCOF9PM1aJ01UwTRpW4C3/e6tTIiG0BCUkgzbXDMSlpKdtSKVneu7rmm7zBdrTM7rBzfPUkqXeb4s6ZOPPlbVaX8VgguGEjQE3OLJ2+l7GgepdevFWWdLzoDoHG0DglJqrdW6cDwe2XCVcpkvhNT1gwAcZtx1tgE0smTEOj/Pi7MWQJEtStuMnillIso5hxC2xiooNGlZxBhz9+yZNVa0NdU4zwBacmWiELqm6g1IrVVaycX6AaUSY/AeUQlbQxRRa11zIiKbnyuldL5cYorTtGu1tdiGobebERaolHo8HU2/N3YkQtuNWhbqrsvlvuRonXXOb4j4Blv8S0MIx8NhN0wWLbO9mq7fPd0v6zoOPUg2hmvTWqt1DpDiuhrnaymqa5MaHCtYRNDWAPR0eDhelo8+/nijLmkDItMErXPW2nVdgw/DMOZcQui7foQmxoI0aVWc4Vqy9yGtM7FtImSdD93xcAx9J1KlaC3V7HbT8XgwhGl7qEq11hACE3ehM2xSNQ10ixdassbaw+HJGDP0vWEKzpWmYP3b++NPfvLTlGNK6fZ674wZxq61KrUiiLZSWys5dSGUUjaDK7Oxzo1dv8aZEBGAiJhIavHeighoU1XfBWNty2XyLVAFNM652ooCbLveLoQ1xfPpZJ492xqcgOi9FxHDptRapd0/3G8BPDe6+XIppRhnq1RoUnL23humlMu4u06lsrENEaGRgbRWJn8+nxBUFbqxm5dVN/MW4Ga2ubraG/cspYRICJhSRCQmTiUzs9ZG2jzDOh9NGBCpER/P55byp599WjmLwsPTwVrufUCiElNrTbKIND84Jdzvr1JOUrb70QKAtVbv/SaHs2QalFZrCD7nuCzrdvxwhvp+QBuInUITEWeoKai2ZYnOibVuHLxhP3ZWVEAxpeU8HzagSsrFWwdN9/vd6Xg+n87Pnz/PKVWp87w4Z3vrAJBKzs5549z7ZZM1wzA2BeedsbaBrjFqa0ykqkiUU2ImAsHWLNtp7IntPK9f//bbHOO/+pf/+vbZi6tdByr4PuWYm4izDpG0aal1M8o2aTHlUtvhdMq5pJRSSkw0L0tOCVVLKc65EDwgAGptEpdZALUWlBxTwu06BJBy9tZcX1+llFJMxFilGstVakqJjWut9V1XSlmWOZdyOp1zKSLNOQdISFyqEFvvgypYYxC5Iau2bXKYi0itGwi8ljJ0YRiGlOO33317OB2ts02a1ErE9n0lzceYci7eu9YaAIrU5fRUpJUqZEOrZTf2Xd81VessMTWy4/WdGycFmteYc4k1gzGn+XJZZmus94ENq+Lj4Siq426PCM7aEPyyzMZY77taq4iK1A0Jx9b20/7q6vq7715/9/07ZLum8vD4VEodhuF8Os+XudV2fDpJEc0tXdax2xlgaLA9O4go5vSbL7703ocQLsuqrfX94Jw9nU7LMjMT1Vqvrq+HaXLOAgCzLTkbY6xzDRSJxnEM3ltmy6xNco7BuaHrpOYmpcZooHiWT1/d/S//6B9+9fW3v/j5LxyzaislSy2MFNd4Op1KqYhQSwGFWkqtRVszzMQMqt7ZaRo3e0NMcVnmTZOWYyopQ4OqpgHFGOO6zpeLc26NUZGatu1hM5/PcV1SjjklwvenJZW6zjMiWueMtQB4fHpCkBTX1uRyOjGzC8GHTppKA6nVGBYphETGlVJ300AI1nCT1hTeN60U+n7c4BMp5ZgiIOScD0+H7cPQdZ0x3JD6cWjQTqcnw9B3Ic7HkmPOyZDZ74YcV0CWps770yWXBmsqSGycbYwc7JqTqNZWU0oKcP/wkHOV1ohoXWPOha21hs7nk3O2lNoAt8AdGafIh8Phu+9e/5t/++8PT4996Nwm5i318PjgvXNsVZpIebp/cGSDC1DrGEbPHgSC60qRZVku5+VwPE+7PTErgmG2hPvdNI4jAphx2qWYfsfi0M1xTIgppU3rNE2TgtZaNiLEbhpRFRFdsO8Ps911F/r/3d/+39Tyj5/OddoNigoKyzynZUZiBbWGDZM1XLfLobWEmGtRbc6ZWKKxARSsMfPl7EIXY5x2e0ZcS6q1gIqxnQJaRmNMLnlZ5tYUaLcf+xRXqfW3v/3tixcfhC40aZfLPIzD5izWVoF4XeZNpadSuuB7top4fX0tUmPKLjhEWmNUldPxaFwAQBeCMUwIRICEzvpcCqgCsYiICCIE7zLo49PFWEdE3rsNybA9kFBbTMk6a60tKIC4298A0t6zrS0uZ2N9adrYd0OfSzVsTeensVuXdcvrbSfXKuK9AwIb/F3XeWcQwDlTpTrwGwdTVLvgDsdjP+4MsSJuDDyE5b/6P/6dJojg9vtu6MLT8ZDWVYq44Jl4GocWW01FS4mSkWlyQ5JSSYhM348/GEdA3iBDa0qMFIKHlA5Ph91+b7btVa0VtNVSAACRFJSZY0zOOTa8CVmYMOdqkIlNbWKZasns7GlZ2VhSvMzrRx9/5ghIwTIxmxJjaw0BQ9czNa2VaTuONuucNGm1VpV+GKWW+XL03k/TrjVtSN651pph3gpvpcrYecKWUjTGWGu35FSOaV0jM33wwQfGmGVeVIGZL+cLobYmoNp765x7/fadYe48A3GudRgGRkA0TppqI8M+uPlSRQCq9H1nELApmS0La7aStqqmnJ0P3uk8L61WJrq6vj6dzjGmly9egAISNxU2DNsS0NhWa216OZ/Y+mkaIZ9KWUop1gU0VquF1kSytMYqy+nAdpzG8bLGcdw9PT3dXF3lnAGFiOfLTNQTgbTGRGkrkiumGBHkar8/z4s6X6oYZuv8ixevLNvWCGpR0lJqLZWNKzkD65KX+/vH5x+88FZrzSBFsiITOUNFU0zdGPA9fRFrUwUkY6rolvZvoGStce59uwC0heB914FCTnlLQm2MNCB0znofiFhEVIEZmakhb6mauK4ptw9fvUKEvu+dtbVkaRUImrbWWsmFmFMRac16z4a7LiABsgVFY6wL/WVeEVC1pZTWGM+Xy2ZpyDkjCEialyVLm9f4vkdhOOeUc8ope++ZmYiMNe/7y6qWDTSIa1xj7LrO+5CLXObFOX88HmuttSRjSLUBYMkSi7iuD/2IZJrIssxS8ybv2drCgNAUjHVkXOh6INMAnXXe+VrK4fA0L4tou79/qFLjGnPOdhMaGHt1cxe6Lq3zGnOuaqwnZkBTwJ6zVmkAJOTRdKELDXSaplpr1w+KELquIRnrtjqsIXLO5ZJbLQ+PD0iogLWqNOy6IZey2+0AyYfOsGE0vQ8qlRC8dU1bCP769maJUQmnq915Phcq4qH27MZQRKS10AXv3PF4eHx4TCkhESFaY6wxbG3MTZp2XWeYqZRMiE0bNrDW5XkG0N1+T4SllPP53Pe92ZL9SLVmb400rSJbyw5A+777V//s33ZdHzrfdb6WbBi2o/fV9fU8XxpgLiqtsGFnu+0a3wUvIsgEAK2ps4amaVtWi4i2piKlQVwWXOZh2JluQNeVHA0TIKrUmqW1FkKw1qYUcy65FGMdMW/V45yzYfN0eOpGGccJENj0AKBN+75HBClFWYg9qOaSm2Sp5IMnVGSz/SBzqbQ5iBA7Z2MqOedSco7JOL8Zyp1zd3d3h9PRuDBN/d3dnXN+zsUgbQCD85pvb6d4vgCptewwVZFL1libOm96L60+HY6Dhc53bGhdljdvf0vsdld7EQMM1hrv7O3dTVyWlFOrYo1B4mHcsTHG2pK0NkFAYj6dz9M4IbIPvuYKKtaYNUYb7G6/r1JzzQpQRSuY6ebWWL4sc85ZTOjG/pxmySKgPgx9382XS4zRW8sI2qS2lkvOOSOxAQAiXuYLMxs2IkLEAMzMOSfv3RYS0KbWW2vN6VQBoYmwNYqQc+q6zln66rfffPrDnxpspBL6UHOMcdYGp+MxpdR1/TSNMUYCrFKliiIpGDasQADqnDsdD1sHxTnb9cMaIxtCVeOs8cMiDNUEq96H3divRaCWtJy9cwC8zGlZY993zgcARMTD6bAbRwBUbbvdNK9Rul5UQ/CIVGtmgtKwtmYQ2UAD1SZdCGScCACzZYzzZey9YTqeztJwGkLMJcZkrKacSqnI1HX+aZ0RcOtbnU5HwxzX2btray0g/A6AXubLfEk6OqcyvzldtL9FN9VaSaDj2nm8P5z6/W4ridfWjOUN+I+EKa3B0XY8JTYpRYPaGjRtRLyuC0ZiY4g45wzvYQnWslNtxFijKOoSZweOCLWCKlpnU2mnuVZYw7Nr6/xGb5zrSo6lNZGScnbOOsuXx9MhRh+CDyF0vXfBMkvJJpds2HgfDJGxxlpnnf8de5Wkbu5ECcEzURWx1oM261BEcqnGOR+677/7/nxePvroIybYYni1FkRUxMs8e2eb5FZBW1ViJFRQIq7SVJsxqKjn04mMvZp2TarU2ppUKc71UjK5roVrAmLNRoWQ58sZ2M6nE6q0JiF0zhqArgvdBnBKRW5ubufLBVGD4ZoKAWQR8sNxWQ3REBxIkgbEbJ2fs6S0GGM4jLURNGlVGImMP11W1KoAMSbQRsxDPyDiw+PDttN++/ZNKdUYXtY4TRMiouo4jmvKb75//eOf/Nhaq2B2V1c5PvXKQaSWWQGWAp1DZrKM6zIX1d2rn2g+ixQmo027rmdm0XZZFuPchhusrfX9EEK3zsfT8dD3vXNuA3zNp2MtMu32Q9elnNZ1bValNcdmXmbTGXbmMs+bFs35oACtRmew67qNGb3GmEsex935dH56Onz06oPLPG9B/X4ISNgN026319bmeQatrVWzIe+ds1JFREDzPK++77rg0hqJyCAwc5WaS2HDpRZoIJL74K3rG5L37j/8+z979fGnwxA6S87anPO6xlpS6MZxHC1TzmleFlCoUojIOeMMATZjzTj0p+NRVZ0xNZeU1q7rUkpNGhOaLqSqAohad4FbabmUJa5jcGm9DMNoLRNqTrHIhuAHAGxSCW2SBuQUnWry3peqa7wYg8KElYYtUio1E2feEavUBGlBcgLsLROI9VzBlKKIJFJDt2Oix8fHcRyaSC2VEK31hm3fdZdl7kJIKSmq994YnsfRWZdTobAHHde1Qo3EYsbnO9NzhZISGwqdPz+u7H3XT62tT/dv1svp7sXLpo2BrOEk0HufpWATNj5VKSnWUodxQqR1WYnIWud9WNf14f5eb2+6EEC15ETGEJEN9ng5W+f6rlvXRYG2W2StueVqdLwco5RVARAorSsidCHU2kLo5nl21/vQ9cwOAEE1l6SgbKwzgYgJiVIu26EPiVzwUmuThgillFqqNVZEtg7DxrUAoH64IuZ+6PM6f/XVb3/w2afOOTa2lI1GBd47JuyC3zaFOZeNXNS0GWNrUxFBaOuybHUkaa3WSki11i6EaZpKrQDomHw5jLa1kmOVqqAKNcW+87XmJlJSKqVYZ2NMyzIjM7BbU+6mK7KdEosd1pSLgPeeWjXGrmIqhW0VVcoWD0TjAhvDUDQea5yZ0NAW65XL5TIM3W7qDeP1zXWp9fb27ubm5unpiRCMMU11XePhcEQAa5gJL0tm3ykAGyPSHNVxGK5GKxwAKhgXQj+6NngmstN05foppei6YZh287rmUq6vrxChKjnvay3QRJo2AESMMS4xb5N7NmwMq2prreuCtfz5b/6y1pJzAlRpdclLUdnYjohgjN3tJjZ0Pp920/TJxx+WeGn5DFoRUZo0hXGcXrz4wHu/PWVVgch0Q8+GEbHv+q4LzBbRmHVeG+h+v5MiwjXlkkohaJ9//u04jldXV1Uk5VhLcdaCorWmltIapZIN8Xo+fv3lb0Tgaj8hQM7ZMS7r7Kxv2kSEGynAGmNrusZkrB2HsdVivM85z2uUUne7SVQRIUlSgBRTreXu7lbExJhUFREzoEoVYmht39nlckbCoesUERUsa611LZQrvH78dhx3u/2eJVtQQtta64ZpgcC+C8E0oKx2KVJTQoDGrGRLTs1550OLly1YWktp0HIRZh76ru+D1EJMteq0u6q1GMPDOBLRuq4xxt1ul+KqTZjs4XDop30RW1sz0LK2goykpxTWWH74oqvl6WmRUrXfD/FwYHLO963EWrN1DolP59MwjKlIQ5SU+uDHYC/L2oUgTbt+YObWxBoahj6XllJ8uH+4vbm+vb7y3j8+HXfTsMZ52/FvR6LtOzkM4xab7/sBEWvJxhhoQoSEtN/9/2r6kx7Z1uxME1vN1+3GGvfjp7ltxI0IBskkmcpiZQOB0kClFhAKqKEGGunHSEABNSwUNNA/0EATQYAAQZVQSWJmQoISySSDQTLixm3Ovad1t2bv/XVrLQ0sOPeBu/lnZnut732fZ3/TjYmq8z75wddOzFLl1vBGJGZOiVrrgEA3FUFvXVQR+Xq9OqJ1WW9O3eBDirG1Hr33ztVSLuczGAzDWLa8rAuY/OVf/rtXn76KwSfv5nE4n06EaGC3t3XOxVTXLedS/9F51sw0OJ7nCZHGeTaw6N0QI6jUWuZ5HofUar0Z7Eou6/W8PL3vLfftGh0SGDuexrG11mpR7QgWHc2jN46v3y//7i//3998/47i5NlkO4/Bes+ti9w8HtDH6MQNfph8iMQ+sR72c3Tc1uvWEJ0HF0vry7KYGaB7/vzZGOPT+QxI4ziqytPp/HQ6DykxsXduv9+PwxBiIIJxmsZ5Dp4e7g8pxJy3XosZGQXyUYGa243HV0Mab95uIgxpaqJoXVrtIl/+9CeffPoZIB3u7p7fHaHVXouqTvN8a2KI9CHFIca85cvlaqYppf1+d2v5HY/HYRyfnp5ELXjO61V7QbRluYj223zatSPfBhp1jtkFREfI3gcmjjEiMRBvuXaA0gVvGW2zLecu3Tm+4eeIEOZ5RsAQovc+Bh8c7/e758+fA4KIMDOA1ZIBbEgDEQIqEzJTSn5dzk+n809/8qVjUu3n09NNFtRbc0wppZsMtfWeUjwe99M0iTTp7enpsW4rag/eLZeLmd4M6ohYe3MhbLlcl5WJ7+7vdodDiME7HlK8cUwRSEREe4oRAHLOtWzBygTn/RzV9MPv/mPtYIAxhtaaJ9wnmgOwNTSx/NiNs/ncqkn1ugF5H2MaxmWrXWwrZd7tYgy3vVStDZC8j2rURIZx+NWvfv3t92+QHTKlYTxfLqW1y+VqQAronGdmkV7LFrxzzL03AJgH/8mzsZdtu54Z5f64d1qPu2mKTJKZfTMqpTGRIxLpvRbP+HB/75nBYBrH4L1zzOxUzQdmpuCDiPTWb5SWaZ5UJaUQ07Dl3HpP4459arXevuZqrTcuSgyJ0JG7HYbkXEC6UZO8GrBzAAYIntkxhRBKrqW2UruK3cL5tRZnYsE7RAyO37x5c4MlICIRlprneSo5S2sisi7XYRx381x7vznkHds//P1vhnGa5xEB8IYxB/Mh5FLzlm+EVET0zo3ROyZDrL2PKbHzXWoMHlU+/eTVuq69S62t9+6dL2W7nVcmbiX7GAEJTIdhaK2J6DCNtxGMmXqzeZq3nGtrWrfP78Kn/+o/9Y7W8/vEQNbQ9Hy9+EHvD9PTtWwN0zgB1bWHvU8GkEtJEZqCi9Nnn0Vr2QDzunjPvTfpTVU30SZmWGIafnz78dNPP/nwdEY/DGlqdfMxOudTjGYAqt57kXarcUk3di5ArqVAGgZGb5tuS2TntGsIPjKqhjG9Py8MGFIaIl+ulxBiKeXdkj959WrbtlxqSnLzjbLjVnsuisRi5plrbb3Xdd2GIYUQETilNI7Ttm1M3M18GuO0YzADvP2DiDjECUzt1gpiLyLrmsdpJIBbCL51FWspeEJgpmHc/571hqQqTEg5Zya6Ofpuv9kt5AWIKUQVRYBxiN7xzXRXWjudL9frNfiQPP/6737z6tWrZ/d3hFZLCTEwu1IaAIYYTk+PpZba6jCkx/Pl6bIA4jhO7INz3gzJOfbufDl/+PgootLlcDiWnEOIKQ5MVOqWUjL5fQs4b6sjlF6JkJiY+fHx6en0dF2uarpuG7uwm4Zn+8Qmp9e/acvTLXIBBtLKuWKcjwDAhGzdO1fd/tSdogOV3lsupeU1pskRqbTr5XLjsqnIMI6Hw34ckoqcTqdPP//in/3zf2XsSykltzHFWtaUknO+9X4TcjlmAACw4DhajtgDAWmFcu5lcVby05vzm2+8FRWtBimGLuZ8YOfHYTTRMSYDuC6L976Uuq5b72KtlZwJtNeCAK2UWishBRePx/su6kL4R5o9jdOsN2uCC2ncNSUxbh0IHBo7JAT03qtCaTW3pgh2szB5D+Tnw4HYrVu+LtttoQMApVRR7SrOB9q2rZYKZufzqZQivUfvYwjp95SxrtI/vHuTcy4lr+sivd9W0sz85vXrVuvPv/rJDaILAOuytFrMLHrXaillm8ah5KKqd/d3N9cTkWN2ADbNE5LvRq2rDwEAQgjX6zWl6IiI3TBE77zzrrW65S3GeMO+7Pd7UzmfTrcbjFtavuQt5wKIRNS6dOn7w+EWymTnHl48T9GrQucZ/GDo1uVST+/eXKxaHAOx45QG7OXj2zdqqr14R7dIWoyx935T1BrYOI1/9Id/IKY+plsyK8Vwe01iTMM4gAEzSe/rtgHgrViualJWa5v3fIuT9lrm3bTfzTUXATpfluuyITGa9VqZXUzp8fFxCPGGOXv14nkMfhwSO2bC24MrApjJ5XxatwUQzOzucARVJBynsbReSokhpnFGP/QuzvveFdGFEJhoWZYUEwDdcJfDmBxz2bJ01doBSES7qCH54GJKcDNeMJmZqj2dzvTJq1cppdvEnnMexsHAti2b6q1BLQZp2iPx/nBUlXW5MqGB9rr++te/dj5+/sUXzFxbvVwupRTnCEzMdJymhxcvRSENCYl280x4w5iIY3aOPZP0ulyv7Lyq1pLTkFKMrbXrupmpdhG5JRns1tJ13qtI7z2FADf/p4r33ofA7Mzg99GiXBA5hDDPk/N8Pp/b7WqvblkojAdDenj+3Hk36uk+KRMQMSJMyR+Ohxsm/LZNnaYZkMZpFjHA2yRM58vShILz25oNKUWvAOM4ESEj9C5brkzs2CPxNI4GQGGkMKGpSU1DZIJcShfJeam9ny5raeIcgzbnbrOnmsGzh5e73cHHGFNk56Zp+n3XyYBddC4YIiJ5R6raW2mttN5KqZ45+gCAPkZDjNNunGcXAgA6Zmn9NmoNKbVaDNRMEQEBhhil6+ntu49v37ZWmXEcRxcH50OMQVSISAFqa8HHlBKFlG4kw5jiPE8hBITff5ycnp62bbu7u9/t9rvdLoYAAMTUpY/DcPrw/td//7UP6enxsdbWm6jZbrePIYLqkFKrVTqM004NTCGvGRBUukgX6aXkVqt0nYYBzDxjimFd11JuuAIC01yLAq65GNAQ4zhExxxCAIDaWkzpcDjElJjJ+4DIu3mXUnR8G1y7mrLjmjOAXi5nIhq9uXqCtnSFXBrHhMyorfe+LNeyXHMpZta6iKGPQxdVaQi2bZve6hMuqZu8d48f3nUVALuez49PTzHE2up6vZSch+SDY+d9jCmEICoirZTNx8QudjE0uF2wrltVoy3nNI33d8chhRBHRGLGdVmliwFVqa1uN3SOqol0kQZgappSIDQiFLW8bbkUR1ZKjcOwbvl6vd6gKzEO2pvmTbsQEhoe9ntSrKWGGIgZkZDoxzdvSq1EPIzDOI3TvCOQ9breurbX6/L09DSkhGAI4JzvvRMRrctiqjlnBLy7u++1Xa9XkW6qoLJeLsvlxEwhpv6PobuUhmkcW9vevf+IaG/f/rhcFzOTLghaajGk0+m8baW1lrfcWm29d+nOOe+4lqqmiHy+rrV351h7Dc6TCwBAiMxcajGDNO5iGqfd7nh3d3d3V0qtvbMPZnCbaZ27SRhxy1lNhyHsdztiMlPnXAyhtWYA824+HI/s+PTxHaEWQRcT9uIQUxoJwbOT3t+//bHW0mpZzh8RQc1q7a3rzVkl0gDp0mjrPB2f/+SLzyQvDi06ZMSb7cU5ejqdeu+15m3LOW/bti1b9iEE52opKi2kiejWohR2vvWG7LdyqyvJVruhA0BVKzmbiWnXXky7Sms9t15VVVSQUHsDVe19nOZxmkIIXYwJHRERX65XZiYz1VrWpbamAMyeiMq6kRoCtN7Ees7rui7H+3t2LGA5r+idS957znkDaVCXui2/+/p3Wym3Sy0mFJHW1d0uvGop+92uSwveE1rOm4g8u79rYohYax2nGUzYudLa9Xr95NXLH398u5Y2T+Nnn7x69/79OI7Hw6G0Dsg3RveWlzTwdbmGkNTUey835pxZ67eMLABCF41xuFzOyH6/P5o2QNy22gFRLDhkwJqzhsA+BudEBJhbKyIyTdPlutwsCyH8nn56qyaW2ltr0msaxlJbqc07l2KSskG8a00dO6clMKiImDs+fylq27pM08zOtZK9czSN3ofr5TLNU6f42ELtcnByWXoY9tvlMTAQWlOJKUrvYjBN4y21WJtM09Rac97nKjDQcnlMKZEPHiikoRs4R2PyMdJJdWkwTgNF3to1sAdoIQazbgAGWEtGAuqECCLinANEVVMRA3BMFUHVvPc32nwap5iiKHTT4BwzqxkBrHn16LqJgRIjO9xyYe9f3B0vy4pmecv7w/5yuvYu834OoakKWr+/O4gZsfPetdZyKUTEiC6lhKDr9dylmxo5GseRnPPOeR+p1pzLfp57b877y/kyjJPjUHP+d//f/3B/f/+Hv/yFiN3iz9u2AWiMsbU+pHD/7P7mAu6tMbtcSoqeu1yW663uv9/taq+E2IGOd3fruqpKKTUNQ0gRzXyITMqINyZn77dGqW45T9MgXU+ns6qFEMdpIjBmV1rtZ62txxid93FIAOgAidk7z0Qd/dVIuiAI9sLeA6H0vi7L7ngfttV7V7ZigC4QU8g5k+NOaYUxm/NsH97+4Ajun90zmWNec2u1neqT8166KBiq3Tx5BtZ6Zx8K+IDsCP2wK0JdfG8Lk2utqVjbLjGl99uayxZCUCDnnPOMCIjgQyjbhojjOJSSe+siestNINiybj54wt8LQG8GeATWLjdSD5jmLffWrQtZkN7ZEzFXEVGpy3bTteZt9d4DEnLPpbrgu+n1uqhZ7xIdG+n93d1aigyRmK6X1bOLMRABmOH+eHe70z6fz9c1p2Eepn0aRiKKKQJCb+3p6aQGqpaG9Pqbr5+ezr/42VefffoZEd3dHa+X8/l8duRSSgBwOp1up2ccUqn1cr1cLlcwMLAU4zxNqrJuq9RuBkTQWj2dz0g074+IHJz3zoMJAt20KV0sxhCjDzGFGIMPN7JELllUiZwPyQwQMI0Du/B4OtVaWm3L9Vq2lRG8dwDgCEbMTq6k1VS9DyH4GxPUOw4xpBSHafLeaZdWiwFyOhYcW1fuOWCL03R/tw9QWl5brb231mvrveQMgN55xzSkxMxPT+dvvvuBQhTE3wM3WgGpm41GYV2vzC4Nw3J5dOXDu1//u//H//X//Nvffn3t3jiN49haQ0RGzNvKzL0JE98KWbd8j3f+dgt2Wa63Qc/MFCmXQkhELL1JrV1kmKY0zuRjinG5XqsUdgRgXQwAkOh6XVsXYnLOm2nOBQ3UVHp3jGoqZil6qWVbNwFyMdXWa+vOeb9t262ScS0ZEXf7favNVCHGaZrVNG9bCKH3bgZIHAj+7u9+7Xz8+c++ct7tdvuc13meL8v2+s3bz+jV6XRurd0IsdECIfphQIB1za03BNy2NcbkHXmmZTk7pludtCvWrXgmMQkhElNrXUsz0JLbtiy7eTDElNKybuuax3ES6Tln752aqZr34RYW20oWxfV6XZf14eFZE9VcYgjaO4qCiSHdSLFkVkrJpSIigvXeSsnJe1FBF8x88H6S67s3b44vv2DndyMlrO/ffTTttUjJOcakKiK9tIoUhxDZha3IMIyvXjkGMGFQ5YDQlpRi61CU5mkqtcYwescY0+c//eq0FZHe6/a2Lq9mvRVL+tp9iMSu1DIOSbUOaSwlG3THlIYhhLBuqwlseZ14ijFNA/XexnFEQtTuXRDVy+mRwrifR2K8gQYB2XtQtW3LAERmVjYx3NYtpaHW6l0ARBF1jLcx+O7+GYgYorR2O9/Uu7RWkZAJDCGEcHp8ZGbnvQEMwyCtfvz44Xq9rsvSew/eSdn+/u9/+/M/+IP9YX9ji87zPAxjiv7x8VGA/v1f/91uvweAG0bYAHrrCJhLfXo6t9qIbvE1D+yGYRqn2fs4TjsAI+ZS67bVUhsRq0it1US9c9N+t21Vu4IZIo5jUu1MHLz3nhFgua7bVrpoKevxsO+9E9DxeFe7KRAAiqGYLdtWyy1VEm5GSOc8k6u13Mw98zSuuVzFfazxsfl3Hz42hYfPfyZA0prky7ZcT6eTiKILHX7vYA8xHg/HGFNrnQjO56cunQl7K6wZzQhxTH50bRfxUqiJmTQVHebd9bLQ+PBn//wvfvHzXzjoFHdrUUZ07OBmOzQzoNI6EYcQb1KmEGMtlQnXZWN2N0tEqxWJYgqeCaSbKhOYSV5O23q50VCZHQA4ds6H2zMoO66qzrnAGJxDIkRyTDF4cg4Qb446Uz2fT4FhHqdpGLZ1dTeKACP+8P13MYbT0xmInz1/gYC55F7Wb3/zD4/ny09+8QcvXr56/PihrZen929K11/8/KuXL56XvDnHT6dTa/16ufzhL//gh3eP3789DeOwXs45FwPc1rw/HAzscNgP47BeF0RiphiDqZhqKYpItbRIPO+GQnS+XIj5uizSKjNSiMm7UrLzDAjbupjpbt61Vg1oWa/MQc2cdwC2Lsvp/DTNOzV0RDF6BQLVWkuKUQCHIXoXvHcKlosEp72Web+vvZVShjSIWm7S/bios7qyGOEI5FvPE9Q3P3yfhmEch91udyNC3Ubu67budrs0DGZiBi9fPJRS2TlygRQcdDBEGq5buTSLadR2rrUPo5roerlAcm7cN4jcK7NrMHZo6+nEzjEzM9/gqYBWS1Yz70Pw4f7+DgHmaQopSqdSuxiElFotxA4QVWzLxSg8e/lJNxbVeR57ax8/vN8djkjkfFQz7z0xt65iEKepNXXMKp29Q0DnfN6267J578dpvl4XQxaw3X7v/vIv/+3rH9/M8xS82+/mGMLh7vjDDz/M08wEqlVNx2naTxNpGx1dnj7+w29+O8yHV69eqHT23gAOh8N3333nfNhyGVL4L/6X/6P9PD6+f8/OpTSM03jjpHYVD3C4O4LpOIReS+19GkdE3DL48PuupxnGGFW15IxEBHyb23vrMQRmZjBEYEL0risg0nXNZjgM6faYaYY51xCimKlojN6MHVPNGxDP42iItRbyEePONA9zMjAARAoCdKpkfkY3UN58SjHe1SZWs+sLmAzjqCq9q5FDF4cwlFxUxIew5Qxg0zQiUakChIj47TffEfvPP/+iCJ8ql9aYNcpjirHWcrmcifjusD/juFRwVMbA+9h74y59N09IDAC1dnJepDtHBjQOQ2ty45CaSQxeejW9ETxxW1YCdR4M8Hy+HO/vMPhSZUi+tw4mMbjoXc2b814BOAQRIUQXIqgpoNNepXcwT7xtq++utD4Og7QKiIyq1mpTx95d37322j+8OanZa8LeFRCncfiTP/zFsuXzZZmn8XLdEKmXJXh/fnr68d3Hz7/4chon54P9owf65ctXYui9d8ETWGv1xcsXZVtb79M8e+eWZUXmVnKMKaWExEy8S8kxnS9XUUhpQEQmAhM0A+vTOJALYtpbI7TeJUbKpQ4xiNSn00nUbngUj0FEwCznEoLf73ZqME+zmg5DUgM0U8VS6/ly/ezTT3OvKcatWVPbh4CoItJaQyQT9QSPMAWjW8QxQAGipdnsiUCIaDfvROX9h4/zfHDeGyAzSRbvfm+fBKBhHAFgWda7/ehCJDJpQirHKbHWXtqWBYlNuphF0HJ9VBzSFB2Kd4Rl/e71Dw71sqzHw+Gw37Pv8zQl7xDwdD7HYeytl7KhSQyhlaIAx/18viw3t+HtI5kc3/DfRkF6c6AK2Fp7ePHwww8/5LzlUl+8fMlEORdVQ0RkhyiIkHwA6ZG5mZ3OJ0fomER6b5qS3+/8tqzu8+Mu91oPY5VuxLn1NVcwIdlkuzy+fftBZT7e/dV/+Pdj8kNMAPDhtNy/6P/6X//rp9M5xBici8F//vnnojCO4243O+e+/+HN3XE/hbTVhRTRSMlFH32I0pr3AQh7ra11MA0xxRhraUh0q5l6x60X7xmJrKsRIYDzfHPFe+8B2UfX17XW5j2oGREh2v3xcF2367I+u78HAFVtrSHdjC2ZiFJKT6fTtJt8CHntHoqa095KbWDmA/VaguPRswIBxrotZ4HB6wQbSgEmZgbC5LzWrWy8LcakBOi9U+lqJKpgVmu95duHIRlQDLwuj9hMu3Mx3GDk3jkK3sX04cPjfj5WayzmWJ8+rKOHIYV53i3bN9dlffHw7OPpPMRYTYKjaRzIu5KzmdWcEW4y5Om6LGDKBD7E1qVV3NbNp2Egt5YeYjK15XJl5hjCbpp+fPsWDVtrnSgyrXnzzo/OAfEw+FprrWXN2af04uF5zbnVmmIAclk8oaYhubtvT6y9myoR7sbu4obQPZTv3sToZtVpnNg7jq6paq+99ePsH99+Hzxv5/MCtG0NEf72b/92GEYxY/ZpSMu6TuPw6sXzcZqbvDeA3Ty/evWituaZfe0xRkOHjL33nisiX7ey3+9vQXdk7L15H9UAAFtr835yjGKYc1a1m3dnt9/fciOAcAso5rxN4+h99CEyESC2rt7rTfKFRCkF7V1EL2szkWnyxGytAeiQ0rpcS8mH47221vyAbZ1SahR7X2buBo7wpjJTDsO8243jxEx53WprgYM58MEH72/1EoRGTM67Dq6JhOB766qyrosPwTPWkufdjICOaPZW2+aYc+1tvW7BvXz58nQ6/ewnn1+WzMzzNJ7O5/08I6L37sPjo4gOKR4Oh1uo/sYKMLUU2Xtel6sf5uPxKL33LsmRaVtzvi6bD2FI0Qf/4sUrQjxfLh0oEzx+eJ/S9OL5c++dv2GH2e33u9Z1W9day7Zun3366Y/fvhaAzz77zDE7iNEgOWILjroOi+7Ul7VDq476Fz1CB2X2D/snL2/PZ2H+07uHdr3KcTcNvuUWX8Qb1BKJq1oprW/nvq0fz+9+8+tf//SnXwJzLjWl9M03u1b7w7NnjkhND4fj3d3h4fnLNO4QcZzHroJdewdTmff3ANZLVoVxHEvTIQ11uSKAiE4xIUIuLW+li47T4J273Y1/fHzazTtArK0h8+29vm35Jie43YQjQGnGgGjdurbWbozsUmpttjQ2igCkYhYYpLFcpeTdbpdrq6WMaepu3B1MWr0Np0iuSw/BI+K6bkAYwHzwqiq9ny4XvH+IcRBZmNFabbVN41AKEqBJTdGjCQI8Pp5UdTePzNxaWdcFYJim+fsf3376yUtpTVVevznd2LHTPJnBWppDubt7HmIAkOCd9H7Z8jgkIybkslzJt3Cjp5W828/Xy9WAepcUU60ZVMchhuC17VUVUR1zreV6XXa7qfVGSLWUnLd1XWotKQUfR0ICQEfRU4jIXLSZ9nCISqxbho1Kk3Tct5Lp41LfnTHqj9Q+u3vxi92L9QLXlYI5avTcDTkMF6/nx7Nj5oe7VdqPPzbzloi8lf0wv1vyL776o//sf/w//D/8N//7X73+bt7vu8g8TVtt+8PdPM8ppTQM45CY6Hy5Jh9evXp2U18PMcY0msjpdFLTkG7xWi8iojbNk6qlYbosK5KC9VKaD50QkZicm3exlWYGtzx+SGPe1i4KVlyM1srHx0cgTiluuTQBNx4bJxQbPa65XU9r9J61+BByLSmNl2XLVfcRSi1oSmjEpAZPp9OnL18iAjEBUC6V2dVa9/Mu+dLLEv0uOPbOmUgcx3maf/P168enK1q9302d7xzfJBNaSp6nkZidC+tSQhiR3Pff/3DcDXEYx+iBvXdsKmB2uZyHcV63MgOhtejZDJwPyF5Uz5fVx3S+LPM8jzF4Zud8CGHbNjD7+uuvD/v9yxfPRboLge7uzpdL781i2nL+9rvv/uRP/hgMiHAcBx/84XAQkeDDOA1DCuu6uXF3uCmAuJqyOebSxRHBs2NXFQTZR7jmIMgDyNNbqu3t4wfnKS15Nl3PS2fnOx8udb9acNB63qQ/p7HwGHbxfDfV1uPQ2ocPf/V//28/2Y+fv7hvimspBPZsnjhY3h6fltvOQl7/+DYFmobhr/+D98NYRe8O+5//7KdI9Pjx4/F4PBx2W10VsJRC7E0pOL5clxhTrds4pGcPz53j8+U67sa85lNthNh7ly67w95U4zB55ps/b5qnH378gVzY7/fLsh72dy748+Nr7i1vu5jSyFjLxiS1aYwJAI73d2uuLV9aLUQ4T1NpQkSOH1TVMwIAO+cci2GICRCZXXD8e9GOdGS+PJ0YMQX317/6h3/xL/4c2BRsuVxS8OhcSBPdhOXT8Lvfff/Fp8/fv8vLsr168dBFHk9PD8+ehTjUUhyTtPbm7Yfj3b2phphKa84FVZMONzHjMM37XXj37v3FkQvx9mWORHldf/jxx7fvP97d349D2m6S2nkO3t36vn/8R7+spazbNo5TCLF18c6rKTtHCL21Wgv+1//r/430jmZA3KUTU28dmMCxLqsx9THIkkOzR9cek9srPW/IiCxdDBCBokcA7aIiIuoIEQBUy7oE79/ejR/W5b/7//3VP71/+LMvXtFh7Eh22XIpwjxvDaYgZrXLj5J5CKet/cU//+XH0/XH90tT+PB0LqUejkdR6F2IiNgR03G/e3Z/j0DINKbEjMfj8ePHj59+9qlz7AiI3JbzmsutzZ2ijyGyo95aqVXVUgiXdUGDmGJpnQDN1IcwTdO7N29V2u+++TaM82effzGO0+Tx9Q+vnz9/5p3bSlPAN2/ePjzcf/P172IIh8MhxaAqhDSkqEjbllOMgCBqnhCIHZP0G9G3butyvVweHh5U9XS+TtM8jan3KmJMRMwG5B2eLtcUh1qr90TI13U97HeICGqiUkpL47Tb7ZbrRUzzVo6HHZgCErsgItfr1lXytqrp559/8fj4lKI35G29eueZiZBOl3OrbUj+4f6+q7bWYghqQshmFoOXLufrsqy593bY729cuZtB9ny+iIiDWrELIZlHF3wTcSkuYGQ2pCTJmRnMk3XFfHnwcdxkGCK70Ev1AK03xw6cM9cdO+xNW/OIXYSdk8ADwnJ6en7cffrseeDBNQZm9cOeogS20ElAzIaSn8cjhfQ26Pdfv0e1hxTXdTs87Ktpb1JqE1IO3g/88cPT6w9vXn/Dd/d33se3795vVVKK8zT8x7/+1aW0u8P+xcN963p//wxMD4f9ulLkq/Synh+J/P7hRW+m6Jpabl46eKaWVy5L7/Lw/MExpiGdzhcHna2dz/mG3RAxAvNMvZaaCyD97tvv/+nheHPwxCGKAjAB0paz99y7pt1sYLmWIQTvCMDWBXa73TgMAHK3G9jxr379m+cvXk7z1Fq/LKtjXtc2T3MTVdWt9Oj9PM/vPzze39+rmGNmZwh6OZ+79Bh88I7YPz09qqr3KW/XDx8+Tod7H0Ir9cd3Hz558fD48SMgXS9LSmG/O7TeTufrw91hSOG6bmrAhLXkXtc4jMjhcr0yEYAt28bOxZTytt64Bufz2Tn3+vVr/K/+s/+5T4PNu54zDrHnjIFXM5eiPV123tcpmqGULuvCzu2mqdcuZt57531TATP0HhkRzNqtmNx7E3Ss2t04fVwuVeXFuGezkNgApEpCarUaIrqgiK0LiwRPDVQcbI+nyF7RqvTuqdZKYN77DoiH4fXHj+M43718/uH9h9a7mZV+Q3YQM5UqTWTd6nFIg+/7eRCleQyvnh2mlL7+3etT5c+/+vJ4OH7/YT1+8gWzc94VTN7RiD0NQ7meiEjbitpUZRwmIKytpRhMdUzhzdu3t05xCKG2hkjSupp473vrMSYDWNZrcG4cx1JqSpGZYwxoUkpZc2m1MeF+N/TWDeDxtHTRWzp2K9Uxt9Z++PHtz3/209bax6fTmOI4zafz9bDff/Ptd47w8y8+w3r9cLqm6QBm4zgy4bIul+t2vDvmbVuvFxfivN+X2u6fvZCapa4ujL/97dfe++PdkZ1r3cYxjsFfr5frcn327P5yOj07jOfLcrh7+P6HtynF/X6fS12WzbTX3o7HO+84b7lLv1yuTqvgfTh71TenuDgplebR92Z0ltqzAOgOxQJ6x5EBeqk3iZkhIKOaEXHb1pvCCLowQe3VDSOx69dN7Pqw25uZGIBCAxLG7swrpzT13pl9VWVPPAQkmsYkOc93nhC7Wm0ViYSbiUyHHSKuUveH52JgWw25HuaRmZdl/eL4PM7Th8dTCL60/rhUb+2z4/jqYWyGwac3b98vJ0tOgezf/L/+7fNnh2kc33/zd9M48HR30TBPU0jjfP9JgF5zNimjR5W+rt0MYooEblkubUxbbjHC5bpOE1yuSwgRzQhh6yWlSMw5Z0a+uTj49lRgmlXWdQvBS9fWhVNYtuYdP50uXWB/ODw+nZrIPKSYkhpclrxuhRGPh72KaK+mstvvnHfL9bJcr9MQ5nlWgK7mQ+itjvM+d/vh7ccvPn3x/PnzUjbp4oc0j/Ht5UQAADZOY0xpuS73z+5qLUzDbXte8qatvv3wMaRhq3oHNgzJOQ8AJW/OkamLKZVamVOpJYT46tVLZ2RyWd27GlLMY6InYGDQSlsOSKLqni7Mznntqk16d9zXxSNmwOa4EWrw0flSKpgSQDdBYrW1A0qr7XIC6QaoYLzbUxM9b44Z43jDHGurnknNPAVyjE10rTEGdIyqcZqkq0oDUQcMAHs/IqGq5lqnw3MFNbXZj71a+3A+EMnWbCvPwNZS87p7d8J3798RxfEwb9f3//KPP7t7dr9W/HjNfrv25fKwf/l0yY9vLx8Nmhq6EFM6L1lVkudtWXJu0zRMu8NPv/jUO1dzmaYhn7c0jdtl+/Du/bNnD+Spd2Xv1iUT0zjNjK73nnN1TJdlSTE26SoGyFsugKiKRPTh4/l3337n/PDy1cu3796XCkysBmawm6Z1XdH02Yvnb968vbu7I8dodr/fMci6Lsx82N99/PCe2LWaGcHMDrsZVFtr0ruqllqO++P1fBqHpNq7tN001t4PxwOjzYOr23Xput/PKSVA/OonX8Y0bltdtjJNIyKVWondNA7n0/nt27cPL16pwd1hf1nWkrMDH0w0IMZP7qCrVgM1iknUBuc8Uc/FSuu1EYIx21a9AQO0VqUhBK8iBXIIsdYGCCEGMKvnSxFhZq5N7URIrGq1djNSkzGuuWxKaOYQyHEn6JlxSr0UE40mznlgIkTrncxUBBCJycQQGVQH8GJmzEKQnCfmmzJBA8EIarouKxm0BXb+AEge/LQ7fni7vHuy6OMO23Hc43Bfut/vx0+qc44wsCF89/pHX2sgfL67u7jU5tBV5liwfPjVX7/OVe6f3TkXrsu6rdlaH8b0cH/n0N207Z5pnvfm/XicfYoh+tZq3zOoMuHlsjjnACGX2q6tt/bi+fOttB9+fPfyxcs3797vdgdEvEX6t+XCzp2eHve7WXrzTKfzWbW/evkSDERVVLZc3r7/4Wc//ZIdHQ/j+w8ffx+PH8JWGnVXW3Pej2Nart1x8HNwpd6AfLXWFFyX0pvsdrtluU5jdEzTGK/LGkLIJQPg8XjMObdWQgzeEZp6YkZqtbnW1QBZG4jBVsPgW+kM6O/uqHcThYHaujIYSkdCNAR2QMgATfptaogx1C27LsM8d+1oBmKu9QBGw6gqBFBzYWQBdSHIu0fvQlNlZBxS3bYmzVQoRfChtabsU4ig2pFUpasEH9SQHBkhxSAACEiem3RgRgBRYeIgAA56VxMZ0TETMO1DFLCWK2g4Xa1A9V0/OdypgQAOaSilj3HovWExVP38+FCmaq1Ho+N8uHt2Nz3feWe//tXv7n242wWX/FPJjm03+nORS1vff3dlJKmtrFtyPvnQwYbDFNIIpkMMzz/9tJ2ujLi2Unsbnf/8Fz/rpW6nkxvS8dNPDdzlvDoOb959DM6fztdxinf3zxHpm2+//fyLz5+eHmOI4zgb8Men8+Xx8dnDw62jstvvDeDGvRim6cdvXqtB8GS9E9Gbt2/v7p85Zuf9mquppuAen04fPvbDfjcMwzx76e3x6TQO6ePj4/F4LKWQc6KSS53GWQyGNLZad4e7GMK2Xk/r+nTNz57d4//2f/Kfi+guhnQ/9ejbOYOZdIkxai2IJL23UiSXYAIxaAggKrVgl66iZj56ck5qm9LATL2UbipdbqBFHAYAo65k6n3s0qU1IvLMrVQmVLMCWqUHhK5GKYH3pDaRt9rIs4BV6cEHvOWDPat34NkAkamZeB9bLey8S6HXSr1rt95a27KLgeehqzAge2eAFLyIlFKFCIMDn4zpBrDroiUXq23cjUuv12WF3g8+jvNQIviQlvMaiSVXVdnIrperNvVjjPvpw8ePxO50uZzOV+f8Vurluo4huN4Pu+nZ/d3pfGHnC9H7p/PpdB5jOh4O0ceyLnfPn09399FzCv7WvtiWNW95f5i//PJLA3377uPd3b2PnpwH7bspEcBNTzkOcc31fN0Oh32KYYjxcJjffnjsvVuv+928bjmkpAoh+FKaIqrobh4Y6d37x2f3R1XdtjJNKW+rDwFACUANRCWFWLsGH3LJCDAOaSsleJ+3ZV3yspX7+zv8L/+L/5Xz3pmtb7/3hx1iCLtdWdYY4w2QoL1v21ofH+cYNERgByrSBfrvfa2iYkx+GECNAaRWYpJSPHE3gCGAmvXuRQmgdeXggRDN2EBu/W/T7qhJv8m2HDsrdWQGQ1E1MEOMKbEBIBYVYWq9Oec6WEgRnTMzQzREiJ5LxS49Z+hKwVMKbd1ugEwe4m11ZaodQEAbEcQACCGl2zYsXzeOToO7eUyxy4CAuzikoTVBQI+wqn68LmJWpc/jEJkez+daGhN9eHoSldpVDdu1fHE47Fp/cXePwbXe27P579/8kEsTtR/ffNhyEQDnwjiOnz48yzmfzpdpHr7/9vVPPn21brmIcIi73QGZ590e2Y3jcBjCfjcDYG6NCVOKgETOHXe7XjszqfY118NhJlADBITcdL/fn58eySciXrdtl9y6XO+OBwBB8gBgKsF7Qly21eEt6sw3nsuyXEU0BX/ZtlcvXmzLAgCPT+dxGp3bD2XN4Bz5yEL8MAPe4oiGPjiivK3mPU0TegfsDAC8E6xIEIcBEFU6kVNEkYIqgGRAvJ/707mrSfKEwKrrlj0ShmCGUBoRCWK/EfVEgUjdIICOSHuPUzJiKE2bttLZeQRU7XEcb+F/RKLopXdoAghiYEwuhFKrM5NSHTsOJL2TqhuiAeA1k5iC9nVFBZcCihIYAapajLGD9ZpvBYbWKtc2ON9M27IMDLV0RDSxspUwhH0May1JwK9ZAzsARtAuuzHdP7svSyaxLVx78E9MrtnLcXBBItDnLx7WLRvYw37MpYpaLjUN0/1+KiXM08CeP7599wefvjSTx2U55wy2bdfy/vSuVCHAh7tjbtk515ENKXhfaonD+OmrFyDw9HSapsGH1KWP0zzvdik4H+KTnIdxIjcQh7Xyjx8vyacqKF2dk95ajB7AtNfL+YwU74/70+VKjqfoa6lIbit1W7d122ptRPTs4UFVnJQivXURUUzHve6GtlWOHgGMQEBsCNt6pXWV3QzR1Vp779Ka680753dzXwVrZSZtPdfigifPilbGgMSdAZrN3vuROEVmlutGqhiCBLIKSJwwFW3SOwLeINTGrquBGnrPCs45QyR2rXXPZCDJeyktAkDJzRyx096VwBHCli2XTsDj6NjlZeW7HYpaab31MI9uv3PeSW01ly6duhCRtoZgtGWKkYfBg0Ltel0tsKsSmhlAvy4+hNZFT9e0n7fe42ULSGVwUfvUVKcpanFvP4xKadrhbseHXe4N0FvumLxsxXubUyq1x8kfdthqKbkNaejSpRbWThv84YsXvlcX2d/f3dW68057A2TnUz8twzguveXrZe3NPHZoi7XL+x+/P71/PF1O6/bP/uQPfv27Hz6cF3R+nKYXL17EGG4beUSOcchbjdP00598ua7Su8xT9NHX3nxw1+V6XVuYxq3KNA7XdYPog48fT+f7436ep1ZbiJEQEQwM8H/3P/vP+eGATeyyQatu8PzJQ6uV0UmuClqDy28+HMx1AvNeu5pJzRmWxY/j/Mmry+nkbgp0RO9ZVc7rlZnCbqeMoGqiU1VXOg+JvGtLxt41xVILiKRhRGZFEVHLzQcHzikYO1euiwdEJu09hoCqahZS3HIBRiK+3Z7UKWJu3mCcx9Iq1K4Gbh60ibRWtm2YJxQj1Ux4WwYzMaiqqpiac0ysgSl4yRU94RCsaz9feZ4eUdK3H/YvH9bWkmdUQ8ZutkpXkdQFvS8qHNmdtp68bnXiyDEgInhXpOMQuxgGp57t/bn1IlPU4NeSV1Br4n3YWtdaXC6JggGOPvIuNRB0oWxl8D56SsOwlDKFlPwYhsFUrk9P67py8CBSakVU8L53Cw5bb7W2j9sCKb45nS+XxXl/vq6ekYFKh3HaN+kpxvHWfPLuF7/4WUppGCczQAQVcY57K9rafrd7ulwR6fnzZymGknOpjYicYxensU8RTls47KVXPT31795acnm3y+/fxirg2ZeG0w7UNGcQYwJniuM4vXopgI69SC69ximdWyXijjjcH0VVDJAoNrmZSDVXBDDvbBz6trZSkKhLt9Z8cCTKwZF3wCw5r9s6TzN0aaoC2Gr13htSEyXnjFC6tFLEO/vHGrpthZgsRc8cpml5OiGR9zGlcVsWP6ZI5J2T2gxAzQCR2Buitga9Y1OHwMBSzbbKAtK7sKqInpfpbm4iuJXgnXl25NAHWzcOAbWRqrQKtXjB+eWzKlK3xeFo54urVZ4fT6j37Ie7g16WellpxxP7E5pDaQDBET/Wh3HXVXFIuZTIDsa5ONxyJiLp9mFZe6/zbldRVauo4hgDwVbLdrncp2G4v+tEyYW2rR5RSn3YzTb4eQzt+ZEdLVvJpQ4hdOk517ePayR1Jm/ePTa1/+Q/+RMn/f/yf/o/hnE2pDSkUjsjxBCePXs+DOM4DIT+k1fP2aVD2pnId9+/dl26XTY7X+z+CBwaB24dTapjNUy7fb0ucxiIndXKphCjjsnngohmBgjGZEO03s35G+V+Oh4NaKml9h4ch61iTG7amZgiPFG79PLCEF0wQgT0nmtrHDwBSO/W2no6YRcaJkEgIiByRICI7EQFEVrvpRQ/xDBOymCk7L2pIQAlD6XXLfsYoDaHbI4geuVbVUp6bUgUfBQVIGTnhMm6OOZWKjkEUWnC3p8Cr2tOTLrlOEZp3SP3pSiBi8HmQaZk1w1rKcvmDeM00SGWVuVGVKlVa615A8+DY2qLTGNnIDRpHQhnEVExVFrWI/nErkrJj+foaDKw2nVr16dT8Nkp8H63XNc2LV3BuxhTSp4juOAQW+Eu29t3XZo7HOqydUAEI0cEdpzmpWRH+HLaLbUVFTPrs3z1k8+nefbe51J9CM/v5vffffcX/+LPPp5O7z9+POxDafib332/beXH198ZsojshmEcxzTtUhqZCACcd76//UhdDJ+QXEqprguU7vplnnbBB2TG1q01UEUwi84cuSHV61revR+PB2C8XjcTDcGjCjpnANfzWTwbmhcxAyMWFXKuEF6sbiDq2IFf1iURgmMOAQDM9KZiUYXpeDDvoHUTMVMzVAPoTXqvKsj+JgxEMSpVHN54xHUrThIioCdUI+85snRBAwMMKbVtdcG3UrMWHwIRASOGAM71rghoTHpdCEC6SFX2HryTImDoXYAbm1jMqWltQoZNkrKlyYWoKphbRxFUMgMVQFT23CSpWdf2eArTqMwNqXlyqrdzduCAqr0U51zP1939kVNEgF1tv5wPWmpT6U/XL/Z7uFYknO73x1efXi+n7bQkpvs4QIRyueLWoD9FJDE1M2Y3oo/HWQjLspLKfmvguakpubAa9Sy0MjsH7vGHH2vrrx5ejvPOk/vqi0+Hcfz555/+5uvfIdGHp1Pw/tWL++tleff+BzdP79fcRB0ZEHtCZ12Mkb1Dk97Kcf8sK1zffkiHWQhVlWM0k1YrJIeeIQXPqL1XFe+ci4yORfswxrZsnt1gCIiuybzfs3O9dQJbSLsj7FRUR+dCTKLWpbOPdVs5RvSIptM0OOdVxAClCxE3InZOVPT34CYEE6fGjFs38dyrQBeXBgieklNRNAUi9r5cqpaaxmS9sfNdO5AzFSZWFUaH3qtj89YJpTT2zojQJBESYWTP0RE7RCitYQqeGYisiW2bi8kPvJzOXVs4jP3dCZ0r68JEuxfPlZiIT3lVh3sMLEClI3FvDdmbGRENITpyuS1BtecSYqIQVTTuxqbq1puUqDsyBhjYq9kw3736J38q2teny/nb75bvv221oA8yyGowTaP1BoYUg4yDB55DuJ7PLQ4DMoql6NZa5jTUax53UzM4HF58+c/+bDocvPcm8u1vf/PmH/5OrtvDbnf803+y5AyATS0Q0gu0nygicvDDYeek93g4QGs9b6BgSw7I7u6uiWHw4tmJbaDkHXmqRWRZCY2nifXGtAdSG8ZBEFp0Rrqdz6H1EYgB3DRtWhsRMktpJB1QtZRpleiCqnoAac15p6a9dxeTNEXyRav1Hm/bBET23qeYczZGvQG02LF2VEU0ds5El/NlP0zdBMLQEWD01BSb9tasq4m10hBA1dQgRC+NzKzVhsF74tJFGHPJSZFSEhMit3O05qxgtp+VSKTTkHqrgKS9o4EjF1KSWpenx/lwsBrWy9UPg3eOnT+dL8Qsl+u821VHMUQ9L20rhhBDWC9blY4E+7tjPeXBD9IqmPngBRRK0SFuvTOAiATv1UxKrWphmD756qtXP/m8905f0tvD8bdbaW9/MHTD/thrRR+YqPWOIVQRyDV4Kl1++/qHn7945WOoCLTfXUVoTug9j9Mf/Q/+4sXnn2nvZpaX9Rf/5E9/8tXPv/6rv/rxd7910A9xQOIOpqVzN/TUa/UC+rQ4qw1SwjE573RdtVY1td1cTxff8nAYtZnpbYHbDcyNA6GrjydrDYfBmL3jhppB7f3JlRpFWBHZAVPYszvsBbCDEXPftqH3T0CHaddVRLr1dmMRbdvmh8GIbusrP04IaEiG1koPMRARgGWzpn3ywUxbF68ml4wgbkx4/wzUoDVYsh9Cd868t1pqq5yCi2y9W1dEU7Vunol7K0xEQL22UnKIKRpp7xBNuxKxQ3IAMA1uP7VcSbH3rqrWm4jGFElQW2ek3cMLaXL58ckMeqnDNDofgAkQr+f3+/3BidXTJbJrg3fO1WWrpShYYl+WykSm3TmvYIgIRMjcSyPH5FiRtKtLAX0wcunTz7/6T/8pMq2XNQ7ji88+efzt8/rhnXbVUsEMvNfaFBEBpRYx8EqfHo/frMVqA7hhzbrzzghV5flnn+TT5b39sHu4N5Vaqw/+2auX4zQJ4g/ffG3SoHcm0psRUjsz+xAVwFmtbVncfg8IyGzMYCCnc0DU05XHwd3fB1ECkNbQx+C95vVWCFYR7wMwbKezu6xD6Qbmggdm9kzBtVKYXZFmtSVRM4veM2K+Nd9irENCHxqhd54cqVoHZQM0Q4dNREoNQzSAWisRk6PbbhpMffCoimIeXb1sjqkjYgh1y2Tq1OTmNNzNbc3uutXzlYg4hhsgiUzBjBxZr3ltLsW+ZQQgpFaKAWqHHpmSw2uty4aqQc1K7bWaGjvuraMZqHXR4EOum5r5kBBMRKD3nLtKJ8S2Zhd9a60QAXlRFZM5xQaYfEAF6R0ABMx5d71e97sR1GQrlBwN0daiqozExJ3d9Opl3M+99TQMvYvUbqWwWFDV2tbrtZdt69XfHaUWOV3Cft8Iwrb80bMXiNhb76UCAHkPRG5O83xAwvev3whC8D5vq/b4CI/Rh5dffFZbefvdd1KLS+n5q5fn949tywDWTWvrDmOSUmOpeVtvKTB3OECvbckhJp+GjoDe6ZbBcblc++XsAAlBAILzljdrzW+ZxaL3FDx7bzHUOXW0oKRdHBKYyrKoKg0J02C1BqRyXXgcmykIMoC026pXVISZUEG7lmXFwHE4CpKIUGkTMiNpbziGLA1LHkOCJlJqOO41OMioTZmbi0FNkUBMIFcGciGAc+QcGtScQ4zDkJbrwmJOAJiBqWyFAiGRlE7i+7lEZGWXe+m5TMHlWl0MJgIATK6L3BwoYR40MKzFVBSol2Zo7DxPoGCq5kJQQL1ugOacU+/s9meWTN4ZAtTWa0nj6Ngt64pghIOm8L6XvaF0QW9+3r/4xVd5ycg0z/NyXeuyqcOOSCk4kyFFJJrTZGIORdEgF4fg1zZHK9pYjbpC7Y4Eg0dYy8dT/OQV3PHlfD4cj9r17bsfHj5/uZ4XFfhn//Jf/kegpx9+nHfTEMcPy+sbA65u5Xo+O7rbW2s1V1XjeVxqmaYhX3u9Xh4eHtZ1heWqagzmv/xcWiNATBH67R5ftVWpzfvgAnpkciStOaLOIOwChZarXa6+ixgyECOtpbJjNCRAkm4UtBbrgoTI5ES6WNhPshUQTOMUyMh5RWB1SGJmKAIi+XyGaYLDodXboEfkvKpO89Ry4RDETODW/ARDdI7buqFjv5vZO+LBzPKaCdHQQLW2Rj4gUt+qC44MYwcXkuQCORso3mRhQxIRQlIRF13rvdeGjHy/uzRKzsXrGlLKeRsP+8XRtDW4aezMEqqaUowG0FStNgNsYMNxV9ZFL2WcZyHU3kFNALRUc3SWfpdmNWu1E9j1h/eyli//7A8fH08xRACU3juJsjkCl6IRSuuWqyY/3s16raw4DQMBBvZNqvNe1aSJbt0DPf31r6zIz/7Vnx+niZyb5x0Rz9NOej+9+/jxw+m/9xf//X//f/vvzt9/292TbxKQUKWuGbdMTcUf5o6oXaW2cTfndbXrNg8TDyM6j92wFBYR651RRHvrBgie/DQaOSR3c46Y49akGYgZPi3puum2aa8AZoag6JyXLiBqBqUWx6yM1atjQlEw8CGi8/G4wxhcCGk/pcPeTVNrrSxLfXx0osF7H1yMCXvHnFMMXcSYyYfeesv1dnuvXcn5uD94dpwLiqlBu/1AF0pRD4MwGhogueCdd0go0g3RpQSinqkua8tbb9W2bVSNjotIUxM18E4AWinbslQzIt6W5ZsP758m1+doBAbQcubWuoiCKWEH64iUkktJzIL3Dgl6j/v9MM3lfA0xVkfgqdSCjBw9AMqaf/Hi5RyTiUhvLg2HVy9a6+v1qiKGcH18Wi9XBPBAUloX4SGZakCO5Pul1NIVEZm7qKqqGYegRLdjZCJY8vru3T/8f/7D+c2Hd1+/fv/1a2fcusVx+upP/nDY7c3Hz//4l9PhvtYaGX3vel2TyN2QnF225bRYzsGHumYXYwCF96e432sTIM6iMSRkh1ubulpvwzCcL2fvSeMAcTAxVO291d4dEB93ZQjJlImq2rt1Hdi74DgEYMzLAgBiOoYgtUHpcxgNABDIe1Pj5CWQ9samaEJqcHtHlgJddM0E1gDZjJwjdnpdoTc1c4Dam5+nmFKvol0dMTjulyuWTs71WoAZh0QxlVxp9MDUStNchnEUKbLlMM9hN5+u595aAwNH/bxFJHIcU1zPS5xnq8XHUFsjROjdEwvgMCaHcB9ivV45JhKM06hdXAefhq3W2jszu5QQYNvykssICF3AoH74WJ+eXDcBIUdtWXtpdy+ft+iuT1eXGzUl5393Oe/m3c8///T4+fOjPb88fojD6H0go9bqx8ePqbWo1s28iPbeVMi7NM2NCyDV1pipozI7BQDnrmtuAAxANZfvvynrtV3PPbfl6fH+k1fHn//sF3/+J+zd/YtoBs9/9tN1zU//9sq5jMFjxLxtkZ2TUpk5jSMDmih0RdDx2Z3b76qIm4YBkRBlKXAueBhqbUvJhqiGKgaOhLmX5oKHEH0Iy7b27Rz3B4phhf72qb7yYY4BzERNAXsp4zTxkHLret1iaTxNhRGkg3aoSldFQGJXe0fCMM1MrMxqJrUamD/srIuLA4dQ1w0N0jTWUogchFBEVKT1ZmcDAmbWEEAV1YhDuN8bWLkssizzkG6uAhVBRM/OSs12Xr1t90MrtW/bs0C+qjVzzvfWKW8heCRSlb6ugZ0jDszWlRF+Mu5b70PVVgo6RgAQzZq7ARGram/NO+cdC1MuJQKYaivFhSClMCbbyvzsxen9OxAdjcCAglvVrlJfPtyv11LXjZC6yLTbb9taLtf3f/O39PR0tz/I5YqtpZCk68Of//knv/zlME/k+On1m8fvX2+nk5xP8vFjw3b3yYvxsKO1/M3rN036rurh7iDa88eP437fU1zevVmenh4+e/Hiy89vkEwA+uQPvlo/PL7+N/+mgPkY2iLQmlOwOEQOUc4XZOIQKIVaa0Nih7029AEMlDOA+pA2X5xZGqebGsgAyAVJiCFg2UC7V0guSG1Lazb53WHUZq13ECEXOPrgnHNeotf9hIxau6l1g9E7EK2t3QyJ5APHhAjIZM20tjgMFGNHi+NQL6v0pqVorWGILkVjVu1yuXbRQMxGWlo/X1XUAWKKeH9oTWtv3EVE+rq1LvHhDlJUoi5apQdyqF2D6yCY3HK53oO/EZq0t5SSH+I1Z+cAFKV1QAYGduxCKNum18UBKBMTGiAxMxH3npiEUBSkVmsNwGiMT6fzZ3EEM8ylluKIxv1cmK7nJcRx23JMQ/Demlzq5QPBz6ZDarRsmZ0TQxdiGKbTdz+evv6t1Y2nZM/vLq/febXhyy/+6H/xP53v7oYUAeD+5z8rW/bevf/+9fu//tu3f/O363W9++QhqRHC3755+8cvXvhc0zDTON199ZP8D1+3DwvUenr79pOf/xRUf1899Q7MVFWAAKEzr+vm5sMemXPOWgqG4Ido7GreqBT2PsTUVU0MnVPEtm2M4FxgYjVFdkRkUlWNEFOMDOKm0UxzbwAQMu5H30wJGYDMjJxnVm291QKMTTQbDExiqoitlJSSmIKRC4FCaL3nvHETrVW8N2UaUu1NyqZdfEwI0Lbs59kI+7LVWqfD0fugtaiIXaoamnNpt9fjVK/ZrWBVmFkR2cdyXvwYWwpr66aegBwTlhrvRhVLKTjhtmRU0yaE1nsPPoh0h8AxpZTMrHeB1nvrdatAOM2Di6m37saBzfrt2r8JedfEmjZPNJptYPm6hOB9CKSmvQNAiuHp8eNl3dJuSkNxzoOCXlaakg5RrvXlL38hgs6FEDwSXi9LrRVEHDPOqQfOXXYv7//ub/7ml3/6p47vvaMYQymVvX/48rPnX372s7/4V7/+b/+fbns6n69/8Pz+4bifdvuBkx+nhz/+w/0nz91u9/HXfzeM6eGLT00EDAhJTIj47uHhe6R1XecUwPuw3/3/AWxHrEtRbOFeAAAAAElFTkSuQmCC",
- "text/plain": [
- ""
- ]
- },
- "execution_count": 5,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "shape_id = \"01_wild_hydrant\"\n",
- "example_input_path = f\"./demo/demo_examples/{shape_id}.png\"\n",
- "example_dir = f\"./exp/{shape_id}\"\n",
- "os.makedirs(example_dir, exist_ok=True)\n",
- "input_raw = Image.open(example_input_path)\n",
- "# show the input image\n",
- "input_raw_copy = input_raw.copy()\n",
- "input_raw_copy.thumbnail((256, 256))\n",
- "input_raw_copy"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 6,
- "metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "Downloading data from 'https://github.com/danielgatis/rembg/releases/download/v0.0.0/u2net.onnx' to file '/root/.u2net/u2net.onnx'.\n",
- "100%|████████████████████████████████████████| 176M/176M [00:00<00:00, 134GB/s]\n"
- ]
- },
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "SAM Time: 1.887s\n"
- ]
- },
- {
- "data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQAAAAEACAIAAADTED8xAAB2SElEQVR4nOz9aaymWXLfif0j4pxnebe7Zd7Mysyqrr1X9iLuFCWKpGSLlDQjmRIwhgXByxg2DA0w9hcb9kfP2ALksQ3YHmD0xdbCkWxI4mgZjcSdFHeyt+rq7tqXrNzv+q7Pck5E+MNzu7mI3dXNrp4aZr6/QqEKVVlZ933e839OnDgR/yB3x5Ytjyr8fv8AW7a8n2wFsOWRZiuALY80WwFseaTZCmDLI81WAFseabYC2PJIsxXAlkearQC2PNJsBbDlkWYrgC2PNFsBbHmk2QpgyyPNVgBbHmm2AtjySLMVwJZHmq0AtjzSbAWw5ZFmK4AtjzRbAWx5pNkKYMsjzVYAWx5ptgLY8kizFcCWR5qtALY80mwFsOWRZiuALY80WwFseaTZCmDLI014v3+ARwZ3M/t3/zEzmxuz/Lf/E20BQFt79G8P7g4i/yPvsV/9XojovfuptvxBtgJ473FVkos3et/3QgBBU/e5X/6pKGAGETF5IFJ3B5dlvffEJ/avPOFwd3IAoBgjAMDMjEi2Mvg2sRXAe4y7EomqNcsjsvz5f/tPZtHKghk6HhWAwZWYAQEpQ9ydhfouk0Q3V+c+0zLJR7//L6lRtXNl+G3NMvM2Xn3v2QrgvefO659PiztVug9r+q51MzW4ucHdnQUMsBQguBmzSOAABnkZxdWyp1FZV1VpVG6qx0fTvYb2ZgfX3u+P9XCyFcB7hsEYfPTab+H8Zc/tcr0id4Ol1EGtrEdVXeeUAQi7O6rxRLNq7gEXljKGGGS93mhORVmFEEHYmdUo4/H9ZZw9Pnrqh2JRAQAc2EZE7w1bAbw3uKp6Pnv7czj70nqT3DSIa2pT32fNUQKHYrVp3LCzO2WiEEKIgQACUk5VWQqT5iQsQqZqxMIsDsByIV6MCqPp/bR/9cM/vD0PvIdsBfBe4A6idvGgef1f332wGI/iZFyt1+vNep66xhTOMp7Mcs7TyWTvYK/dtHBXzQCCBIkCB9wIJg4SNzPh4MyuKuQACCRM4WDnbLVbP/59VV3BebsNfOtsBfCt4u5EpKl/+Tf++RQP2PtXX3mt6+mDH32+KEZd32xWy7IeB9hqfno+T8f37+9f2nnquadGoymzmJsIEwBCIDBA5A5nCIfgRK7qpsIEAnkOu/Uqfs/k8OltIPSesE0svDd0fX/rlV+lrnnl5Td/9Rc+//0/8PFPfOrD5+cnb7760tXHHqvL4P1mcfet3/zNV05PVz/85/5kWY3MCWZMMLMQAtwBc3KYswiF0pmYyBHAGeRMLLHO8we4bMDFtvN+f+4/9mwF8N7gjtu3br7z6tuvvHT/3//L3/+DP/KD87OzzdnJ1SuXLx1eNlOqxk9/9FPPf+q7SUpQwaEAHDBhBsjNmQGAwCBAIjET4AZzI6Ig0R3QLLHStIHZdvW/J2wF8N7AjGWTP/6JJ6rZzr/8lS//1gtvfuTDT//ln/gLD04WN2+fXb8yi0VpLBZrUCR3AkBOTmARJiKHO7MwCwwgwM0BwPniZsxgmlKKEXH++W7vmbIebzeBb52tAN4b1KBcLzfnP/crr/ziF+48tV//xb/4IzHoT/2T//q//jcv/Knvffo//J//xM7uQUou4iCAmMAkRMRM4sgMI5DDmcgcw5WZMJm7w61PXNdeMqjJ9kwZS8BA21rGb5WtAL5l3M01xGKxSUW3/qs/+oFJifNEt+/d/bv/25/97Rdvrxv6zc+9+Tf6xIHJwcJEbO5MAoIaACXPDpA5E4yJJAqJA+ZOTFJVGZSwCf0CRTe78ixCALbZi/eArQC+JSxnDoHA7fzW9Sv7t155c7fyv/ADV6/deEzG4aU3Tu6f2biWG49fK4titWmZS2IQvAiBhXPKRRSCMTkzqzoYDmfKzNynBFew9+sz7891c16Pq7wKy2KzN9omgd4btgL4FnDnEDar8/bkjU//4j/dq8FPPnl28mDRN1etu1K1//FPPPvpF995+unrTz733PHJyahLEksQFzHO5/Nms3jsyj5IynrEDhFEgbsLo+vm49GOpjZ1J+qdd6tZicIpLU+P7Sm/XO4BaiayLaL+VtneA3xLfPYX/7/Nydvcz3/jd14A6RPX9mMpzXLeLI/3KpkEEtZyPMVoXykWEigImKt6/OnPvHp0fP7DP/zJm8dtWc8QiiLQQQXAZyNK69OiiONaCmqjr0hTu160Pe9cupGmH9Qbf+XS1ScAwBTbRoJvja0AvlkuIo++Xb3wS/+4P3ntnXfuPDg9PTtfTsfygccvSaBRwea6XCyb89PUbvZ3xvu7s/G4LAoJjIYiqBTVs01qvPJqh+PYmUIoAnSzaQ8n/XW8LWzBc59tsdxko3K8d+nKtcnsMFlc6+jUDp/5vr8qoQBs29b3rbAVwDeHu+WUXvn8vz1587fu33lnvVw1TRsi90lzWj5xba+sIjzHGGKMrrZYLZrFwlLHbgyuAzriGOO1ib9+f3WWR/VszylmVTfpm/bsbFlg8/GrfdN4qMazvWoyGl17bH//YK8INWTaNMlMpweXz/Ol6VN/UmaPA2bm256yPxrbM8A3BxGr5qM3fuvu3fupTeooqqqqy1qtabJbZhImgsOScgiX9i9j/yCn1KWu2XTrRhfrtjtPJ+e4f8bn643ELifrO1W1yFTEUOxObm3K2bR44qnp45d4py5ijG7qDpEowa235cnJqO5k9evt6evVk3+Gt3vAH5XtDvDN4e6/9q//3tm9Nzbrde4awEBEbqrKSJHW47FEiUEETEOmMjCFgChGLAZxp2ScEtzJHGZuzuTMgQNTUYSyoAKrvXEzoSQiLEUMMcYScRdhxw2uapZNGeWknE0xuXaeDncf/7hpZgkATPV3bwiGPuSLwjneXpz9AbY7wDeKqjLTL/3rn+TulGMxmsDrSnOfcnZNEoJ7pDDutRFJDhViBjucCG6q5hn4yo0vV4GMWE0IHsRABhA7iBPnvoyt930WYQkhFgQyJ5YIiVSWbE59x+4Z3je5kPu7dHb6juw//lHTHg4Oxe/+3H8gMjLDdr/4PWwF8I1C7kQSRZITE8VQqKgEkZRTFjNlpiKWQjtqa/KV5z6KMwewM7GBCcMR2gBzsFtmNxFnKDtDhAFmqjgFMQKRCLPACUQepl4cUDkmCp5b0uG6QJgltx6km+BLr7zYPP+x7wKwPHq7jmZOmvqzZZtUD2bjIFZd/hCYzYy3GvgKWwF8Q6gqM7/12hfb1SmbBSKCETGF4LEMOfapFxCxhxhFLpHvQDc5rdk6IfKhmBkgdndiZmaDOQvIYQATmJwJzMaUmUyCkLATUZjI5DEaXUU1cwbapZu5JiPismYpwUFVi4DR0adf+Zf/DaVVf/ylZ568Vj32cVx5qvJNm1LtO3DY8XKdJtPHPgxgmz4a2J4BviE0JYnxM7/2rzdHX0q9EzKB3Q1EDoZbyklTIgDwsggShAiaM6xlGEPhydAKMgFEznBmEwKBCRc1bUQeyGNAWYS6LKt6VteX4+Syl1OSwpmReqQNUpv7zkGhHEEKokBEWTVwWr/zOy//2r9arNOk7mIxPnj6u2988sfBRTY2o2Knbk7OO0z84Dv2rjyxbbTHdgf4Rhgsfrq2ye0ydUM5Q0GAw81NWODixODCLDtcAVcjQGLBMiFXJjgcmk17R3YYVAEtYEIUAgmhjBRjjEUsi7osR1U1jrEmCR4juVu7gWcyU8tw4liDmSg4GEROJCF2TTt+5gf2lvrOL/8bD1VhefHCL60WJ09//18vpvuW8/poEZhGfnr0yj8d13+tmF3fHgm2O8C7425E/PorL37hl/5hCDyqRkVVDY1bBhpK1tTgDlOTEOBKngTgIEQIzEUkcwsSmSW7uRmIxFnIglARKSCVBQkLgKocVVUZYgEnh4GIicnVNfngLcdCzMSBiN0BFhCD0Ldr8S5UxWtf+MxrL3yebF3avMJm+uR3Pf8j/2EsRqnZWNu4pUBNStpc/tP7V550Aj3CVaXbHeDdcTUKXBaFu4VQOg2SEBDB4EwXRf2AsTExiJliFAQBuRM0MIQLESKiigJLYObIAvIYOAizuwgxU2RiEWJRBdOFuZzmTEQhVmZKYGIBkbupJVBgIiLk1MENYE/67Ce/T2X01iuvLBf3hE9XNz9770u/8Pj3/LXAnIC8tk5DXrzzzp1/vf/n/1eWk4StALZ8DVSzhHB2cvTL//LvTSuBu4gM5RBB2IM4kZqbgZkiiZoLUxAO7AEAaWBheBQmYhAxUQgiEpgkFlGCMFHgQARmMOCAgxgGUjNlAocI4qF5BiKmChoaCSoz09RLEGEzbSzWBPemefK5Dy7Olzc3+Wi9eXySz778M/vPfd/48AlYPnr7JenvXZlsnrl2GcDQkPzI8uhK/xvB3UXC8YN7P/NP/vP9WYxFLMsqEAuJiLAIMwJRYCoiB2ERqaqiLEIUiFMIHIWDSFkUQUIQqaqyrOuyKIuyKKoqhMAszMxMBHZnkICIGCyBpQyhEimIAxPDAQ4gIgpmMBcKlYO8qBEL7Rber8Jo6nFkfVdGeeKpxy33583oaBPz+v7Zy78AJSHfnzQ1Fpt1F8v4fj/g95+tAP4Q3N1sMOWk4wd3f/vnfvJgIkUIo3pUlkVZlkVZxBiJiAFhDlGKGIooMYQoHIQikwhJiEVRF7FiZgrMIsISWCSEIDFG4YuEPIMuDCEMYA5EbObmIBZwcIc7HOwUHIEkSjHmcuwx0u4+wXV1F+25W09SSqyKqvSUDh974vD6BxaNnXWjTov1W7/Wnb0jHEazvfnZ+vjWiUj1fj/p959tCPQHGWxOBvOp9fL0td/6qd0KwKgoIos4WJidyAFxOCyECCJzg4OYGO5OQQITOQsLMYFBRBAJLOJuDly0AQNEAnI3cxpe8tCLOzMw6CI9igA4EZOUYHZTiFgsSTtbH/H6AbozUg9SgQQiq6aDe1VUz37oqRc+/XnVsLEpHR1P3/idax/9/s2yZaanPvWUbhtqtgL4A7hmYjbzzfzBZ37pH3t3Ro6yHk9nU7hndeYgIRDIyIerLRCE2N3Vzc0J5ORMDuKLznYnYxY4u7kBRHA3J3catARAXQlsAECkSoGZyYmc5KJ4x53YTXui6EEoxnZxivO3Kz1165iiu7oX7uCysmKyefvn+Hwz5kuTirRb0Sx2iY/f/sJjTz8TyY4frJ96al+zbitItwL4vRhJAPDpn/svF+98JuccQgRx02yWy/nBweFsZ+YGYiFQVuMggBORMBORmavmrOYwNwOcHJrNCK5gQg9nODHHogzoRQIURBRDZAqm/aApVwdHgAFzMxYhzzl1rr3pUnPjAIU6d6s6nxMp0cU1M0AE9b6d7l2tzmpd3nKparHGs1NBRGl1kjfzUixynzd9sbtd/1sBfAXVLBJefeGX0/kby1u/c36+3Nk7kCKMRuP93Uur9ebu3bvL1fLxGzdiEQ0c/aIpfXCvYgCB1MTcc+pyMrdsbu46XKRl0MX7nymkHETKsjQ3mIcQAhMzaVYiuCbuESQSiMhcG1hj/bxvFzAtAoSk6Xw8Hsc6qCIwOxMZEQypNcskJUbXfX2zoHS4V97vYWbq5Ck1y/ko2mOPTVKzvvfO7RvPvL9P/f1nKwAA0NRLLG6++DPHX/zHnlO/Wdcx56ZbL8NyXrvmG48/ee3646+9/uZLL730/PPPT6azbGAQDzE80VB1LIFg2QK38NQrXHNauWUzGF2URxMoE2WmrmMhySm5NUWQopyoWdu2go7yUvsVcwhRgiBwjpzEMgOSKecsMqrjmDQx3IffeQiV0prB3sypm7PmKmJnGjfLwGREcvPm2ZUPn1jdPbjzIDz/4YNP/XXAQI/0PrAVAACTWLz5wk/f+sw/6LqOHFGchWC9cOu6Xpz0rzTrJ558/pMf/9hbb7315S9+4Zlnnz+8es1UASYGg0kwuBeaw81R1gRYds/QnD33sDSY3Do8EwmxgZQAB7PnRH23UrOU0wiLiFa7Td/3GiKqOpbi5MQe2Nku8k5E6mlBXFCYgAYHLYd1Obt3a4bAg7qXBYpoZGZqTeN9u9BoLy2uPf+Df6ssq+HE/34///eTR10AQ23wa1/4+duf/v8szs7rumZmdwNYYhE8w52k77vj11/Nfds8+9xzVVV+6UtfikU4vHzVzAkAEzmYQO7OYlCKkQldT+ZKIPes6mr9EBGByEmMiIkIZEQpJwI5MVlL2BRVrOrLZma589xDndgBKDFJYIi7wTNCJAqDnZaZQwDV3G5S13ifSw5wDcjebVTArutNvytnXfVdP/43/9dlWQ4lHu/3N/A+86gLYHj9vfrb/78iNWUktpyzOQXlUUV1GVpNjZsGdLDF/dtvE/iDH/ogE1566UtVWe7vHbiDiOBGTkbG7hAiZkJwLwEA4kxwtT6bpWFWDAmCRMuqriCQgZjdtEAbo4CjuefU574PbBcmuczETDB3F8/sShJ9OGJTFGJ1mPZEFgMzl8gCIvO8Wsyn0zo5P/fUjMrDK9/1v5SidPft6scjLgDXTBJe+Lf/aBbX8/kGmkI1Kke7od7LGrqu8zwuY7C0NrfA2X1z5847IRTPPv+cu335iy9+13d/73Q8tWROMDJSdjhdrGhI4IjC3YmHs/Jp3/TqBIf2naYuhOhuwDBHj4I4A3CYJtPsqkIQFncDAHNiMIuaxxiYjcxA4aLPjJyIQM6m7jmGkEAcQtu0ZgqYhOKZD1w6Dc/vV/uae/m9XWOPMI/0O2AohG3O3tTNWdds2rZpKYTRdGd3Z1RLFSnn7nzZJS1ckVLLnknb27ffuX/v7lMf/sjlg0svf+lFNyUhJuaLvzAzsTOJSIjDJBiJRRHHsd4tqimLMFMIQYQBFyaGDz6IMCXAVS33Qbgoy1gUQcRBF/sMeVYHCcHg6p4BN3cfinqHX2eJcquph4ibrtcrkRAlXtktm8s/+OyP/m/cbbv6v8ojLQA1dfcY5OzouFuvWounm+6dm18+uvnpOqwnk3JnVhVRluvU5chSdG0yTbnbvPLyq/Pj449/z3dlTW++9XqMAnImF2JiIhALB2JyZ+EyFFWIXISinlbjS2U1IWEAQszkBDB5ISDq4cmdhyZiEIPC8O7nIDRcn8ElxL5Pd+4dZ1UaBmu4EoiYiYjMxDqy1vulu7Q9NeuWXHenlT/z733w3/tPsI37fz+PYgg01PkAiEUJYFoX5WR3XY5kfIlJtN/cXc43zUtXrl4fja+AItOqTymZMAVwILKzs5M3Xn/zk9+19x1/4ju/+NnPXb362Hg8NVUiEKDmxMBwQWZqguACFAq2aoeJCNZ3azcHyAlwu7iUFTZ3cxGBqjMrM4Mc5kQgcnaGGUu5szsSgptTEHC8KBYiJ0+wTvsFrFGT5Sa/dWt9+QPf8dRf/89G1z4C+0qVxZav8GgJwEwBZw5DEdqbb739+d/++Z/6h3//aEPXrl/9yLP5xpXdyd7jZTXR5uj+2Zd3upuT2XXZO+j6tu2s67UCYiCx9vbbb1y9cuWxZ5+5fv3a7Xfe/uCHP0bkTo6sRExODDgTg1Pfwy0O1UEC4RHhwE1zasxBfnG1TFAARG5cKCnTReRPZJDhppeY2YnKqg4CgMARRIDjoqHSkDfWn3F/0mctdg5SP/0r/6df2LnyRFFUGMqctvU/v59HqCPsq1m/5XL9wm//2/n8/P/6t//23bvH523uTSXGUYXv/87HvuNDT33oqQ/sHFx1qtPRb477m7tXPuFhv9O6z2b9ouDcbNbn54uDq49/5/f/6ViUL3/2d649/tTuzkyzOtwMBriZuztZ7js1E2b37JZUs2rumvPl2f2UOyJydyYnMgLgxuQMAkPIyRPDikABieEhlhyKnHU8qkbVaEinOgWm6E5mhHTiy7e6zXkYV/HyB/m5/xjjJwC4KYak65bfzyMkAAA3v/TLFa3uvfPmvTe+NK58MqmDiLndOZo/OFnfPmpefP3BJi2vXt797o8/+9iNp2Z7V5av/StuHlx68vtG+88Sx3Z5pM1Z27Rd1yfPTz3/J5795Pfcee3l85PTD3/oo9kyEZupmakPKJmru0PJsmo2y3BV7ZvNcnV+lPOGL+6IHW7kTuSBaWi5DJIZJkSRjcmFQ4yFuzNhNpkOOw1JSRTc2RRobnWnr4h367Cz84P/N9l9btv5/vV5FB6NudrxvTdvfe6/Orn3xuHBRFP/2JV6f382GVWTcRDBd/DjYIfZ8YPl0fH8dL7c2xkfHORbD748ubJb5dOiejCZPHV6ehZso9oG6h2L4H58+8uH1x+/cuPGvdu35svz2WxnqOchd4abO4ENmWHuNtzWCpM5GOJlRdPd5TKZdQDBfAiiaGhIcGdiOLP4ELcLSxAmIAT+ynAkgpmTUSA4Q/uua2HtrWV95cf+H7L7FL7iFbfla/GQP53huoeEf+tf/Gd7kwJuR8fnMDW3o9PT0Wiyv793aW9Shixuxj7bmVw6mAIJmuHoFqdv3Xz76mEd69HebtUsjhfrNSwB6t1KoLrSm69+9qPf+6OHh5fv3765O/tE9gSQA25GF2E3AIMbACEayoFAiMyoa6fdzeIkpd6yckAQGX4Nk5O7mxKTsMvgpgKIiIBYQMxEAhCGedqEnHrqz5Z+cOXP/18ml5+CZ2xX/7vxkGfEhgDvrS/8XB113bRMEDJhDwLXfrM6Pzk5u3e8TFZwHLNM+lwt23rdTtpcJ40HV5+pJpfevpXPF0Xb9ZcOL4sQIcPMSaxvvF3Mj94+vnv70o3rm/V8tTpjIZgNHe6DDoawnhhMRgQa5t6RcxDmUJX1eDxhplgGOFQVDNUEt0CIAkDdlBzsOtS8SeAgwd1BARLBPJReB/ZFV1bf959ODp9zzaDt6n93HnIBDKe+Nz7zL4sQiUBQQJmUYASYpuXy7OTk7MHJWafuVIFLUAEZedizeEDx0mPXn8se5udHy/PzIGF/f1+IyFIoCneHdrQ5P7/zGhMmuztH9+/CDWRwZShIhVQowzN5R57JM5G5mwBwD0KBMarLUV1ZzkKIgaHJcm8pqfYMYwBw8gxTUnNV10Qw0myWBsMhd0Ok855Gf/I/mV37GPyisWHLu/IwC8A1E/Fv/9xPFpL7lGCda0fWsrcRvbAOaZammc8X68Vylc1ICuLoTE6iHtWroty/8tgHTk+Oju6/1XTdzv7lqh4RnFgghat63rSLe83Zg+m4Wq3mbbMiz/BEngkJnt0TLJEpPAEZlkmTae/amK4JGfAihshgckYW6KgIMbCQwo2I4CAfAjcTZnJVTRAZUhjE5CAI3d/MpofPXlyibfnGeJif1BB9U14FdJpb8h7aE5S8J11Ha4VyYHPrzs9PTs5X89XGISFGYSFzuLGEWI73Dw7ryezB/Vvz81MOo539yxKEASlGZrC+XS8e5LSsCum65WY9J8+ek3smNzN1S+zmpmZmuU9po9p6bjxv0G8sra1fB0rTcRnEyIyZRTywhkBOMFNyIyCGEGIABkssOBGRE+kgD8AzVb5d/d8kD/XDGmzUyMkzSN2yWp+ykQRhC94GS+IaYZZXp6dHm3WjqQOULgoLMKTei2Kyf3DYt/3J/TtN104PHivqKQESAkmAqq7Pz4/fKQuMK1otzkDulH1Y+mhJO7UW3ntuXXvyXlOTmoXntWvjfeOp0dzFQiZ1HUUcaqqm2d1yVsuJQc5ERMTRSEAxFDXBYQ5nwAeDXUt5W+bwzfIwPy93U81qZk6q2ndN37d93/bqmQLFWEQTVkZmsr5Znp8dL5ZzzdlN/auD2t2KotrZvSzl9OTB7cXZMUuxu3/AwgBCWWVTazebs/tdt9rfGbebs6bbuGruNp7Xnjt4dkumPbxzbz13bL1QQt54t0BeCZmrD2WbzBC/uBTOycw1xMBMuOhcISFWzX2/oYuueQeBiBDic9//77+/D/yPIw+tAFSzxOK1V19aHL/VZ4qhiGVVlpUE6rpN26Xe4IGLgkXAZoAuFmdn56epbS7OnQA53D2EUJTT6d5jbdedHd1q+m482x+NJ0UZRSLMLadmMZ+fn+S8sfZkfX5EltwaTRtYgmdYdu0996QtWfK8odxAO4aSJpgCZGqpT25KRBc1dQDB2UxImWwwaxkSoyTxwjPFelgacqzjg2vv7zP/48hDKgD3odqn2rz02F4xHtWjUT0aj2NZFkUsYgFg0zSrpk/mVVlGYSbk3JyfHa3W82Eg10WhMYGZRGg2nVGoTu6/vVmdcqzK0TjEIhRVUY8I1m6W2q8mk1AUtJ7fz2kZkIMbtPfUUW7JE7TTfoNuhX6pufWsGHrqTU0zuUUYuRGcxMyNySNxEGIW5gAIO8GdEYJUMDgURE4GzzBDTu/zY/9jyEObLCOiz/z03ynbtzfNqgjOHLMhRoLGnHOSfkjHNx3JONbjol+uyblZz09Pj6bTSVlOzJxImRywyF6XNJ5O0/md1ck7VVFJLMuycrBpu97Mtdl06zlf2nGS9WaR2ql/ZQyLWSJXTR15oty76ZDodEMQIjJwJCNYdgyRjhLDTIOIMJux8dBsEEg4SIDrV6o/4VAYQIKhIWDLN8lDKgCinPvTW18E2LUjX+/OplU5ajtWATEVBZXRTfXkfH3rwerwYCJk6lkzn52f7+4tLu+XgLsrwYW9EIoiavzGrZPZzhu7+5fH01lu5u3ZYrM4Sin1Cav52Wq19xufufeR53YIWZOp9SLkBnJl7UwTweCZKWVzNwts7oB7ACuhVzDBTM2EKIiwDL3AxDz8jTkJhmpWhxGYmOFkULht1/8fgYdUAEAIRShG0L5LnLp03N8bj2fTnV0JYy5qUlutl03fEOWUN+fnq0qUyJxi03bz8/Od2SwIiSsIOafFYn52csyeP/IdH3/2g0/NDi5JHJ+enMznizRfdH2vVDWr9fly/tY7y2uXWJC7lAtx5EymBndTh7slaE+WxKBucDKwWwYKcyYowczhwDBnwMmiyLAviDM5AJi5Zw9V4W4YjsBm2zLnPxoPoQAGo4c3vvibAs2OpmnLYhaiHx3drwvbufHEyXnL1qhmUyO2Sc2mqdPMcA6SVVfrTd93oYxN32xWm7ZZu+crVx/78Mc+ESe7cF+fnx+/9cXNas3uKfXrJtV1ud4077ywPDltmq6a7O7EZVuIbuYPVLMjmDl7D+vIk3suyDOLOru7gxUwy+RmDnchIfPMxBcpnmEQBhkxuXuQSKEYbjlAILhd2Ipu+aZ5CAUwmJq89OmfPSikzzrd2XFzIoxnV+4dL7w6I1DOHQFBKCcDDORmrjlFihBZrddnp8c6LpumiyHuX7o6292XUG3W5ydvvHx8/879e3cOL10CsenKzPpOi0K7LpcVzdebz7+4/NBTL9++eZw259/9iasSatMebq6NkJl2qp5djOBM5pQN6pnI3YbKIQEogIfLCHMMCU9nAAQYsRDYoUQ8dAITDYNWt3zTPIQCGJjMdnWzgDsRO7m7xmLUdn27Od/b3W2NzBFEhAORuPXDra1qz1x0ff/mW28f7Iyfee5Dk9kBOZbnR/P7b53de+v47PyXfu3Vp5994sMf/ejp0XHf9b3KptOy1mB2dT+Mq2I+P0vt/cX6tDlfiu1al+GZLcEtu5FRdjKQwtzUMXSPUeoTs7iErs9FIS7sTk48DJAkOMGB7CYOgN1hcIAUEIAf1nzet5uHVgBC2YmGmmIRhsPNiqLIqbsY5+Ls7iQhsEDE3ETZcqagBuuz3b7/YDLdSZuzB7feWp/f034zXEaNJ2OpxpumdVBK3Ha5Sw5ycb40tf/RX3j6M59/6fAgfu6L+elr+1F8s1kJmTsG82gzSQ4zdSInqBqxG3HOWcogEkqmizlJICFnmHhiBALRkO0hIRanbOjpouAvwMUuKue2fBM8tALou6a6sOD3rlmzSAiROKoms0wggqtmgFLWxWI1nVQSQt/1nDsO4k6e2tuvfua8ZAZGVVXu7nTJQtPs7dU5p65XIY6BQKSGnMytt9R88rn9tNp57bWTaPbc45OUjZhStsEX1NWzK3lwc/VEUHNodmIOZWUUsroEoWEuN4MdwTMjC4XBb8Jd3RqzgsW+kvcRpggIl6PtEOxvlofwYYkIgD/xo3+j63uQi2CzXty7dzeEABY49+1GtUupTynl1G2aVd+1qoahAMiNkFl8NqkPdmZ7+/uH165P9w64qBWSwUURCd732XKnqet773pVNSZPPfreY7Q3bzc7Iy4L2my6nLKZ5+wpm4FMNeWkOQ3/uaWkgFKpwzse5Oru/pW0jjo5KCgEFFmCRECXsDWg5E4UEcYqAUwv/MJPMfNFCdSWb4yHcgcgADsH16qda2l5r+v73b39ycz61AuTJW/btiwjmRVCUNrf35/UxXrT8MVbFgSEEHZmO7s1BfZsru7qcBInZmED5ZSC66q1uydt17kaUq9kutl0p6vVptk8d2U3q3ZdjoGGDnm9mLzkWbPahaGiSkVSOwdygoPEidzUYFpEQgCInbnpw0QkFAyGax7q4DA6pHLfjKhfI+s03VrNzyc7u7bthPyGeQh3AACD7c/HfuhvzGZTBzuLSBj6dAf3NBGAAHIpCuYCHIVJs12M64KbZjd3x2DtMDTpqlFWymrM7q5tl/pkBk5qDrR9cujZ+XLR9EUVYlG0bVLzLmmTrOm9T94na7vc95r6nFKfJaCYmUSQYGgHBrkp7CtFbiBmEZFJLUVUt6QKCzWNrvnOh7D7YYu7abOiPlnTPnWgZ5//B83qmCXAFLbdCt6dh/M9Mbxci2pU7H9osvn0atPTV+YZMYu6hhDVte/VNcOMTbt12/TdbGcmwuzEjpQ1qzPc1HL2PlvXa99pzl7W7KpmKRQRxCmbucdYAHZ03r748vlygU8+edh01nQ2tICpmasTQVWzmkgI5ZjKPYQRAQYj8iCSU2JCVZbCQsLEQ202CxtxieqS04iqKWaHIPOzI2hXwmHqkGz++EFavfxPjuK1g4/9BWa+MH/cuqF8bR5aAahqUY3HVz+8vP+lKJpyIjgAEel6fevmvcm4LgK8789Pzt66+eCdO/Od3eLJUJZlxcxEYlk1ZXXOZpot9Tnn1HR91+Xd3chEZ6vu86+e3r23GkdrOnMCSf3lm/Nf//yinXc//r1XnrpRrptOyDGMghkmT4IkRi5GFPeA6KYkcuGhBfIoMRQcopNAQBIRS1STUO1yuZtR0HBAblYXzWWAU+USGEZMfWuT2E34Lf3yP7hH169+5EcBwAxEWxn8oTycAgAgElz18Wc+1q2O/dWf15XCspqTsJncfvvtnZ06JX/tjQd37i96RRkDBzRtUgvqEgkMTckcpOp9yqm3lNL5+bLrtSzEkf/hz7z+s7926wOX48efGDWJvng7PfZU+PnfuPPk1cmf+rNP7Exofr5JuR88C7MpAyIUYxmqKceRmpJmcoqBYxWz8SaRcQESAgkcDA01Jtek3skchgYHMgd7ZHEECmNyhal7zKROzkLqTkZCq6vTW+nWr2L2ZJxdBwAzbBNE/w4PrQAAkIi7PffJH/7pL/xSHaTtMxOR581q3rXpC7dPXn9ndTpvLu+NQ2B3EhEi6ZQYoYCTavKUdYj7ddP0i3Xz4HilRlUlx8fzF169bxyySZBYldXly5d+6wtvf+cz9Q//iX2gW602qsMIGAOc2YsYiqIO1Rg8bDIAk3patXrzbj7bFDeuHcL0+q7FURQupBxLnEBGXVaGiQxTaBgg88hRPCc1Z2Jn41AMfZfZVVWbpusfnD12+ZzOP4frP9DapergcZiCGduyod/DwywAXBig23f80H/whZ/9zwtCNjua9w9O2uOVLbs4GVXTSQ2g6/rpOO7MagkBTmaU1JRzzta3Zu590lXT3X2wunN/8eT1vSrGz3zp3nK5GdUyqqQeldNxSO3Z7bfPfux7Ls/ni7ZTIlfTrCbko5JjUVb1SGLFoeYQQTGBzfls0f/Ki2c0uvbcE/WkcnGvxxMPEWHkoQaJulrmMhRtn90RY2DmlDNrGmZUMmOzOV0s10W143AhiUVZVGMDjluZznar/CZvXtHqz8j4CgDAhgzA+/rN/HeFR8UasVnPmeCOLiXh8Nlf+Ecv/NzfbzJS1zZ9ajZpOiv2d8fj6Ww8ncZYRKS9IlvfdF2vRstNd++0feHl+1XEpz52fTYq/8G/fPG1O0uW8Mxh8annLzvjdL559ur40g6a3lIPJi0jykiTWsajqqzKEAuSIlN12o9Wm+7qfiiqwigen+vhbjGbkqNINi1Gs/GoZhIWKmJhEJYC7uYWQggyWKUb4FVZIK3ffvOLq027t//Y3sFhWU+lqJx6ak6tPzdbCwNehXrsYTK3KzvP/yWW7XCA3+XREMAfFv7+4j/9f97+7D9uErq+U1WROBqNytGkrCsCau+jrVO76ns9X+fXby1fffuk6fTjH378xpXxF75869e/+GA6Lusi7M/i4cF4sdb9kV/ZCaqq7gLbnYb9WdwZlbEQFolFDLGAFLfOaJPj/owv7cRRXRF7YHYqlUuEEckYHMsiVlVFwywNjjbYnjCJMBHMMhxVIev50Z13XhtN9w+vPjnbveSetTmh7h7ae5RXpi37mj07RTgjTuTwAxo/JNd/rJedop6+L1/Ff9d4NAQA/G63lANEQ8nAb/6r/+LBl/758XkzVEbEsi5H0xiYtUW7bNeL07PNnZPNC6+fvXNvdbg3+uBTh/W4+rUv3H1wtNyfxTLGyShMKxkVPq2ierbsB7M4G8toFHZGvDsuQhFYhIiKsoihMBJwWdblRYO7CAk7RY5TSKVeqqOq6lgURZQQC4CHuxrm4ACRM3EQIfTLs/uL1Wq2e3h45TpBPa9o9Sa3t9gapuxQKmcs2dZ3yRgQeNEnl2oUvvfPJ/6rcXwZro/4jFQ8SgL4fbi75j7E8gu//JPvfOafbJpWjevx2Cnkvr391jur+dKBddM7hdN5kxQfuDablLxcbV65027avFNRVXBd8CiQUC4jQohdprrgq5dDGbgsuC6LWARmFuaiLEOMg0e5S4RECWJcUByxlA52jxxLCRVz4FCEKASEWKecgwyz6EGgGILlzenJ7aoc7126VpQj1jVv3vLV26RNDImQXErE61p9gKsozRft6DPoV8SFh5lZpKoMh3/SP/q/Iylh2UkeZdv0R1QAuNBADjH+zi/9V6dv/Vp79PK94yXMm6Z7+9bZzijcOKzH46pPvkmklivuc9+1Td5suhiYibOmwCSEpB4DqpKiiAgXkeoiSOSyjGWU4SpXQpAQiUTBYJFYemQOY0d0MEsEFaGoYlkRF05BJBJBJAweEe5unuuq7tYn92+/sX/l8Z39S4WEIh/J8svUnwkaWEMiWjyO6imT/UwBIrGcBNy2+79OPKKwCy5dXSa+4E/EZ/+Dejy5mN79qPLoCmDgq5Oif+Of/e23v/Dzm8Xm0l61M4mtXN2/8cEeo3tvvtif35xvNtKfu+a+TV2ngc2NQGCiEHiYXxQDlYFi5CrGGIWYijIOng4hCIcIEgdBonNJsVCYuZSxYimcS46FhCLEiqQyEiEhYQZijMOEgVjEZnl0dPedy1ef3NnbjZzr9I6svih5TugpL3PY98nHvHzCuHJiCiWxMIdQzciyubIAQ+ZIM4/lbFHG3Y/Uh8+J0MNaFPOuPORp0HeFiMwywB/8vp+o119cnM+f/sH/xXgyKi99uJ5dBtAsT3/6J/+P/Z0vKiQ3qSy4KtiyqhoLO4gZzC4IIRAThEiiFEVkIpZAIsMYC4eARBFAwQzWdyARFgMA4RBFSuJCjZhAwgDBXd1z27HwqKqX53eP792+cuOZ0WQm6KvNa9K8EX2FvFZwHn1yLU8WcVfd+r4f12MiQZgQi/ZJypLYnImGcj+3tFruTR3168u335k+/efe56/h/eNRFwCAYYDKeOfKM3/+PwV8sn9j+OeW+5RzPd0vdp+KD76cN972CrdpFckd5AQrQmB2kBBRzlZWEcxqEBEwhRCImMLg6UDZSN3gCjd1ilXJEh3BWRysTlACu7CxmcEE4g4iK6vRcn58786ta9efqepx4fNp+zr3D8SX1i9ScbgKH6TySlHEbHr3wca5MMLONIBcQiQhNzJlyp61h2Yge0rNfC3Fanp5Ynd+hh77USJ+BNsJtgK4oKjGRTXG0FJs5sQcigg2MyYi4RBijNE1O4gDQy2KlIFpGFNN3CeVEEZ1YQ5jjsJE7ESqxEzO3GfLOVMIMZSxqoiDUQAVjEAs2dTJhAKyOysxmMjI62LcrJa3br55/cYHYj2qeTNLr0l/j32dPLSj71xjP5Qzcm07XTZ+et5ISa2uFuv2YKeWabZUqA7txb0Afbfp2vVkNC6KaKrnt+e7B42+87O4/iMiwTSz8KMTEW0F8LsMRdTMDPndYaLDoFImZsa4Dn1vKecyDFZtLsLDOJfAsdwpSYa4h0QCMRuRc1C1rDA10zy0IFMsnRgkoAJEiovQiYgcMFdXEDHcYqz6vnvrzdcuX34sVtNxWM/6l0J3jBAbu7oubqDYK8iYqe+TeuXFbHZpslrMm2bd9eVq3e9Nm/2dcVnW5gURmycWKooyqZVFZKJSbD3vx7O77c1fPPFLh09/8isf/ZE4HG8F8Lt8rd1/CJpBcHdhDhFRJKXMDDCRE4gxjDV1NrcokSgo3F3MGcRqam6BxYko1E4BCODSmQMLSLIxEwPuZkLEBFVnEcBfffkLl/YORpP9OnQ76dXQHaM6bOmwj7uRRdXMPHWdUkXlfillOZ6mdn18781qsqc2W3f96Xx1uD/amY7KekeVclIRFo6anYWZXATN+Vk97bh5ffnS7aZ++vADHx4OIQ99DelWAO8OE4iJiQ0IQTw7EZVRhBwgCkKgpFaW0cyZWULos5EQifc5k7MDxKLMJAVCNASmgjgwR1BwMyczImGGmxMnUgdVhNdffjEKT/cOSm539XVb332Qd2J5fbpzRdVAAnTzkwdcX4qTy2YI5Dk1xpGq/c3yqOhWWuwen/Wrttsdr6aTze50yixJHWRCcDOIczGS2U6Xnes8LY+rfJSXlculOJo+9BrYCuAbwJ05gCTGSG4GF0ag4O45Z4DLsjDLLCEWFGN0IoerqaqqszBIAjiCRSQSCXMEC4swBzOYKpGKiDuIvddkznU9unXzTU39tSeeDaL7eBvLm+ctvTXXuju/QvVoPEmb5dtf+NXy0jN7lx/LmmIQd7SZR9N9Libz00mzvGPtgxirvsOa08HuCK7mUXOnqR1PdsrZrkymKErte1+v+tUJtI8Fo/mNxWmm6//9MD18uGOhrQC+EYglXJxkXQkY3ospK4GzoQ5Sx6iqRKIOIQJ5n3IyEgGHAAkikSVQiEYBHACGiRO7EyQSRXOQGRxmJhLXy3nq2sPDGyJyEE9Dc/9s0b5zFrUqumT3jk6m58df/JV/s3P9I1c+8Kmu76IwMbfJQRLEy8B7+wej6Wx+fjQ/upM358/deKauRplqNzhoOr003r+SIevFKq3u9O3GcxP13JBbLuS0mhzs6OpXNPwZqXeHh/C+fgXfLrYCeHeEiZiGMfPEBGdmzmaxKKoinq+ars/jujRjZ3F3UldVzUYsJGSgyAUkOgAqiQTExAFgd3cKTgzHkKC3nAO6vjl/4817z37wOSnL/WpTbd44O9/cPacs41hUYCbLn/6Fn5HZY0997481TcMMp9h0bu7uamYSAwlLlKp8vCinBRqurmx0DCAE3rl0WMRyvWqb1UK1d21JXAKhbdU6p4216OcPZo8t0bTp+l+O5QiwhzI1tBXAN4ILc5CYU2sOYlK3TZ/2JiEIJuOq7zMTcWBzJ6Y+55wTM1jEiN0vCvFICoCJZZjva25wvhjDwW6q79y8efzgwRe/8CWG/Nhf/HNFUU+KNOpeW56dHJ22G9+RcuRgEbn50hes2P3ED/3Eer0uglRl3XRmDoYDRsRwZxpiLp49frWIpabePZdFXU+mmmyxnmfLwiGEAAtsDfKa2JmYQG4ZaOc3X997zq25i/KZ9/kb+LbxEGr6PYeZGFQURVWVQWIMMQSZjism6tXLIpRlOFq2TVIA2dAm7Q0hBGYGGMTmnlXVoO5OZI5Nl1N24mFcqhMBpkWUplGTvevPfPDy4SUR7Ifjfvng6HR1Mu8UBBTguDk7Xs7PP/h9P+YAwcqqbHrr1VQ159znrCmlvvWcmYhYTHWzWqjmejQt61Hfbpq2cVCMBTOg2S2bJ2gPB+wiEyUSQwzrm2+tvvSP3P1hXSrbHeDdITcQ2dD1XpRNsyRHESWb1iW5a1GFf/SLdx6cbv7mX32aKKoDijsP1rs7k3Ic3IdKToIZBGYAUQhRQuEGNwc7zC11ly5feua5D/9o6tp2oWqX47nN3zo7W8zX5uU+UYAw4IujO9c/9F3VzuXUNzuzuuut7XSo6XB3cyPgYn5wygEYxl2OprsxSt92AKQsmIg0qzuTAUZmcHM3chssdwkOdjVU3D3E5aIPp6zfW9wxtBP4hasQI3CIko1+/rNH5+usik88O5vMJp9//TyEIecpdVUVRUEUWMIQ4A+G5qaqbiwCgrsRD5OAFW4hFCnl49OzZtNNR1J2dxZnZyfn61USKkdcTojjen4yOnhievXZvttMJmXb22rTZ9U+9V1OXZ/6lPqkST2bGdDlpODpbIfh3WZNTKEoB4tFcyNX9wxXh7obczBX8swXmR92SMH64IV/CsBM39fv4dvCVgDfAHThMTV4thVFjBKyYlTFL922/+Z3TlLKH7pR/tAn9154qz1dNOzWtmkyqZxIjSBEMDiyap8z4ARyh5kZhhe1QZM7Qlk3XUtAVRVjPmvnx/P5ps1B6h3iSDLq+wZhvP/Ex4ikLsXV1k0yzVl7s+wOB5mRmquzIXTZk/F4MvHct+0qFKNQ1HCQm5sCmbwn68kSaUeuYGYJZu7D4AFiDMmvvABAD2Ph8FYA744wK8yZSAIJg4iYs/mqyX/1h65eu1QuN6lX15TM+Z//yv0XXj8ezsFD9ws7CA64u7sZDS9+V8/ZVV3NLalqLAo39F1bFGE6Fl7fWq0W81XXWCWxikVt7uZhsv84wDEwAefLLqu6Zfdh1BKGak9msHDK6iS7O3vsuW1WVTGmEFwzXN0VrqQKz4QM9GQ9wwkGihxKd3YiXHilmshDuPQHtgJ4d4iInM3ZSJxYRCRwWcb5Jl3ZLX7gI7vMlLNdv1z/8CfGhwejz7y+MUubpnvw4NxdJYg7yBGForC5kZm7ETvBhTHsD1VV566FWl1VtR6186P5YmNxGkY7xtE4as7V9DCU46ypjL5puy5lzUkH3zk1NzM43N08pSwSdnZ3Rbu+WY+m+7GsXROgBIc7mTkyyOBG2jOUCUTkTOrMLEQyuOkRAH0Ig5+BrQDeHbUsQViigwFICCLMjP1pMHWnIMxuKAPt1fjUU1VRVP/iN0/XTUchzHb24DAwmN3M3Mx0OGI63D1bzm7GgQAYPBShCh1t7i1W7bpTxHFRjyXWORlCFeuZEhUBmjZdMiZ4tr7vzYbdxd3UQA5y0Hg6jUiW1rPZXhFL0x5QuLoltpbQEXr2BO8Jyu4EY/jQgZaHw/QwowmAbwXwqGJmgzcnwYmFQyRihwjJqKoc7g43ZyaAUqZI/vGnppuemyzf8bHnJZRlVV++tF9VpeJitLtaMreLkaiaVXth7vuuazchIHa3mvnZcpO4vuSxAhdSVA7jahdSCZyRF2szc3cbXLfczLKaGZwcMPB4vBMF2m7G490YS9NhlRvM2HvyBE/kmaxnzwxjcnFjN4KRuTuIxd1ABEbS/H5/D98utmnQd4GZCYFBw2nV4cbMDiUzJnbKvd09ayP74R6mVTS3a7v5r/3pq08888HkBQeMa/bVfe8pUHTNIBBHyz1JAQI8Ax6Yu26Tu7aaOJbHy9Ume0AcSTkJsdKkKnVRTC2nKqaUclIhVgBuDnh2JRZGIGFzr+pJVYr3m3o0LuraNZFlAgjmSKDkDnZ17R2ZXdkUaoPbkA8X3YOxrhs7YKHPD+0ZYCuAr4mqSpBXX/ztBzdftL7LlgsexvhmFiYnJnInEhzsj5t1r8NkbfWqqq4/9fx4OnXisiyau1/29gxxH4CTELG7wUCcLWczlyCAdW1TlLHwZrVatH2muKdSihTEklPm0b6EEMXM+qYXDFsTAJCpOblAADc1DiEGtrQeV9W4rmDZLZNnEDC84N1NE3kjlgAPcCYo4OZMpBebGpEpYEzu8GX70K6TbQj0dXCAus0ikKoBzmoEDiAB0cXKcw8s+9PqYKdQp5Syux/eeKqaXQaXk9FI5m/E/jg7pWFS/MU8d3U3y73mPqWWQX3fa85Vwbq6324aFDNUk2K8E+Io99abjKe7QVgo98kdZLDBdNT1YviSuWnOBI8iSKs6cDWqzM21I+8ICleiTNa7NsE34j0TYiyCCFmSi88EHtoAhgytGZEnxOf/zP8UAD2MQze2AngXREJWFQ7MAhE1V4M5uQhYQMTMg/mtEDmIyroY74G5Go2luX/z7Xdun7SadbNuTBWurglucCOi9Wo9P9+URZlTLyFKXuv6rOnMilkxvcoyIomn52dS7sSyJk3DaDK7uKglH2J2zZazurNwVZVV1IK6yXhEcNKGrCMk8p6s9dyabmCtak6IXk3iaEemu6GsRNNQ5cqEwCTs5E5gZs4py8PrprgVwLtAQiIBjMHfapg8oKpwhFiIDIZtVJQlGEktY0REpuB0/oUvvfof/b+++F/8s5uere9TUnVTN5jBhz/NGApoSh2QvZtvVkuTMowOQFWI1fz8/PhkubN3oKkldF0yw1emODnMyOAGmDkBCKVZW+qDvZ2xsLFuGC2QyMwtu7WuG2jbde26S2AKBEvrvD41VxCQG1iCI1s2NxYoUJQFPf7j7/eX8G1kK4B3IefetCEHAUxg5qIsJQQH9ynnYbKFI4hko7bPdeF93zC6bnP2d/7Z68cr+45ndwzQnCwnJ/aLuhrT1NdVada88dqXUp/KgiOa9Xq1WSwsp1BVVVW+9OKXi/GuCCOtNHXZaLhPgw8qchhIgsTAIubG/Z3IXYjBc0Pe4WL6fCbPnjtou1xt1ps2snLacGrgQuUe7zwhl5+j8b6DzBWgPmk2EBHHor768eEHfj+/hm8bWwF8bRwAGEbauynxcMNKIQSWSExZzYycOQNgGk/Hy9bb9XJ9fvyPf+pX33jr3jNPXPpL33/jv/edB302EGBuDv9qZ4l7n/q6npTVmJkjabdZpKQ55fndN7w5e+O1l2/ePnr8Ax/ou4asy8lczSxhaE6hizJrIiECF6NZ2Rb6gDkylKyFJrcE62Gd5TV7d75ozs4XdaRus1gs5suOUnkVxc56tTqeb1wmiGW2rI5QFG5EcMTYbJbAw7r+t1mgrwMBQFUU4+nobL5kuGYrIrsrkQCI5Qg8jK6GOmIRb1zbb9fN0f37n/7sm9cO5Cd+9MkvfPnO6bKpi9oM6nRRDA0nuLozE5O4mwizte1mvWxzSuzStSdvvvrCW1dvPDEaj7pmEwpXF3UnGPzC7NrZfQh+yrEIvfHCr+fN/R/6s88xkmlz8RkYpD15Pl22D04W08qX50dZSUaXRtUl9PPl8t6yD1QemIdp2KU+5bQOKAUoRtTQ41ztAvawdgZvBfAumPXiDZD7PgVG06a6iAR3EhICYGSAwNxUJ3UxLmix6v7SjzxZj+vTed92SROKkfXrPgCFuSExE5wcRCxDlShRprzqu36+Nncva09tP6qKq898wM0IlpKawUzB0YXZjESIyOAKZgqLN3/16Obr3/un/8xktpPbDbkTjMg8ZYYuN/29oyXpct0mDiGMLu/sPlbRqlssVzoOox0QJy/OeRRCUfhN6MoohNFYL32irKewDH44l8rD+aneU8z7pSeWWATJcMmqRQgAIrF6VjVhciZ3J6YYpQgyiu2q619/e36y0M3GRveXu/tUmaoZg0wzEzEXOhRemgm75bbrtW3dCNm8adrLl/d3dnc9p0C5z+YAXMjNLDkxAz6UE4WaN8fV+vU//+M/eunxD+Z2zcjug8NQgqPL7a27x31qKTekqPeuT/ZvTMpek7W8F8uRk+deATZnLQ9QTorurTLdz+14lUelmYEe1lh5K4B3JSRjCjwZT5bnd2OMQdgvHDadWQxs7k6AuIA1J1WURZhUXocSWZPR7bkdbfL3HbqqKUEymKzrlkU163MiT26W29T1mtSd0PU5CI1mhzFETR17Z0pOgYYrCFMwqZmzZ6MCHvLx4TMf3736dLdZiCvI4Q7PZtk0v3PnaL6Y525TCkaTnWp6OKs09drxpViNcu4tKWgYzGek3qLoiw/uHt446fcPHv8ILPND+vrH9hD8rpiFZCH1zdnxLSJ1CDEPNdHE5LjoniIi5giiTdtKFJGQDfs71ZOPjQ7G9J3PjYjr2/fWQJ9yNk2W02oxt9xHttSttW/7vun71PUKB2BmJkXF7ORZLbsREYEuCqqHkaspmVFJecm63rnytPUdaQK+kmc1s5zO5/Ojk5NuvepWq+haTi6Px3WfrPE9UISbqiZV5shExMFZYlEyydmKl7nGQ3v6vWArgHfFJE48t+y9g4oiMIva0FBCwhxCEJEgUojAtChL5kBBQlmYY1THqipSyo9fDg+OVyfHZ5T7nHtiarv+7u07fbsyy0nbTdNkRVY/X3TLVefEFIObwxODnBie3dXNNGdzNfc+K5Npez6aHcYg0I3AyX2o+sy532xWp6fHuW2839ScQzWVavby20f31qPxdDdGEQl1Na5HU5ZCQsGhCLHM2Y7vvXx29MpOfvn87pcZ/FD2gg1sBfCu5BAo1ntGAe79ZpG6LmcNsRhqxyJLINaUVJMEGVUlAAPFULohBrl6abI3GeXUBabbd1dHJ2eunWuuy1JN264NHMgMCgI2TVpusjubM3E0zaadqsF1eKWbZTcfbLfcCWntaTOdjKL3MmwSUDfVnLq2WywW6/kyt621jZCjmh0f3bn1yud+57d+/Y233g5FoU6qADExO0Eksuvq6OXo8yJWu1US78H8uwOmHjoe2tjuvSIECLLIeKNtyu3ezk42LYUYuVWrqjJ3HQkPXv5BoplKCFBSU4lSEKkqvNif1aDUZTw4bTab/vBgnN0PL0/repL6xlWZKat12ZNSyuYwMMMze04QkLkZETsFEMzYXInZusXumHYq0BAkeTY3zdp23Wa13MznfdulphHrQ6yyeXf24Mn92HB/6/UXLbWHh4+PZzuGYZJ8yO08L96sec0SurY7OnHWz1X7T8dq/H5/D98utgJ4F6zP1i45TMfTva5bZUCY3NG2KQh7v0wZkcoQgqs6SGJBEDG13LLRpu1S6svI1w93Y1ieLNrxqOw6Ozpdq5nlLICacyzckbKlZH32pGoGdzNVh7m4G9wNDEOGE9jgbn2T89HO4WNRkA2uWgQ11b5PTbNu1+d9s4Tm1G2AxMVjYinmVmO9N4mZ8OD2K4vze9ee+PDuwaEavJ9L81bIawOn5IBEBhUVEfvQ1fkwXgVsBfAumGUj09TDUMQq9StQpqKEVNHX7XqhUteFbNpEJF1qRnVFBAdCUbOm5fm663OZaFrS4d6oCHLvtK0LG4+jZph7125AYhZg3icnosgkIOcyq0feEGUjg2cAQz87mE0BguV2FIpJVQBEMMBNkVLu2jZt1n2z6rs+92mzaSZ1PF/365O3Z2Ic6pTVSIsYPK9efvHXtLy6N5s+MVvuT0OoJ127TtbHcra3O5nPngvl2CzTQ5oIejg/1XsJVy7j/uSug2eXn/aMgN40tc2iKLTPns1HbuzebRYcCoq5S01RFH3CpksUisAh9S0zMeFgEqH5rXvrrDabxNT773zp7PGrk1kVXR1EVWQTsMj+k58c7+7Z6m6zWVsMFMqL1noDLBGh7VNZVLu7oxhFiN0TKKeU265vuyZ1Xe419X3XpaOzZIZi1qNrtYyFBCYCMksgjrOxvH3v1kG5e7h7EENxdP9ujDwZ7yDKvCszRoA9vEeA7SH43VAS83JShzqY6JLgphZiEYXaNrtaJCVwITwRmxZgz2ZqqYneDcaJl3bHV/d3QhAJQkSzcXj6sWlBWC67suBe5bWbi7ZtlaKIjCupCqYQUYzH00v13pNh53FH6LsOX+lVcc9uSRXCoWSvC3HtTVPKqeu7vuu0bzUnN9eMvvejc33x9fNacGWvKgJEiIiEIZ7gaponoXvicl3Ewkw3i/OyKIqqurQzOeovH9z4kKnxw9gJMLAVwLvALA7yYieWdV7f0+a8qMcUi3q6O9q9PN7Zj1H63DuyQft2k1LPzH3bsMj+ZDQtwlioZBMGEYfAMYTdcXj6Sj0OfOf+8pkbVdPbrQdzF+GiDEwxyqTk1Z0vHT+4Z6Gqdx+bXnpcymmfes3ZTd01qYIlBKujiWdY75py36bUa241dTmrOhTSZlT16IWb+ZW3TiJpiMI0JHXYAXbLKUWxuiSRECU889yzo/G0YDtaF4cf+lGYPsSrH9sQ6OtARDmlZnWu4ISqrEpfdf36bPfwcuBytdpwkGq6S/CUun6zycasfU6pPrgq3AvR4HoCdSISHhwWSJiJnAk3LpV1oNbt2euT1aZb9ymWtcjiwWm/aM4/HtmR281zs4Or43q2ezhuVyeb01vmORYjNSVoxQhkrtlNU9d0Xdv3nfZZM8zMHSwCknEdnrwye/vOZncSRpPJ/owBJ8AhZh5DrHemo6oQDiQMRWr7Moa9T/zPYr2Lh9EM6/eyFcAfjpkxy9G9229+/t9U3COQOlZNLiT064WZ5eSTnYmb9tmLogiziOlObpaWe8sZWUPNRiwE00wECQEgp4sohlRitKv7cd2aaz5ZyoPj1aWdqqpLEX/99uqNe6vv+URz/dp6M7++f3hlsnulml0J1bQ5fittTsA7IsGNI7ND+9Sm1OW+09ybg+BObnAzVkOI/OyN8ZOPjdTx+ZdPPlWNDi9fUQhIAGRLO9NqXNWu2ZFzmya7xe322tViCreHM/Xze9gK4OsRBBELa043fVFV1WjnsjXH7flpUVTJKJIu12svx33OwiFGFtkFASnBKhIZ5pA7MQ1DAODEmZlNDcSBY5Mz3K7MZFyWd07T8Xk/GlUHM7QZX7559lsvLv7K1UOcvbpq7/ebJ6q9G6Ec+ep4Mz+RS7s5ZSEKwqnvUupUs7uTs7uqqabBdsVV7WBW7k6Kx6+Wo6r45IcuTUbRo91bokkWCkk5xViTE6yF5/qSWPzA9U/8DRANUwve7y/h28tWAF8PZqojnZ63CLWEMoRgvId+aWZERVZ3yzDdtHm3TjkRlaMAkBA4OnF2hg+xj7kTYAIRMSFRmKmXQbgkdx+5Xt7lu6f9IvnOTtUpnrm+c++0PTpZf+jGrNus0snb5yf392bFrD9JmcRSRBZktyonyylryoM7nGbNWQeLq0ktT12fBPbZOFal7EykGlUhhBhoOsbNU181ObXr6egwjsP6dCPii/7G3of/hzZ4gD0CM4O3Avi6OPrsudmg4J0nnlidn7oWcXKIfoXVcr30QNatzylMbHOmNK7HO8en83HpVZDsUGJhYRI1Jjc4OzOZE8BEIFdXuPfJFq12vU5H4dXbK8t6bb/eS7FP+ZXX7z9xUFSR+txHT3mBJaU2cZFWdVms1nE54tkoXNybEZupO0KQ7AowURCxy/t1IVwIjUeFccwmcBbGp56p3jyrT3R6qpfuNgflcz+xf+35wpmFzexRWP3YCuDro2arVVcV0jWnaX7f+xRiQcw83p2UI7M+bc7S5rSYclGPNr2dHd3tNiu0VF46ZHLyLMxkYEBBQZjBLMGRyYgIQZiIll2ab7pstDOJz1wbzZcdyC7tjdT4/snyt7945/s+esiSSynMedH5qCqsXxbV3oMH97UrP/bcE2puTlB3x1eCduIQYyyqUE8qCCNGDrEAlyRBEeoyWLH/9J/9m89NHv+9H5kBwB+defFbAXxdiLtMMCrrcn3yTqime/vTs/nCKJZlzVaBSieGm5tIiKT62P7YpSKIMIFBrk7EhOxwOMDOw79xY+q9DyKzaXG0UnadFmEc6XAWV6tOIdnk1mn3+p1+XM73d2JVFvv709m4ys6WTVM/qWNdjtTYnM0AODGTgQhlKWVZpNRPxhwjIjsLZ0gUyUazcWhk7B/5j2RyQ3OPoTON5asf+3185P8t86gI/Y+Gm4VqJjtXNx6BkPumWZ+j28zGBcFAHIpqtHu9ml1tacShclDfqcRIMgz/GizGjUgju5A7k4MJABOJSCjUiYTBVAQuSw5CTLY7LXZG8th+gMQf+Pjh4X5VhOBmr712v1035BqJ2vV6Nqp2Z5Oc3SBDjhXOYIlFWY9HZVXGGMtqHMqxxLLtrc8cBHu79UInxSf/9+XODWiWUIiE37P6Hy22O8DXg4iEycOOlZSaO0y22rQB2RZ3Oq2K2VXTjmASoktgz+MJW986mAgOJhpmyrCTM/kwIoaY2EhBTh6Ek/lmnV01FCxBHB6J3b0QHK/zY3vlEwf02OF4d1JmzUfznf2dcdun5UY3Cas2l20figAncg5QIo4MibEqBeZ1XYUQhdWUSOL+rOiMmtF3xA//D4rZVWjGQ33J9Y3wqH/+d8PdXbOXUc7unk9H1USo77RrFhqmbm2IlWrPhGw9EVXVyKvK1ZgY5OZOF2NVxNyIjJj9Yng2YKqkVRGKEEd1ZBgDG5JSHKYEf+ZaLAJ6QxTcP0sn5+2Hnrv6/Ad2c9os1/7qzcVR22+y1Z2GyAKChBBcJJRl4Sm5e1EUTpTB2ezw0qzRInz0f1xf+67ho21XP7YCeBfcAQ7szHF0+HSNZW5WkAgRWHr9xRcuP3b98LHDgNS790bkLhycNTmDiJnMzbVnKEiGcUjEYk5DhCQCuDHTOER265rUdTaaxhBYs+5XeOz5ETF1ve4WJUvRJ2iaz0a8tzO6dGnyxVdP3z66P5GrdTWFe86J2auyJLPkHmMRiiKrjwvf36m7x37cdz5UX34alp3kIZ57902xFcDXhwruCSm7jPeupPNW+2U5HVPGg/P+pz+7+PhafmS/Oj07652Lncvr1ohsdxL73o5Pzvf3x1VZDs4/ueuLGMEOBzkAEwru6s5qZGYELqJMR9arSyFl5dmdjaPQdFoVjGsHXlTctf2oHsFpPB5/95/Y33nz/PWbx2bp8qX9UVkSUVlEdxfLl/bqKoazZc/jw9O977z87I8DcM0kYbv2v8pWAF8XYvce1lJxSXNGfZhyXQvBtSzi80/sPPOBazHEUI3bTVPl5fH9s4ZGV/YeJ09XL48YIEtOzsTD8ZaYSIeR6+xQG66bhCIx3BB8NuU2cRk4ErKbxFAXHAMKcUYeV1yUJYiZo5khVB/82PO7B5dee/Pu6vwkjGVvZxJjFuEHnfWzj6+Snlblh3/wfzJ8Gnd7KB2evxW2j+PrQfAQomnLugKVCCUH6XS9TvHqQXnjcrnq29XpiqtZWY/L8fjZp6uT+aZfn4GliAUZG2ywRIwxsMPdnGmYKGNO8MBwuGeGELG6u1clB6I6BiJycyErxIoiwgOTM4FD4SQUAois6688+YH15LtzdclT043rLCIi46eme899P4ArAGAwgJkeWnefPzpbAbwLvQmZWPsA1dWi3sndiuMs7hWtr2M5Hc8K8b5p21iGJuXRaLJ3MNmc3JIQ4ygQg02dCMRu7uQ+TM0mMYBI3MhcmZyFaEjim1l2CMASBDkbmEIIIgFmIJKiBgJIHEGIQOTuo4MPXP3oD//BH92yATBwCNt099diK4Cvh1kGl5abEIvezF05SNJcxhJGmzatN7moRJIK9xKr05PToox7ly6nvm81F1QQscMYg7Uawdl9sEVkctr0iV2nkwpqABEzXJk8RgnRyHsQOwWDCzNczYk5OphISKIaJICIu2ajml2zfCXCcQJzYGxvet6FrQC+HqPJDuBOQakOQYRdzXK2oqhAFAterdbz+4tJyNV4HHd2RtVetzxeL+dZNY52nS5s/N2IAztAzIARsWV391FZDJkhgoCIkAkWA4XIwi7miBQYzJyNhSIJA+7EEO66HIoKJMPNl0gwwlc7d7fH3G+QrQD+cIjIzH76p/7fmpzAsZyq5tzOgwTVHrxb1qN+c76zuzubTmEdCPOzs7KogtCDo7N6NC0nJcFBcIpGBMBhBCJQch9uw4QpK7fqMAMLs7TOjDwhMIOYC0gRmEKAiKcONlwtUG+essVS4ICLP+xtK98+tgL4wxkmwdx+68uXZmJdEoIwmvWiKKfC0jTLUaw0JzUugohUgIdQaW7UbHZwIxZF7xSYyOEMZzZ3vpCBw9xA7pwVTWd9NhIZlQUTWazYGocRa/ZKqRBfCWI2gloZxiQjdwphWo1rOFAXZqOrH/pTMGV6RMsZvhW2AviaENHubJZtQUTj0ciRmPbUQbnPm9Xd800sq+nOzmg0apuOSVgoFjsEJ3K1/NXlTnBxOJM7Eyk5kxDc3YI5h2ghkoQQWQjKRDHGIrQONJhU1BMcJBKrXmcUdyjWnvtABEvgCGYjCUUF04e9eevbwlYAXxv3LrV14Lbv+01blIWrhRBFIBVtij0HKyh1y9RrVddRQsoWggMQZsFgaE5+cQLmITAnUiIiIw/OziFGN4N7ESAOJp9EZVsq1xIngjkpAtsoJsmpbdZVFYTZkdwcFNgsA2EbAv1R2eYIviYOkBscVVEu5meLxelsb6+I4hS83J1MJk2nRpW75m5p7Rk8iRATCYFJaRg5SsQMYWd2Ys5eOKIQMYNIgogwiFlCYHYDRKgQZ44cajclp7qKgaGahJytM22ITNWdSiYBOKXtu/+PzlYAXxMChNncSEK9dwPEyOsQPFsyzWT58v7o5Hj+zpv3heGaL/pQ3MhtcIAwZIcNQRHBGMrkWUOXWN2J2AEzMpcgofAup46JCEquilBJX8Wu92KTR70FB8qqLoTbXh8siAYDOnXfLv5vgW0I9DVxNwKEmKmYL47rImpqm/Wqqi/t7O42i5MoxeHumKeBqVFNBGPKRD4M3HUQDIBdrFQCwQKZC4PIIXAy4+xwc2UlcSYPlJgMxAwP7ExMHMlcSE09WwAVIC7LyAwzMLCt7PlW2ArgD8HdCT4YoItI6tuiGmXCg5P7dXDp5tSxMHUZk0nF6qnPHsXcAg+Lf1jwDiLGMEwGREQOsBHZ8H9QRadOxBKEA5v3QVJgB4nHMhIZqLNiHIiQxHsjJhGXWIqUZYAZI4CZH2Ljwm8/2xDo32E4UBJ/+mf/blWQAW3bxgDhYPFA406oJ1272qzmzWY5m4yZgSHBT2TuZjAbBjiC+WLZ03Ajxhdj8QIoQB2e3ZhBIBYmKcoYmRgs4NKAAqmkxN5FQSDq+6xmBDJN8OwYmo2HjoX0Pj+0P7ZsBfAHMXdVzfNbOH/LVN1Qj8cET5pDNXWukterdQsKxHlxdruKFEWGuRUAAUrkGEIeALCLswExbDgGKEGZvBCtCoczwd0ZUrEEMGfUySu2Dduq5Ia8T92GoEldPSDPvbvr7oOobNNyP3/pt38OILP8Pj63P6ZsQ6Dfh5kyy5u//l9ep7fWiwfLTT+d1OTUNN26Swc3dnKXNfUudVmUozJaWlhvxCIimlMQsaH5nQwAOTGJE4h8mIYKy8P8OqbgTiAyMjYyyyTq3guBzNVYghG5uwAogsSYJAbXzvMJyZQ4wtw9G6Sc2OVhmoY+tIOMvn1sBfD7YJbP/fTfuWw3X5tvNk3HMhTwkGV1xNR3VaBV4vH0EqzLfdIuCzmVlYBT6oMqy4WXmhkNnS9EIAxXwiZDWzDc1JlcxJeNViyi3plWbGR9GcyJ3AMLGzOxMDKsH4cU+YwkEk1gpjZcs8n6fH44+kXMP8g7z9hDPdHx28E2BLrAcm6b1Wd/9u/V6zduH22Wm37TZiIDOZNMdi7xaPf07GRxftL1qYrCRIEJsUwohwQ+RHp1dblwvhLBxeuffGj9Igbx4JYSmIRNvJ2Nua65jig4W+5FJKFwKZjJiNWZKBAR+Qa6ZCQ4XN0NIHESFvaubW5/CS/+LVvdZg4P8UC7bwdbAQBATj2H8MJv/oI++NzRyfz07GyxXK43G1yYjSizF9o052dtslEpgs6zxbIajaexKJ0YYAZp6kHDHzzMUSViA5zYzMyGg3A0IhAVMVqX33zpnYqpjijEKzE3d6euVwCDiQrpmvIR2UbV4DYMDAZArgQV5ihk2XR+L//2/yHN32GWh3206XvJVgCDETQvz45l/upmtTqdrxbzs5Ojo8V87pp9mOWuujuJ1x5/cjTegWfNfduuJISd2R65MzwwC4vmJK7MTmRMRjB3dycnEokgN4cRMwcnMrBQVUpUdddkllmYSZm0kCToA2nwhvtjz6tsmhVdL+AJuDBTc3VXU3dPEmWxMcmn9vL/fXn/dTgeelvz94ptvAgiEIfN4oHO37x194HDN+tN6rpMwkQOBS4GYkehEDj3i8V8ThT6ZkUVx4L7ridBjKJFdFWi6O5DftOHE4A5EXzwiHMQMbNDvR6Pnv9gRcRqRn7hU8LeRNbADG0c2eBmCC4iQqE2VAQnQUq9I8YoDFPTURW6DY0Oz231RRw+bZZZ4vv9aP8Y8KgLwN2J+Oju2z/zk//n1dkDggKmal2XmrSZ7GVY7FWLEN3RqxaCqh5bEkZm0mb+wCWCycxFQICaCgUiHpxPABAwJEaHyzEACmf34SzQpp4oj8sYA9QyMQDLlih1zgXCuEiteDBHb7GqD0gKd4dlhxEHeCbSEAuYKai5vahv/EpafVecXoMbtk3A78Yj/4Dczexf/N2/1SxPj05X2VJvWLe56dPR8fLsfMnEOakBGK6oiAgSYyEh9ptVahtTcwcRzChnVXVVVdPBCtTdwETELMyDFw+BiJ2jSjRQLOJw8eYOc3diA8y9oxqxpv5cs5MUJLMc9pWiE8PJ1MghTOSGtHJTc+mUuqZD9yAff041bS+IvxEeeQEAzDwZlfWojJHO56vb3eVlQ6+9fZJdFmcrDhREzNwpEIk5G9gAZmEJ2Smpsbs7suYu9wZnIjdzBw1WDMOMDAiBiZmICexDFQ9FuoivDDAQNe26bxqnGIsK2kAqAxuFHA84lORG7u6J2QEyVUf2fq1ZN50yrIjQ49N68ctCDObtSeBdeaQFkHN2+M/+87+f1vfVsdx0HKonpuv16vhobqX4Gzdv930Kgc2HBI4QByMxREOUckb1jpO7GTPMtO8VAEkgJ5ARD+a4zCQgHsKir7qXEzFAgA9T5k1NVZkLqSYIhfdnmapcXY0heNiBg5GcoGYkYkZGlUjhedP3LTGEdVxaIdp3Cf3Jm7/1983Ut7vAu/FICwBuzFJj3TbNqqVPffLDbY87d05Oz2xW8a+9cOdXP/vO2zfvj0eFuyuJGpnDHeqUPHSokgchMmifFZDUZzMnYhLO2dwuQn8iEFyGDgE3hwJgUM65zykEiPbu4BBCDJBCdEO5B0fqzkwqKne9X0RhIRfvPHVm4DjiYN4dw7quaYO3XWqdPYQC7eoy3WWWbZ3ou/LoCsDMJIRXXv7y/Ttv3T1Nv/E7L53cvrkTda+m3RF9+qVb81YV/m9/84uqKkymyQBzgrmb+VDX4FAuwQXAKWXVFEJwN2Y2MzU1tyG8GWbjYZgNA1J1M89JWQrS7GkNhpAQga13bbzYFVLKGysvmVl2iUXF2ronpsxBikDd5qxdHJ2erF57e/n2ffn5Xz/7uV98u2nXsGJS5/m91+Fw394JfD0eaQEQ8Zdf+J2ze2+GUN47Xtjyli2PCj9ddF3mEsyI0TU9uH8Ug5DlC7t0Ag3lmwR1NpTZC3BIqSXyGMUdcIQYCXSxBB0EMiL7/7d3ZzGXZdddwNfa05nuufM31DdVdVW7B/fcNp6aYCeyO27HCW2CkmBbih2EeEAiAhQhhHghQrw4IARRHjCQSBZEJII4UnBiP9jgjrvdcWy5q7urq7uru8bvq2+84xn33mvxcMsOQenBxk7L3PN7ug/3ad/zv2efs/dei4iJBarKoiMRRaFRBlUADJIcMiOStBMWhnQI9ZRVBEJXxdyKVAkh2AkhQRpEAcBUTn1ZXdlzv/flo2dfHLW7aeY6V65TXnjoyPzGN0AIbhaGX9fyBmAxPTi12ksi3UmUCNIvPTu9WbmS7bCva4CrY5fEyTvuOTcZz7LZOApkUbr9kxzAaylsXTnvgaEoLQh0BPMsD4MAmRF48QgKQEIiABLxouWiUIpBegRGGk2LWV4uWvZKrpiJgLGaEntSLSAnyILQ0mfOkvegpJdaA0pEFNKQy112eP7CQZ7VUSv+/BNXnvr2rmq1nEwKK+2o6LQ13LrhNF7Tsq8DGC2CQAXGvu+B7QtX219+8cDVJZPv9Lu3d+KHz/V3tleB/fH+XrsVxUE7y2ty1kuplJKLTlrMCIKZru2O7zy7HoShtbUAiQCMwEQCBQggRIG3qhsCUyuWoUYmqGsrkUTUYQEKaiDvTVcoI92MUaJAa6sw7hjpAJAZPTlpUtTB4SvPPPnVF596NnvojmSl1XnskbOFVbFxK6tD7JyZ3py1zjb//W9s2QOweJvJ5M+sBpu97rvv6u0d5wgQKtbCGyXryvf68WR8dOPqK6fP3rU5TBRPnAcJLAQjSm2kEMhepO0Bi8ARC6EW51QAGIEEIgMsWmUgEAoAIvYQ6IBZlA7J+aL2gaZ5PhOI7HyqLLoMVSKptN6jRiEFUCWlFTIRQh7vvbJ7bffG2Ezr8syqvnD9JKbinnMbqPz1a9fEcfnQg/dpbd7q0f0RsLxToAUGQahDYxzRybSYj0drbdk2rh0KrdW89GVZIFGatkejfHRwheupDlrGoEAPiIggBErkUOE9d211ksg7jygYF33YBTB4ZmZadG5frAsTCEey9lB6FkIRkgNhK1dkeeVA6UCxY2KQBuoJVmN0c64zpDkKJaSYnhzOjnfjWG6cak9Jf/7pyVrfBCFMZxP0eUtlinIZGYhbAND8xK9v2UfHOj/PiqyqssKhNKB0Zal04EF6QiWlc9Y6G8chCDmbjmajm1VlA0Ua+dbCLuNis4MECONQKE0gAASxAqFYKGYBIDwzMRMjATCgJ/SMiOCcVVpHSWyUHwy77W5PSonVFEQAaFAojS7CnKoJO4dCVnlR17S+nq70zCDVj75z+MJJ69//4VFZyzBJrx77q6M4TPsiEUvb9+57suxTIOsJEMuaxvPSOZoX5Xq/Ix2VzhutBDtrXZZVrTSMQ+19XWbj6c1M9Hthq88oEAUzMQIKYCJmBBTOeikREYhocR5+8V6IBAhm79kCK6EUgmNGgVIITXPknDl0jhTWCBZMRK7UKpBcSrDeV0UhWefWqqgVh2mEjHffNpvn5Y8/0BsXvevzYHZTx+2Vs9vrG2sBsCPvl/3v7U1Y3gAs1qjq2mlthr04m0+y0vXS1BFobZJQ1I7q2hqjqsq207jd7bpilBcF+Fp4V9c2jFKhoyBMmZGQgT0QKiVBMQKI7077gXBRJYKQedGpQjKzJw8opZDMfnFr8ORULCIuJQgCoSULECA0ecvkSmdoVqsoDNsxRx1Zhx2/G8dz6/mn3rt2/13DSrTjuFVVbjYrgKW4dSqAmvv861jeACxUZVVVVTsMBt3WNB8djkuBdNtmJzQSijoOFQpkcCeTuTEmTeNS+rIssrJiMc6nI63C/sbpIIoYtRD+O50hERG88wL9oiAQSwQGZBIIxE4JAyg9AYP0ziMTArfQBUGAWkJekJTGBAqds6IsBNkMRBAm3TCOgnYskiHrPvAhIN1zttvrFP0+ILoQS1uSc6i1B6ms52Y/9Bta9gAUtZ3OimGnL5CH7Xg2n99x21qgXF25QEtiz0RG6/G8SFtSt1OtfRgFVZ6R99azkHZ2cp3CKEz7ygQgABZlsEAwWZAgUCCzBCYGKRHZA4AU4IlrFs4RIkVaRirUGTCAq3O0llXs87wqMu+8RidkXFAUaR23Akw7HA78dGTn09nxga/cud68lWaOQnIVEwQ6ClUN8mxlsQnAG1r2ABydTC9d399a6wQ6lKJ45MEtR95aENLXlSXyrSh0zscG2rESMlC6p3UWSO+qSiv27Mq8oDyvslkUhTqKTZKyjBlMEDBzjYtNm85LLYl8VTtjFBF4kMQiTSOBVoKFclpbD8hVNaGqEt6zrwTWWgVj2zqcmc0+DTqGoxjivkB1uLc7v3KF63GZlemmKWcjqC6q9CyGqx6UVBKS3nQ3a51pJkBvYHkDIITwzn3oJz8y3nu2qLJWu2PJT2Y5kQ9DRYRKSWO091ZK8CWPp9MkDbVpARjLAIC2rmztWDIDZNP50clJGpmklYJOUAUcaR0oBGQiRiAHRKS0dt7X1gZGusp96+VdBG/AnulMsR6zLJFqAqGh6kZyPBcXxu1egvdulFsrIQYJtNZAdmB6YicnBwcHd20D9qKkVSMYa+fgbvqw42bXYfO9RvXK+eytHuMfAUsdAEduZ2env7rtxs8l/f7xyWg8yYf9FJiD0CBDVVWL2s2ADgDLoqwq224nKugIRCAPwOzJAXoQNahJ4ZimKOdCSiwCqRSgAkSWQghUUgoUQF4A1nPICj7/zVcDtHfsxDmCgFxrkgLCuJeTfvL58bQKH75f3NMfpZGHoA/puggH873d8vBVjcVwddVx3gmzqhbOyyDQ3hcwvail5zCE6cyY5K0e4x8ByxuABWZ+7uKr62a6NhwOV4beeiGkUeyctUSewFoOUzHoJQJhd3+kBSSxkWEcGi0FwWxM5JlIBVEQhuyrQCMDHmV07TjrJ7LbNuxBCDBaSWQPjLcWiTF08NPvW5How8ALduxaF67nz13Oe31V2OLMevD4Xx1upeNZUdbBlkm3RWvNZ/nRS98oirozWI0G3dH4cHpwfdBxtYMgRO+MjozqbzsTQzGuKwQAJm7mQK9jqQOAKJn5Zx7/hSe+8Nnp7KTf6/P6cH//QMnFuRV/cDTVSnbSALytnR+2w8AoWxRlUSljWkEvDH1upwo9ShQoAqWMEkTU0j7SuHtUlZVb6UVFCdYRGxIIoQYlsPJyXNE8d2kCXRm8eLn86jOjl2/MV/vRoxvyg/frs6eCJCms7ISrd8jhbcJ0Rrt7o93rQWvFJFYKaTRirFjptKeEVORJK8Ui8O27oJjZKuqdfQ8QCdksh72epQ6AlJKI/tr7P/C7//k3Ntp1KwyCMArDsK4LIVEKtTqMA2OyrNQaEEii99Zbtta5OkfVToCUdYLZSetNgIHWUqIU2IpNFKiVtne2CgLMhXC+zknOMy+BitpPbcurFjHNpsda1IHi+87Gj7w9euC2eKVrQgmll4EahO0tjLrAJrt2fnbj6qiMAw3tJPD1LOZCQY1CZIVspVHlPJd1sH0H60SMro/g7HDrbmBqmmO/vqUOAAAAsHPOUnQ8OgoVt9IVrVSR+0hpaXTg60Uh27qqEHg+q+PYtE0adVbz2l++tttLMJDC1aiZgYkwUDpUWigJHnFlAEgJs+sjkguEYOhzUfvagRKodU2+FmQY6MzACAyPjkb5bLbv4czZM+3hFur2dF7HUIpqrorLdZkVpQ619vlE0YSprCswQcq+tqUFRzLqcXuDD68I4sxS33tsNkO/kWUPAKJQCv/tb3z2l/7GuzaHW7acrqz0wkDm+SyJA+/dfJ5LCcDiZDSNIuNIVs53k+4k3swPymf/9PkPP7KdFyV5kjrBsKVulZK2yEwERiKQRSClpRCMwNogokSw7K33AkAwQVXXswLGmU4DNVjbPJqGVpQ6wMmsWq2yEEYo2+FgbRM013NfOK1joZzRvXZ3LYD9vAaBtdp+GNyUK4dRmOdSSNkcB3tDyx6Ahclk8tQzu++8e7B9KpYShiuDyRhsVZZVbT0VtY0DHE/LQX/QXx1eevXq1B30t9fWOuJ5y51ePB+PD8dV5mVcKVGYVmy0SQKtAZ0VAtAZSSyIGVCid3VZuTSQWgdsHQoJIDEkr1xRwUpPXrk6PZyVabzf78WlxUR1DbyCPmt37lHJyv6xtrrfbgdKbxgpSARebShl5OA2jJhuPkccFBTuHs3ufqtH9UfCsk8QF/2Au93uP/vnv/bVb+2FgXGunmezTqenjYpDubHaHnYjJaAVq/3D4+l4rE3raP9g/9kvXnzp2mMfuDOicvfmbJRZRDr/ws1Xr4zms1mWl1lhJzmczCm3OueEdN/pnpV9rwY66mJrxYXrMt3iaNN0TrU6K76uoZo9/9LN4zlW+fQQNu9/9FNh1LpyM6fWGc9a2kM+/tZaxw8GQxaRMN0wSCVkvq5EfxuTgPYvoQNSepyJv/KTvwR/1qOg8ZqWPQALUsq77rrja9/ef+bi9XJ+IgTUtkg7/ZXV1TAMw1BpDdqYmuGrT1/WAs9tJsD8vgd3bF4d7h2fWm3Vzr9ybZJTcG47TGPJrirms7quydfO1llWTmdlXrisdCCUMqG10lpynqpiOj28Obr+4uz46t7h7HefzA5z+fK149XVXkZJ2l0Zn2RFcBsM3oVqqNIdFPGg3w9bvcphzRHqoTp1n0xif+O8zWbTUlnrplMbd1ff6kH90dBMgW7JsllZ029/4eIvfvTcXbcntqrGVg1XVsIoKcuilca9Wj3zzeunhyZ0x5JaSRR9+asvtDU98mD3839ywCjTVnrvnV1HPMtqlNp6AFe2W7H3LBXZupahQha+tloDClRKeFveeOWiKybe++MZ/6/zWZi0O4l4tgz+y+f++6vfenJjY63f73VXtvzE0zQT4ZYQElGFUTC19niS9zZ3giSy176B5US2NhVE7aC8VJ4jRyDo1pGFxmtrAgBCCOfcffc/8Klf/PjnPvfbv//E3vog0siFjWwapWm8sXHq5HD/zHbw0l5/Mjl6x53tCpNjvc1KDXT5ledH/ZW19z24maaRJ5jOK6OENIoqKsp6PCvS2EjGONACJXnvmVhrAKidq2qX9lf2btiZj/ayfC/PH7s3zrPswz9+b0koGVoRrq51THVjks2k6ggmVzNLixKGg0E50Ix1dfXbnB9h0J/lMBxkNnr7wz/1d4HpVjmixutqAnDrMWAwGOzsnIk0Xnhl+h8+f/Fnfmy71QokUjkbqSBMugNk9+h72//p98tf/8PpSs8WXG718WRc335u54E715FdXnsHgRLKe68Ut0N1DD4rKEliJRWCZUAlVRQqYvKOlFRGq/W1FW3Ck6klNdOXsqcvnvz8B9dOrbXWVyNP+urV4621BN3sODO9dhTISEqBQrGQk9lEwlzUx+DGMhqUPiqOXoakNUs+2mNmJoHNj/vGmjECAJBSeu/PnD3X7g9Go5OnX8oA9z75WOtg70YQRafWTeUrrfALT1wYnxQ/8ZH74yg6PJr1U316basdB54IUEaaa3JBiEYpJVEqOey2S8tJZEIjgG1ppWdWSigpEZhcbUK0dZm0WoUv22V979n2C5f56t7R6mp4+Sp0YlNlWT5T06zDImqliTaJMJGtpuXsuq2ropq3Q6+UtqARcXt7cG3S337kQ+RtUxv9TWoCAACglKqq6q8//vhv/uZ//OMnnmjH4TdfmpjwlUcf6sPJvJWYQYK7u0dPP7O3d1yfHgbrO2eOpvVmUkwz2sv1akIoRZFXSegXLayVUloZYIqJga2iCbqsqhMZdOI4DLQE8nWNziNIDiOMnXGH8zt3+oGSlw/zlb1paNRkOguEunEsWMPmqjHp0Ntyenwtm+0q8Azy6CQzQ+CwA/VEI4/HMZ77WWAW2Gx/eLOQmwLCAADAzN77ly+9/PhHPzodjxmgqP073t577N3bZ1aTQTew+TyfZePMrq0Pu5tnhIwTZXOHtTfe+tqHcwun2jLRPtDO26oVaIHAwMRo8xPNuYyHMuhrrRnZe1dVdtHzyFqqand4NK7rChAPT2YHJzMmmSbhudvWVtd3TvUDqEeCqny2W9XORAkwjMfjVqiSNI0jDd77ci4f/sftnXfDokt3481pAvBnFs13L168+GPvfY9WishJrR96YPOeTXPHqlzvx8JVUSCArAlCjPtBEoVRpFTAVEnZAxERCINOCQLy4K0x2hPb2iG7IG4LpRAFowbwRAwMRM4R2dp7ouPRBJiV1mEUBVESqEgbIQRQcTIf7dr8QEtAaVgoBpEVtTKtVqQMZkQsWL18wz706c8mcYJSNgF485op0J/jnDt16tQ73/Pub3z961LIsqjOP7+f5yuHU7h/y64mUHOoBaKoAju2k5nNYxMEWqBqaR1EQmpPitgLYXuDlaKsbFmGtCuQSK0QaKkkoFCIznsQUjArhCARjCLprmgttdJZXntXcDWejg6K+YnwhbU+CGXtlQGn0Il4ZVqnCiuNM0dU+8D59KG/9U9aacoEzdX/PWnuAH8OESHifD7/xMd/4amv/bFA6ZyTEne2ehtr7Y0O37Ot1zrQiU0QxkooAUDARgqhlZexjnpR0vnKV57/vT945oMffPDjP/d+YK+pYBAkY6kCQCAGKZR3zhFlWemJk9gobYyC3Rs3r125cm7D1MUEqCLvEYVWCGSNUp4dszgpdObCMIKtbj6f1Um7H6Yr7Xf8Smuw1fRE+j40Afi/fTcDP/uxjz13/hkhhLMO0ZkgWlsbdGM+PeA7t+K3bQVpGmsVAKD3AAIZiAGFkLNp9vyFybXd8U9/5D07ZzYIhBS6ripiEEoBSGAHVBM5YOut9XVGwJ0kvvLKtb3dmw/ee0oLAYgoJCIBk6+titL28NSNo/p/PnVxbRCuDtTxyN64ST/393/11G13mqjPzc7n70sTgL/AonGq935l2JcCtZKCkAC6vU5dO6VAIG305X3n0q3V9Oxmu99vC2lQSG/Luq6ctWU2V1J5pzdP9aWQQgjPwgPgYvIDjtkDeedISkCslRCSRaSlVqIua8ElUF2T8aA9RiKIhRS7+/n5F67cuH6ycfv9UXv1l3/13zHzd+rzNlf/96kJwF+Mmb13n/nMZ5568mt/8vRTSCAlxnE8ywtgEBKtZSLfivTGMH73Pb133bu20ksiI4wGT3Tx0v5s5qJQ75zq9tshEQMqQiD2jgjY1g5Gk5KZV7oi0FIpKQnrwnrrF/W0WmkEqns0cSigqLMn//TahVfGdeF6q6f/1ed+ZzAYwKI1X+P/TROA13SrgfZs9slPfuKprz0Rai2lcJ68c61W1Ou05nlR136a10rI9Z6++3SyOmilEQ9SIxDnOa32o3NbaTuWRkoSsnbeeZ+X7nBUdltBHJBQBIAalXOAIA7H/stP7959+6m737a5fzJ/+dLuPJ/uHZcXL43KioM4/Rf/5tfvuve+1fV1V9fKNMWffwCat0CvSQgxn8/TNH3o4Xdcvnx5d3cXrZXMcRQpqUzQ6gfJfHwigR3j7mF++cak1wm6/Yg8a1Ab620t5u3z+52W2RrGcWSSSHXbqqh4NPVaWk+aWBDoqmbnJaKfzN3z17OvP/dMOzp/PMlryw6lq1lLsbO9+Wuf/a0777kXAIioufp/UJo7wBv47vPApz79aSnwj77wB0ZrAUTEoQmMEtZaIghCnReVANAaPBGBsM7Pc8cMAkU7DZPQtEO4/2w3DGOtmVw1z0tHkFecTzIBKARfPywmBTGI2nqthFEKmT/02GMfePTDH/2bPw8AzjkhxGLe3/iBaALwxhYLZIvPX/zSF1968UVj9H/7nf/63LefEchCyVCrsrS1985arXTajifTvLK1llIZpYQQCKvdKG2nre7QVbkg8r6qdGotrK20Tp85ffPbT0/Gk/1xfTDOETFJ4kDJd77vkUfe/xOf+Nt/B76Tw7d0GP7/1ATgzbLWMrP5ztzj4ODgxvVrly5d+pV/9MsP3L1dF34+natAoxRbK+kLL90ovBhNZ+B9qxUN2uFKP9k5ezu2Bl6E+1ev9Da3ksF2rKSbXuv0+3vf+Mrlly+bODFhnBV1lPb+6b/81+fuuDNOkqqqtFJNdZMfkiYA3xvnHDMjglK3tlvO53Oj1D/8B39vcnS4s7OptHzpuQunz5515C9ceNFVxfTo+PS5c7fdf381vll6Ibs77Io4bQU6ivrbdTbJ9i6kcRQruXJqY7p/49GPfeLM2+42QQAAzlqlm32dP0RNAL5PzLyYlrzWoasv/dH/eP78M1EU+Hm+cuZsa7Bezg+KIisKDnuDQLp6MnKmn3bXfH7E5B//2MfU/zHJWazHNSe6ftiaAPxgkPf0nZEUiMSs1Pf8hu1WM+3muv9L1ATgh8U5B4seLQTwes+vYvE1uWir1PjL1QSgsdSaN2uNpdYEoLHUmgA0lloTgMZSawLQWGpNABpLrQlAY6k1AWgstSYAjaXWBKCx1JoANJZaE4DGUmsC0FhqTQAaS60JQGOpNQFoLLUmAI2l1gSgsdSaADSWWhOAxlJrAtBYak0AGkutCUBjqTUBaCy1JgCNpdYEoLHUmgA0lloTgMZS+99HY1i+CvH+jgAAAABJRU5ErkJggg==",
- "text/plain": [
- ""
- ]
- },
- "execution_count": 6,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "# preprocess the input image\n",
- "input_256 = preprocess(predictor, input_raw)\n",
- "input_256"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 7,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Data shape for DDIM sampling is (4, 4, 32, 32), eta 1.0\n",
- "Running DDIM Sampling with 76 timesteps\n"
- ]
- },
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "DDIM Sampler: 100%|██████████| 76/76 [00:05<00:00, 13.14it/s]\n"
- ]
- },
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Data shape for DDIM sampling is (4, 4, 32, 32), eta 1.0\n",
- "Running DDIM Sampling with 49 timesteps\n"
- ]
- },
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "DDIM Sampler: 100%|██████████| 49/49 [00:03<00:00, 13.45it/s]\n",
- "\u001b[32m2023-09-10 15:29:28.140\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36melevation_estimate.utils.elev_est_api\u001b[0m:\u001b[36mget_feature_matcher\u001b[0m:\u001b[36m25\u001b[0m - \u001b[1mLoading feature matcher...\u001b[0m\n",
- "\u001b[32m2023-09-10 15:29:28.959\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36melevation_estimate.utils.elev_est_api\u001b[0m:\u001b[36mmask_out_bkgd\u001b[0m:\u001b[36m48\u001b[0m - \u001b[1mImage has no alpha channel, using thresholding to mask out background\u001b[0m\n",
- "\u001b[32m2023-09-10 15:29:28.962\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36melevation_estimate.utils.elev_est_api\u001b[0m:\u001b[36mmask_out_bkgd\u001b[0m:\u001b[36m48\u001b[0m - \u001b[1mImage has no alpha channel, using thresholding to mask out background\u001b[0m\n",
- "\u001b[32m2023-09-10 15:29:28.965\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36melevation_estimate.utils.elev_est_api\u001b[0m:\u001b[36mmask_out_bkgd\u001b[0m:\u001b[36m48\u001b[0m - \u001b[1mImage has no alpha channel, using thresholding to mask out background\u001b[0m\n",
- "\u001b[32m2023-09-10 15:29:28.968\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36melevation_estimate.utils.elev_est_api\u001b[0m:\u001b[36mmask_out_bkgd\u001b[0m:\u001b[36m48\u001b[0m - \u001b[1mImage has no alpha channel, using thresholding to mask out background\u001b[0m\n",
- "\u001b[32m2023-09-10 15:29:29.384\u001b[0m | \u001b[33m\u001b[1mWARNING \u001b[0m | \u001b[36melevation_estimate.utils.elev_est_api\u001b[0m:\u001b[36melev_est_api\u001b[0m:\u001b[36m199\u001b[0m - \u001b[33m\u001b[1mK is not provided, using default K\u001b[0m\n"
- ]
- },
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Estimated polar angle: 62\n",
- "Data shape for DDIM sampling is (4, 4, 32, 32), eta 1.0\n",
- "Running DDIM Sampling with 76 timesteps\n"
- ]
- },
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "DDIM Sampler: 100%|██████████| 76/76 [00:05<00:00, 13.38it/s]\n"
- ]
- },
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Data shape for DDIM sampling is (4, 4, 32, 32), eta 1.0\n",
- "Running DDIM Sampling with 49 timesteps\n"
- ]
- },
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "DDIM Sampler: 100%|██████████| 49/49 [00:03<00:00, 13.32it/s]\n"
- ]
- },
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Data shape for DDIM sampling is (4, 4, 32, 32), eta 1.0\n",
- "Running DDIM Sampling with 49 timesteps\n"
- ]
- },
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "DDIM Sampler: 100%|██████████| 49/49 [00:03<00:00, 13.32it/s]\n"
- ]
- },
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Data shape for DDIM sampling is (4, 4, 32, 32), eta 1.0\n",
- "Running DDIM Sampling with 49 timesteps\n"
- ]
- },
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "DDIM Sampler: 100%|██████████| 49/49 [00:03<00:00, 13.31it/s]\n"
- ]
- },
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Data shape for DDIM sampling is (4, 4, 32, 32), eta 1.0\n",
- "Running DDIM Sampling with 49 timesteps\n"
- ]
- },
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "DDIM Sampler: 100%|██████████| 49/49 [00:03<00:00, 13.24it/s]\n"
- ]
- },
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Data shape for DDIM sampling is (4, 4, 32, 32), eta 1.0\n",
- "Running DDIM Sampling with 49 timesteps\n"
- ]
- },
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "DDIM Sampler: 100%|██████████| 49/49 [00:03<00:00, 13.26it/s]\n"
- ]
- },
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Data shape for DDIM sampling is (4, 4, 32, 32), eta 1.0\n",
- "Running DDIM Sampling with 49 timesteps\n"
- ]
- },
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "DDIM Sampler: 100%|██████████| 49/49 [00:03<00:00, 13.22it/s]\n"
- ]
- },
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Data shape for DDIM sampling is (4, 4, 32, 32), eta 1.0\n",
- "Running DDIM Sampling with 49 timesteps\n"
- ]
- },
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "DDIM Sampler: 100%|██████████| 49/49 [00:03<00:00, 13.27it/s]\n"
- ]
- }
- ],
- "source": [
- "# generate multi-view images in two stages with Zero123.\n",
- "# first stage: generate N=8 views cover 360 degree of the input shape.\n",
- "elev, stage1_imgs = stage1_run(model_zero123, device, example_dir, input_256, scale=3, ddim_steps=75)\n",
- "# second stage: 4 local views for each of the first-stage view, resulting in N*4=32 source view images.\n",
- "stage2_run(model_zero123, device, example_dir, elev, scale=3, stage2_steps=50)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 8,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAABAAAAAIACAIAAACTr4nuAAEAAElEQVR4nOz9Z5AlS5YeiJ1z3D0irkpVWbpePf36te6eFjM9mEH3YAewwUAMCAILLlf9pHFJGEkYuWZr+5NLM/7gD5qR+4MEjbs0AGvYBTAQM40BBjOD1q9199O6tMis1FdGhLufwx8RcW9cmTezMrOy3qvP3suKG+Hh4eL4UX7cHUUcewClEIaAACICAIiYXQAiAshwMpA8KeBIBvMC+3kM/y3yl6F0WXn6qQSK7wrIhNLN9UnYp+QzHkvpqQwVc79Mh98aSd/PVubIR0Z+4KDv5i7CQfoO8QDdfcC850W5rcstNdQfgohDhITYf541UUbRwiJEE+i/uChlMPh+/7OHLz9ITn75+Op/ec4cirfygmJ+87AjMc+2XOF9MfKpfDwWLVfKYXahRsbOcPb7Vycb/FPzlHIWGfc4KmBWLynTSonmipuYF0JmNsSgT7OmK5d6xivZUwEpDQLEgrwHvACH6Z9FCBEAhLlP/1IwECwqVab5KQUeUEz/u9lgKVeizCcn1maU8w9jwnic2SojXGEkn4nUMv7omLjXISEgwy2cYRo/PozsgLzj8x4cJBhkltPUFL47afzuU6QhuT+QX4N6jdROptCAjOZYtBWgQMEF+toDDvF/IIJSAYrrbCRBlgn0R3oxNsqkNUF1mRs4+GT+9fE6DjLvj6xSvcfH3cMIgVk43JCY3jSTlKV9FKiSsCjJlymDd7JWORlDPAbLt4fzHP/ESBlm0DkMp58BGfoHAIb6fUg/LMugUgYIKFDQf6EajtA/CjOggFBBeYL78tVJytZAET+A7joPBCAj7hGyHpcI86kso4XDqT32yLDPAJgvj/mbIieRMnXOpoBZugkWFCQDvWQObtSnOwHAebpjhOIGSt7wAJWiZpgpSH0lTAQQvWdNWWkx++yMuh8RZGi8TvjYgYaP9P8UBsD8FZiu8wwL/8kYF1ATH43cKb9yGMwgzBm392f+I3zvyLnYBODEwdEX5JJ3aGkwjak40/Tc6VJvGv3zgP5Hvjih+uN3x/Q1KczvY2/GE8KhaiAwoReGGveAeebDXXDQ2gcp13w6SJ8C+wr0jMQnh/lrWmrykjQCgGLQIYIIAjrvtaI+/58r6+l+xoOSSGmI7dfC08Z7LkkG/TVz3B6wZEcFGfkxl947iXOMk+9xYErxTp74Z1sbGcpasEy4X5Jrg/el8M0wMzIzgCDSw2g/M82GydU5WP4DA2CcGiaXJ7sYFYv9u4PLx1ww7YNRfW7SGJq3Z8bUgsGfaR8XACibF9MTFnbq6Lf6RZxdqhkvTlSFcMjmYACaXZUjx4wyHzirwtdbZgVHUheBTLfL8szk5LDEOnh+0zGHbnKY/HHwZIgUZHaZxslmlPLLdyeXRsbTZEJ6WCuZUpWZHTlZEuZvDT5WcsIP+QZRQDCnfxYuHJxYznx+SGlAjY/lfp6HUE4mSj04CiKfJl5PvzwQKYYh7stZD5t/3wQFGHe8nQBGtJHDMwURgIzQRQChZJf2R6GAABCMqkpzlrH4SFa8kptSYFTwDbjOQG8bGPmHxpDBVmT/GELmZhIy/u+k6JR93x/HqCV2elpyIK72V7syss7/7etf/asy/aMID882I4wOvMHjKT0zMnE3o1SHg8isia2RW5M6dxb/OIVs/2F0oPFMRrX+Mck32r8jwOEMDup1moQ+HQ4VcIS2p+nu48WDMS0N5ithbh7JCNWfJDUcAe3NY34fJtd+7iNFfARjZd9PzsP7h8dA6c35xc5sPjhOheP29kgExYSiDxVlanGmDVWACeF/hc4/gVciCDMg9mkIJ2Z+hH1+2rjtHHhIhjzeliPm/wGzK+JPjinOo5C2g5ASOAW9dsgCzBQGw/y/jwM1q5T/jL5YDP4hATrS/Q9vxQlAbq/h8ciCI0LfJMotoaF4lCHGM4P79X0cB9H4xzOA2WbAMcSFHInDYl7dZoTOZtA/iy+eD8z9gxb0mN0n0yTdoV8/aA6PNSbFPA/3Vnk0zTIFxnMZtiSGmVzJKzK7ePvPEBwTZJhvD6r+BEeKvsfhcI07/zszR/qI0lXQ5+FMx8FLZRk2s2Rj9uWE7+xrjYxbDjilCoVaMyRLJxn/MmD50q/UafSLPMETzIt5aVdkoHrmeAgFeg6Fq3g+iK16GF1mxEN2CgftNKZ4VEUdtaimOQzn8SLuf+9RYppUmOGKmsNQQBafTQVMJKJp0uVR4LSR9uOD3I2zrzMdxjp51F06SDV2Vf5exuFKz09bx/WVIwEYLIApY7Ye9gQHxzQv2QwctPkPYupP09endfyIcJ+hnh9A0BwlZU00HiZp/OUwyoz+EfK1kaPazxP6f4KPIkb4P06KIEEYXh03f7bzpT3Shbqnf5TuZxwd6VcOMTWAI/9+ZFHyuAqILhF+v+q5RTyu7j/SxjmSj5/+gXIMGNLCS5r9BIy1zz5DaXo+OJ5shlsTjrxfZvb0iAUztE8MDl98/MjlmIBjVDHnW0eKiUxt5ObEjp9T3T+Y6DkwOxrR8sefjvyFMck7Vq18j4hp3sgn9P8EHyGMjrhiHOXLV0BwZL3DQej/QGmPdGCd2lEqIz9w6M4MZ8rhkHfnvo7O6Tj9GuJDlXDU7KQZdugpb4dD4aNYp30xZBbvOwMwdmOaxj4hpxmkNNuZePT9Mk+OudJzeqMmP2KYSEwzEh8I83H8IemO45dzvFyoyhMthBn5TJBKR2APzZlF8fHR5IeVk0/wBI8jhkZp5gUthNPE8fEED4dRawsm8KGjbXOEw3K1x4QAHqqEo/RP+UTXEX7iCU41ZuvoUxLPQxACw0GOkP+XPyt7LWX4tUdGbpz/e1BPzxM8PPY1Rw/KxOfuwLKnf/+X+hxzPwV/3/L2a3wUSnd5Tq2PaTUbN74nTchPkNVPcJSQMk88Ijp4godA7iwGgMwPNOKfPoV4vKlmbPr1OOoy6NSDOpswf30eT+nJY3xK9+GzlHz3w4lusSf4iOFwHTtNQ5iWsLDyJ7DTGXFHM54eI2hM6XlC/MeJEeY6u9+PZwbgANnPUPpxIC0mlmLKe5KF2jw0kfUzmBF/1C/IDGGIg4cTnj8ZC0eK8mJTecJsTgNKmk/eN8d1ftZR4XGkmrzAmUogQyzpOOoyyHN+iVCSRHO4eh4ljtRfiohU5PpYktYTHBYHovEiWZn4xu3RfBDJ4Kf0b8yeYcJH5dc40kNhn2BflHnM+MXDZ36Eb82X24xNj2GUWedK31HQ+lBMX7k40wsykqDQQyfbYE8EwbFg0MxY8MfH26N7EjhO71Ap7Od090JJyI5v8nvaUdYQBPNzF46pFkO5TotomMneTnJEzqDtUbWq+ItHWTwByQyAJwz/I42pbo1JgQBTspisw8uYTjFZr5vhqQTJFyA+AjwJ/j8BjM2pln0sVEqIR+GFwOF85skNx67nL4NMeLucTYnSsbgpo6kPDCx5K8u1HRmO+9pYWdjDyLbPT4bEceG0u5dPJfJRcyxKWSns53R3zcAFd2zK6fFpvSOaQD9S4Di+OFXr2Ld/C945bXb1ODCftOnvVj701tF8H2n/VE/wUcYodU1NB5P0m+m0OJGpDn9rQpDCEQ66x81P8tHEaNRnbgqM6OgjlHRoM2DcPTInFRzUZii/Nfa1cilwYN1mG/ML5GcGHJUsH3Ejy/D9kZtTMvg44lE6UnNXaIk4Pq69MDeOTUGXQ2xNdrI4maj0Y22EAXPtOy1kTCocBQaMFyc9mKMd+17200YVxxOik4cAPW7s53Er72lERlCEQ3JoRuqCAOcMjihdy5CyM+qXxLG/R4LTNn4/hhhXRgWLOPjjApYU4rJn/JgwnPPI1/rRcIXeD0PUfzStMHHibdTumgIZS/XxGTWPyB2fNbkM/uvfHLp4gpMCnvqFv9D3KZymgh6mLAIiOTecPHP60MDiQ3MnnYDT0MxjcuKY2FUeAjTuOjvd+PjIqePHfJblfuQxMYvJ+T5WlPYEh8eIU/0QPvajK8ix5SyTom2GZzWwSAiFAnhU2mc51Ggk+Gduu/o45pafYBwzWvdJwz86nEJf7wgGA3s0Wu9RYqBqj881TpPwmPv+B0mOJ+J+qJWmsUAp/1OUScaenwIcj+8/w9Ai4Cd4gmkoKTM4SccYTjU5ZvN0ktjpLNVHBxNd1Mf6OZwUDHRs35tUoYFYxP5PFATAY3bo5F87CDLz5MkwOBlgTgsAB7bUnuA4cPqbXIauH/G6tVHmUrjz++yurEsOTQHncT+ch79JYXsdZ3VGmfNEf9T44rRSqo86qNxhMkpsT/DIMCvw79R1zLi6NRhTpTEo/dSzMaN+c1b9AC10QuFvp67PHglOIJa1jxNi4KVw//5FiZEWM/e5zDuWj5edmCNhV7M9bIV18oQ6Twj9mKsn2w+cFgz4v8ioMDgdw+J4gmUOCYQhd3nO2srcbhJXKkxdGXp2zJVBHBPuQ/Jnanc/8kaehHnZ9EHdPyKcuSVOZa0/vpCSWj3SNTLj9ObTgv7SqnKAwYmS2ROSfoLjxviEMQIAYH+rPkQUEBQUBJhw4uJxlQeHGQg8GQunAU9Y0imElM9nGH12ijqsGM8yZdLxpJEppJmSjaXZzmlCfqIqc6zIW6zQloZ/Sm6Ll3r/5Es4EyPtuD8t9kOrDiJm8l2ARpW1J3ikmCG2H2G01kHi9Upxi8W04JFQ1zQXsoxdzN1IJ0H1Tw4c+KhjKC6uH6pb2mgcjm/cThgOUvzZ/+UnlHlSODW6xRMMMFX7n3H/0aCsoJ6KaTucpI4ITlUUEKQAwNSggaMsHpSsJRxS8Esqf6m8p6nDh8oyT+P0N3M5iKJG5Q+dpup/rFGeHpsYxfb4YMI5YIcb6eW3cNgS6P8dN5rnwOPWok9wajDRPp8iS45dXI9Nq+9vdgicEnfiEzzBxwz987D2YwxlfvLITswZxyCcvuzjgKGzfienh9EN0coJjhHDHx0EJ+P+jPIRQgqZUjKappHMeCDovqBTYEhOwNGW6SQjj08Ap5NQpyAf64cgzT7GPQUzOvQg1u+hS3QYyOnw2jzBMWC0V3OROCRYjnHUTsi6dGufNQBPZgCe4AlOHGMibAIPkdITKb2QCZJHP26ltJpWEKapqENeOpywYPBRQY5/HcJDo2zyzcOqEXPJMydfp9PZBEdbpsNJ4KOi0nJ0yolR/nEcGD7/cJk4/PGAHTEtzmfi6zjpet8PyVypjgIytATilDDAxwintsWw5EsaGyAIwzssH1MtxofbSCGmPXoyA/AET/BIUMTz96cBho4jGJrN6++sUw6ceRiP2lFhyKkvkIm4vne/LPiLZIiAR7cJ8sOi1IanpERl5BENI5EPcyAPapsv8SmdAfgooayPngCdySmQ62VdH4crPn+xxl8Z0fLLyvQwn8mxL2XjXKkeCn1+/RGbhjoSzNMaM6y+U45xo/SYatHPvN9W46NAJn39BOj/CZ7gCcYhpSnC0TEog8iaTJzjWBIoxvvgoPFjK+pcyDc4KJkxxz7xeXQ4lYWcXKi5vchz1unRzwCcWvlzHO1y3BN3IgJy9DNbhyv0MRHWiCaNY3+PuwDjyOZbyp07KORQ20l5au7UUv5RYK7K7T9FU2zNdxpmvGdjhp2Phf59rEbgyBoAnDk0+q88cv7/scP4jPBIUOPQhM5pJ/snOASKfi6H9QgM5EjxnwxTRCmadjCoCwJ5dMO4xEVmsOr5CfnkSf70DbL+lrSj6gOOKxUPJVZoPKMTgxxHnMrsL57s58Yxe4OmScXbp8gjggNP1WGBx4MRf2cfE6TnCZWo+NikYVlcDDYjwEfPsk8Ah1F3C8mXc79hSYKn3AaYQX4jN6eR7sN8ekSTPJJsn+BYMNFEG1LocsXv5OXjxw2PqoHHAgUB+kN4UokQSlGGxa6bMtGyH9cajx193V+m8Lwi1dRilR1nw0JAZBJnPTqcvmmKXL+fGMlQasMhC2G6aNy31YZCgE6yHUY8uB9VDDa9mgPDTZH18D7Ns6+T70jwyPtovAXHVajxRtjfwfxQhRorAA5cOIPbgynaXPeb6PV7nDHM9AvGPdoSQ+knyQnJD4gsh40NzKb9jOdHjnGOPRxgUxZyo+bRlFCA/SFjjHREzp7qJvtYYOS4oWmDXga0gPlx0SVvzuPPJE4lHpWzTEZJYpIWP/xb+opxn1BkwguPZpsgKX8YBzUaMQqmAktXpaPSsfz0eCp2mgbWuLM/x/QG2Pfgun1b7ZGGAB12/M1LV8OvwMyqHi0lDKTy8I7gUsz05bZtrjKVlIOBwYAfjfV5R9Kw0+zhkQQHpYrRHB/i1f6xg0VnZh1eVu0wt+hwwuuPMwYerGwxW77UK7uZOyj6Q7Z8aGShDMnAXBDMx0ZfbxoMoMcNAlAS01im27IaOG4Kjhu3E29OtIGnOY+e4KRQ7tWMBGC4H2QsZfFbMsLvP+4Plo+CIDiVeFTjY78OHfKeTN5AblzXlylWwUkACx9Y7gYqOD0WP2HaQBjxB8nQ5jwI48PkiIt9ajDQFCda/ePOnjz17NmVfUD7JzkejEfKHvT1DLMrPlFZnJ3hkWBQPOmvjhlS9DPtvpjXw/IBGcUg2r9LHwvpfuRDbDzD8Un1h81xfgzr+/3cBipw/+6QPZC/81FAwZ9RiqpnKJPxwB/UN4BzGSFSkmQjOnJ/EvjQpt2JY9woHScumZSgXLuRV/b1gE30/U8r1RMcJ8rSuDyVBVDi/zlpyyD9ZDk12BrmsaD9J5gfZYfkiNk+2xsAALPO1HxEhNJfrCAlfpYx/SGZMGy19Cs+zAJxEAc09spHGX1hl+uLsK/tU3KmFc080dMwA4/MAMhwIHqdKDhhuJXGjUqYMpim+WEefggNRWQhAErZnSn9EE8QEWHhkYkckeyV/fFxGRljnTI+Y3o4OfmwvtJMN5Uis8E0RMEIy+e8jPg1Sp/fd9A+ErY+70dRyup/UWfs2ztDwZzZOZE43BBSso/G/Vx5YBVOWv506jDRwTU7cVkXgEmmzux8RiTqjJ9PcJwYZ0JSjIXy2s5CWA/M2vIO7yCA2RBhYEQcTiDjtPEED4dHND5w5KqsNE8vUl9eDAuO2QrM8RNMNus74P/5bRRAESy5fTIxAeX/oHw9qAiWdjJBKVvI+yrDjylGg8JmV6evcky8XVzPQ9yPeBvQIxl/4xPfMDywRjDEoY+iAKPZy9Apf33mj5Cz9v4oEM79n/2JAoBcPZq8R9hAbDzuBH8wzPD6l38eyZzS/Mi1+76PQ4rbmM33FPu3SemNsS+OK21HUrYDYQZ73Y/MhoSWlP/JG4By6SDZrBeC5P8hIzCiICCiZHHPCAPjOMurbwqLSDYuTvViYBy+mN1x42kmvjV/Vqe4YT5mKI2okpdgYCIXE8CIGVUX9J+zEQEUyl7PpUZmN49zlJOpxRMcMcouIBm5WfwYTCX12euIjl1+DUtJ9xUoR4m+W0eG2N/AbB24tsuqy6Aq+c/+VflxkR327eMsxyLfMfPho4DcFzYXJtiCB2oIfZDEjxiHUtHyFydaCDJp0uQhBkxpJGeKPeduy3xcigCBsIDKP4uULfPKCb/vNRVBACnWD5T6OM8Yh775sfTynYZ6I6CADLanQQDJ7pV42MAyGFOUS9cjcxEzqiZT6PnhUejegxnIfZKXL6SwWbNNKjKDNlsYgcDCQsI5s0ZCyAYHZsFvACAMSCBAJR0JpLQIBvtzlblG9Mh7fyKGC5YrepKTxYTE43/LnSvDbGrcJYAl+pmYw2yczjZ83DDWsaUbOXdHzN22wgNujiCC2VyhoCAIi/SFAOWT89nPIpIaUQZy4fjxhD6OFyIlodC/mY3gUnC3jPLkQssvs47yw8JgHF90dvRVAIAsaEdghMcJAA1Y9cBbVpwGPGSr9JXeQeJ87hcBM+ePZLPAOMTgBjrckJr0OKK/jBDgUOZMSQDM3waPkwEwj2I0/nP8lbK37IjMxsGIy4Zdvwsyfs85KSMJEOU6vrAAEiCIZxZhRtRIhLkjFAu3Ub+8iFkshEB/qCGeYmXoWDGPW/QkCjHq3y9+9wX/pP4p622HwLH5OnIxM7ADYGIRpWTMSP9VKfQTAUAUn5EvegQvPnVxG7pNn+zZzp4KVGodp6CUMqaidYRkgqUGBdWeD8NKjT2QptSLItSomAVFik8gyLR2ffQYlkJ9429G1O4oytUar18/3xH+JsP9caCGOZUN+Vig3BulmyVDLe8OwWJlOwOCCCMgeBRhQUWZFYACKB4ARLGgItAwEkGNmRCRfCzgiKX+pB8/EiiG9YheO7aLwIh3oA+RkwyXHJYChbOjKIkMbJBBcVEAGAUHtrAAIosgAYogIggi5jtK5Blk878CgIj93TayKeTcJBYYRLR/vMfBQRjBY2MAyAH5W3lUzB4I06Ts3AUry2MEECQSZieUxbEZAA3e+vbG2u1kZ2Przs1KoNp73SgKKaw2Vs8QhmL04sqlxpllIMNiBIWAADMDmjEjbBHMuX/ZFfixNQEGOHTlj8g/MsWWxPKjyV8ps+99y3F83dz33Be/B0wbhnl6kXxQ8GK2owhMRQDxTlIQtknTxbtpZ7u3u+naO6FPiZi0iolTl9oEFAOgins2TR0YnTjfWL2ytHJWOeyliY2Wrj7/6ViFSikljEJAJFKw+9O0R9Zo1zxEV427t8q5wjCdycz0+6JsyT3BYZGPgvJ0Vd+GRuAs0hPAs3hwPWRr4464WGKvxcfeEQOiUqh6jlNCcBgtLlQXllMdCSqPqJTRAAhMiDKQAn33bqYPDX3/CebFUXHVMS4544PFGJ6VHAvzb9zb3V9qW2YIg79lC0KG+MNRo+/3LAsyBBBgRAIRzpa0iIAgc6apIwMLsyMnyCxeQJEHEEUipClghMzKVYgAQMOzwsAD908xT5A3WNEkjzf1Y2EzPZT9VnijYb/mQBF+TJqscKaMElw+vT6jxUYM5ZLfFmcnm69Yg0HHgAiQegDy0mvG7fXW2s10b6O19SCJW6lLL5xb7qbJ5Stnd7dbCjBG1pXQdr3txq6XOO8uPvVc0Hhu+blPK7NERgN4J0oINAIKkHDmKcKSh3ZuzvN4Y6TfT0+F8809yrEeg/LNKvLEZzNob4S8j0hslXw4g28zDEyjcQ1zaLQVGjkAAAOy9+zbcetWe+22sm3yqVHUbe1Br628xUq9eu7M2p17oWIEhYTCAAze41azt9tNOx5IY6MCb//iZ9ev333p83/hP/m7/ycyNcFAGUIkFiGkvjfoKNrg6FH01OTInxkYYT7jND+e2zBbO0Xj4uOBwt/aV0aK2A0vyIDiWZGlpMNuJ928k+4+8Gmz2951mIpz7NPYu+5Op1JZaSfm3Zv3b127uXv9xq/+6ksrF65+8td+nbBeOXO+cvay1lXRCoIIAUkKNQGhHA9RSKJH6El4DCFH40rIrbHpTTvKQyce8lMe4Tj2ZvETcYgnwBTmcDIMgTM7hSGbnBVGIBAByx6JSADAE7BIT3otth0b78Q7293mrvGe0dskTcWZsOqcry4uKBdUFyoesLZ8QVdXxdStBGQMEWU6jwCTQBZcJPmG01IYB4XX6rTKhWnI3QU4qMbkvpwHJbKZpxkemxkAACgTM065OVFkjjTdNCEqw3fmLxWCAHgA9F6xF8T71177TuvGu5x2zp49XwuDpVWzuyuq0ogCHQamu7vn0g5GQaPeWFxa8EnabflQQu9VWOFecmf9w1Ylqi3Vq9Z2Wz1z7oXPWdNAQMJs6nfEbnnMyP304Gj4I+bu80lHX00dtf0R3f87Q8mb+PrDo1z9wX7NA8YxWpe+WyFb4VJwq2zokBNEnyKv795+u7Nxq6q1YVaKxIkJTNr1D9bXzjz1TL22RO5WLaQwCDxoEdx+sN3rJLvN7tpebBYW60GlGhlxyeXVhknu/+Sb//2F554///yv1xoroEhRACCnn8tnRFE06sGKOi31RMYFB5EO4xk+MRsOi74bNt+tE4GzP4xkGTT3XLyDyS74vaS5JbYraRd6bUy7NUhFs+dExAfWLyxEiQn/h3/7ymvvbtZ0+InVS0vLiz/57rd2k92d9Q57/+xLL126/OwLX/yqufAJZ3SACgTyIAmAgTc4Dwjdtz+f9PaxYXrTTlL2S5fjYqD4OWLh9z27fZRfhWEhcrw9XXxJmAUAUSFCz1tAZSDVvhfv7RpJnet0dx6k3U10lggRfNLsBAbROh1gPVBgaHtns729vRA8RRZ7HdvudXYxcI4AovNPv1g7fxFqKxDWPdWIlGSBP5zpQVlIRF9kHdTlciqAADJ7AcA0w2A8oyL9nJEhj5EBMNKxE7ySIxrV4M1hu3rfR/MLRQGQLM5fyAsBwMa1bz+4852VSOsgoXo1aEgQknR7qG2lury3x4GpGhNevHIhcZD0eONWKtJdaFSBIFisiQ5DCkm8pj1IdysStzcerO3eXHrha9HKMxaJCAGYhvr3YyHBj6OGR5TnQIvOPHGlZUuFb24MI46ekbE/MicwUs6jUtqGxkIpOxm+xJJRMMqjirUojpEEbOt2c/2XFO+sVtAJa20QTC921ZUqSmf77Z2lp6+mcVdpHyexVzqIgo21bSKiarQUqpXLZ5GUi3u+2dm6u/vsi+d/9Ssvkencv/7d195+d2H1pd/4xl/WAZFWxW4ph2iDkxgsw0zmAJ8rNy/OZFblZBMfzVnOg5Vvwscfqj0fZ86FxT9FsDIKC3mF1qWUbPi11yldt2mnok2gfLPXNtVAAXR6qVgAwXbHkYKV5XNho/r73/rl7//xDxbOvxTa9qeefc5cvpQs3Pn5hz1Fqhq4z5wLQe6/+sN/8fTnfvf8i19ihQoB+rFwWUmw+GdsRrhvt5fI6gkKHF1jzMWTSglGNXsojSfpd2h/zezU8vY5c1npnzEXcWRAEBECAEJGseCdhygwrfXrGx/8sHXrnWo1rC9V642asd0IEmHPTOK4YlAHxOL3droYVKO62bx3TYNAS4vXSHJ2eYmUbu/tit9p3rzVuheG9Stnrn6Wzr+cqgYpAgCdx1v07fDjd4QeQ5MOrLV5XIAzWPy4qJivsPqxZQc4cjGxcfZ1sspY4pHcZyBjrLnLRQgEdq//OLn1ynljezs9z7ptJQzQJhKlkHQTdg+IGheWL1puexcrH9SiUEId6YYiASTnwbJORbCiAYlRrEBlcSmIqnt3Xm9t3Vy4+jmsLjEQAKrHW3weGKe2sggDGhz4H/Jo3cle24k6PZRY+QztHw7r7p2IgdDKvfoAUKwhK42bkTizgbgqvI8Intu34o13q+BEVZNOMwXWgSADAbGNdze3Vs+dNUbZuBNVQsXae99Nup5cY2kRurK9vXXpfL2zs93auA1efvXzLz792adN6LrNDb/z4NJq/dbt177/J/Kbv/N7ClTRimPNsx+VjMyLHkf4nMzszdkYZWpjFyNlnW0eHAgHHF+ndjieLKTwxQODoPdodAy7b+3c+HEjXdeG6vWFtNMWI72kK2HDoKpWq5v3t3s+DeqV2nJjr9UOXe+D1355ZSXa3blPzv/KC7+1oBDj3g++872/8Xf+6osvXTh/6YxOEmm2f/69b34pPHvm6jOgskE54iAG6Nsig9+Pn0P0scSYyTUCKRy9Q6+UAoHKv/rOo3H9ZNzsHncWZO+ewPjMIvoBgUB0L033Hnzzn/7Dnfd/+umL8MxzF5gry9E5Sbtxt0WBqtQrItDbjZu7e4EOyGNF0faDe2ZPztRwZ2Nj+852NVquLS13W+1Op5f02rVacHZ1waVi9+5uvLURtbaqz33FqTMEKGUzKd97othB/dhqe4SQYeGb33qY7GDQIONuxGnQp5mVz2EIZbbTkFdjmiAca2fs35w4P7Bvo/RFMgI6FoXJz77zP1xpJNWzl4Aaa2sbVz/1nKqGWkW22+zE0ZmVehTVY97TBpTRoVKJTYRQArRWiDUZoxGMUkjIDpwjFvJBzQUm4KZ07jbfXz/39Bd54XmhzHLj+Ur6EcH8XfNoUOz5KsVWxuMm5TSDcxwTbVQ4eN1HhrdMKslw7v09m3OBJZBpFJmhC8V2DIBS7IMqrHC3ufWmoa4TIq2jxfNBSNb7tBf3mnsVl3gfL59brjTqTFhfWbW2CbFtPtja2Wqu3V9vx/6dN9771V/7vHDPQ3zu0kXTeKp2YWnrwYNuEj/91Grit89/5tnX3nmn1/o6Lp/RqEZCgHIlCIeqVvwaYg4P05jzYFjdnzwnPY1HTeusfb818eeBMN+7I/JqtqG6f0aPOXKiY8DsPB2jYrj/3c61Hy4QerQWGxi7XitG5ZYWl3o922r3qmZZVVfOXmhQxYj3vovi49/89KdXzErQWP3kJ56+sFJb27j1/NXlemh+9eWLrTj54I07T125UKksBWb3j/7wn/yn/8V/6cErUNlgHVoDN3aCZInSJkadP8ExYNxMn+a37z8omW8ja3z7F2W9f8QdMNHLIMNpHhrDHyk8J4SUeqds+9brP/5v/6//5+aNG7/6mYvmyrkO9qy40Nbi1LG1sdOJF0XKe144u9rp9LpJTxuKzlUlTlYaixdWVpvdrWYPO81mY0EbYlbU6/JWs5ewT+P0zKJOt38aRi58+tedWkKgbEWwZGy/2Djoo0DiZdfRnD6eEnEcSE061TMA83nORsu/r9mAxUAbMr6nZTclq/5GDygIyMz27rVXz69iNaz1Op1A+7R7K0qiC5c+0WVct3Tuykshonf21r37zQf3n/3UJ4R6yyvLgNp5r7UBME5QEJQAeBYGZjDGaNQihAyGVCidB2/9u9VP12HxsoDgoz7I+YTxkD7O40bJ54Illt1fFzx5NmBmhgCTOMCR6HkjTKb0EwvNdcRc6euyBZ9FASEAEXSuvRZCW0iiymLqMEkdUaCVCsJ6tVKXtBVVaotLq10hUaiV0lwPvU07fuPuNnu3u7Xn03h7e/v8lTMXn7myvPJ02Diz3l4T3IjjHtejjft3eKdz/0O498HNF796LhuAI4GOfUV7eMJiVmsdn7zoC/Sy8IZJrGZGGU4fax4jiqG/By7v6avgAZBpHn2lg9Gp7q3eze8tqASpbsH3etvsvRYrllUaKGuNi1FXa0shBGjT3u7alrHS7bafuXr2V77y1a29tFGFuLNz/sKiiei3vvbUnc2dCMNqfRkkYIDr79/6/mt3/urf/s9Xzq3mOyZi7scqNkPsu0AnMMsnq8UmYFhtOnQDzWI2OHgy6JUZtn4p/TTIMEsZyuDYleDczQUAgCgCYvln3/nj//p/919op1cqlSaq9dT11ncaS/Xm1o4HDANdbUQMziVxrVarRAGSrJxbSCV1FrSthaKNDk3VBB5rC6a3vbW1vbm8tHLm/LPtbred9NY21tJucvni5d6D1zFcMJe/whjly40L0/fUKwizMEQYfYzUZnblSrbk/K1wqmcAYB/75/CCp8hvRub7vom5GYAoghSEd2/cgW66sHDm7R/88uKV5dUzlaSzvXXnZlequrpyfvlqutsKG5U7O9cUmQvnL6QxBxCkiY80CYj3HgkRUIRZELRicKLAi7POOefI1FRAmNzf23pzcfGyAyxbb6e6F48Cj1ftSsx8xFkzhHnMzhH+frh2mEUemfybpB0U86uApT/9OcY+72XU3baFOI0qUZI6DEIlmpQySgFq0JGYRSKwYZhYh8jC4ntCaBq185CsLder4coKN/3nf+1rO63t1SsXJeZ2Zz3irl1bWw4X9naSu9e2pYZPPfXJRiPonzZQHD1QNFF547O+Y6io+kRt+wRUohmMZYaHbx48oiE/bpaW73+0MT6blB3xJdm+Wbvr7yi/g0jOcrx9P6wsaRN2Os2wWtUN1bl1O97rVBa40lg0cYXb8Yp4BsFaNUafRI67No7jdmszJff8C8/d/fDm5tpGffGcldgEZ9587TZStbln793bWF5dAYXFmRvFoQDSjxcXGGJBoyV/ggFw5N9jwIB3HiD9RExi0YU3ZuTto+/wUnYCOePNjrdw7v/23/5/78YmSqTZc+qXtyXpnV1UX/rqy57ShFOsV3WgtBYl3NzY3IrvLq0spAnphYpiZdudrkuJqdXdOvvcRWN0x69F9aTnXM9WSTjp7m2v37nw4jMS1JvxbtJ8b+nCZ0FVFWTHZfdV51GX0OOE+Z3903Coej/iGYB5ytwn8bEGeSh/6Lgv6+AZFboFIgucO6M2N9xPXnmVfPjG9bXX37+xcmY1VO988pOf/fQXVsjF3u5+84/+xbf+/bf+g9/+S0mqtAq7iVUKPZJnFgQmEOcxOxLGiQL0nj0wMGgdMoL34CygTX1+5oUMNc9JbQc64n44+QF3uC/KWGkfOavYt/VG/FLjVZj/Q9Myl8k/MyEixXbSUDjcM7Zf6BzMAEigkJOkt9uIFlFTs9muVhc0Ke+dCiJhlbK1KddqgUKNLgXXYwBGChcqX/vtr3S6zd21vV/983+rFdob1157r3fthadeSjvp2t3rC2Tv39nstHuN5aVbG/FOeu8zwATUX7Ag5Y3T+mvCSgOirP2fAA7UNeOzAfu+WyaAaX16+kyICd+ZaI891vDOKrttNAI7m1pxqXLJ1oO9ILJRbWHv7nraa0mv203vSNwEjwpCJTrS2GHeTtxStdZLOjsb6/VawKJu3t1MUul002ABkl7S2tj62Q9f++DWVouWgbTSRsQDQB6EN1i0k5PGWKt+ZJr5aCHlq+NqoxHNuXyzb65NKtn4DIGMMY0RcTZIjJPzeQiUckLou4y8d3duvPft775iTBQ09PpWp6LNla5TAb3x7r3VpaWrz10MjOE0vfbOdRv7heVFUCgtDKOKWFuLap2O29rca7c61Tps/vwNRnv1hcutnn/tzes3774XNzu/81d/5+qnvtxu3d388NrFK8vtnbsrvgdKMN9vo9wmMlLSQ1fxuDHyndHQvYk0MdEqKKti01+dhke8C9A0EofhC5haq7Haz43y9MFBpxKK806zKwYRg7R9+9q7v3yzidF3//gXG712G4xgit3O6rdf/Y/+zq2//bd/5+YH7/ziR3/6l3/3G1/9rV+3aNvdLoWBFoWeiIwTJ6lXhApBnBBqTZoQPAMQJq5LCoVdRKR1oHIfbObqwfz8vBOn3YfxSR9JAQ6HPsWcWMmn2fPzjNVDqIkTPzRus41IFBx4EPP/8812+qsB8lSAwNnZjYQILEiJggSTpLu5Uan7FR11u81gacVaq2IPGIKWRhSB6wGztj1wTYGu9V5hgspi0GwsxTs7H3ScvlQL7NbNd9+/wRDUF9m7uLO+C2bh7bW727DQXr9z4bU3zz3/+TCkrNDYd/xLUca+9xPLVX80uuZ4v8+YDZjHtCsTwAyGiZP+TsvtUJibU5bG2XgxHhO1dERKDG5LdhqXICATJNDdFNdJN7fE4UJNpbzpyW3d2Q5TUtVFu75rJK7UIJRKklgT+K2dXW1cIlS/+GKaJnE3WV/rJivB6lNP39uMU9v47g/f8pWk69yDB+tpnNRXF778lV85d+miFyBUAvlpYFLq5/wqK9YEl9Bj0uQnBJxwdZxfGqd/KZhYhmmqCE6/HlBn8dqx1aUkIYr6KFL3b3+gMU1a7t5eXD+zsqPklRvrFxvh+Z30yjm319tbXDC7Gw/OX1h56ZMvv/nejXc+vL/Tglp95fJT5zvNO++9di2oL7777q3AtP7ef/m/uLCsmi594/3WP/qDaz99u7UYVG7t/Ozr33jhKqWRgq0bmytPLytvs813M2Y/dqjC8NTwqcSAXc+vsM/QIYYrO3+9T9E2oGPCUmSuihxS9S9/dB7HWzFIi9Gav8Mg4BLpucRzcvXp8w/avtltt3oIDXXpxav3b964vXn3T/70z37ntz937tLS//w/+b0z5y5H9YW0l0ZR5EUIiAWTNCWlEJX3TjwYMkTGi7AHFkHiKIwIxKYJhY3K6hUBoGzEF+Nw3DcwZ1MceozM2XRHi4f53Pi7J1n+aTbAvs045Nc51HdH/o58dPIMQE7i2WlDGflLcTc72BGBwHkrSQxup715z7Vb1UZg9x6ICcQ0ejFZH2PqkSIMAvJgwiiEAKHn7J64rpPUu97Ozg55ds32TrvXuPDsuUvn3nzjxzbmpQsXgDqbHZtgtNdx792Jd0m7lvv5j37yV/7mfywg2VGT+b4PMmQOyNB4yPeIPoGOHu+g+Q28Q/fs+M+SLjGa64izZB4j4eEwWCoFJzZBOR2HPfRp8juFPMjPxyAATpJ4Z5d6bu1u69795kbLbnXiqoWXnq9ffqbW2g0CxVt7nfBcPbXQbXeu391+5uVztfri7p63afvDt27fv/4gqO3tvvLmtRsP1rc629uuaT1pZSIS4dVQf2mx2ku7CS+GCggAit0/JXcE5bZwvh3KkH57HD18upWs2RiYS8WvE/vsvnb+lOn8CSKscHkDjG6BMO2Vg6PUUn1BMjA2eG+jqbQJQ273qNnj1m5XB+aNzublS/6FK26rHV1YCS5eXl146uof/skr1cYKLax89yc/v3PjrZUz1RdfeqF25vx779yOuVoh9c+/8+pf/u3PXjp//sIzja/8VvW1O9+noMKNs2rl3O5OK/TuTPXs+We+BHoBMus7i3+DIvwHSp6gQ+DU0vI07QEeloOfqAEwo4Rll6QUMmPIuj0ijDjJ5my0MRVCBJCzTXCBTEQG9INr9+7/8mdf/Qu//r/6X//Of/f/+/ZaJ75//c6DW3e1+IWlM9euPTh74XxUWzSViEDCgNgLMjE7UqSIPXgtRAiESIpFUiAFIAi+Gzcr1QXF3ic+wXPhwlWEYqv5bBkOZAsfi6Ng5qaDhyT4UzteZuBoyekQXx+/ntiMEz1Gh8A0Ja+cvxQiB2Ro7hj6O4L29X5BAERUAF6cQ7ezc+ON5uYNgG1Jdnp7u+fPrqKuse/ZeNOLDzG0EtpUAaJIqPUCWNuNm4BJCImLO8sVxV3e7HVc0t7ZufXMC19JQDY72z6ub9zfedDW2+t8a3P7nXvtmOOnLqys3bnZazejhToqAEBhASqKKIWqn1Wm2B9FcsfVsSug0wR22Syf5uEr40CcfESbn/RxhDGaL2khw3rQkWPAkB49tzjKs+MGw6PoVSJFUeBVd9dv3uy8/87WGzeaezZgTv/w368vLr2+er6xEHCzHeOl9oN2+/a97Tu3WmdXw4XF+lrTKo17zb1uN1FYcanVFFhyoJGMara61KGVpTO76zvbN2786f/0D7/0m7/x/IsvLyw1BBBBwQSeNs7nDl35x1nLn4GSGw9OsIZDw3L4w9h3583n+5QSZznOmpRaqi8mIDsDHjwwBZHtJuBRSFvvz6+s3rhzr75Qv7bT+eD+xk/fUWfPVJYr4dUL1194+sqnnn7x2//mWxxXfvevfO3S5dq58wvV+vLta9db7TjE2vdeubW93l2t4lf/3G/WqpWV1eDC6sVXfvCTlNf/yp974dbNtcXnnw4v/4qYFRRA4KwdJWuAgtVJmeucegwOAB7cmsSvZ+OwTPaEDIB9e2NYOzma07nnKcwBPtOfcC1+ExAzk4B3aW/rw62719i7a2++fv7ll//D3/vie3fif/Qvv6+A/9Kf++R//p/99dWl6OL5Sxsbu5GJ4rhVrVZdGguI0aFnZ4ziNCXUgdaehYiUUanzLF58t14PPNu4tSOJ1csXvdQRQRc+NUag3NcJxZzAULUm1vEIx8dxehCPHkduUh4ao9OWM38+5CemmRMD93/GPkpblxZ9moX65JwWAQRIPJNYu3dr7cbPpX0fky6jZyu7DzYiSVcurgq20mZcX1rUqiqUsKA4dj30as+gNpASpMr1AgNb97ahl3S397ppr1Kp3X3vZjcJttv61tub795ev73J29uua6Hjob6gd3ZbFbN159aNFz77eURkFkQU4cFGiCgDNoiDs5lLPX7SdDrxe/3xclRmHoxlNS3PsaF63CcqS9/MON7vzIHDFmLSe1jyVAkAsiAlEvQ2NjGOz664Fy8yx+b1D3odCFp7CVXM+nubvY2tp15+4WffvXa32WysLLcs7qx1aT2xTEoDg2fWCKxId1Mb1Y21TqxFUuLRW2fT9N3X33r64uqbP2zef/+Xz37q0888/+lKfRmQETQAF+cD5zERxViQ4bmfgzbDvgLkVHTugTFc6hOuwz7fOogDAEdvHCEG0qBoH8kJDHLOIYif/NynVleW769vBSZMvLu//gAUdbqxjgLP6v6Oe9CK43Tv021JI/fe7TfeeHWjtlR/9hMv1Bpq7cH96z9678zZC072FpeW33/n+h/8/uuNGr36bnJzb++rv/Ebb/3k1cXIGtsKtV85v+RAOawxgEZAKPuIpeD4uRGQs53BLrmnC312PUEhGddOylx7hH3DaFjWgWp7QgbAbDk3Wt/MrzLkiXwopW3k3RG3HMynwuYnTQxeIOcFuJts39P2wVs//JYzcbqy+ua1zS5ff/bZq5r4r/3lzxoyf/N3v/riJ89v39/euHezVqt1du+aav3+uq/XFqIw0qA0mZ2tttJBiigGo0aVwSdJWyGyTUJJ4q227aXSa3Y6u5UojHDbyorIYNNgKarUPxG7Hxxx2DY7AI5QiTkBDBPDKWINx8HH9xXRxVZWxYb+mUZdvJKTVz+Og4RBhFPkZHftjebauxg32XcIE9d1JAGncOPtD0mgUqn10nakNITolUEAdrFSgNxOnNlptjTEoSSdvVYUKBf0sNIzSna3t+7c7Xzv5/fv7PmfvnW960WpUCkDAXnunTt/rorw3NNLtnPPqC+wCBFK4e4ZOqm4X6v+3MDgx2BfzhNGWTUvM30sxBQc9VCSUmY4icuNyIzjGQxz+zOPHw9RijGekdcJUSQ/fVeoUl9MjGPTVrp38Xxw9vxldHffurkdPVPb801tk5deOF9dgku62gl4c2/HBOTAMTNoJYQiwMgowCC6qh0xEXkWAomCCqSiEb137ebGpbPmwbWtpLnR27j37EtfXL7ygpCUpn/zpfv53kBlz9pwMzxEj892LDwumOiIfxSfH8achTl2233coZAbmJndKwCIpK9cfeZTLz+78WA79Wkv8cJMSiGAT9OAwCNQIE9fOR+H+O9+9Jrv+tWVlWbT/T//H//i7KWl7d3tVju5+uxlTcGt9z8Ao5dffo5U7YcfbNbrZuvu2svPrXzhU1+s17C1vlsJTM3v7l7/wZkXfk2CRegfhjdYmzDRh3Ya5wP6IxUKH8lkBXe64juu1padSnPiVGwDikN9lf9z0DkQmN5WE7Mq13kONia5lpSXlNhB2r5/58bP7rz587pJP/zg1v2NXmKDlz71me37dxpr9zXA8+c1c7hx716cthVWKU0Cs02InfYtXYkaUaPd6jQa9YXFenOvmzpAFYm4oBFUqiZt7qDlSkiGbK+126gE1cirak/zDb73Y7X0VaksEOhc+S96fqBY7Efzp2w4zMLREugwMTxGzfBQGBDJwGtZUv8GMZPFngq58lgc/AUiKE5Q0ILf3rrxbrp3M8Ik8ZYZvHO9bmu5Gi00Vt96+9b25vtPPX+1ca62vb4XVWxlaQGUDQNAsGmSiNORFky6a7duh42VBw+aez2+vxW/8pN3bt1v73Sp2aQ9R3spkeEauagaBKFRIFt379DSwu72/Z98788+8dlfV7UaKVPECRY1lHyHopKVP8EQllPA9cY5VdYjD6mIj8jsiRJilNkerx43YU7yscJYR+WNJSLZFiSZPxSZVFAJlhZrrc6eJJLEac9trizisiEbEqdAhlaWg3qtUt3ea2/smFrFJ6xAAZF3jEp7J4iCKAoQvTB7hVopRNGaCADQ0EYzefPDB2xloV4JwkpvZ+PVH37781+rLF6+LAqp8J31PbSlZQ8TuiD3kpYNhH06aqLN+MgH02OAaTOBIxePHDKgiv7vEnvIR3OxPYRgyhBGtWeevvxv4h9SZBARFSoCYEYw1osA1qJKoIJeJ9VhA8Dd3diqBhVI1b2NZtgIglrltbeunVtdNkGF0aVxp1LFl188c67Bf+HXnzm/tBB3diwiB0uBQe619279vF6PKpe/wlQBBAQqOMxQGw+5/k9P+465YPoYsOth5xAMj7SJdTmE6p/hUS4CHkwvjfGSye0yhhmPZiQuvzUf68rYKRQURc4Cx3e+92/+gdJxN9lbf7DzYGtbh4vsoFY/v/qF85RsQ7cVtPZazd03btwOGsvtjuvu7S2fX0lTm8R2cWX5zPIKSvrU1aWN+/He9t7K2ZVWG26/9+Fzn3zu3Lll39nVjD4MV6+eP39xRaMTScGjbbba8mOJuf7Urzi1jMooIMjis0sjdXgYl6teGDKnaUjMg6Mq8ump90mWpK9aZkukEAAKj2Gxl9qIuxwAgAH7M1+CShjANtc/+KVtrxlwAByZKLE+TpETD1VpXFxdfvb8h+98uPFqc2Up+tIXP+HaCaRdijxGyqaJFzBBzQAgujS2t9bWv/2D6/eb6u6DXiuh7b3AIVQblQv1YDlJtzZ2qhhKN+nudBSll6+c1YAE/va1N7//rX/1G3/pb4BSAISD8peYx4BcBIZ4y6M5MGaceoflVSbBjsAZOf6VkZsTZwCOE6dnwB0O04gFB9YyoAPlZCfZuRZxF30rUs7peK8d1yuVF64sv3tvz7VhaWmhETUChtUgOF+p7QilYg2REzbGACOgAiAChYgkmjyHRoP4akBhLez12lGt1myl1+9sx7vNL3/5ZU+ysbO1snLu5z/5t19r/G5l+ZyAyoKACqu3PzEmw8F9fQyNk31cYEM6Bo79fdwwPAJPRiSOf+VRTUhOw8BzUvwrICAk2fQwMGUeFiAR9AiCELdb0rUMgKgICJQoQkBhAFDELN1uSrpnjEl6CYsPwiDltLG4UJWKB4m7/tz5S5UwWKjXwCbPv3C+auiTL1yumPYLT18GQWdZHHZsr3J2Oag3BG1n851gaRlqz3qqoxAiAHCx+8N4A59sH+8H7EsqGJ1ZHHoqJbk1mnaoHg9Tp0dpAEyUVQfCjDmT8b/jzor56SHTnQQERQEDJg9e+eY/gM56it6lDhh8ausLDWcotq5ClcbKZal2Pn/15bjdltSZqG5T77zd3t1ptVqxtXHaETBBQHHSdrZ9/dY9rtQ3m+mll64GEWzeu3X1Yt2ooBcnigDQMbi050hFUa2mdWzbr/du7gXLz9Py856WFBIC9r09pcC3vgcXS25e6Z+VejpGxCNDZiNJETd+3E0xMuRPoPHHJ9byKucUDZC5C/NpgUyVFgHyIogsgIJeA7D3RFrivfaDD6nTrpBynp1lSl1otCWKKsZxWq9EX/zqr1y5fPntX7y99eD+O2+9U680CGnxwkIYRa32joqCZ14864Dv3bpTqTba19fjTlLF4Hydr15a2OtiJ7aVRpTEScfHlZWAGdhL1AhXlxrnLlRr9ejSuTCK4r17P7v19tnnXvoVCpdAGyhmNIpaS74mbNDw+RL5kWCbRwgp/T1yWpiW3bhv5QlmojSASoFMRRsiggALiDD0Kv7d1u1X0/W7S8thI6xFFb2z1/JJt2LUcqS22z5ptm92ugiOo9qllbpt2liJUl5Qe6TUWpPt7MZWIaFwoBX5VJFoNGRtTSnuxaGo7tZOM9HdVjvuJFHot3e2avWF13787a/++d+FqIaosVx+BMg0N4Bid8Ri9/RMWxLIBv5+o2LUwzrcPo8nSjE0J8P8+yJgxJA6PSjcJ5BvKYUAQh5BAJlBIQJ4tNaiCoKAO3s63Xrvh3907fWfGghiB0yoSDmfEqIine1Lwp73NncrUaSUDkMTBapSierVeqPWiLtpp9ddWakvVCsG+NkXnl1ZqL/8yRe7yfaFS09HjcW9B5tOKEnjeqMaBDXwgYgXF8fr7+oVUItPe1pWiCQE6IcaeFw7fKRtLf2DamDKYCrdR+ifdDP+EODo7MbTMgMwp1tupAkmvjI+kzDiBh//7uxi9r3pKGQZwCev/uhfWb/b7sWCFHdT783i4moaJwvLQTUUSROpLviw7lCZWkUvkhiKQKNTZ86ci8yFOOmB4zjxOoiazZZQ4+UXPr2+23nn3ZvNM40zjUag/Nmzi9Fi9annVz0a77qIGC40Uqskanijgkrkk5vxrRthb82c/bwNzgFGComAoTgNDwaWQD6BIYUkyC+Od63144N+G53Q10YvTuBbUsz+SH/tFEJJ+4dshx8BZEEAi+lec/tWb/N+msZKUTWoqkAnvZa33ShCUQFK6FJk1VFA5IMgrBN460AF6vxzVxfPrGxvrgmmbH2nk3SwmnAFV+pmoZ7WzwdB2H57w6M894nzZy42uq29xcXVZi9Onb9+/X6SsucwTbGb+jiNEqu87y7XsR7GFxarVUgirWX75i++9U/v3XjvM1/4+tKF56hSEyQEknz1S3ljBcxXywx3cn/r9JPcGmiE/4ykkkn8DWbyqBmK/jzleYL90Ff5pWRT5qMGhQGBAT11bfP9+MM/gJ0P453e5k5PQ6PZ8esbHW0WSPt6BS6dCXdsz1m72+0pnRjWjagSkIrCgAUcg1fKg2PPRESECkkTIJNDVkbZNNFMhGQIrE00Iok4x9YzW4Cex73O97/zra/89l9XChVgti0E55thEQKgQH52KyNQHigHiIiDib/cETJZu5imsDzWGJ4YP+bPQP9jB1ZCThSZE1qQEFgEszUuRnZBQeorHiOtddzeSnfu7r3/s1f/5Jt/+sffuXljS4eBUpTEzos3SmVHZBOiMBHSQqOhlVo+c4YMoNhqFCwv1ENF1TNLF84/j5zUIlOJDHF69al6GOwEgY93t29du7tYrbmur4TVSkA27qQ1NqRIISb3Yacj/h4tfcbrpwFBDXk5M5T8n4+wjTOdvmQCFvK3hIIsStG4kxKMzcQ9JB79DEDZITpyMY5pEnTEEpim949bGjPbMbfZcu0fCBiZ4ebbP1j/8G0nabPtSCnnFQqaej2qVhSyDiSqRJ1uElUCFgh0JBpBoxcrXgIFTlwl0uxdpR4x6IVqxadpNap+QuGXPvOMYk4BokY1Mq622EAdkHNEwMKkTK22JKYh2gijgKsFseq+J9strF6FxnMWzivSyE7lKh5lx5UxCxGyMAIJMxIBAgsTUt8kOJ047lE7yP+Rb1F+zMDBP1IIfyj+FcjiKBmsCNrm2vWfbd96w3a2FLskddHCQldHnun85QuoSFUqzoPWhnQkQuLiqFpLEiLgSAfGUMw9vdw4UzPVhQY47xKXekQKVKBYwd5WB2z7qeeft+0WxzveJtoj2u5qtbrTbD61WiFSaSzN7WZLfEd8jLba0PVGNVCBTxIMazaOqRb3djdbm3df+da/Xrn47Ce++LXFMxcINZCifAsUKmyAfFqjWP0LfRaa+UZPpuOn+XpmYx6HyOzXHzLBITGa72kLc9gXQ1KiUP77hwtl/lEQQQ8Cvgn4QRC/6uxNWALtFtLNdtzZs07OrDZ6TQ5rutWyNcPVWoWZVxaClrXbW11i0KKVB01KIXoEERJNhYrASoDZgwizRCYgIOdYkCMFly6dqdfCKNC9XhJVDfcSYOyk8sGH71956RMBgAEvkorvcNIzpuIoAg6JSAPhQLBhicBK22cNNcLHBcdU1fIcbKlBc74jhX/x9IyQUhwNiiAjsrdGbrh3v7e21jbPfm3hmZdU3N57/Ue3XvuTt77/nWtvXnvzZvd2E9LQJ72UiASAmZGIEDLdQysFIoqo22xVq9WlxVqkKSRaWagtNxrL9aiiK8xpauPGYuR80tzqGhOkKdSCWpo6IIWAIExKQKwBMuI5TRSk6Hcl2TMrqa8+Z1ERADH3CbvUqg/XtPPpi5NasijD8AIvKfd3SUMdFQrl38XMwNHitBwENqKUT/Puj3jIJgrIcZfbCOZmbJidNVes0UYv0Gx/eO29b8fxbo8iBiIhUqC0CbTWyBrEkJCiimKjiNAieiVAqRCyDg1bB0SpAOrQkybUQoo4EI0VTOuESoCDEAwwk3jPKSsU8aKCqgnrqCtgIqUMAZpIu7QNyqNf19u37PZ74blf99UXHWr2oAwUoxiAQBAsACEAIAkQCxL25wFOBeOZhPFyneLCPh7IV7JISb/J7QJMBUW6t9//fmfttarumSAlMkG9ES6vVkD3ui2q1IyqCwWVQANB7DtovGcg6dYj7cUmaUcwQBUJRyCVdtsYBQ4siw8ggC46l6iOaPCS2ooxypzXPmpWFogiQZNQxUHg2VHAYaQXWx3vABUFoUoEE8vh0kpYX+i20zStVGqLRsClzfbme+/8rLly6cWnXvxCFC0CAaEC4dKeptkcRz7R0zd58wnuk27/UXdgWVaNxwYUM/KHcWE9spGSf7is/MDjYwaUhE9mJvfjZ/IpUwYmBkpIQNZN8jZ2XsfkelQXXF1GVUmtT3p7lv3KSrXpOsJ07mIludtjcRQGhirg9aLwntdtEE6tx1TrIAw0e2AAIRAQduC8A4FAa62MMCP4gGwtUBdXlp69cHa53gDvNTnFViMqlgqq++/8qBr2Ll44k3S6ew/W4r1139sLtao0zqn6KoQLqrpcrTWUqSEiAme65yAYdILldpITlh9l5AO8dCaewED9Pz1eOEQGEQTK4gQsuDC53Xrlf7xz/afPff2vbaQ39V7r/W+/8s3/7h93WhtNDm7B0o2w0jRN1EpSqwQFkJTKDjYlIATRSmutavWgVq1EQdCom0ApQhEUi86jT0WQ2QSBeOi2EwUYBl6QQMUAigwrzaQhDHQ10KE2NiY0FSQFYKlzF7gHZ1Nf/ZQAKoRikktgIANGJwYORs7zeGsmoeyDzgy+wuE2NBnd/4qUxQBO+OyR6z8TDIAT07FGfPbjrvpysokTkzM8+lPmT0avpxeuCI8spgcFIG6vv/fTP4t3d7uJdpooCI3SRKSNIVIkoAkIPSAAeEZBEitOmAnYKPCOCcm5FA0JCwKwEmEEMCiaUFuXGFLoka0NIhOoEEQ6zoZBYKJFFS0xRYKRoBFEAKs0AyfQ64JvGUrSm/92z71RufgymnO0uGgoSjp76FR1Mezs7ETVGivvdEhoss1T8tYrRMA8rfIRw8exzgD9KW8QARDMxD8juGTr5g/TB+9pZzspW6ei2plg+YI5e1alPUlTpEjAGFMnZZghqISgFCllIbG+g8JKB6n3SokmVIFCTQIcUIAs4tAoZbQwkrMdKyLgk9STNvWFBgAl3XSlEYQUxD1mJxxAuLTADvfavdhxaBqVWhjUV6wKziwvXnjqBRPWkjRpNbd9vNe802xtrT24f/vqi1+89OzLqJCwOLOjCIDrswQcDcaEE+B5g2UJM78z8WERofZ4Ueu4RQOnvgr92fUhmZNzfyAADwLAygk6nbC7b5Kfq+6bwFveNS17CUOoiT4TnqssdbZ6Ww86tQZ2WzYI4cLFoLmbxrF1qTPMFxe0YLDes6mgEyYlzCmyCLA4IQIUIa2YWYEX65RGY/RCpXL+3PKLzz1bXVwgYxBQIYL3zCkrsnG8vXurs7cbv3DJaPRpEje3Ag2a1F7zfnf9VuKRguVKo7Fy4ZPL58+bICA0gJwdn1FYOcVYQShZBh9VHPsetWX7SUp8qLTrgvQ91o+2oYueJkAGERBKBZjk3ps/2/zRz5aeO481uPeTH/3yD27+6JuvPLi7Zxr1dcs3uvKgk+oodDbVShGgZLNLzEioFIaBqVbCRjWoR2G9GlUCXa9EGiGMtFGiiW2asIhRaIgco3IAGhhREWpCRUBEQUDVSC9Uw0pgFCoBYg8+gbTLqDBKNsX9VF+qc+VpRsD8OJi+ZlMybftDfGYTTP55KGQFKXPwkndhNHcs5g1GUh/K+pgLEwyAQiQer1I4QvH7mlgyzJjL745Icih1sYy9PsHNMaN0wtkRoiJMxD975V/ceOcXCom9RqKgGgSoSWujNSKSIIgAOQ/MAo5EsSCSiEdSDKABDBIBofPombQnQYSUFBl2JHEQBEoDs0dJXZKgoIlCFVXJVNksJlgnHaEKRCkPDEJKA7KWVLxvoKYA1/Tmz6vpT8Gcgwsv0ZmnfGf3wzeuX3n2bLxxq9fuxN34yue/Duc/68yiwmyuWU45gx+nikNNx83CyVf/Ubd5tnV5zpUIsvl/a5u3tm6+Zjj2VGMIIayHi5fCxVVBJTb2jlXYcJ68VqwCJMVOmVBjtUJBaDt70tsW3yZOpNtD8kjOO8k2NlGARDr1FjhlYYc2dQkBk3ilHIlzLGEAqUsDhaACL4KqKqBUaELtFQLpanVhtXHusqot1ZZWyYTNVhzv7mAQdXabadqr9OJOq3m7uUlJ7/JLnxdjEJTI2KzqwN3WnwDAw/nXD9DcUPpUXoqpHoryDGeJPU51zJ5WTCzgkY/dY0UeICaSh08iIDjlkD1sBf4dab9Dvfe92/UCoSaoNFKHZsmEAcfrG43AMFFzo5NY51s9sLoSIjNGQgjW+nQFVAq4mwohsWMWAgClFAIDiwApIKPAkBhD9apZXV66cP7M4srSwkIDwoA0ibBWxN6JAgZox26nk67vPmBrw2rQWK5bhxr48rnlxFLqd/YerKe71yJya+//7PLzn3vhC9+AoA5E0LcARjaiwjkVj8eBHk8HcqYjw4O7ePZo/XAl/wQBQpqkHe3d3q14/Z0P3rpW23GvX/+zf/YP/2B72zcuXXJLF8OKirvO29bq2Vq3mbQS58RrhTo0CDrQJjI6CIJKaCqVsFINKoGOArXQiDRStRIYRZExtbCqSQVGK0Kl0GiMAq0MGaO00mGgAkNaY2AoCgIVKMueE8upZeWBHQqGquqdDf2N0LyC52o+WlUolMftYe7nOtA8lkxix3M+hYGWOTyUSlqrlB+U9NJhI2WQG0xSg44IU0OAToAQJzrfRtz5MxT3MYtqkGc/wci3cCzNMIZCs6Sw04SFke/dvXX97Rvs0bKrVOtah1EUKSAdBoooP42XRdghWBAFxEiamAlYQFjEk4igAkRJCBWDgHUagZ04n5Jy7SQFEE1IKKiJNBoMw2jFhHUwVcZAVIjaAIKIpyxSTxFzR0STGO/r3gW//MGf2E6ycuUTGJ175tkrZ7ibbDY1BLGXcxcW9m6+0VvbuvCFvyQUeaXUnAbRacKRl/Xk6/9ImntIsyyEDQJ6D+Kthp37134egRcEK+iV0ZVGVF1QGAETMdSrDQ8aAsOoSEAYdFDxXpOKNFSVOuvNlks3fNJE2xHfS7xFZABGFskWo6Cgs95ZFiER8A6AvQB75RwnzgoEEChDqMCwaFAVS6pRbdSXlqKoEVUWWIUUVrSJ2q2WIlw9u7SZ7JqQKDCuG0sae9t7dbcdBrVzz38CiFCwP4r7Ls1+5Uu8+Hin4EsMSsp8v5xg/HrczbGvl+QQBTtiTB1L48z41KLk/i9ENREDgHidAAve1e1fQvyq4l2wLfReecUIJEqHFWCnNCGQj+1CrV4506s82FMV7Dah07GmAqFXcQ/MnqPEa8TlStC2zgF1nYgmjwiYaUNUiXSgoRLqWiNaaFTqtXqtUVNhFZQyQciA2ujUe89olOn0YH23+8GDvaASoW4H6Krbu1FgK5oiiuqrF2uNoLnb29697nfvmLC2vXbdW/j0134bKEJUg0XOfRVEylrM7OaaKMkfC5z4EXX5qM5aufTtgZPgkaHPYUSAk92ku1FdkO3rv1h77Re/fPvBvZ93dkRa3epmt7tqNQKEzqbeRzoQ73QUBohevEY0hiq1akAm0DoMTCU0URQEgQoNVSqRVmKUCgMyikJDFQ1GYRBSYIwJKAxUYJTSiEhaKWMo0EoTkyIB7sU2FodCDsCT10KEHIWOPWlnWX/A4SUxv0kKARiESkxnPj1non45Q+OflmCSPlpm/djXNUtP+/8MYldHvjKlFA+DqQbAcY/mETk3IL5JDThRcZ9Ppx+1oGYSwlDLY3FDkAHg8uXnnnv+0zc+eAPFLdQWASQII600KSLMjmFkYC+cKTsKCdADCZB4FgFgAe9BPCEpEgYQQcUekZCNCUWxF1LKoAl1YLQJosaiUkpHi0oFDlBIgSIEBAZFhoAQCVBIByJd8YkXjJYvm8ade/fe6rk117m3+/ZbFIWXPv/lpec/2wsX3GoD4mShWmluv95YPO/VVQZlivaWMUfjacC40iPD9x++xKeuzseP/iy0R2KJxbVef/Pb1F1fUNYz2J4Po4Wo1lCIrhdH9Uh8jNAxkHhGpQyCECCCAiRhL6SpWlNhxcoSpW3p7dnunndd4h5wIi4B7wQFfG6yes8CQLrq0Gf7SQtoRYRetAAgERplQqRIVMA6UkFEZFBHBEBOdZt7Eu9FkOzuPoi3NuO91karLUxhYFYWliRpvvrTb/32M88KKoJBCGi2PVC+5VG+zC3bAqUvh483EHdcxM+ejh7BcZTt6Jn8hOyOb/r6WJF3l7BkG+gIG4vg4X1ufg/3XrOyTaQUi/aAiILKOi+i2KMw6OoCqkSpng61qepoodJrue5u0tyJydTabdfpmMQHrVaasE68FmUS60SphBmIIqV0GCFSFBCZgKOKCgIKIjIBkSZliEjYg+g0taJUmsra5t6r793fS+H8hZXliizqtPugs9wwEpp3H+zWz26fv3Kp0ji7cvGpe1v30Dm/e+en3/v9y5/4TP3cs2rATbNBUmwNNBdx9Leffhz56JHb1AfCqAQr9mYdvnuEX5uZaSb9CcBx98bP/nW89sH5c6vJ9vbbH969GTW+f33dNKreJUvn6gAuTV0qqdZBAMQslYqmSDFJtRpoJYEOKiYMgygIKDAYBBiERhNVqxUFkMVMKIWBVrVaoLUKA0OEJiStSREBMAIQeWR2zjuxoEAIvRfwiKIJFaEWwjBSYqUHVIsqNulQ5wNY/CKrBgpheVpXhiY4JrRMqRUGFzKcoKyhwiT9Y2acydS2zwztfqiqlFYPY/FcRt84KuhysYusBQBPMv5n5Ga5PGNlm9a8EzCtLya9WEo4FBSIgoxIChQAsE888/LyIpEKg5AUGR2QJgEQYBREIhEiEERgFFJCACQCwCgA4hGJ0YNYhUAEpJQAEBKS0gbrUUPrQGkKogDJaG08px4NIiECkhJGIQ+AIghEIh5VBbXT4MB1xbuouvjcpz93+8bdd967VdXhAwIdqru7m3+hrs+ceyFOukFY0VG16lvx9Xf82a+o5ZeVIpp7rvfkMXsG7Eho9DR4ro6qDDPyyek7I2+EzK4VSTXv/uA7fxQGNnSeJQ1MQMpUaos6rHkhY0wQhj12nMRiux6ESINSFhx5EFCklOQ6AGoMlK5jeEbqqU1jLSn7rk+74lIGy+yRnYj34sExCAkhogZFAAaURhHhLCcEVATGWu+BgigAFmBPSsRbhC5CN+3uus4Od/co7dSMxCm72FkVWJDYq407t89cfZ4UDvGRgcNPCmtfxudmj5wYpjnvD/qhEdHzeOKRD7V9UfRV4QJHFPDkGBQ+gPXv+52fGLeToLdOEIlBJLCiNAJojaIojdmmNggDVGQi6CEq9pXQ6AoFDZ2kXtd1NYkSC6vY8N6k3nsmZ0VQWfZOwKhAVASCApxC6ExFaSKjUClEBUo5AUCVWo8m7Fq5cWfrl2+u33vQri02FmqB73ErbaWdvXhPFhZqFIS3bz/YvLf+3CdeajTOLpy5evvd9yoVZ7vrH7z7zufOPKUUIhIzI0B+pl5uDWN2tvZMB9uIE31YkkJ/GuFU9nvpnORj/9SEe31KGy2RHLVUkiH3xqy8BUSDvv7TV371E4to115b2/5nP7j27ma81U0MSoRBpLHFsSJkAQesKXPSY6CVMlSpmCjQ1SisBmE1ikxIWoNSorQKTFCp1BBQa61IkCAyKowCo5RWSmlCAlKESN57BO9T5mwSOTuDHpRHIFEKRSFrhZpEUmYPQaBRAmAPvQcSb/iwYQabwGV1Lxpg3BM8wlVnKBxle2C8/SbOA8zThcO9P3ZvLOGRhopNnAE4fObzVHnc2T+PKj+S/7jxMM0S698Zf2ssOQxUhJz9eQQRj9b6N37x/dtrN6J6qAMMo4oiFYahMJMiAEBSIIIEwl4YCAiZUQGCRxQUUdlcgkJNQAAKQSvQCrUOQBlUAWlFKgpMoIiEBBAcoNJKK0UAKJyvZWEhJEXGCQuTcoQUkamAjxnRWVtdOvfV3/jz8fa/Wb+/jvWFlWqj1dz54bf+3W/+9bO6eoYdchym3AmheePd71388nNMEUG+d/qxsumZ5vHUlFD0xsR5gCPS2E7aBJjhRDh0hnPkU/bqCQGCOEXxd//dP9fSAQ+o7OJifXOjrSqLVK04Ia0DK+g7iaRiezGkzmgShwKiNCKIF/HeZ15DYRERz4ig0RgydUIUTrW3uc/dWfYpgvLsRJyI+Ey/QlEC1lthEWYQZvbM4rwnomqgwcXC1jmX2p5zFlwiLkFIowjcYkBKdMxt5MT7jk16zlWD4NqbPzn71NMiKt8OYpjyMB/lhZsDCzF8PFSAwxeH/sg0j8npRk7pp8HMngMyeo2QHf6YEtQ2vie3/hTaW+lOYqJaNQpAOYiIux1rUIxFMoqqgbcC4JxTiM5TWF0iDNKgJ9UorKRJz6ep6nS6uuedD5JUaQXWo0PlAEIljFpQp6xDYxw7RZFVAREwaCDFgM67rmcMa9bS9m587dbeWx9u73UIMdSJd9s7m7i3GGLca7eVZ6AwcEk3/eDNd9bvrT33qWca584lH15zSVIzeP3t1z/9pd801YCFJ+kVMpfqPip9i2WMCDAU2JxhdnYnSybHrP3PcjnnD8aEIU6JBX84zOOMFgBkEEAPand771/84+//ym/8+v/nn/zoJ/dd4sSmlhR5jXGiwEFotImMIxCFIMoLojGmGpDWgSGtdRgFlXoQmsxuJRUYQCRNREpppQmQUAiEkBEYgcELM4kC8Y4tCgEjC7IwACAqFGRApcgrCAwhkijAkExAJjDWU8+r0DZNsub9c6yQhjZ57rP+MUoc4c4wJp6hWAw/0lj7tvXsziv5pvLrvr1c0EB/EmD8y0fF/IcMgIfPbp4cZjjDxg1fnJQMisTTmnpc0E76aN+ggnLkjwz0AUZB9sw2uXv7vQ/e/kWjVldKQqNIKROEme8TCRGzNVREBEKIQAqyW4IgCIAoChEVBYFGQkJUhISkFRIRKIUqIFKkA02KEAUki/InUIgEzAKegUGAEF129KRCESWAIkRoGFUSx/Vaw3pYffrFz//mzp/9y39Zi4K0mQSB3t7Y3Fu7fvZT5xJGAragvSCiY8+IKMIiCMMnAhw5J57RrRNTzvn1oyDak9ZMRuj24Yd036KGaR3XF+/SP/NTAOXB2haLDkItrouGeo5TJqUiFK2UsY5JKWFn407Sa8Xtpq5VSYmIV6BA96kcAQCVEkEknU++incAopRgAMgEgMQk7BmEHYAHZoRshSWDMFKAzAiA4kAcsxNx6KxwIq7r0q6Id2knibtauFatxSmiUqZa7/QAJA4R251OCuR0JXDprWvvfKHdrCyd61e/3yglr58U5kDeiOMi4EiQ5Ta/S2hO4CSGeVQ4at/oYOH1aUV//A027MjnAASEmNnxndd69264bhL0sNXd1rVKZSFSVS0BoGLRoGohaBKllAqVRx/3fOptYjNKY1BhLaiGwpbqVXZWXIJpGnS6PUYtUk0ZUhunzIJEPiRiERIg64gRrIiQ9+gFgSna2Y7XNprv3bi/th63Up1aXdXoILEmgOXlbsd2O10KQPxerRrGsW317J21rQT4qSvnPFXjXixo79++sbn+4OLVulbKi8+Oyh5Tm/ZDMZqgb1b3HduTNxid4QU6YVZ8vPZGn72PtuPEeg+nO2ixHr4mmViwAuBVIvr+WvzP/skPvv2Dd1sIttetGeOcdFzs2XElAkSXcBQFIOLYKRMwgLU+ABRDYRQqk21xiEopE2iltReA7Hx5RgYhBO+YNaaerRcEEBQiAAFrLaIGIUSVHWOnSGU8iQSQEAmNJm1QGyJjSGnUWiQAb9klKAzFqXeDJi1fTGy+vBWkZITljTrQ/ss9CpMGx0G6YRD2M9wNQ7kOFiKNfvdI+LPOszuiUSD7tfNImnGVZYQxzCja/EWeotfmn8LyGe8C2RwoIiFg6iAw9P47P7vxzquNyACoIERNiEorRSigTQiAgAjIWc6KAPMgBlCEiKAIs8MdEdEYTYq0IkIiQlIKAVERZksfkTL5gwgKg+wkx2z3acFsX1sGQJWxWS+ZI5OUAVORBHQ1FAWIKgH3/K99fv32B6/96L1qZVkb7HWbH77xytkvftmI6SU7QRiZ2tKCTZXONkXMN80tFCKRQhuakzTm7LKJ6ac9LatNx6o4nPxpaOWBjKXhMPL0oJilYpb29yjSMKBxvLC6+oJvv2O5SabadZwK1HUAYICBmbUWBUlid8Vt95o3FquBT0BX6p4BvUKlgQhYsqPnGBEQWSCPKhMAVEgKQJBZiAEFhBUDZ7O5AogonoUtCJFiEQ8ehQVRxHmxsXc9SbvO9ryz7DqaOapEJgxSH2ujQraNCjoKOeb2rreetaFOs+1l9/6dO88snCHSmWDr+9UQCit/eDvQ46OAOV1CB83w4QfFjPIcz3zgSVvac0OGLkryiUAAhJGFvGzfS1qdXk8iL7FPdMLdrZbuajSqXgnTFKgWeJP0MNakFFYEYiIIKqH1rNCISBp78sSCTjB1LIiiUIWaQMepZUJPziM552If2zgGVXEkrDD1knhKPLR7vpOqm9ubaztbG+sPXBqzdRpRk1Eg6H23jZ1mGiDYtMeWXdzVtNTqdFuxSxk+fPcmJGy76dr97cWl6sXKg+0775y9cAmiEJEAcms+PyZk3pmxvsdmkg476s4Zd6UeuWk8P07oozOHask0LqU7nD/osO04YI4K5c71W9/93pvnFlZvbTc3el02FCgthI4FPPtYCDUzViQIFChGEgT0DOgJY58agmrqDSkmJVoEAFisYyAQmwoq9qI0kgcEscqKiFKKgBgElANGBMUMTpxCItKgyGcb8SIoJZo4UKg1BRoCRVorUkBEnkPL5KyqFKsIiqMABhs/TG2mvhFb8gZJNgE2otEWDuNBaNxkOt8HWCQuH188ahKMqAWld2c5+w4CPcEEOQqM2F04x0AfSTb+CB6uwmMdVHwqpxSBPA5GEJW3Thnd2rn35ms/JtuuRaHXAChKA5LWhARIQEoZhixoD0EYAEAk61BCJAIiVESkKD/gXWtSilR+AxURAJHKNgfK3kYQEBFxgACSXXsGARHCTEEnAiXgAbLN4oQ9KxMGXI1THzSqvU6LwujXf+d3r13f6Owm3BVA/e67139tr2Mbiz7pUICu2yQHxrDva8ACfdPyoI2Mk6739fCMdOt414+Twb54VJJkTky0lGZUef5sx9+VidQuiCgCjIjeYj1YCc6cuba2y9hZ1NU0cY4VmboVAoHQGGYIlCD3xLbS1q24FuqQRYFohxAgGAAi0SAKEIU5G0XOZzQkg/2PEfPTRzPrmgCEcxaBAEqBF8gW0IOICFsv1oqLxcacxuxSHyeKIAiiIKgpUoReoVfKe7TAFhVqSCW2zqqEVZzKu2+8euWFT5iwgqiGdlLuW7aYGQF54xXmQLH341HHBzxaTeegONUnhB89xmROfgYkMGT7fyr2VoF3DtI4QWCKlI60s0lrrwWeuF5zqUq3tixJQolSGEUrAi6qR4xxahNEBG/SngZEYeUtI5IDSFGlAgChE0mcdG2UCsWWehbiRFmQZtLp2HYn5t2232klO03X6tqmYxEWaxWKQkFhTU5l48bKxsZaLQgirRiFKY27gbe+2217UL1W9w7IUt10ml1Cqa+tvffzHyydu/L0Jz7j+q7PvAWkdDFd/A5p9sNcXMra0wiyBDKc/8njyD49Uckpq4gykhTGuP+ICJQBpwKYIDVGcED9c6Q0g+wVcGtv48e/+DAI9K5YIC1evAgpZO9RAAidh9gKgzXGAAEwgBfxVoQFQSVSiVOFoBQqqwBBmEUJi0cG0hoJtScAQJDEOqO0Uoa0AsjcmkREwpLFVmRaUSYwlILAkDEUBDoItAlJB9oEWgdECgUASXmX5sarZNENUBgCffrGQb1HGn/gb+yfhlxEtJUTDy0lOzBnH0koOPi4FJkPjZnSJ/Ly4awMDwSNYxR2JJDRFptAoCMEP5JgPP1ow01iS9MUrEkdNFjvK4P2RxByqdWhf+cn31q7c2NxecmmLJo0BqiAtCAqBaAQsxWQOYsTEmBhFmFgAWSGTLtAEQVAiCrb7YSFSVDyk0BEUAlkyzGz2VdBkMwVw8wgogGEOWPHmMeHsWcnxKRQEQOkPo1JCTKHUQSoo6CeOGdqtc999TM/+JMfdBLfWKi3W81b77125UsXHHvbbMUb2xYb1jEqABKAjAyPkgrmz+twX50oW0651jLuLJDhRw+TrUy6WYIULFAQSJiBPPDuzvZ71u6EoUgnSbsW9aKgdoBGaSDDIp4tSOpdxyZxe9vUFwOtNfuqlqqQRtKkAgGF+YECBJANChRgBAAGyQhemBDEewAGye4KCIh4AEZmFAecAMfoYpSUfdf19kg5ktj7DhkCEBVoAHBxl1xK1qEXI5L6tNftNRZDrNRQLaUUbLf42nu//LW9byyfexpAgAWo2K5kwBdkxP0iucrysN2xb08dVVbTFI4nmBvDkmrMCUEgAKyyY9QJGdADo/L1pVqlVhOu7m7v3ru5k7Z9JarVl6vdnT0rSdzrPNi9RbrOKkBhURQEBpHAGGcodeQSSCx1Et+xqpf42Llu13d6rtmTdiLtnuz2fGx9J/Gxg17qrPPOZ3HSSMIaNaMHBYggDKRQsjk1Rg+cpjYg0gAgTArTJHVOep3UkuLENglWaqvNTq+VWkni9M/+qLq0fOWZy6yWQAdQPiS8cBkUOvo0zXNc0pabdyRlP2HZTnjobnzUGGfsA0ys4LjomuTFyTXC0utH3VQj+aEwRJVKSwAc3t3eRm3EOVTKeQcgwuBBUvJexAvoGL0EGlEhMLP3LBpRoBfEWkARKQQkFARhZhaFhOIAxGK2/YkoI6HGwGhkrxBRZdEUgogEJIgMrEERIGkINISBDoIgCE0QBIHRYaACrbVSWhGLRQRwnqBw/UMWz1A6zXWgbhZaZz9OKPf/9G8CDI7LKYzUYTKfsBh3DmIuK6jj9wfiqDRKRlT0gfjCCV14IAzWABzJMJyhxI+r5iPm7kgm5TQj2Y5wozIHwrF8RhIMWE5GF9JvXgEAx+KZUHZ++kffrBi9WAmdTXRkbDdRWimjM6e+RhLwAoSkiwWFIuwF0HsBFPGewaOgZ0GtwCEKAZJHJlIeNLACRaQIlREWQSWSFy7bgk1AQDgvIbtsIktQgLOthRyy80mXe10ybG3qfaxFFICLbRQGMXNK9OxLz7/63Vf2EpvaBIDuXfvg4me+TmFw//Y93evVzz+biqrSkBYEx+D7nIaJZtujkgUnKYZkjGiHnT4PlWfpTt+5gMMjBAUQCX2vdfvDH7fW3/RpByTQocTN7XBxCQABEQgFvGcPaL2NxcbWpQrBqQYbA8GysAUTCGivAiTtQQAJUBVnC2fre5FEBFEEmZ0AZxaIsAALAJN4YS/gxHtEJzZm22HbczYmbyvasiS9dDfQAELW+k47XagtCbu43dToQDySxRBMtV6j8KWrnzl/4enY+daeX9/r3Pzgl4uL5ymqIGWckoqjwbJJu6Kg2bxfbmLP0HXm6YGpOD7qOo6x8/irZPNgTPj0T3/PWHHOlDNi1qCQtNcVCDQFlYgVqYBqi7VKo5e27fr1zfbe2ZWV1ZC6VlQs7c5up8s+7tnddgwMcZx0UkxY9Zx0LfYY93q+66mbcCo6dWgdx44skwdKgcCQd0JKZ6RKwtnpqkT57hQowM5ppZGFiFCAnWBIRIq9OHQKxHtO4yT2klgXgwdne7F6sLnZ9dzpJa2trW6r84M//vcrZ5792l/5Pc43g+jrGyVGNdnKHE8JY3yonLik3eQJH+3W00M1OWhB9n1lsi9hXEeRKT+L630/dITCCwXPnD0bLS/3nKWWTqwFAO+Z8shjYBbnnWVwnIV/SqCNFiQE8sxMzC7oWUACTUjIgIYFkABAIYN4AADFWVB0AKCIUFh5EgIEUEoIERC8CDArTUCis5XEBlUYKqMx0GSMCjRprYwhna0RcIqAOSVkAMqiKSDX8or2GWrbggrzx5Ir/8XasEI4DDT/kbYe6t/Jxt9UyEy7Lh9CmBezb48M0c5RdPnAADgSApria59s60xX0CeLtGlUPmJOzDa2C5Op7/vP7gqCOCYPuLV549Uf/Kuos+trDQ9eVRsmqNUbERAJKABQhIQCZBgAACmLmBFgnx36K+xYwAMAiAcRb70QgiciAHaCCqwGRaIIFHodCAhkS16ypbjAgILgRRgLOialCBnBWnSK2GAqScfG9zvN3bB+rn7uvIZK2tqJW3EQNqCi01aLKqpx4dLqmaWdze1OO7m43Ni6+R4k2xIsBfWoqiJHlUoQjjF0LPP5Y2LOIzZeGRNFx+x8jgQnKYYmjo6HZ+IydFnmD9Kn+74/SQCSpHPjvZ+b3u1qxbD31HBJq41Bpyqe2BtJlfPAqU232HVDxLTr2HRsZ8OFFRSvaYEhEApFjKBCVAyAoAQE8xBMBYAADKUdAb1nwcwA8ACOxYKI8ymyE+/QOQWJgkShRZUE4qztWUw0U6/bBZuSUr1kNwpVLfAu7cbeQ6DqjTPUuBAuX1lurCrjDHQuNcKFhWh9+1Zz7Y2Fpz7nWCkCpUwe6daPOpChBpNsRuKQ8S/7vHIcFubHQ00/NmSidZjjlF1ukot/ZNRMDjQHdY3VJdfr+sCoMGBgDPDM2QVfY9vr3bm22dyIz5+vVZfOLK8ECppxyzI7ReHuTpK4xoM96MbYttJhbDO0Y2eJHHOK6FgEFCMKCqBXGhx6HSiWFAXQE3gg8AySspDWiCRZaAQCkUIgQFAaEZhFPDvvBEhAvLMqtSIiSAAM1tlej1vtuMsQ1qttDD/88OarP/nx1/7K7/Wd/8XYwLxFJnto+vxbhn8ONecEfbac16mh4MMUREZaasrzkccjrtByCaZkNCIuxxMeVSsKABAsnFn85Oc/+73vfDdTfZVGdpwdGQ8IqDDzjDvxiXWIJIJeyGjKhpMAdBIP5EmzUgwIHkQhA5IHFEFhBiVao9baG2UFxIJSQCgmW+woSgRQZRoXEWkklR+BoULUChQBEShCRaIyX6qAKCRG9ISegQr9ZaBp96M8AaAUeZP/PyITBIq5ryI+etSQGDd4p/fg4JVSn41MSBe383+GRg4OHh2td1bD0VHPCGSMcGGMiPspR9LDcKvC8EU5/yHP5pSBNvYtyekCcnc7AAEjOb++9tqP/80/W60rcS7ubIcGrUvRs2gdVuqojDJKZQFpKEjkmQlyiaGJRJQjYET2wF7YOxZGzPY7FFDIFomIkfLjwBBQaUAkFYAoQiERJAbw4FMAn/lPs/k1BZYkEe56jntpKySxyfbC0krl3FmoLgBbu7vhfBKKsbtd9G225DG68tSVV1/fJtaM1G23fWfHIy41fEWZ3WABQecnCpdHwIhedOCe3x8PmS2O0czD45hquu9HYZLf58D5ZEwMB91XdOZQrlKkxmwliao++8LLrdubvtMVYusT1N4mTUMQJzFzGhC7uB1A17NLLHjrbLeFvB4GOlCM2gFXQQWkjGCAqBBAONsayAMAAWV8jDlz/GQDTvLgn2wzULYIAjYB8cgexQOnnlNwqbjEpYnlrrepZ0ZmRc5Bwuhb7ZSUCQNdiSKhRuXMU9ULnzDRgqRdSbZd9/bb799wFjQu/fzDDyrnPvfyr35j+fLTnb2NWn0BVVCctYKAwsIoRMXS97J9MNnpuX+X4qTro6eueYTOE8xCftzVQBHoh2aWRESm4LLBNnAzCD06RdpUl0Inmb5EgZPKQtRoLELv1rXXb3HyzKVoBb1nu6esUMIVIq4G223SGsKq6rR9nEjPAyrDkilXaLRJhb3zmVQQi0IeECVbYS9MhEgKwOtsCLEHQCBiQGJBhYSiERUiAntmBkJgIvHOixCKkAcBcNYiRoghAO91WQe4uEyo3dba5uKFi5TvBNTfNb1wluU7Io2rMH3PpAzuj8r28s1TSqoHGESDtaWT3xzRSWCEC48oNBOVoWE1qdBVppb5UJxqAhAAkJyT/+V/9h/94JXvB2GU+tg5RwgsnkBB4Y1kAQZxIol3AmLEABpEYAEBjh1T6kzqAmO0ZkQtBKSAsy0NPQiziAAqw8RMqbcB6EypykuiCAkBRRQKaVSalBAJZZsnIhAhEaBCJELSAqxNxIxeUFCBYLaHStlCGyZSGfyTpZnAtiWTEX3/GQJAf5IQBr75oRfLJF/czF4RGfztJ5hh3Y3rtPDQXTyCqScBHwLTSjbNLTCu90xT/UfyH5F5E9tu1K4qsbKi+bn4DFpAce7+B7/4wZ/9j0sSbzRT0lq8GOV0YNTOg8AEi+fOVZYuhGZF65oguDRGEiJFmC1aFEIAYaUAtLYpC4EHFnHAjsWBeHEAAkLEwCxeEIAAUKEKEICQFKFBAJeKT5BTEgfIzqWee+gTjakmJmRAqgYmrEY+qlPjLJvIi5DzOgjYJtZ1NAlwK4ltI7gUBoH1gKKtb3iPSbMVVI237dv3Nld/5Yp1EGph6G8NOdSex4eJ1lrB6PbHyBB7GPQXvZ2wXOpHKY4bxofEQPvP1JrJX+1rNIgQNhoXn3tGxT9fb63rYLVaXWzUO7tJqhRBGntxKYACjxjbpM3SCwJJ4x5h0GtvskalHaqIKCCKlKp5QS9AIAw+Pxev4HAKcvcLi7D3IIxeQCyLR/bMPgABYO9T4US8FbYojoQBEm+74MU6qxGz5fRpnIZBxUSNJIVq46nGygu4ULdAoWsbfvDez7/z4YdvX3ju2edefFkx7dzZePPNf/5nN3/uw4u/8Zf+WoBP6bCCYQ0ZvCCLZUCl+qfvYLZqpzCJc7LMruaLVhgVI3P26SEIYJpn5AkOBinNjw3keXYmrgdk8MorQr9tlbVBYBoLmlIOQ1BEhI6UQZX2EhB/5bPnJAh++srN12/sfPETV56//Azw7bSbcuq7bdtJg4T0XqubMqnI6FS6SY9FiDQpTHwiSFoTAYgAAysA8c6oAFiIyAuzsAfxSgiRi32b89A1zwiAmlFEPPbP8iWtvPNeUISt9QgYe5+mEuiwZ+PE8/pO5+zyIiGKeI2YbQg9CIUb2ADjtFk4VOc6RvJY5ckRYP5BNME71lfyRhSbKW0mpZ+j+eKASw9+Yr63yL7e34dkBZluGlWir//Wf3Dh/IV79+87Kx3vBbKdQoCIIIueRtCkDGkEcMzAjjxppUSEEQwzeq9Ta5RSSiFzoBCMEkJmn21n4jygt95rxZQpUSgIIKRIKUBCRagUKOUVJUaBUaoShJqQSBtCg6RAQLyICAOFRmtmBjFmrOHHO6rf2iN7A/UvsbAYJjRqPy/spykroCNvlLT8/BqHSKic50SNf/LX98PE6YVxHI0BMKptl/7C2DWUlJ7+04kWwrTXy3dGLiYKRek3OgIM5n0RSCCLQkDZ23zzZ//+nywjd3Z7KtLexjaJgxqFkCrPRmj39mZ3b3vh4gvL55+hoKq0AnbOptm+/sJM2UwRaxYv4MUREXiXsGNkZBYCEWHvBNkDOhBh8aQ0kAa2JOJBrHfIKfiU2Gnl2XmW1CgXhKCJFSERolKoRIRtNpXADM4hO0MACsQnbLsKeujB92waW6XICayvb1+9Eu5uvHf16lfW17p7e9Wnzz7d88XCxxJJln8dlW4xkRBHb86tMh2hxlO2fE4MfdkwcfwfBgPChmL0lNnZQJDkwyAfgUSq0tzdXYg0kN9uPwir0t3eEtsFm3hwQmyUhCGlnhUCi1XCbFvijUuM7ZEOqlqHpKxLu6QDFAR2hCLsJTvUC1hAFCICokL2lh1ncW4ojOJBmNiDZ2bHLmFOmB2IY+dBxBAgWFLEguy9Bq8Boyj0YERXG6uXqhdeospFoWQx4uTBm3/6B/+sVg1/62/89ejMKjvbW7u2GGx9+eXGt3/y3r/603/5nT/79vLqU7/1jd988YVnkUVIibLVM5eXzj9tTWAogHzdfr/JBs02WZ6M9uq4+2bMINi3G+eGjHO5g7z+ccWQf6OwwovGy1VqkSziAQG8COoEIHS3SPVQKQ4DQC+KguoCMwUQseyRrrd77UoDn/vU2a1e+Mf/7oMbd976+pevvvzsJTLR2p31qnE7FDEIVVA6zM6yZ6XBCFkPzF6RsIgA5+oeM1JmTztEEuT8WBkREMw2muiXGIEJiVS271zuA2AQQCAgB8Ag1vvYplobZLHWoQAKMoM461zSanZ73S6zo+xEywHz6AvOcbfCRHdc+em4VP8oYJwbTEDmO+6vpyhxhZz+yo1XNhoKT1h2RwaOHciU8ImteJQtKwAoUVD5e3/v//Bf/df/VX2xntie86IIBZgFtTLGGKO0F6+INao82B5YRCGBeO8dO3SpooQwUaBEo1GoEMECCLMAkiICQe+9tUlkjFKECrVWWmmtCBQqBG1QI4ZGa4XaKAAiNI4lCAMGTzo7SAlFAaBCrYE1RjURNXTCY1n7z5t+sLPPiJ6ZEftkmh7xOkOp73H4ZyllWakZdkMP2Y1HjjnXBx/lDMA8KMvGEU8zTko5cmein2ykX6ZZC9j/1Z/lFWAgy37t1rU//J/+/hWV7tmY0LlOWlmM6o3Q+zTlTiBeizGg/O79Pe9IkuUrnwzrS0mnGynjnWNkRUpYiJCy7VWCmqNUnIIsWsghiDhh5ixGzYlYYCYAb50II1gGz5CCtySiCYBgL+mEWgchIjtG5dE4UEoZhaiAtFckyqBG9s47HRJwHKBL2YbkWu1evbairTXKo+sFQRTHnUuXLsR7Gw/utN69H3397/xv0tgEESLwgOSLNjxyTCTEsrUm0Gd1Zeo4CZz8OQAIkAWfHclXh80JwXEhUeY1mb4jgACasLK4GFVq0N5G6IJ1lYrqrN9Nd9cEVZzGYaB0qGLbIq3SplMIDixZb9Oe8jZNYy+M7EB7AXIJAZEGcWxFBICZPeaho+C9AxRmByKZWQDimR2wR2QWTtIUwLH3hAgCCgkRvWejCZViQFREgfjEBWFDKsvB4uVo5WlaOp8k6VKF77/63Z9++5tnnz73xd/8RiI23buz8+7b8e31vft7e93g/jV3ySi/tX7v9vrPXeveq7UXnr+4vb3jklYPo0/92l/8zNf/mmPU2pRUw7w5+7tATBEI03jScNOXbko+3gZZTMxxdo9PSzDuhvzIKmKHRKZY5VEr+bazWOxXm039COfSU0hI9RyoZJ0//KF0d5VR1mjyrJRzzjMAJkBMnXa72+p426sHSy89f25n037/R7f+8Ef3ttr03KUwqAULLm3upG3W3kCPRAEbQmtBkeLMW+8EkUVRtj8WocoCmSE7aQ9RgLJyk3Dmu1JAhJDtGkfgFRJCKTQFEZBEEIScF+8ZgASQBRPnPXgBHyodkHT34ntrG5W6Yc95IB+WYiOw5E7MMUrSwzdh8Ghwbx66fkwwXJXx8QVQiicpPc4VwcGP4nmxTz0UOxD3HQ9jOQ8+PKPpD4hSJ2EWw4mVavDX/8bf/H/9v//+ex+87z1nIgaRmMV7BoMAShNq8gBASAiMkkV+CgAAO2FkJ+whidmoMNSh9w6YAAFRMftsP0MAVKS8MAIGxmQb+hilBCUwGsBpo4xWipAM6UABcC2osEhQqViX6FCzF9IARB6FdEBgEBgyGwCHGOBQ++c1HzzEvI59K6BEwn3jrbyKoORym9xPY2ZA36uUJy9PMxwP9iWMEzUARqTitATTFMBplcEp11O+gEU0IwKLEx9I+/f//v/90pJyiU2AA0obK2EYahMYbcJKsOi6ViFVQm3jpN17sHenQyRnn/lcWGvE3RgRnIuj0IgCFAYBIk0EoAhIoVKeyKLySOBTYI8AIEyMCAjsAITAK2BhT8LCgszeWmaJTKAJwTEphaKEFWotgizOOcfehJUqgBWfKARod9LujgZFLCAerDWIkKbtTlpr1ESpKKoorcEs3VjDr/+d/2MvhkpkXO5xGuqBckTnkAV9RBjRm6bg5ETFo93x/GikYl9SAxbnuQ1/ZbCTQMHdEDwwq6pX5yxvIXegE9dqy83tnebaHbNYDwCIudVt+952aLtECIJKheyZHWo0JCCePTqUmBQhIDgU8BpBWAQ9iEdGQgFhZi/AxJ6FQRgYRTyIA/bMAoganRenSMQhocrWD5iq8cJIOjJElG36I3rlgqqej868qCpn2fci39l957UffvOffuZrX37xG19bu/0+7667+/c71+7sraVvvbp7625318vFcwGnu4Gv7F53vrlSN7y3t/7spYWLjca7P/zXQXXxhS9/g4EIFGTn7WVbSPfnVPLOKgZEWSxM0Nun8aT+0UqT+dvIz2mEMQ/ByNjFE4zwnqx/syOGoLAMQABQeSER8ADMVsmmvf3tW9/747R9e/mF5wAUiEbudZNOnKaLur63tsOJa1QW11psY6nq5CufXe503A/fbv/zn9z7xDMLX/n0MzZ6UKu0Fi0kVlBQoVbIxpjEsyLiPL4imzXLVvYioBCoktYg2RnbnJ2pgZBtk04AiEIKEYGQCAAlky4InE1nQb7OAIBdto+7FxGlCAACrVPBbuob9WVtsnOb+ppnmUn3w/3HzUmZTGVD/L2c4WOP0QiRsdoPMYJBa46ulhhcl3w0MHyn+Fl4bqR/TNvg9Yk8aO62HqTqczVEpRT+N/+X/+Z/9nt/Y6Gx0Gq3lULnWGWbTXm2YJUCAznjRwCtSCtCECQghQgkgh7AM1vL1jCjC0wICIKSbfGfx/EjhmEQRWEUmSAwYRAYrQXYKEUYhCZgtibURIjIiojZk9JJmlajUFFgwgC1AAmLF1CkjTgnhIp8ZvEOTNX+UQCDpQHQF4xS0H1hQefbh5ZOw5PR022K7RNxsHduQRR9q2/U5CgooNTjx2cc75vn0RgAhyj6ROVvRj4j+qJMIfrZb/elb7bbGYMPlP/27//3V+o7C1FYXVLtntMYOGu1Fu1co1YNw5AWVp0nl8Za6SrsuXivc/etUNHZ576K9Ybr9ZCYnc8dMgwAClV21hFpQsR8rYpHBHbAFgAZKDsqIHM4sbfCXsQDCzMjMBFoRAJAQMIAQSESglKgwLNCUSoF0oCpNp69pJ0ddj2hKrE4ZkYNqrK7w+vbsrq60G22z54PrWqswzN//m/97zvdoFo1IEwlh1HeA+VFKvt1yuFQEimTyGBSd85hMDx+wNLfh8ynr6Viyasx5CwqO5wEMlFCAADVp1/+xs9u3oLeeqMa7KyvLy0Fa/c+eLH+WfFtVJZ8B+yei7cIUsVea/SgFWoTRSqIPAsDszhmRCBhEU4UAoKwMDKQUBZlJmwZUhD24IQFOT8rGAUIlU2dYHaCNmmtATViqMNIG4XWg0IRpxCFzNKZcxCdqS4958OFRJIqttLN1//9N//Bl/7iV65++Vfv3769d+2tRmtn4xfXkh1+sEFnzp9dPt9ePV+7fHW1cXb1/gb9yz/+6fvXNq/dWb/63OpLLz/VajYvn6m98f0/PHP1mcVLL1J2bjHkS79QsjUBAw9Z1n4yCAuaockP3S8O2sCM648oWeMWw2xmOOtLo2lHHx6HVf84YGgYDGQBADIDECP6bHEKAAkQpeD3Qneje+8X0d77Z88svHO7fu9bD0DvfeHTlxvLDlV7ucKd9rqKsbfeu3d/T+pL0tm6UEfEyrNPrby5bu9i9Y/f3bm+LZ+9GF7U1Yp0K54DoB4JEzgLQJSfHoOMggSIRF6QwSNiMTmZzwL0l64XjzL1oiBXFhAUEZbs3Syen5iVZFNygsLskZyAAqgEOrUxi7d64eu/+1eFFAhDtmCnLFj75wAMlmiVpe44S5aRf4tXPmqYGI8z2WuZx/EjDPPh8vYyg0elMKExj8LoBye6HKbdmQeFKiuVauXzX/yV/+3f/bv/6B//Q1S0s7uriJDQM3vxwoCAXiEBUGaMKkJFKEAgmO9mKKkTDWK9WC8eBJUoQkKEbD2XUlrpMDQmMCY0QRSGxoRhqLR23gqh1kqTJhVoI9qQEgwDQoNRpeKJUKmYIU2csT4gW6uEKjAee95tq3CJQZFk+z8PFP3+juelIKvCfJX+nAH0t3ZHRCliQgf+oEHj5runlIyHQcois4GRMRDHwx00UYM9GVtZH+FXZmc0rrKX5dx4W4zcGbWwJr078Zt5f+aWuGSbHGQLdxPnJW3ubVy7cmEhlS6nCdtu7KUS1V2aEGDSEkO6sbzQTSFcWJV0l7Qk2AKON2++YcKFM898MQlDlIRFAIlAUIH3DABEipB84VrKzEnvLaMXEZEUgBEytsxALJnvJZMAoDUBswdArQiyc4eFSJCdR3baABF6YQFWwC7t2aQZEIAAi00SR6YOVN3evdFpb11aUbsQ1CoriZz/i//h3+umQVQNAPqhP/22yot6fNr/iBJftgRmf3KikPkIipRDYdhNlzVL5rror9IrkmY7PmWcUARFNIWNi5/5wtf/1rf//+z9d7StWXIfhlXV3vsLJ918X06dw/QkTMAMBkQShCEBLhMiKYiETFLLoiyZtChrmaJNSqSpaNOWlgTKpEiJi4ZMkJYYQIIgBwPMAIPBzPSk7umcXw4333vyF/auKv/xnXPuueGl7vc6DFDrrftO+ML59q5du+pX6Vd+wSU+ieNaipfeePnBEycjlxU+RxLxOYQMpVQQQ7EigbEuqdukHnyogv5ZWMATEihr1RlSZNQFT1BBQcQgCTAIQJXlWBW9JQLGJK55KSw5AABjACKhWJNU0LnEsAKRKotxtWTugcIsSrocODMw8L1LX/7VX/zIZz586gce29i8Uly5PDh/9carb8g2CEZnHzmTHmksL57ExEi9PnP8xPJHZmunZl958ca/+BdPXzrfe2mh0XTpMcUGybe/+Cs/+fP/nsRIaMcg0Z6K6AAwAXzGtsChXIgHpN0hh+iBo++KDr3H4Xe6yem/y2if0aS7y0JB0XgPgMDciTCXcsv6NfBbGIbSX0mGm70r59MoOnr8kZXvXr1yZeuFp69+/qefOPdoLcuugURx0uiH9mvPXZk5/eh8026vbjnDKaaPP3j00otXqWbf2O5LKHkhiW0dzJAsS8EIZAiYQZUR1ODY0KyawYNWEnqsi+B4h8Bxp3lCAAWtigMhVaW1QFUFBBSDAqka66qSQRyYgwgAeHEWnVXgsjETk0b/6uf/4M/87B82cTLW/kc5AOP2XxOccxrArOh2mAzuO+ZOOH36mPepmN+neNzChbznyFsedtBve1Ay4KTwEux+N33MOxysXbVWEY2t1eL/5C//lZ32zt/+n/5HRwYQRYTQiggAsQKzVC5TQFRFVTSEZuSfAlVg0VLEiRSBI8QieIcmQiIDxpJ1xjjjnLGWosglURzHcZykZCyGHAkMIaLUkggwgFBaa7gIyUTqyzIf9AcdlVCLbT0xao32bDozq2UPhHDuLCRLAETVUppkY0yN1Ni4Bd0/hHqw8C2Oq8TBQdhlZFJUjzzeMSYFiKZ1rElPAa2W2a4afNO5uM/0bucAVHSLZ7u1IYsHR//mpKN5gNEcV2aeKoCACBpnouTS5dfEwyBkqLJ+fatWjzGCnUGnFgmz40yNCV7WZo4fLY11drFpyYgt82Ex6O1cfS1pLUczSzZyPoSqGBBagzipf4IEtipvBQAMAlIDjwyAqChCIohUuQiUKnOACCqwh0G1yodnQFA2qMqMIEQBURAdWGMMgSgKIwgpAYB6AbZx0ux12p3Vq3XMa7Vmtz+/Qaf+2H/w/yzCbFSrmxH8gFNS53AZ9U4E8MFz920Xdw7nj5fM79EhdHCBjOdyPN5TA1ehiBMF16AwRQsPfPwjn/mjz3/r75PfsSpvvPb6hx96+NSp5kAzSh2pEhAqs3qDzpKL48i5hIwDMBosAat4YW9QAVEBQUZRCooMVHUFQFCDqgZRYFSmWQEACKnyA6MiOheXiizW1FIxCdoIrItdXckWhbikJeZYki6qBGeGZrj+T//xL515+OyJj/zIxsYGXl/ZeOaV889cODFT40jPfPQxk2a1JqTzsczM+NmZflxLHR195NiJcw/MzTW/8Cu//cxXv/rgQ48dmX8UAYZbb1x/7ZtnP/L7SkWDbs+YjZWeCnUdbxuTIsKTId73d7KydrV0PbAs3glX31zvv/2Jb8/w+MDSng1eR2CggqIQ5eJjs0Vrz5cbLwxkAE4dDI1mYditxRR6meWwcWFVpVZzONOqX+gM/9cvvP5vzHzsRLOx2cnqrXT5+PLcqZ3rm9tqjrCZ03bWLjabzRMPL8z6orMF7q1+McTyRMs1yJpYXWASsYY8AyJSpXEJjQsFVp3qSFAIaYJfVaoMTTYYVaLJW6XKXBCswi+MIoIhtV5UAX1ZIhoCIkIyEIH6kLto4WM/8Nk//x//Jy5JnHEw0pFwN8xEpz6oPtMpwbLfEjjAifs57NbsduiO8YGhW/zWm9Sa2X/+oebUrRbplLP+ba/ofafsAqYKzsXBh//rX/hLTz/9tQsXLnY6AwEhq6rKQYAoMAICq6ABFAISVDI0jkwDEBUVYoaSFY0qe2NA1SpUPgMyxpAxZMhYMs4Y59BYa2MlYimNAebcs0Q2CarXNts31ld8UZ6ab8wmENuy0XJJauPIiEFXi6WWBLKcX4l63lAEblkRzVRSu+5i/dXDjvZHxFHAzwT02QXOJr0hEaaAg6pfx2jFTGUCTM2h7tppE1P6oDn83grhd8kAOKj23YxZJx/uw7cmptI+I+zQe+HUm0mC51iGAiiqUhBB1OLaxSMJFRnnee/EyRnu77TmW0WAQb8To8ROB9tr2t0xaenmjtqoGSWpW1jIOhZY8t6N7urLc/FHo6QOFnMfCJE5IFBVtQ0UEA0ZBYyqVEhUBCUQZRAQBhNAZGQOVpUfRBUVVAC08q4pqoBUbYYNgkFQBLKWyKGtORsraNVAwETWs8kC1JLGcNDrdYbZ9tb8keXcNXr1+Z//y3+zJ416rY5QNTGesPoerpxkudwPptyPp94B7TP57sMPe1/vMTd73n0o2cEX428nndAnX1dQRoU/GFAxCKzRg5/8fCfbeut3/mF/s9gaRP/iS9/4k3/8RyEOKkFDYUDJ2ILFJQlIhFFNTKTg0KFSAPHIQupFmXDsU0UEEEBQFR2HT7Iw7MYvAxKiEiGFwAKqQLkPGqVxa5bSWYZEgJCMq896QSJyjQU0LbXGYgHae/Grv1J36cd/6g9tbOZubefCV557/duXU5ZTT8zb2QV7bMaQSRpRcGBajfryMroZg2lQEZp77CeWP/ajP3npxVeeffa5t15/q95apNnaS1//tdnWYuPUk+RslUsxhbLBrka0O8Y3k1WwRx8aqZu7kFE1ETD+786Xwx0iqL9HB2h3VMYYqqqiIDEUNX3t+tP/oDm4ynm7ubA4DKUXX5aF9mWnPdja7m3fyIse1+epDFAwu/m5q2uDZ17YWPooWBddvHD53HzzBz791LV/+tLm2kBmkv5W3s+9Db2Fujs+k3aH/QG68+2infmHF2spYWJzL8hBDCGi8SyIiogENKpEVXm9lCYtKxSqJgUjFlAcQZtVuEW1ywkIgaqqASJBY50qeNEhM2PVsUYMmloS2zCcbdbjhaV/+8/9OYxj5+LJtUc6zi477xUw0yg0HhzeKXbeb+feVn7jgaW079T3l6y+h5vRvksdfL2bhj2JK8Gqp/qBE+6SDjG5do1lJION2cbf/lv/00/85I8DKgGpgIqiQRZhAQgCI5iVEZVABbFqxFtpWqogIiLiQzDGiahIYAaVKlACkIgIkMAYxKo6FUpkKQQIZb/ZtAiyur7yta8/+53nX52fb33qo0+crC+cOH4sBB9kkA+VfQQ2ESCDPePymZkWZHlgwKWUTZMQUBTogBNgzNm0y1s40eKrMZ8cPH6JAFKFkewm2+9dJDAuNTTS/qdo33G7n+shrK33VRUb07thABxUUCq6k+d6G88+DWvDOA4LFUFYAJRIhIruIK4Vbzz729/70j8qh2sYuLPTLbmk0JUgaO1MIz5+ZO70g8cAQ2y5uH6hDmBnWOO6qc9iKB17HHb6a6/OzB0NdArIxJEtstzEiKoCUkWPqSgAoAECAjAKCaigBgQPUooYhlANi4AgSFXVAZEASA0JGgAPzFbRoBoUAjWRI5cAOaRYgVBUUcBYVuNNJORD0Pb6etEfHjsx99KbWXzuYz//H/7Fvm+mSY1UpQra261/NRG1I1PpoNH1Tmif4L8rdQdugojcU3pvLfDb0M1+2b5Nct+oHrI74whhGsPZVIUMV3NuncvUPPqj/8al186/+dwXb2y66+vbZ55+8xMfP1WrM0ve6XRmWy1AA+KsdWhjtBHZGA0BBw2AyABGFVRJhI0iKRCxAIuygiAZAhI0okAWEQAZRIQICcAYLENwcUS27hpLmjQxblpKRGwUJ2UZSsF6/QhjBLGU0oN8c/v1r734/Ct/+N//q5u9VZut7Dz/7Y1XLh450nzyiaMu4frpY97U0+accWLiFtePG78Ioba2XszMPYBHWubUWqM0T52ZXyuzv/M3fuX0SXj8w+fKjbWv/fo/+5k/9Yi49ADzK1b1EccO32ow90IT09r/vmlBPSD78cCLg/S+5s67off0QSboxvhFtS+givho+Nrlb/5iC7bJQpbr+psrRVamiYOSdlazzR0/CNge2BsrWXHhBkaw0fVX1zbXBhy+1v7cAw/WZ8NsK375tauNFqa1dGM775fW0Mwbl69HHWNiiKnWSKKdTglx82K7W0h2uhGaqI7EqFcl1RjJclVtnQAUCIjQeCmrxN6Rso+gIKCMaFSFgFREAcAYQOBRHXVUUgcoAEoohhg0R8iFlZyIAkiaRMSMgV0tbc4vPfrYk1GcwLjlKQKMwh3GQ7c38h93kVHc5826mWjfh0vsg/hg77f7PsfRjeBg8vF7T3fOz7j3r+797sAD70EIdtX9vWJ/nzILB3aEd0ITTxShabbqH/vkJz/3Qz/6W1/5cpGXXrw1zgIwBgmkY39UlVhiFINCIAUCQLFErBokUEAyVhW9MHoBELYU2JbBO7aWDbOUAUSZpTSJAhch68YOBm3/7CsXfu23n+53i6NHFp964swjp+eh7FxdGc7ONYLYYWEGg7JUldBJInvu1IKP+q3FOYlL01oItccRnKMpNgYYLaZR/0cAJUWokuJUsDIIRvq3jLm+ql4HAEAVpFY1ChlNEMooV2DsXJg2DnanZ8//u3ToMqje3Gy13Cu6vwbAoY96WxBg+sX0wQfXyU1uiuN0j5E9ywLMJXAJntRRnPg3Xvq1Z37zn7/+/Dfb7U1Kas42XZSApiQYvA7aWTrA9UFxrbf6wNn5BDuxCofLi+cgiZM8QH3hiJLjsl/2Ntqrbx5tnChsLYTgbMQhIzKqo0AgAQFEVUI0ZEBV0EVWYwSv6iEE0YAgHBQUaKSCKwCQAQIDoooBlA05EkESJEUEAseMSBbRASApESUMBq21zoZhV0Pps3I7jx75zB975Kf/REZpWksIQCvjQnSi64/GdUo/mZZW75zu+dX2okP35JLvX7rFYpleC7eWKfuuNWkbXGXCKCIIRAAlJZ/7/B977YXXKF3pd/Hv/7MXraNPf/S4IUqTtJ/lJk59CNbFAMjCgGTIgIhUzEQWkUCFmFAFRUEAsKoyhYYMsxgbWbJIACKqTMosIUiwBmuNug+UpDOQNEtKOVCSxGQTAWAia2O0kaHIh4J1AN1L3/iNX/3jf/o/uAHYsqH32mtvfP15J9Gpx5bdYrZ09lQZNSM7DwbMTARRzSTHgVqvf+eF+tJH3PzZIum1INq6ePXv/PVf/NLvvHZphV/duvTaZvvk0hHxb/zgv/JzzVOPxnE0URsBYOLmnYy24kjjmhrvm86JTsvyvXDPHcrDfW/fd9rQ7eg9Wma6i7pVKi4AoIwCWsjD8Nr6s78+b4u8tGSo39tOyjBc6W0XUb2+uL3ea3d4I8t6ga6uZkUhUnM7wVzZZq7Pnm/7514ffu4HmsfmZ+XYwoUr20eOH1lZW93OuwI0tOnqTtZsJrFlY8Ba6ha5JvFKVqKEk83IGSLDTjHnKiOfGEGABQDQiDJW7b6UFLRqaopKAOPU38pFjAqookAoYFBJkWgk2glENIB4rgo5KiiSgSgCkmJ+rjlg8wOf+KySgaob/G5auk5z8zgyZUruHq5jjgup7tdZptfCoXv4PrN57zVhHE79wTeFDz7BLZR1vMnryUdTw6qTyvI3u9rd0hgPr1JZTdYffvYzn/21L35hFNKg6sVX8fWiSojIwARlEEJEIqtaZbQHFQIUwRC8IfLEjpBBy8A2MJUlWkRSCQSALESk9TSiAFoM1ZfX17a+8+KbL17Y4MicO3f60dNLiy1qb6+nRqPEtld2Njfzty63GQyaWHwgxteP1z7y+AOz3c78bEyBmo8ucbQEQKP6nqKABAAKVS96IyqWSFRJQKoQ1qpXPYIKEiCDEJIqG0QQIKTKVhgZ5dUeoLC7Q9xsHg/bp3WKwfcfP9lodH+plntI99cAOAhMwp0xqL5Dbh7NCIBqiQgEGzde+9V/9L+0r648cPZIY8YPNy6cPVf/0Id+KJ2br7eOJ8kCAsmgjA3vDLLCDxKHUa2+vdlpNuvsC8rarzzzTD+cPxPV4vqyco2SBdfq1zTrrlysN880Tj4ZACxhpCbzQ7I1UTRoCAyAqar+oAJacGDZxGA5mKAlgzIEdmRAQsV+VQFEVVEVoKASIrISSmuNcQaQAaUss7gxY11ciVxShxghB8vU6w9DkbfFdfXok7//f7/w+I97A1FlTAhUfmUcx3PCHgZ+e2N980mYen0LGX/rK+jes94pY3zQ6LaPuQdkvunwTjl8pnPIgMZVRNQoJhjZ00/8O3/pv9zM/8Pv/PYzPrNf+NKLRxfr5067JEIsyBkLQUSZgLX0FCGAMahoIhkZEl5BgbC6sCpW7mECQ2gMxUBWEJEo+AwtqAASGYoQRcG5tBbQAhIahOBFvEDEAtbWDDkgQKuGfCzrX/viP37ssU/CwpILmb906eXf/J0kmknnkvmzy8lcL9QiSGLrGpDOec1czYAvtq5da504deypDw9712tl3n7tlT/zJ/8f33pt4ObmN4u88Hbrzd7XX1hfbphXXnz5Myce0pFsnkSCVo7dsXKke2WyjnHKQ6YOR16XcY25g8DpndMHXwt6t2lk84689jra+lVBSQFQOiiZlqVk5fU3rrgyp5iNlZXLxWb76tJCwwN0+77N0dZA+1m5tTXIbH1rGCQMYxf91rdufPjso9zeWt+Mr14rUqMoZrA5zNBteRpYt77jZ5Jg4lrk1JTeK5RB1gE8hKU0SW0agwSRUNkmqKiVD6AC+AEARFiRxjH/VU8KNGiqdhoAooIGCUWrLpE6qhSqFhyKIIFhtihehTCquygWSF3kjDt58swf/eN/NHKuMi7G+wEATPhsHIWgsKf+ORzQ7A/ly9Gl9kHV03b0XkPjNg6EW9zpg0H7lbybBP0fcuStr6gHKvfdFd0SZKq+SRu1x598Ymam2e0MQBQJFUAESxACYCSSKvIMJYjGRKhqgQwaBRFVAyxc+lI0JGLQORNH6r0aFJASAheqQX1R1mpomLz4opAr13eefua1nUFw0cxyK5mzVOM2Dl1XTHryzGvXOt994TVDMRn74ANHFxdaUWKSJA2E20i9jSH6QS2ct8n3auc+q3YWAFCNIrACAxKoE45IQYeQ9VmFRBTVF8EREoGyAFlw1riINVYTM0SEJABIqMKA45Z7kz21Kgd0Zx3jp8d/13rbOyO7+k+1hu5DzdD3IAn41r/+IOSvN/n2ZheYAuUUQAL7neuv/vLf/Euf++EfrH/oyUF30yV2R5tDn/U2e7A1mJvjpSWsNWcS4m57U4DrVkhE+zxfrxUiFKfHTp/KS7jy+nPr59944NGGMRaNobQZOs5g2Lryan3hWNKYybuFM56MIjBVmz4aVBCsfheyClpjwQEkyKxahdGplB6VEKuY6QplkSClVSUiFXHWoVEGNmSACCMAg6JVRB4xUsHaMDYvghezMawd+fi/+tHHf6ygZe+AQCsrnCaFJmDMtKPBvS/BlfeKWT+o8v5e0KGg0UGQ4OBGshdu1unddiqCVyc10RBAwaiJmicf/s/+5j/4//zX/+3/8j//vRtbm9/43oqNlk8vO+e8MVW8JBJpkCJCViVEImNBWYABFUVBEQgBBRQAHGgVTI9EVq1BANGgBAZMFDkNrApRHBVCgVJyMYG16NSo96WpNxwlEoyxMZqokNJwsXHhzctXNn/kj/zZneEwkey1L3+L+kC1+PEffpIXI64l0GwZ26zqtTuqD3Y2A8ULj3ymY1udYWa9f+Hr3/jrf+UXrreby2dnyGCcmjzzOXPabKrFL37hi7/vZ/91AJRxGOau7TqylkclGadD5m4+Y+MRHn+8b3+45zL9/Unv/gPqpMPSuLb52I1vFCBoCL0Nv/7m3DxiL3CxM9+065ezWXJz1tQJXlvr9DMQMt2SShO1cx0oDjBa3ekoEbPPPTx/lV++VNZhZ33Lrl7rMbBSa3sQckubpc8j8Kx5n+cwckZNFQttKFNkVj/0RxMTARIBMhKSjAuzIQBUQaQgOrJVFLBq8kdjj9SkwkX1lgiAFAxg1RiYJVhyIigsVim2BGBria03a7NzzYji/+N//FePHD8NVeeASYnKXX192oKtuH+vJJmm/Uj/3q9uhfoc9AnswwwnEUfwfbZWJhvxoU9z24c8qB3dyVmHXOQObiMs/U6vKEprbZAckRQEFA0ZRKxiSbkK9lekwESAQM6SIAmoR2+IFJjFkzoDZIz3oKogahgkjii4gvtDIwQFFWV44/z6s69f3+prZOOZhGqRSS0M+5lFSlpzz7967c0rK/XG/PETi2dOLx6dbxKWxgiLxrPNWm1m9eJbrrFY5EX/jecePPEE2JYCoZQAAUGgzMreluS9vL8T8g6EITkjITBqktZKARU1NlYy5AwZx5hCfT5qHgPXULIIBEhTG2glXqbU9ruk/UrvVKew6Qmerkmn4yp1iO9oPbxLBsChyNhtjzz44R2s/ik/jBJAqCF96Qt//0/+kR/daPdXNi70BjsCRIJlrsDOEQ/XVnKamZ09aRq1xM3wsOc5t8GTZ/Ihpl6p2O7YEw+fEV5fP/9Ge+PqkZPnLFiT1DNXL4bb6jeH7Uu15pOMQFwCqmhJiAAGxCiQIgByhSCiIhE5E7FjUAbLop6DFfQAhCqoAhqQZJz3hcZYAAMggAJkkAwYA4YZSlBDCKIMpOBgGGp+6eSjn/ksNB9iV4vHtW9HZqvul8RTsM69l6q79uvU37dHv9uA/wlN43HV3z0I3V46iK2N3+52B9MDxwMAVPkqKpExDIZqM3/q//Tnf+7f/OP/zf/tP3/xu7/aWg4nTjzgvU+BnYvJUuVE5eANWSQDapGCATtKmyVRZVVBFFAUJVSo0loAFAhUxFljFaiqUQ6mBAxoXFoDig0RAQiigKklc1nuybgoSgopXMK0df3F3/mNz/7kz+T1JVtcbL/0wtYLL0V5/tk/9Cl7JOkbZ5o1jBKCCNCAzUKRE9ZmZh/QxryXYdTJXvzaC//Ff/r/XVu1SsmRtL5x7fqcelEPabS9MzD1+Ctf/iJwVkgcT/VF0mltCxSURgWVpudp76upt5P8vZtO8Z0w9sEZvI1y9X6id3/lIsA4A15RQEGDKqiykDW5KVZt/gaVl4ada9nlS0V/cPlq9/rVbTlRP+58QweNyG9sFe2SCkrKyBQu3uqVvaxQQ16Q2Ttr2pJ868Wd3//krHbWWknjwnZv4HA7Snol52j6mVcCEUxKQQRHoF5UUcgWQbaBLcG8ZUC2xiozVFwFRlUr8F5YACv5XVUHRQUgrBLGQEcl2AFGnZUMAgArVWoZEAt4EVQTIzoL1kazs0lAc+yxj//Z//NfWDx1xlo3yoYY9b2DvQrhJL8Rx94AuBVUP112DG8rtfd9d+jRU164XS/E988mcAv9+1Aj66C6P324Tn1+z8ZoLPFaMzPGmFCWokoqhmxV7UFVEUhUUAhUwGAQLYMYNBysGhQEz4EFLapBKDEYIkCQINaKoKl8xRYxIch7ZY/x/PX2d15f6WZqycwmOhv71KAEWyTRam7aOxur7V7aiB/50IlTy3OttNaYSRutZpkNN9d67StrJ88ktdlmTxwQzczOQMgDDxRclK34nbVs2CUqUTiIiC92Otfba9f9MHNkojitN2uu0SRnklodPBoEA8q+CJj4xvLsqaegeVwwovE2jPsgov1+4DulPTbAZCOHvbNeqf5jder2xtsd0H03AA5VU+5qHe9TfW5tLY8qnMAoB7ssaO3apUfPnnztzbf6gfN+10YGkFTYEhpCBJFyOOiuZP0TELXUztTqFjGHPCv6w3Z7w2fXZxcXXQ0GuT977pF8a/Xy5ddn5mZtfckLzMzMb3S3fLk+6LxVP3rOmYgLcSQCQZAQLIAAWBVAYAAxULV0ISUla43ENgoMQYIHYVEADiiMqgRqDCGoMQaJAEAVDFoaddBwCiAcyCgYZQ02gmHPNB/8sdbiEwqOqK5KAorjPJZJy989MSC7PHRfpCrune6bKSs3U2vu0vnz9ugDsKMc5Hy8yYtb2AA4jocet3rZk1enioTIqoZiFLGNGRe7/+oX/+75Z59+4el/2AtXWk1jIVBVhRAAVBAEQGCUSDBujWVQFcEYVDKoIARQVYUjVGQICjwqWWgkCLMyGiJnjKmBTaq8dlXmUM4sLLAKGEEVAEHDebYF668Mttcf+pEfX+vkM3m48LXndZgff9DY07ZkTltLENUJDFIsRa44YEjTmUd884ygSXJP2zf+h7/8C9dvhPrCsWLL+/XNsy155Iz7yMeepMbMcy+tv7GyvdbO/u7/+7/7t/79vxiCWos6HqxqmMZ2wMTRu783zE2msMJrRld52yy3bzV9ULT/d5dGwzvWaCV4VCAgNZRBft3xoBhc82uXcXCtt32p6LVXzl+maO7itr2w1drZ2Pj5T86fbOBFyysSrXbDZq/Tx6KjOAi+VPQIgkaQhLBT6Is3Br/vkaOEiSoO1eRR3Cdp+8CIXsEHrxS1s7JVSyoeYBVRBAQW3C6YVBuGdDdXoYr/VAAc5fiOdeARjTIUK55U0VHaImAVnzYO3lZQ1aqsv5LEUWxMJEyLS8t/5Of/9I//7B/FOI6dgwmIqNO7wZQCMub2A7rG9Ld7P59eEIds9hPOPdRUPvjJ+NZ7IqzfLaF9n+9zV4rQvj10V+fE3QPg0CGf+vztkao++NA5Z6NcSouGCEW5Mk1HzRJHXIkiIIqs4EWDSGAmpap9MJEgACN5ZvCEGhAkFM4jOueyQY4xOAvnr2x8/cXLG1kURWlctyoqHHpDQUoHfd3Ou5uDbGlx+ZGzRx47dWa4s1kArg77Oy+u72xtJ+ns8ZPHOlt9W7cLx89dW9mcr894zkPoQ9Z/5Xf+2VwqzsWN2TqriZJ5SupHT54+c+pY1htsbW5ffevS6trGzNJi2kgWFiAy0XCQWVJE38+3h6vX+532sUd/xMwdE2Oq+ry6q22OEilvHQE0DejfLLF9jzqku2/3fD7ajt6pUfwehABNGzY3+9mHfn7z46ekC44TpBBUVBG311/trLzlfVYox9Zp4dFyYE8WA6AwKHDYWU+3b9jZJdU0jRu+ABMZW7fN1PRXdvobG4mxnM7Y2flT5x7deu5bly++fObxT9toBqK6dbEfbkl/i/N2ZJcKVQklGAFwKCAkSgCABAqqBAhAoyWKFq0jTRSEQqEalKtioL5y7o5KJBIJKAAaIFS0QKRKgChESgZBWAwoBOlyszX7ONF8BKDMYGR0LxzHM+8dpjFWc+uxfad0h9DmoW+nWf93FR2y4O9yEA47GGGMVYxqCE7vFqMFo6KKpADkolrBfPyJTz34iQ9vfvsfrrz5JeCeACAZBTGoBEJV+VpVEBzv6qQAgIJIoEA0qgctqKhCoBIEMagqV4VyiYxLomTOY6xgRQJY9N4bqxiTZhkFiV1SloWmeTMd/OZXvvypz30+YxMGa9tXXzn//Pc+fmr2oc+dGaq3rRmIY8UWggPI1WjmJWksQ3yacQbyrFFkf+2v/c31QWjNHO1s9tKs+4OfXvz0Z8790GfOcpTOnnjkzCOX23n69LOvf/HX/tFP/ZGfO7J8umRCBEtGACorfBTRP9aUbhfwuV/3ufuZ3D+Fv6f035JGRi4AVBOlquhMKNpx/63uxWdk+FaWb88dqQ3XN+ZSk21ezQc8k85due4vX+69eKG/4rM/9rFWKv2F5uzGRb/Sg1KlE8qdYDwAILGqKgGBD96Qu9Lm772xkSDc6IVOj0spu4MsZ2GyJasqlAzBB3LBIwKCISPAwgrqMtB2qRijQ6WRNoWMKlVYBY7yfSt4fhRyVrX9HfVZGmGCglDlNasgESCJGmMthsCBRUViF6et5vKxB/+jv/pXzz3xISE0ZGCcwTlJmBsN4SG44l4w4c4V10NAO937Yt+lDr4+NMD9+2JD0AmucEd0Myx1+oCbDeWhpx9+3GFEhs6dfeDJJ5/4zre/yyWEwGSMglRdv8Z4CMK4cDmrMlCpSqygYhFIEYMqkpKqZ1UlAQIgS06JPaGGjOjKev+5t9Y3ux6j1FmSEDKEHeQhR23P/aIzLENai44uzB1ZmHnt1deWFuc77X57Y4s0P3nqyOnTZ+J6KkI77XbWLyKXZoOyGA662+eT0Nu68qJPw+zsnPKc2FrZ7Zu0pQAIktRbC6eONmZO76yuXL16vr2xyoPh7PIRRMpztjWLCULotjdfLYM/9dGfwOZRoBgAUBFHKcPVerxzUH7/gRM2n+hkB2HQ6ZNH63D89dtT4d6bRmAVTf/cW/x6PVSM3OzgUbrXqNyNpcxvvGFDnw1aML4MzthQZoZQRZCMBgQ1orpy9XzUaC2deEKE0KZFiUJqLdZnj3WHF1evvFU7fiaL6zNLJ+r12euXzp86+yTalrGJJWMIiu6WZD1bX5QgDrxCgRShOqMmCBBW5ftRVBBJQaWqOQVVTzwHUUzgJYgCg1oQIRQEJhp1g69KChECkABCEInAGROjGkBFGwG4uPEARjOiwMBoKgtCkPa2IJ+ME46wmvun+u8T8L+nuNwh3Wy4pm2hO5y1wy6le/6DCcSmCmCIKpAbVVQ1MlZSKgtOj32Crz3Dg0FkyFAVacAEgqiqgiqVr6lyVYFAdTklqLIBABRJlQOKGoKRfa5VNwBLLiEbGyZmFmEkJIPGAvih+gjBSsjUOVP6/sbqjfXBj//A77ux2Tvaip/5zncbSK0TLVhIagtHhBYEWugaWmQgBdmIcF7rZ0M0i4ao6P/aL/3jX/nlZ11joRhkc9r/0R869q//ic8tnaJkJsnxWMceqZ81H3/yIz/2cz+3NWCR7ayt6JIodt40ydUAJkK++h/3CvtD5+TgBn3XND2J+7Sn36PDqWp2h1XkPBnowvrvvP7b/2BhptaYxaRZ5u326oWVjjXd7W5vi8sePvvCxsWt7Eab2pI8c2HwyflkueHnW0aGtj8o2llexk2uwHJRGbmCKFfdKfDllaxJuDGEXmk2et2uMJMNwiyCiIUEUhgUwTgCUIMioiKKKAzYD0QEcxEZYEAQEFWjAIwqleqrRkZLCgEQgABlZCsgKYiOmp0qIQJ7RUI0xhAikAFDptloRFH9xNlzf/mv/Y10cRGsnbAT7ckrxD2Nim4qX269G+uUiNp31j5gZ9qogFu9vpXqOgFS78smdn8dAPeuCdRBOX/IZacMt8pKvvN7I2BcS3/oh3/omWeeFQAFVUARJhIAGOkyijjaBKwqs2j1z6sqoCUEUSWG6vMKglJRCWUBJAIOut3y+TdWz1/rY5SQii+8je1AhIEM8zAf7vS7EdFHHjtbtjurV7xL3JtXb/hhfuRo84nHH15emEehrU57Y21bS+YimNQtPdrcvvxGY/5oyHpnHnigs3n59Tded0ly5OiZ2tzxsgyi4JzN+jtx3GxF6ZGj88eOz7/y/LfeeuWVU2W+vHTWl55RXGQjSxKy/tqbV15OTzz1o6Y+bykC2O0HBgB7i8IdHMZDJgsnUByMa0zD3u/Gp+nUhzoN397ilrej9ywJ+Nbm6Z3YA4cSTs0FK5T97Z3NS6llxagMbJMoL3OKCJhRRSWgCDIKSpF3dlYuz88fJzcDgMZEIqIIUfNI40jZvbHTWzsP3p85/ejMwumNa9c3b1xbfuhIUFOrz7XbVyFkRdaNa8p5LzVDoyVTTBCDWENm1FudAIFBBaT6K6iKwAYEANE4Fi9kkAyRJRIadXlRQCJCQgUqA6BFh5bUjFgC0Qpz2S9gdh7AJgQAyKKASFSFfu8B/wFg0sr0IDhzz+XdLazYfXQouPR9AfXcQxqNzR26zg4cdkD7h13xNXXdMe4ISgpoU5o5Xm8uWN4kEAAySAigDCggKAhCVf6iIiASGgUGU8k0AeFRVVAEUY8EBhHACgALijqCGIKB4EFKIlSJKhNDyr5oalyjLLpJOoP9zUvPv7h45qnMHEnNYOvisyuvvHZkLjny8FGMZ4NvYrxsk6UgOZqNIusGOqIzZ3P7ILlFKbayy5f+7v/4T4POcPAxrv/kDzb+7X/3h8zp06W1vjUXp6eHPmrZ1kYPZmZqppB66jDcuPC9Z7TMHnzqJ6JjHxfnuIqAGsX+616w9Las+jaX1z1fAu+JNX4/dal9YkMBQHHEw8xqum9tv/yrp48ixHGR+/UXVzY2Bqvn+5cvrjzw0KLPzO88t36pkL7oar9r0f6TZ64e++hs3eQn6rWhL7shsAHPmQIpkEjllRdCREtDH86v947VWzseu0gD0BxEWFgFxinIlkxehsQkDl0ZmABd1e5XgdUOvcagNWuBFJhVRy0kUVEVGVgEFQiJFEaWOZElUFUhsiMkBxFUDQIBWus4qHVkfbBxAiY++dCDf/n//t/Wl+ZqaSxTdf1HGb+TlqXjz++Y9qk0OH2y7m48+zhu2oy9Ha69v4jQnivsW4H3nN5nnccAbqkpTcMEh/zwvYbbIQDTJIJszyFjRUGhNTOLhAhKVJXFJB0lplSlQBGxKqBT1T8EFQxcpamQBUIUYEZCABRGRmBUj4wAeShZ45cvbb+1nhXkYqIizz0WinVA8AJBysxLCFJ3gH5oNLpy4VJtYQaDLC/UHnv4AWfdhcs7167cSBNzYnk5asJMM51p1gfrV5NWs+wMi8Jo2jz28EeT1tHX3njp+nMvPvoYNlvNxBBKsXh03osMOpsmSoL4xz/+FDr+5tO//cmPhcXF5WE7zxDjegIYayi2rr7iaq3jj/2gJDMEZip/BsY+4f0zsM8mVti3JqqRn4RlHLZv3x9WfM9CgO7kmEOdWbc+fWwXKYIgml57rci7JmYmjm2sSMbFoBpEqlqYhLbq8iBSbmyszW/dmFuIlCwiGbIiwbpa1Doyo73u6qWss77dX5xZPhrV6v3+xnyxEew8uojJIYRssNWYHwD3CQOXfTXKIGiNiiVFJAwaBBmQVEWEVb2qBy1AC4KSMBhURgUSUFBWIBQWQDVkWAGrlHoDjBiBAUSRYDAVsYS2s91pHq3D6FmA0ADIuL5bhcFOq+K4T3jeuY/lbuluPQD7fuW7RvcX7LkbugWMrDcHbm77+6c20mkBgxMPwDhNYGQJ7EIMYpTiWqMOmRXxoBYVDVkRQVWDESKqsqAQGFSLqAE8IoAogCKKahARgpH+gmQAq6QBdDYGIdEQQu4sKihrSZCQqIacQ1AVQOM5T7j7+nNP/8Af+JOByTL0NgYyLBvHQjpnqd7KOYptAyBGGPqiT8Zq1DK1mTKhYIdpv/ePf+kfXbjSo5nlhNsfeyj6d/7sj84fjS95u3j8iUZ9dqcnijPzy8ezsihKjSMTJ3T5zYu+f+PMcuPq8186Do3k1OMlkAOw1Xp6d/lln4a7b1nddn29J0r/vh/w7t6syrqQUIb2i9+M8s0BiQ60ux2e/dZK1zevXKGXnvE04z70obPJqpON7o1r17uFFoq/fgE+dbr+g4t1n3eGpWYmKiRUFTcrMToq96cQRMmQjxvSnB10+xv9nrpYlEQEEEWlqhqliFwFy4GoCgsgASExaFWgYahAShZUgUVFCasGkqxB0QBWUaBSYTmVRgUgBIQwFvCEAGLQAgqIICkCARrr0nTuyJ/7C/9lc/l4nKQTzWOSlj7qsvWOh/sg+DAF4B/mWbip8r4PBK0qFOkB+XYrHOSDQftKuNwdJL/7d/+LQyuCToIVdXfSp78FgD3a/8RRMML2UVGyXj7sZ0hkLIZQWamAAIgEiFVyIhERISKKimdmZSG0CMoYCabGolhgFIRSRMvSKQlbh7o9yM5f2+kNmRCx5Kr4SVYWCGhtEwxykVsyrWZNFK6vb84vzrUHfqaezM4urFzvrmxt9fLhh5948OzpJadSFsONjRtXLvZmmi20cOT4SYyavSybffiRmaN41smz3/3er/3WVx57+NyTD5xrunLz4sbCsdMmrXUH3RDUh/JDn/xsu9/9l7/2T37sR39iefkkBJYiAyKCEPFw481nm63FubMfBmOmJq+qOXBA/z9M8h4M/z9Yo2X34Gkfw0Hn2vSucJf0noUA3flPvY1du0sjiaLjlyJ8+fxb2WCYWGsMOKIiBAsaFBxZRZIAQgoW2RchBMByY/VqWp91yQyiQUIQUwq4eC5pnoUC2t3169cuzjaXnnjsQ6trb/rBRmtxbssPyImFjPtXyZ9V3/HAQlkARRMDgGBw6AzEBg2CBPFQQTrKIh5CLj4zkocyJ/EgOYEXFVVRZgUhsgCCCAYJ0SgGYxDJIlgko6oBSksmrjm0zWrAJl3iQXdX8kQATOTy/VMI9jHrbW2AQ796l4X7+3wvue3qvq1VMAVC7Bnv0ec60mqn0ImRIaAkYJSsASOgaKwlaxjFWBBlCx4BAc0IBkESCARGuCRQVAZlBSZSAEVBqPqygwKrMxGhYSkH2cAaK6qqbAyiorURcKDAiLYUqkO9f+M65OHkmbOZFCkMvv3M17K8d+6pJwPGNjClFiPDPCAeahnA1CI3F9makT5nW9mVZ3/9n/wLO3t02/dPUv/n/tAnFs4sZDo733wo1QfztiJns7NHCV2n2IF8iH6738/6vc1jp+ZqiQ8rN9ob3z1ycoloUZQq1XI8YHclye4B6YG/cPOVBe+PxXX/aTQeE4ZHEFBgVUyCH97oX1ndyVuvfv35qDV3vZ1+480r3s5+L++e7oSZXnZpZ/vyjU4vA2NiVO2gfWmjeMgP0rzf0NamN4ikIkgkyjAKdlZADMzOmG5WbrrBdjGA2BYFMysBKTKa6peJqKIYYRYVRq7S5wmVEBVEQIceDFFcQfxgQBSABJTQsJKgVE+HgFCV1tIAIIRYIYcGCRWDChsgohC4Ua9p0KRZM63Fv/L/+oVjDz+YJPEuyDulT9z/Lls3YdU7zl4c5zUfuOruRx9Udt5f7+We0E2sKpz+9rZWWHWUThkOoq1WEw1ZY72UgMTCFaaDIASGqvK6ChLEuCoTRUBAkUSIgRk1Fy/EBqwVVofOKIKIhkLo2lq3V5XdjWxW5nHkCCB432o1kdQXOYJEsXNJnAUwNh4U6oM2Ts5tZsVr19ajWvS5T37iWIr5zsaN7S0FbNTqs4uLwIygm6trJa8gmrd8cerc2dmZYx/9ZLRx4wvf/fZ3X3/1tT/wk5+oRbS+et20lpvzJ4rOlu8Pt9bWP/HxH3rx689/69vf/cmfqA/b27MLC2l90abxMBtKvnX9jWeaiyfszAke1QSVUYb+QYxubBFUOGyVKjyxwQ8R3bj/9X73GU6cBVPNAd6WGU93f8q9oX1S4dZH3vGDjZucAqqCBRMyaM2dFttEE4fABgFIiIwiqRqo8riBAI33yqUvsu6wv8WSh+BZFckY44JA2lhMWkuAic/6vZ0Vg2F2Zib4IfPQgBoknw8k6/lhT4ERh+i3Im4bv0OhbUIbuYc8QO4ZGTjJrWSGhxT6xvfJ95EHUg40DJVzkZIDCwfVoCpVmDEiGCICSxATRABRQBBgBEJ0Bo0EHvQ7cRpVC7DKEMNq1U8pgDg2/scf3C/CwwzgW9x3mg3uny/iZvTeIqOHkt707VRN+pscPE2TwTwgUsbG4a7SP32tSsjICCVEBmBlQTJAFsmhiRWMIgGRqIgygyigIDAIAysAISmKACvCqC0AgDUWEQ0aQkPGWhOBUhCPyBI8hwKkZJ85IgCRMMRyiD4PHDQUq9fP15ZmqNFgn/nOarm6MTfjasuJ1k2BVsgKZmh6UvYRSU3D1Jc91oOYRPw//6V/vLqtvRJMAT/8sWOf+qGz/RC51rmZhYdE4jxga+4kUUMpTpqzCqHorG1dffPkYjMMhtvr7cGguPTmCxdf+DpJLiIw3k/vow09RYdq/Lc75zYHvg95/m3R/ufA3S+wwifLYVZ01hfnmowqrFlJF9Z3zEzj2uZWvbXw3Tc3/+mX33jhrZ0dzyEyWelTZyOL13ohSWaPptGxVA0RKyIhVz2vsXLQsqgQGhbtlWWn9JjEgxA8gFIVvANSRflUiTIgpS8NWRnpAqoICioQqizePISSlSt9ChBGcf9Gp2I1ddzIQEC0quOAoiDCTAqWSCTYyKCzjMY165LW/uJ//l8ff+ihtJbqqIGAjha+wmiPmLr6vZ2L8YTgAQl0h+fuuU61te/y9b34xe8hjWAWeHe3Otg/ZofdfMQQ+wY3SJhdmHORZVYW4aqvEO1O7oQ5q14x1epTABbwrF7UCzEYEWRRZg5BSmYf1LNu98trqx3PKApFEchaMIiEc3PzcZqAQFmWiFir1UuhLKCNaxr0gZNHI9Hrl64tzMaf+uiDNNy5+NYrW+trrXpzeekooUOyaCK0TpEIFDgM2jsXXn+zt9WeSZKf+vyPRTa5fL33lW9duLTZ39zcxiLfuHqB/LDuTNbu5L3sp37mD263B9/8zrMSQt7v9ttbBqFRT2qxFtvXt66fZ+8JQEBgrHoCHLIx7NXCRv+NO4CNRm+/Aq+7k7XrI5r8HS8p1cmMvZ318B57AG7L/Heu/ev0NRFUFFHOPPFI9KGj7bXL1y68kJWdOHEgAMQqIKQGSIGDeAU0ZENRDnubZbETyrkoisfmgZKJBt7X504ssnbal5HzXjbIcmnGVn1gH8pCuuv92JHVJHUoPkf1WHaNiVVIMSAFIBYEQoMCCiVxUGENAbhQX3DIWEoVEA4ESogIBhAIABAMiqkC7IAskEHnERQqixMNGtEAUiIPaDIAOBaQlctvjx1533KmDszFhKbRHj3wOUx9fr9/20F6f8JHN/lVo2mddvrd4SKaOkv3iYoRKDFqKQm7KwmrdqMkLMoBEKMoVrVKDsgJghmzkoIiEiCIMqFBER3HPKAAEFbByiJVzSCFylEsolgI54aQwAKUAkLogD0Xg6IsQBJrU6KEs+HayurCmTMFsUI/37q4efXCE0+2orn6ThBTt7VaLJyjz0BzNgaShTJeUNfKfb65uvJPf/35YdLiIj9S7/7hn/0kNWqcni3piIGIG2UEMVtj0QD6JHVWaHtrU/o3oDWT5YP17eELF3qvvPVa9+9958/8X5Y+/JnP6C5s+h6w6zuht7dJvL/p8AeqBB4iGeO213ZWz79x3S3y4sIv/+ozN6gps8l6e8fE0fUCV1e5EyQQqKprNJSDtXB5u7i0Tq20NZ8GlxkVElUi5FHDUyRCZlFgVSBwXEBelgJaMbuoABKMu0ULKIB6ZmNN1YdUQElFYVS6PygZRRZERFUSUQZQJB7ZDlj1Bx5FwiCAAFRhdIBYdeNDBZE4jogNkKM4jRcX/w9//q888PGPxynRWHeeVCvZRQH2uQjfPt3CB3kL1+/Bc/WwtyPjZbKt3fyOHwyaPMe7/Qx3o05N78gIODc7530IzDhpP6EjDbSKbhOoHAJKBChU6TGiSlWEKBpREAOqTAYZ1RMgKwpu9cpOFgoGoKqftQGGKHYEWJal95xlRZKgIZPlZb0RW+fqRlKSnc31uTRqWrtz6QJKeezYUq1RR3A+lGk9ZhYkYgiEatEbY3IfNq7fGPQHZ88szLcaP/tHfvp/+Jv/7KvfvLB04qih/mBzJW7ODdqDWqtJqurjxYXjD575yEuvvJC4xpkTrlb2G/Wo6oQJ3M62ruvpAA4QSVW0yoa+7QhP4HodA0kT98y4VavuBuTuO3F3Fx9h/+PD3p5gfy+rAN0JHUAub3M4ggoCAkgoL7367Pb6heZsrfR5YG8NiQiO/KBEKAiIRJUXChEBuBj2B91u0iwRvbVWFYxxCkToclFxtSiaAW+jtDmfRJ1+rwzDKFXr0JfeGjMYbLtasxwwKVsLwggW0ARQVg5AVoBQFaQQCRqY2QuXwB60BA6qACqVaKaRHg8KAQBYAUAJHZKz6ECrDhUqGgQMUgSUgKUKNhp5qMejhrtrGOCAGflu0uSmE6G+7xMYf/gBFu33iKYN2j3G7RTd4SjtOxfH9REnr2BSFRYRpgoPV94BRVXNVUpUQrRkIlUyaBFQVRQRyZJWGD8arcqR6ygBcizipEo2RgYQVQAhMEbAg4I1VVWVIOqBjAFgP8yybQKLCgTqMKYi7+/sPPT4YyIBw7C/dg0lgAGMYgjg4oYqqwblvgXxClHUKqiGGLmg3/3yd1Y61BWJDf/Ip44ef7Ceca10R0x6MofIWEVNQTXzbTUSGLrr5994+fmzS42tldXnL29/57WNX/7V77ooGmwOHv3wlz726U8CuX3QzP0hHaMZu3e6Uyl/u+zF76P1dbh+qaNofQxoymBXr3YuZfTi61dr505dfWlzMGgnxgzznGxUljk5p6JVb/UkNsEX0qqlC/NzbjCztgOBkRClYtzRvjtq0aVCRB5KRBdCAAuKLKI0creSjlp4qShYQ4EDAoIgjRN3AQAQRYEBynE7DUFVBQEF1cpjUGVwjT3/WOn9VY0IAAUQRUKA4NnFLq7V4vrcv/tn/qNPfOaHTGJolP6lYxx99GasgNxXXrjFxadlPxyYx/26D+5xXX/gzdjRLL7XP+Mg7ckgwN33ZEyjWSdCg0YwQAUy6sRSUIXKlTVKAEBVGJWEro4xXgQJIbAZMzOiVYEAYaOXe7SCjKPLIiAa48rgS/agSGS9hHwwaNTi2FF3Z+PUQ6dyX5YBgH02HMYEJ44fQYcsgiSkyD4YY1TBICp7Q5jGsbEmL+ilF17f3mr94Gc+eezo7Of/wKf+9i9+4Stfevrnf/rx7e3VeUiiNB0Mslo9KQbbcWQeOnf2a7/z2yvXNxZmGq6hnZ31uDVPxqW1aG52xgpVRv9EQiPgbcNxJlFtewLwxqfsqvU3t4t1lH0xnqYPnAfgACq5Ry/cJxWmj7k5jU5CBQHtbV1+6+XfhjDcvBTEsh+2Y5IoScPIMYtqLEtgZCCtRCGhAodBtzN3pPTENkVrjQAQWiQFtWpi4xpJmoJRFZ5pqJFsZ/MGd7d6va7nodJgOPCYF6DINkgMrECsDgFVhByAARHlIogXDhKCcKnCqjJq94jKAqDEKAbRgip4HYkKiwqohGBQAElRBaoeY6U3JrG1hVIISCZDfGuZ+m7SPvh/GvnRww77PZrQNAwz7S3Bw17fBeHU5SetQPdhDiN4wljOkIMSBg2OVFUIlMACOjIWELFiXlQFIRCRKmGRABwAKnhQVGUQRQTUABJYAZQsOlAQ9aqCCMY44BKVTcgFsSgy21oWoGFnJx8Mjh1dyvtrDSzPX71csl08fgaUms5F1oF4YK9lgTY2boaSo6oOpYxh+2u/8oXhMBDhLIcf/oEHsTnjaQ6jWZs2JaCC9Af92MZIUovc1rXXLzzzhc9++ozsZL/4P//2X/9fn+eZ5TKdy3ioiZtdaliHVWuP3RG6R1y7zws82YlvDaLejHCKc6auuf+YDzIdfL5pqrZXtc42Gsd7a2WeQZYlw0L7Idg48mUBxhS+tNYSYhWvbBwKB8faXdngU7W50/GxJsVEEEgpIOhYD0UkoyIAICDWUJrYWqGeC9FRbU0BGunuqqQIagSIUBGMAhgFBNJKSSJUBRYICNaYCrypKsQBVFpUtQ4reBEREFGJRhX8FbRq+msMGWe8golqP/u/+9Of/amfAgICIxNFf2pPfQc6w9ugvYy2H3za5we4GTZ1KEd/UOl9s/QOE15Tw4zjYwxRo94wxkhgRKpK4eKoRPmIKwGqD6uIT1UQolEvCwHwgdmQF42r9BZAJfUqw4J3+jkjkjEqwswGENCqEFljrfUcTOwglD4fNOZrMYQjSw0gvLbe62XZkw+dmp+PHQ0MobMOUS2JoQiBREBVgi+sMjlgX6RRY2Fmdmd5+c03z8/Oz37iw+eeeGDx8ZPzN25sXL66duLYQhF6EBwHUtNDEM18K5UHTh7Z3lxdX08xNJrNZWZh5F67s+y7nY2X6uZhThqRsbsmNk5nXI9H8OBI7/MjT+yAabx26uCbEd7+kJvSe5YDcIekB17cjCbDpggG9fVnvh7pALkf8vaw27VIaRQRgAFCtIKklZeFFEi0ckRZUtF+bzjo50BViSskMtYYQBUI3UFfAIf9Yb/TDnkXi74fdoSL3qB35oEHjhxbEu7GSVGvQWINCktRSBkwCPpCpad+wGUmPmOfa5lrmanPhAvlEoRxVOwNsCoENMJ6FEkNARICqRKCIUZWYAABKVHZGhM5q64Oycw47m4/QIl7/95v6XNQVE/j/XBLGf/u0/t8P6nE6nhIK+xkz0Dd+aDtVSWxuu7o3+i6CFV334lEV0ANVO5QGCiXRIFDbiyKhEpfR4oUTNW3QpQVgiiriooocFUhlBRIGZFh1DyYVTyHUtkre4NKqqTBGgBmDWWR97QMJgQ/7KJ6n3faa5e7vY0oseiz0Ol2t7qDLNRmG52BN3ENhEA9hBxJAAjiI+Lm0aWq0Ll+fWNtQFTH0h2r67kjMcX1KG45m1ogVen7sgTjlTpZvrN5Y/utb89wn3vdMNiJjI+SeFiG1bWNfrdw0cyTH/skyxTcA6BjZeoe0HiFjCKddUS7bVrv8mJw4Cy8yTHfNzQ2YMcVkkFJ+exTP7jZ7gfoN1q1ixfXIhujCqEYY52LRDXzBVo0BkEDgRbeN5yZqVOjBsfnTew8kuB4dSiKgoJq1fUCEQRCFLnEkAVSEEEmMlCZADp2eRGyMmuVQg4C43g7VAEBVFFgQFYQxFHhf8Qq0xxH4USjF1UfMqqW8ribRmVbiHCr2Vg4dvzzP/uvMQKhUVFUQdUp/aNS/sdmxbs4OVP/YBS8PAEddN/+oAfOhb2gxT38VffiGnogYOP9SHvbf8Ie4TVxDlXZKRXn7VoBCKIqIqpS8XCF5lSEI0QfFKr4NahsAlKiyh5QZtUiSBGkCFoEKTwUQQrWfh6yQrhaV6qgEkIoSw7MCoRkjYuscbGLamlUTwhCNteaWV3dunp9/cFzZ08cnRv2dk4eOZm4pPQcRc5ZiwDMXJZ5WeYKICAiogBFkRvDH/3k43PLs2++/ubVq1dQ6Yc/+wlEe+VK19lGv9ctyqG1GLgwFLKiE6XlY0+cKZnbnZ5o6GVDRFNLmrMzcyn1ti9+pXP+t0y2o8DVSOhomHWUkHo7gAjHKNIu0++rDbXv79R87fv4bTDge28A7Pv14+Hbf8wdCCqcLMTgi/b2deIclMs8QwYupN8bln6o6kGUiAAJDYJFMAiEggzEceyyXr+70xZmXwYhUgDPIYQAZOqtuWQmDdzbWTl//qXvrK9c6HW2hsNsa7N/4eLVzuaK4Z0IO85vlb1NEyFB0NJzEM+D0vdUBhJ6oRyyzyTkEArkgoI3IsiAbIwaA2QQCZWMkGVRjwAATtSyRoJREGI1iE4pLtEwWRarASGeBWraSm2YjOoBjngnxuLbI9x702lT5CBN7Q/vHr1vwJgRHbb17Sb9H0Dp7+JSU/40HSkAY92/2oh1JLVG/YKrfi+ICDoIwatYDaBBEZCMAVGDVHWAVCFUM7IhRvXkGLQQyQAK1AI1A84MeNWCtRD11Q9SVfUBhR2RUQURYa5ihwhVvFc/sDLMBztiXF7qsNv13X6/r4WHhflG6b1ixCyqgxB6AOwZWGMwBrkwkK+tvhHPJScfOpom7pFzc60UeDBIHDVVoOgW3GdkiqMSAVS2bpx/7nd+/djRJbG17e3h/HxjtqaD9iaF8vTC/Kc/+qGnPvwUooE9LH0gXe6d0LRdMXmLY2ztHV/+nWwVHwiaLmysgKAElKRPfNI++dFLnZ0r17f6/ZAYa0Vbsw0CAFBEjGwEXNm64AWZ4iSmfn+7k/dqNbTGC0qFvoggikGlqkwnoQUlZfB5cJhAwAgiFGLxCgISABlBlFg0KAhr0KoXDBIDKFWutCq6v6oMDR6YQQBFVRBIkACwamugoqpVfFG1WAVUUMEAGCQABLIZwqd+4odrtdgZGmluSLDLUZMuFu++2MMRQgfTO8FIDO3Vd26Bf1Z22D388e838X//SEeY803UgHGMFY5maGKpVSeLeF9WXzPzaFeq7NYRZCSgqihaxXuOO40KAKtKxd/sJNgQqPCQeR0UMiwgL1gnzZFGnA0sXAbuDXpZPgyhLIscieqNOOTDE0eXNjbaN1bXT504EqOuX7u6tDTfH/aH2SCNa8rEQbwvvR+y+lFMNZq8lJxl6PMi7/V21j7y8ScI9MqlG71B/8zZ48dPHc013tzuo/oYyjLrcfBB1DobG1mYbYjgVrsTvEqJ7JWUjJid1Sv5+ks3XvwiDtYASMcW+9Q4joxshVsxmsIoc2Aaop32DOyt273/7/RXd6s+vXsGwEEbZp9af2sj6bZPNd4wEYHKIoBynuXZcEjOxFGkXOUiIggjVmXWiAFEBQmMQWMJQBVEgQf9bpEPVBmVUUvxQ0e+HG5fOf/i89/6re9+48sXX/6e4WEoCxGOnL12Y3VlZT3PdrLOyua1t5575ptf+PVvXLqWe4+owiELnIP6ouiw7wtnXGahHLAfii845KUvq9IiAASIiopVzUQEYwiUVEBBkUhAqyA8BlIgBQsYgSJoHjcaCk5HljeMxPwtJeX9k3wTF9adzO9B78TvHpF8KE0PwviFTjys00rOnVxqL41BCUAdOytHeM/4bghISCqsLIE5K4Iv+tzfMMwSREXJUuAgiIIkoF4ZyBi0VeuiCtJTEEABFQRRDayl5wyVUbkS9KIegGnU/kRU2Htf5HnwmYpX5ciRhMIQhrKMpexvbWWZ9wWbIss629vdfr1lB90tldIQIDOIJxJFAlez9WWBmEA42/nWl7785iuX1lY3y6Jz7JhtzSfMZRisY+gAd4LPgXNHPdA2Zmsvf/U3fuDJh+eWToBimWcPPDR/4rg1UZhfaGEof/qnf6LerB0yoveIX3eV/8nCHQnNCZZ0S5o2EA4cfAvB+8EkvamoqLB/haoPhUNTJAt/7C/9V80zx0PkF+ZkLiowz8qCGUTYj5WPoMGL9yia2ujsYrw45wa9/Pp6t5sVnrmq5lnF4yiOgHwFVgBHrpYmQcokjoHVVBgOVLXoWCoVCJgQdVQXCAKIkI48CaqqIigMyioyWpagKqICqgJSZVkCARBUWn/VdQlG5iEyB0CwjuI0/tBTHxn182AZOS1wqsfTWAl/7+ie2sz3le7sd777oNU7oF1Zslu3p6Jxp6BxGfHpUxQAkjiKnIGJi5KqvQhHX2tVtESxUisVgoQgpeeSOYAoAATxXiQrdVDqsJR+7tu9YT/PFURFRERUAasKc+h9ySIcQvCBCEVKZ7Fej33wWzttryIgoSxiZyF4AE8OEcAHyH3IykxVSFBVyrLwXgVMUWjptd8d9rc6MZrP/L7P9IZ+a6sDEk6fnlPFbjvTEsIwM6KhVF+KChBqo5XOL8+zUl7IYJiF4LN8YAz2tjeL3pYMVrKNtyD4KdR1ZNROGH2f60qnhnUk7vd6ZvZO1+5s7GkccdtpvgN6N3IA9v1iPfD6Zr/1Fnq/7k2fmL6MVGXzQRHEkimZWViYWIHy0Gg0vJBxVkBJ1QD4UhwZRONVyzLnQAJD53iu5Xw5IOQQetsb13sbl832Rr56PnS3G81EQokIHJAzNNLwRQe9J6uqptvD3/zO5hqe/xP/mye02GENYC1bS0gqXkJQLlBZRUBQVYyxisDKQEhEiI6USQAVVdQYg2gQkECtAYMMQGAIyFqwhghUwYoPXKUPgyjQJAjtVpMC900PeNuXfU9UEz20c8r7i3D83zv8odPloMYJhaOK9gqgIkKIooDGCjAAxNLX9qvZxpsR+N6wVCdExqUpgrCIQxtRWiEdLIUiGxJgRgAAo2iDBKMCgBYj5sAqSARKBg2iQQVUFIYgrCYAF8Y4IjSYqJeQc1FKw0tscuN78/U021q1WEDuewMuJEKPKTgdDqgWYFggOcUYYEncUTVNgGHwWbuzNSxgrd85mvpjrQQBfQAebLRaK9bSolkmA8PB2mw6+8rr32xf+saDn/9TAWpU+vkji4Ni4ATSuAGEtST59Kc/hQpV5PX+Fnr3hCY5oRP9bCwBcc/ecZNbTqcMTG3eB0OU8D1aaPeUph52epcY+8sAAZQQFFjSqOYefOo/+xt/46tf/OXtdjvf6b3+zAu/+dLOlUIRhVmMM8ospKWHVi2OtDwx12jEprudrXZDpkSEigLj0oaIiKiiVblOnWnWbGQgskWWU2Q8CwBUSbxVzA4oIqKIWHAhCABYFJTR56CV4c2kMCrvDEhIrFopWTSCEbHKXjNkAIVVjKIxFs0ooVhFFNQhnD1xwhEBAFA1EqP+ArsjNeaw94QHKptkWj0C2MeR7xv2vLNfgRNgRqc+gin2fF88zcTnAgCVfjrBlMYH6IHEQQABNABqjMZJKAEErSURYREiR0CgVEGnqIBgSU0QT2gICQRw1CdSSat8FvQAABpCqHplewYiG9hXh46UYqzsY6MChlBE5ufq4EOjMe+DrG11ZmZbVqXMh7XZOntPGBtMvEhQr8wGDSt6KQ2YaqcLIRiXCHDpC2dcMSjtTPPI8XPtzS1D60cWm9cvr27v9I616mmK6FVVXEwiask4g7PNxo1Olg3LJHLZsJcmzmm8sdWxIrXZdGfzSuJLsLHiOAaouuvepK79MzH+eMpNsAv57/m2+gZHVsX0RW492beldy8J+FCfBRwiwvdCYTc5Zp9FsTsmVeC+caIQR67Mg7AvmQkYhctSQNQaw2iJMAQl0DhK8qE3xpWoRV4IxMWgu7VyMWQb5TALIWM/sJJT3k5CrnlRFCEzMjtnQUNZDgxGgnEtXURpZEMrELv6Esdr51e2ujvdZg3LUMQmBjGBPQlC0MAeQFSUgwKh+iKpJUhEhFURLCQDwAYxIINBMGitsRYdGWcJLCpZBLDOARoV5OAGGBde0FKCu02KRsOxh7mmh+3QCXkvaR/T7zPv3l+/9V2ke/fg+63vUcOIUYXBquS5GGOLMjjtdC4+8/rTv2oHV+u2V6tFURQLew3BD4dJq+GsVUEyEagoBAUz4TWjwKAgaiq0TxSRHLqgAUQRBQCBceT3NGBNkoc+ikjIVT3K0AUph1k9mWfvTcqDwbC73a9FUge9eGMj16Sd94uiaChhyIrB0BowyUyZg0YzJTYII9By2C+21jMxUcGps1qbTT3ZQhPn4l7nWtNEKiZY28+4dvyJa69fXKxbaLTsoFF3Np632eqlolCiZJiHM089/MhHPsJqdnf6e82OldjXCYI08qEfwPVvet+DSMtND/1+WUqHP8dYzVVRQAIGgMax+kMznz/76YCdpHdh4wt/p/gv/kmvXfiYBqIWUMgqSQlsSK1qt9/Lcc5HdiujElCgSluc3ABVUDEQolWdadba/W576IdcknWsilUQG5JW9ahH1Tc1qCgqilglAGSUcXkfQEUErIqNClQBPVCFUkwmsqr6IyikYw0Bx2F7iGRAAaIoaTZnAYyIooJWYOwIktxjUr6nNN3dSz/Ywn2fFTP9IBOlR3e16/foSXXaGBmLmOoH48QtgJP/VGFcpBJURaTX6T37jW9FxjCAZwFEqtxckxuA4sjiHKm/UBmwOgopVVRhFZQAARWk+ifiPYvwqDi1jl1VykHUYIxEEhgVHGKrkRrrtnfaLmkUGYOotcDAQJZZiYhDUBAVFQRQUQXRAAjORETGGur2y8319uzcfJ2hGPg4ieutxmDQM4h5Mdhq5+H0MR+YQumsKbKcUDGy1tr5ubkrl1a6vX4Uk3DgvAhC6nMgRg2D3jZCKQp0mHNrJLZvAt5P6/f7sciJGTDG6KZNtptZl3fFX++GAXDQGJ4ydUZfTR98Z86LXYNiyrRQACAE62ytsZBtbbJAqVXilQFGZspyn9TjyEUigE6DVwElF5VFCGKjqCFKw/b2uubZOkRkADWKjUF0hINSO52sN8zqzbqJHDKhsJd8Y5gfWZjTyAIAF8Mjs82aw6LTQfGBgyiEgCqkQVF98CGIByUfxAePhEmSoJSIgMGQcUgOiBTVV3qTcegsWiGHSMhqEAwqkYkBYlCrSsHWZ2Yg0RXPJzQyVGXTIEyhLdWgTXCXw7srvt/ooBlw/+7yPqSDD35XRtuB06cRhn0dxUZ1yxWEyPjhZlxceubXf3H1le/NpYacKNKwx/UmuBSUPYVI8sI1QAx5ZQVhZlQBBYeAyIgBJYCKAgLYip9BlEAEChUeZbUDKQUgRWsJLWIKEsQHghLB12LNNWPf27jeK4PLvL2+urGcRHNLJ7v+0rWdwcKpI4w9EQYykgdDhWVhIYJQlp1aWrbmYWO1Y5UiY1h1ZrmmUiZJ3dVndzobsHFdIrfW6+XRUv7yM6+9+OK/9fO/f6fTnpubxw0P7AvPaKNQ+mgm/ehnf4TZIRkEmhbTCnpbb9sd0mHgyHh6dif+tnfCvS/et9x9fwh33eUj/5ZUH5OapDTxEEm6qZP+wzP+Sqt2PRhbsi9KiJIgIYDTkBBJ3/tLm5n45NJmz7NT8oijajyEAAoMimiQy9QYldAPWS7BRi6IAoBBWykgCEI4FraEnj0RxGiJUUZlt8aYJ5KqiiirOjMq7j0qobKvIjhidZ6CitKoz0Yl5YWsS+I4BURCVBGsjIBd/5GOvcPvH8bYu3zea2BqogDfqZ0/xXK3Nrjv5zPd9ofuEfd7ZcSu22JceGoCPaiOkt/L2Xnz8nPfKvKhEoExGoQm98URMF31pahaQirt+nhGap5iUI8AwgxCowAkAWOqyDYV0SpsThFEGRlEFEF9UTRqxiCkUQRAO+3+Rjc7eXS5sqtL9gKWjHjOAayIEiGCqki1JlSVg6/V6oXP11dXrlxej9Z20laDSclolEQcomGWZXlpbVKKZD5v1VIuckyABQw6RduopRawyPNhZkOZ58MIRbIyT5rWRGbIWQgDgjnRPWr8uMz/XTP1fp0ed//eW3ovPQDT7jI9cPCdWQW6l5kVKj8pmWNnnnj++qvOoXVkrFUJYUioVOSFjWMCQbRkXGShGAYIRMa6KB72MkNiCL3vlQAmihu1RJWJjDJlTIPS5MFGtTSOU8SIpNxud9bXe0eXF9IYZLgTk+t7BCxqsQMSgwZ8AAIGUAYQ70sfgihAGbyAGLUgpMyAwCqihAYJrAEiQ0hE1qElcoasI7JoYgZkMQYi0tiqQSySiMn3aPvpeOHzJTQiQtit6TgqXTce//FiBLjjcLJ3m96JRfs26JAmfO8PmvZ+wGEr6I5P16m3u32AYIxMVUwCIgJYlhtp9ty3/tF/38T8Qw8cTxq1qFXLy3LQ6w36nYg1qYNKgaY/7Lfj2QbYCACRiMRUvuBRq6+qILJUufYGAQDCyPoYpQqwVFsOjbqaxkkNEZSZ8xwNdNa2TGo07/qhPPnEQ1EcvfHS6+HkfJosD0PAtHn5ysbRY0axjy41QFAyiSccDPo3OGn2O1vWb+c5+jLUE605l6bGOVYDSDWfexPWY7scSj/sd7/zzS92Vi9Htd+fB4SiM+yf73Y22hs7C3M4Pwv26LEHn/iYkiNCrWylyfoZ75nvnIHGF5kOL5p4APZotW/nqt+fNCXYJlvENKQGCqCGUFRJMAYuiELIN69ejL2ciowV/dhp+dCj81vD/HrHvXallw3zmci15tJLGb3wSvd8UcvBg1GFQECIiIosXMXsp6DNNGp3hj0GVkJWA1RFHiESQhU1VHlhSUAAxamNjUOjpRfBSuXB6Ra3gCDABqt66qOGQOMiPiNAR3XSj7oK2FYRC4AIBsZJ6iojF8RIDdP93PU+pfda9R/x051I2zH3jQM/bnP8xMF3Hx7xFpccWTS7D3Q4dDxmjHHBuVFNH0CGsHPphb/93/zCN3/ra1FiBzmLF0sIME40HF92ovYLKCjQyDZQqLIuQRRgBPZXRc8VFJQQiQwLI6KOGmcAKIJKnueWTJUAb1CjyJWl7w8LAQrMooKErAEMK6GgogYkRSRhYQ2WjFQ2hFUBLvNiaXGp2y0vXrr6+muvPfjQA2ktSo1LbBySWtqo9/M+gwbxRdmLnAG1ouIDENgkilhCVuRNrg0HQ6eOy6CAGNkiGEhmgJBG7otdlH6iWtx2xkeOkzvjDLzJ67dB73YfgMkauxnMfzO9/ybPeahBAQbg9EMfuX7he8P2tahGRrEMUlKldKgPAb1HROsciCBhUkvzoQz7/bgWCZeJM9ZFFFmIbAmWCAOSQS4ByhA1a8uN9EittlSWWcHZ2sZ2p8vZoCz6hYS+sdGNG52YyqceWdYy58iaCDVkgCosZTEI3jMDi4ackWwgEQUfyFoL1qIhR2oVkKwBa40lY0EJlAJEoAloomTFxoAREAGRlk4kGqytX3z2//fojx3T458IENuqp9yYDadVyTEepQp0HyTRPaNDl8P3tzpzM3qHj4xjF++uoaOjCm5Vyy5UBBAFG4Cp3PnKL/2teRiSs/1eN+NiYSaJ55awcdS3NyFrcyiLYugksDXi4qSpBuuARjQCVGCv4AQEsUREBKNVH1RVhECAoztXOoyWqsDAxFYEvbFoUxsbwEzLnm0UG6sbvZ2Bi5PTpx9utVo3btyAec3yDH0OwV+5uN1KmgI5Rq20OcshZ9dPi2tzy82iXItTv3b9OmONrMdCXdNaF8c2KkQh9JtUkg81wofPPvLyK2898+v/8vOfPWtig8ChtyLldm/zqrQHtdCbSWtHHzr3iR/8BAFNnNrTytS9Lv83ciocOodvg97+mR8A2rcVjh0mYz8XAioqjzrdiUGIfCE3Xl19/qV6gAeORPFO95Mfwp/5A7PNxdmVjeFWOy+6cOHV9sW14dU1uuLSzRBiJ4FFgZTFWKpCDNQZI1hDmGnU2/2sFAQwIp5QLTmouuGBqdBhRQgqyiFCajjnAMog1epA0apdEpCqogHCUQkVRJgU/IEKGIVRvL9U+hZOCm5Vz08GkZS5GAxb8wvjRDgcL/Y9TPW7U5DeCd3FyOxDaO748Lu+0duhm1/+UNtGZWLNSAUZVuFzAtK9+qW//7e++5tfbm+VwgGMQKgcuopkR4KwymPRsXd5lBysKmMkaFyFSqv6/xWLA4iKsFSwedVKmABHiWmICiyqkTPCbC3ZCId5WfjSuZpylUNDlgiEUB2IBWBEJIQAagyF4ImICCWwxGAt2iip11rL84vlMGxtbC0uN9G6movi+cVGq5FlPR/YB1QOSqAlGEPspZAsSawh8EGQ7LDfb8aNoizTudgHCC5eWjprXA1xbGyPx3I/aHPrOZnszrdjCz3Aem+bkd6bRmD7QoCmaZ+75G4eDAFG/ngETepzH//s57/9tX8O+QYUA+AQR44DE1kfgim9jUhYgMtaGrEvQaTX6SUgzbq11hBFxlhWKAM4YwiQIIgvDWgzdYmLrY2yPB9ksNMN3RzeuLpx4lV66NxMPiyvbbdnGvaBEzPWmiLngIrKoUpIzwsR8YGz/pADGROBoUh86SlOU0dojYqCiCIpVi0jyRpjFRAgCVhTqoOLwTkypgwMSKiO2fYHtYW541ee+cKpmVM+PQ5koUIQx8EeFfowKvc43gzez3DioX6A+3H3ySb6ftsU90XN4d7P74Z28+10HLMyWmWjDIBKlnKcdy9+9Z8v5X0jOMQM0DfiyHeNSxJrm83jp0xY7nV73e6Gl9yVZd7dUcYkZUOpKgo6VIekI1E4kofKqqpMYBUNIIIgiKLIKHoZ1AgoCJe5qlGMXBQxpsnMzLLa3manu9V9K3+r3lp87PFzJmRrNzaOnjzexvZvvdxrLCwcmUcIAU0AivPh9vZ690g2rM0loHLhO29dubwhlBhEJRDDQBA7m4c+ok9cQpRThKeOHYnILc8uF8Ng6ihZj4b60Kmn5BT8xtMbwzyLXZRGjgxWXQSnoL+pSKp75wIYwb06hdrtGhm3UOL2vxn7Dm5xK4C9WcLvK/6/Y9qPiO0NxB2l2YKIR45luP7qdzgHUEApaxEtH58NRgMMl5ajWi03y+HJM4tKc6++2dZfv7x6yWYu6QxDySJIhtQSWTHGkSN96NjxteubgWJFIhIEi1WEEAKAMgsgGEQVtQAkWHNxZB2XoWDPQECWkHQEjwKNOvvCuOTX+C1WDlukClhFUmUgrMoOKlZVGCvVTYuy2N5aXzp1Ckd1iKZdVLvT/sGc6PtLd73FTEQz3FEdCZ3M6N3e6C5+0UQ84TinaG+6xfTNq4LhI+OQwgj5H6EciIABomzrH/71X7j00ssnzh5/q1i5dpHBOIu+8mXsBhHgLtKIVac6Fa26Po60DhjVz1Icq2oIoEjEzM7YxKZ5nqmoVJ02AFSRNSigBQTiKDYsod3t5T4YDKoahBXUuhiIwBoFNGQIWYUjZ0tfujgGUe89APbaXVDO80EcmdNnTvrgs2w47JuoEffzPCDlPi9Y+lk/ieJQGlVuxjOKCGQAwViuNeKiCMBU5nm7vRFH0aybhbTO6fLRcx83rgFoRlDXGBka4//joKB7NOWHqkYTDriru7xnnYAndNAMuPtRmtaRUBVQFZ2rnXyoceqJ9uVvazEYFqW11pkIgZ1xwMFnPviQRmbYG3R3Op1ut+j1a40IjfGszqgzEToCFgOkeWEs58M2aUYhU+lxiEAKCbi9FTzjdj9cXu9DgoPMb21njz94ppE0SnbCWgaPGESGIsJBmWE45NIrIViDxllURxgjpgSxgdiAsYacMS6yaBNjE3IRuUhNSrYFdkaNC1IYRVRf5gyiCMHNLGRU765cXHnunxz/zL/HYEfoE+ikqXQVnTYKzRsrgHc15O/5tnE/dPT3bRWgXfXvsM/vjKZOrUImJ05JqWowoIKKoAJSOey9+t2r3/iXS7VusjBXT1v5oG3KftiSTBI7a9k2zcxy1IBac8N3VgVz9HnZW484o3hWo3mAmGxQUUOogqgGjK/6f4lYEitBQAG1BChFPUsAqRr3GQMKUiqASOmBcl86MDMzc3Ozs52dDdVBe61NMc3O1bsSBqV79a3yLVIXbfzER5o1KE+dbBJn3O1uXOtuvL4WWWnW0u99fSOtN7udoMBaZM1aBOqBMAjkwSj7pLcdue2XXnj12NmTx86eKrNyvu58tm1Vs6GtzS5Yqqdz9Scef3x+rgVjv/VUXX6cKpyxV8N62/MNY38wjpVznWaBqTvc8k6TgJiD/HMz2OV9uQjuhPZZyvvMmwprByEM4N3Wq8WNVwYKPgaCsLTAi8dTTGbBRMMBD/Kmahd930o+a/KnjsErW+a17ZyLuKrOZvK8FUvN6rnF+kyttj7IExd328OkHklQRRRRQGAOAooWEY2IN+wjlaaL6mnSzoa5SkkkiEiMo2gLBBUwWvkNRooZQhh14ZBxnyxEJESDVZKwgCKKqkFFBUIlg74szr/++qMf/YGJcqbjEG240woiv0vp8NCYW9B48d9hBOktPADvfPXtYhE6wuInDt894SVTTrKqk68IiAIoGBGDQ4VcLZSYiDe10L3w9K89982vXt/qrJewMyisSwNC4BHMqlCVxK3cBjSKNVYc9SJSBUQCBAEEVBZDyKOkF6pSWEA1cs4YY50louFgAKO4IKrq0hE6VDSGDFHpfX+QeQYH5D0Pc9/PCsWZOG4GtjaxwmVlb4gqoTFoy+BFoNvrKqgh++YbFzTgbGNOrBXMFubqIErGZN18mBVAxqI1bEIZ0lp9kA+Ni6KIhNQ4qTVclnGZ8VDK2Xq8ONMMRUdnTzz8yT8IyRFEO3Le7oL/k74Ad1q9822zwds+8b0xAO61+Nk13RFGIImKCETLpx9bv/hMpDrfrOWeOSsSl4TCG2PzYTeyLuuVg273xrWVsiyOLM9GCbKAEtokIuuITJBCOJBKkWeDrOgMvCPN/LDfh9ilFmmrXSjGLnascGN1Z255/pFHzp47ecJG4DmUpQclAPYhsMKgl4VSPYCzUZzWktSRIURbb9SiqGbAiIAGESKwrBp5dRIidE1najZqIKUsMQoZxDJ4ZgxASDFpSBfn0np9diHON69Dsarx6UBi0I6y8ivNX8eYEI6iDHTsrrsTFrq3AutO7nLw8++/jeu2Izl9wJ0//ljRn1aMRjhitQkwGGYw4tX3QMpi2MbetZe/9HdXX3/GL8dHargwezSFtJ42xdh+PoThJjqH9aaJG3V3nBqzPt/Ou20p+73t9XqL47hVKigZxER4AGJVhchVBRmcMQJVgCc6chrCSFVT5cDMIYgqei5zoCiKYqOkAYsii62dnZnrdtoWobfd3V5d6/rG1TcGR4+eWGsPv/Lcetbrf+bDp6EtnTeu9/thc9sndbdybaVWqz/93OZOUcsBy1JKija74dRxt7O92SkjHYpN7c761Ya6b/z2068887L5qY8kNtreWMOiffmtFTOLi+qwkTAPIhP7onBRNBLmI6FedVLYY6G9bf7UybVgXAd0gvrvKQV013e4hZf17V3w/Ud7tP/xdjsGQZEAUFS9hXK4nX3vN3deeTV2TRcNIy1PHLVH5105HJRaxhgHKxhHIMBifHfrox+de+bGpkq5lWmamFPL7snjR44vzR6bqTdM7oX+3pcvXloZJM0ZQGHvfWBDNmhw1rBqwQGhMKoJwNJs0yB2ht1MNSA4igRVlA0QEqooGhQVGLX2NaIySjmokrhUDBIQsLBFo1VVaGurqAnjTNVHtSiyyEUvv/C9H/2pn05nW5NkX5wKU/++mPH7Qm/DA4Dj/Oy3Mab7/VbviEZxNlVfaJiUkZpoRBPITwEQWIGhUkzYSB5Jmzcu6PbVvLfa29nS2Lq5Uyvb8uYLr33zN79+ZT17a628trnVUw2hEDIKIBoMKEGEqICMYKhC/isQFmTsBhtLMlWDJBwAQUQQAaoyoaxFUSBiLLFWdXUnuDkhqSNE9oHqBg0UZSh9EFERYVZfapblHEQVSckgGmdDECFkFgAkQDKuGOYhSGO2rgLNmdkbF1altN7YIIMPPfqQtcH7Mvgw31wosvWtTnuusRBCKMuYoqj0HgCAQCXEDgxikICgxoay7M62zp36yE/Wjz+FSa0yaSbCp0JtppILb2kCTPlR7mbO95wwwjHvkpPeYw/AoRjnO1EuqymoFkLEdGL53NXaXNG5jCjoA0rZz4ZoKRuWjL479Fl32B8MyzyvJXHd2UYUZYIEhhQJwRowQFIUhDroZ1s72eaWT40rhzCEvmu5tfXhtS2f1ueOzuhiHZuNZGlxPlFW7gZPLBIqMR/KwgcOUhQMNorTWtqcrdUbaUQuMpG13ksZBDWP6zZKYkRrI2eSuSE3bW2e4xa5mGxECE6ssnjPiM7GKSE461CHgfNsqxNBacH3rn/DHfGuuVxIYsCpkK3y0qo6EaNEJAKVKkboth7MafjgrqXkHU/cnVz5vhge7+8N8e39uv2u5t2a/4qKDBAkYHZ5+/Wv9Vde3VpbLcuQZ13orddno7XVnbQ5l5i4eWShTCiY2NVsZIVC13dumGjBpktqjqRzRwS3ip0VCCv99nbuQzQ7R+kcoOOQxk6daUjISx6AlAQlAQuWpRTCntQjBxBRQURTSiCwqgzs2Ye8LBAsFr7f3s7yzDica6ZQ+NQ1V1dyKYaPPXLs+Su9/mC4UcDO873VYuPJJxZ8u48FFJgkseGlc1979vwGpDuFD2wcUqfNVy7xU4/O5z2em11utprZYCPLO5tr7fMXNpKk3tlZaczY2Zk0K+ZOnpq/3s8urVxZb290NgYPnT1lscLVduOoKyGjI9n1TnloP04/0f4n3rupNwdPPPh3cqEpzW/3knvf7d7gg0n7hmXf8wkgEQJDGReXt954urs1WB/O9TOp1+HkiUYcW7QUOZV8p47ih6G90w5a767l/U7xUx+e/wnIHnjwXOrictBp1BMTpb3+ds2U6fzsv9l8JP3qW8/ekJWNwthEUDOPth4HYF9khCYFXEpri3Mz7b7fyAdDiBWRUFDQgDglUpQARIRIVdcwRAFEg4SIBKCqqmCNk6CkYJCCBDQOjMkgGMLImkASWSMcrDMq4dtf/8qjjz36+//w/9am8Ti+EXc9V79H94oOU9reC09ahbvv7uRKULASoCUIAopYdXcRBUQlFQVGza3fpO4qb1/sbbyuW6tlnqVJPLi2OuyEaytf/sar2y9c665RVKSta6rdIAqBCIxFIgVFYVUNwARIggoiZCtVX4ksoRm741AMCIsBAmOVA6JVEQCpKlkRkYgOhoOqirQhM5WnKIAYgi+GoH7WBx0OmRmZpTsYNFOzOFMf9oc828DYVl6N6mpFUSJYQFCG/rCcmZ8zFrPMLx87Wk9mVi5vANiTx88mtiHa8cHX6vXEumxQ2Cj2HCKLwsJFCYRC6H0pgX1RCrCx4pwGKT22OD5RP/NhTFoWFFQAzBQKMfEJK44D0w/BUPU2psEtpnwP++2N9Lpzes9yAKY3tH3Pf/frZ/8AVmNBBp1GDz748eeuvszcK4uAgTGyTDLsD0G5v9P2w7KfZQimMFKyKmASxYJIgAbQAALZAiDPy24n62yVgC5JW2QbirLZgW+9snZtUxtpuVSvL9U4TSMtcqYwHFhMXBDNcu+LkJd5AIjRNloLUVq3cQSmVgQXDDkfayZR0jBpGqU4hKJUQkwGQ2gki7Y2iy62tmZMBAKq4rmsGiuBcyWBBVAVhxLbYGFr7eIrpp64wYruPBP5BYMxmBmbLohGTDWCuqkiYpFE0aAKjKNEdVL1Fw+O6X2VaHfI/ffJ8Jj+Ge8J3esn2hfTpaM6ImNvMCqIqMFBduF3rj//zxLqqYSZWadIUXw8z2fqH3+Qu/76m+evXd883WwszC11fKilqSFSQSn6EhDA2frRqLYAdsEl89xOOVsZDnuBfdL0aWvJORuC9z5nzlHUghUsNLCUAZg1sKiSkKqqoAIBgFSFoQWUGRRCKMXnRemHOQ/67ERiwMTQkcX69pvbCEW9YfvDQeYjqSXfeH37WuEs6InZVlxrdq4MXS3qN08kzTgut2Tom/WmybJvfePaT/0rT87OsaOOQW20ouubdgOS+iMfPuqKlV6xWAz6nW00YfHMyXg4/+wrb5XiTp04/viHHjPWgo6jqGDPngvjnIqDouxup228ie8B6KtM1ps5wPQmf++Y3kut8D4U4NozCVWqoQIClAZ8//zL22+9EaWL69fLRn3GpVlrJo4SREfGGQLc2tpBSFozR9944wq4eOnhpUdPHtN+m4rhsL9l0qgP9es3+kePNaKan1mqPWqHf+5fe+TV6/LVZ6698Eb74pZHVwMZgnDDyWwanWw2IjU7xWDQHbrINB2AH7U7QlVjyBoHiEFQQINqVbHTUMVPKiBESIoAGkVGVBGMdZSFwhhrHSXOxoZqaUzKMSkEUS5syH/jl//hQw8/9sQPfk6qtqwAu3jsaIx+zxq4R/R2x/HemQq7+zYBsrKGLLYM2xvlcEjNRdOMVMkokwSUgH4Qsp0w3CmLDRz2SAZGvMdIamlI62bJAg4GVztkqF8Or3V7OfUG/QJRhRGJQEU1gAIhiAqNMlNYEUCJAKEyEGDUXKZyChChiAdAMqQ6KqaroEIqrNV1jCMOgVmsG60AIEAC58zS0tyZs2c2VrcV+qrBg3CQdn8wGBhflKXPg2IQJ4oAapAISFS2t3auX9/qd/oPP36q0aoZMoM8WzgyX6s117bbS0fms5ApluIlSaIoiULQ4LkIZWyd9wGNAdIcQuRsWXKZ+9g5KYtkLlbEgdrTZx/Po4VkV3vSfcJ3ottPy+89iAuOraS75IX9gND47Lu90HvpAbh3cDLufVXFuwChoMNTH/5cWmtevXG+nxWNWtqaqYvfufzqM6999+nYGGPB1uNBLjvdvN6M45hqJiJVhFAWQ2SrXPhe16Dt59zu+e3tfD2NNdoxRrb75oULXSU5cRRbTVRSMFFWiMa2185KKvIsAzHWWKw347QexRHEtRKNsEVwDmMRmxekoFDa69c220WWumg2rh8/eeTEsSWqpyzadM5YUlRmZQmqSmitjdUZIk0MRKaAbNDburizsdpcOl47+URu40i7nTefcbKTOJeVxkSN5OiHdOHTPjrmMIGqOIsIkqlmYpSojxNP0iHAxsGJuyd0vzX7O6T9EOwHmyZKauXjGfUhAqiao/e757+6/fpvNFw+BBdsQ8Eak3ggO7MwyH1yDB4+dbTcXu3dWO1dubh06kzhybVmGcgCiPdabqrF8v/P3p8GS5Yl52GgL+ece2+sb3+5V2bWXtVdvaMbaDSWBkA2QUBGgAuGQ5vhACNRpChRM9QYxR+UTDSZaWwkM43NphFNHC4CyaFoshFJgDtI7Ox9qaru2quycs98+4vlbue4+/yIFy9fLt1dVZ21NeCWGS/iRsS9N87x48eXz90b0uwY9NfYY9r3KFdSuduWl7AeFSunOesmAWtqx8SQpSaKJkBDmxGaMSoflDgBNQICQgUTMZXUNm01rqumquJkFNsomWNQKauKi87WzjRJfu706oWXb/SYlo6vXd+ODz3xoZ/9hT/WTuQDT37g8oVrkT07+i//q//qhZdfX3+g2L+899XXrn/x6ecePz9cXpXx/s6Fm+215vgjn/vf/uInhiey+PQ//eUb7d6NF77S7cSPDx/43S898+zLV0O+/lf+i7/04ENPzIra3Ta+B+xyoM3hEZwt3OnaeKsrZg7gx3ke2Rth0XvFAb4Te78bPssDuk/a/x3u/1sDf1iOFgDMErbToshu7I+y0LNg4BN3ikq06LkGOaoiFyJ+r0zG6fi59d7qKSS4sd82lQ0XVhvOnv7W9c3tePrRc0WPBCgvAsroY6eyJ06d292Lr93cu3C92d2ZBOw44lRbquX61rhp4yOrOTLFBCZkgI1IMjJmAYsCjUEdzSGmGXBCFWfZxLN4gYFnTiZKhg7M4lKfupwG/axbhE7e63Zy7z2gFp2u885UesP+F3/7V1fOrC4de5To1uQf8ui7Lm+/X+ntHti7cB/zRWRgiFEtpGrjmd/81b/3N9YCfOZHP6X9xcGpFTOrqqmnA1vXYUJWyETJY+wTBcFhubFZ103WGWYDHZ5YPqbUvXi5o5LKKouxScJEyASIZIYATCCKpjOJrjCDFTHe6gtgSLOGL7NMM1JEP8tBVJTZ4jQFI/SBU9IUkyGQcyoKBKpGgACoYE2bykaMQtNEDk6UxGBSxbpODJTamBrmbgYGhEQqzhs6X4tL1k7LyeWLlx9+8Bx75wlJU7+XObeosaprImucuaZuCNBEmmbUxiLm3UYaRgYBZw6xoy21DakIdRHMTfa0z9TPF3NPDGAHNRXnXQAAD1w587GwO5beHe7Vt6p53DID3uq6fjdzAO6+1+998dzKcUSYgScF/crDH11+5GMGKGAIxlCee+STsYabL3+BIJX70/3tvYhZlTp13XRDJ2TOM7DHph05jYMeXby4+eJLNy9fnwKG128210ZNSnGnBPPZ8WF8YDkzpes7KZS1OijLaSOtK/JupzPsDnvdDnW6FLzz7IgByDnnyDMF8nkEmNT1zSvbl7e2irx7/vzJMyfWQ6c7LkvwIXPOMYGmqCKCigCERqoY2aIncTqtdy/sbm253uLxJz7lw8I0Rd9cTxsv6uarKe60zJikavbbK7+NJ75aPPgzsPAhwQUinhXHOCgHdMSvCfdyy72zcu0dpXcm0PF20xEJMHfzHU6lAZjprFqIldXVb4wufAPValpMjIDOkZ9VGAR1ruhY0NYyt1Qs+oW4/9J093Jx/GwDXfYDMfWMlqRtdmSChEEhx3xIdjKYg5FLo5txdyeCy5dO5HnGkFuKhgYcwFpAI2BDZ2J40APVUC1Zi4imBCooJiqmKg2YMpqWVTkpExAZA2incVmiVO2XXaInHllJwL5b7MT2x3/253/6T/4HIKlpywc/8YOtkffhoz/+2Y3N7d297Wef+eqLX/+tz1/afvra5onj7rHHP/XgZz/92MIHrLe4DuANfvR//Zedr05fedWHmjvhybXxx3rLS0snK+sgBTeX6fPBvuW8OXxxWHnjLt37DU3dYb32mcI/tyhuVYC/5zbxHQz1I+/f6mv0/Uh3Q4AOd8bDdxxhzvnC5p6VFdVlTHXbO6XdhQ5njQtQVnUrdd3U3dAZlW3R7S+trwnqZHe7newtL60Hn2/vuzYsHXsg9DvewUQttAmVg89Zk6wOml7hTg/Dzg5Wk3RzO23tp0ldr6zkq1ZUjUaA1riNEAWCYJWkVU1qYKRmjABABGYGhKgmjEyEIuJ9AFDHzjGi08V+54Gl/MRSf2HYCd6LOufzJpExlyklj0XmIOPt3RvPfPnzn/7sKep3CGYNgW3e9vWN1xz/fbqvdIQ331gIZl6z794fnqWZKgAqWtW23ZBe/8I//l/+5v9jNJb83AmA1iZX4/a+86Hj2JTBGIWAAYGAgABNgTxlvXwhdp/5/FcWFpZOn1idLixEwY8+9dDk69dfubrXOkMFZkJGVWQgQkNCZmez0iaGaioKCoiIDtHxAYZtprMAGgMiGiEd5sYCEBk6JoFZB8cwmlRtikwuARqIqhExmG1s7b12dePUieNZp4h1k0QZUMR2x9Vo2gxj5kosOrkLHhHEFB04jyvLi0WnM31gurFxs66bAOC8r5smptoxA0pbt8EBI5RtvTcaKzGopVbEoE3iCUAUAcFTW0NZQ+Yo93lbRVe4GJu820VFpHnk9VZFiIM/hxN3p0v1Dhvue1iLbyl+cIveaQPg7RY6c9NiJuMQDRgMDMnAQOlgJ8yxc+LHf+7P/vav0MXnPt/NUra2cmWv2t6rCySn5WDQSaJ50Uisy1F1eVw+/a2N116f7o7MuxRbCMm8Q9A07HRCyVtb1e64RY2DLode7PTC2tJSyHudbje4wIyGpGLRDD2G4Il8AkumHkQUAtPWletnjq8+eu6hleWFBG0z3gtFV6upOj+1CORSYnQ5MiMDsplWGKeFjfZvXNrd3hye+xAvPVZmFNrrsP3C1qtfD+WeRB2sHq8wmG+6eR6rkRs9PX1lPz+xHVZ/JPp1zwwgs+7zAPNqmId1IecjOq8mZodogbdjEt9VHMJRrfn9SkdigIdPZm2CzBAMSSxxKusbXx9f/gr6lGipjJ6dOVRiJBdULLYRzNRIQzf6Best5p1e2r2wffnK8kMdoU6EwnPmg2hdSb2VUsvFshTHIF/zNEAMaJyanWZ/T1KDS+t5XtQRkCzPBxKdpFqxQRctGSQwJyjRVCwRHCi7s/YVCoKAzoXM+UQc6zTd2a/3Jq0axdRulMIufvDs8XHDV27WKBAiuYZBFNiFrKcggXxK0l3onxsOz+iDH/3kp9D+PCTxweqqcqEXRTgwA2Sz8j7dXM0Pzn4I2anh+iIIgCJ0EOgWdxyRtYc28yzrCI5yz1F1/Vb77e80dXOvxdGY7h3L7Z7hhXue97bw8vzAHdr/95ExcLcz7TDiPpsjm8FprFgplRQVIIkklUQQi4I8KYNonUic497e9ObDH3i4CMXe7kac7K8t9Y35xvXyt7+yMc16H31iZViATZLLXK/XVSxUU71Xj+u4t19vXG+aaVuXMcXC9/K15SKWzWR7gq0DYlEbT9pWUckJuQSW0KZNrcgGZLNS/sgGxoYGoqLOZwrmyQWCXtDTx1bWV/unVofdvsPgJpN6VMbp7t7upB2P62nVtk0r1bTX7RV591vfuLA8PP3hH/3svFv1rfHB37cB3gG6Y4m+eVjt3JN+4BWYeZNNjwTsEc1QQU1jB/amF76x0tn/8Ecf/Mf/+oXf/OalvXL0R/7wJ4eEKZZEfcVM2wPnAqKhCak2ZeVCEoOw0Fk/d+KFZ1/pDnOENGR8+NTSxraOp62UrZGIwKynHhMSGs4K0zoS0jbNzBAxIzNQMaCAAIRAAATIBIRETKCGYESECISAqBnxDD/f63SabrE7nmztNYZBFRBRFT07ifHa9T0KIaZWUgRESaLIlaSLN7eGK7kjmo6qkEUwLTouQQtk3rk8K6BHqyKpaWYtmBAxK3LvCNCluo6aIFVcFAm1FQEBhCxFJvWOQFIClKjN9d2NSZwu9BccAyJkFFNZQmcA5BBxVlBvHrGdO93exOp6K2vxvqzf9wQE6L6eczYHh1spHl7rwO9hBoDE3g+P/ejP/vnfdX7vla/EvS3JsknUuqyvTaeii85TfWM6rtuNG+OtzXJrLyWExUWfE/Q6OFjIu3nY3K1GVSpRr0+aTLKVhXzlzMqwXzgOPnPkPTAoKTIjATt2IQNUcGQEaoqgCOjAUqo/8NT5Xnfo2LZ2bmT9Ttbp+eBMraxLq5XRZ50emiB6UgdVFaDObfOVp5/pLq+eefgjsbdWpl2sLpU3nk/XX4PxqLOw3ls7D8vnghts3nipGV3oBUwxtaOXd3dunjy/E87+bM0nPDCTHA1M3V5yBA71mDtcEN/L3L2tG8/v2V1tpu/boQwymPUjUTQDcCrUbm6//HTaf5k0TjQT59AFNRWGWYCAiDDPYNaWJXESSuiFTvTXF/PxjY3Xri4/mIUiVxGgEDKMbV1Nd0DUIMsG6xRWiNDAcBrSdLudjPeaKyvrZ4rOsNVWWRA9kMcUQFtmAQ+aBFKjWhN5VQUkE0FSQkASZDU1l0F32BkYbe/HcYWUd/ZGZegVCz230IPhYn+wvDJNVBzrra+tAgMAABKAiSkxIrABekadBQSZTCEr+gBA7G4ZfggKQPNGqg7B8EA4Hsn8tSOPR3Xww4Kghy7nWTHoudV8q9T+t0UG4V2R4e863XfN/vziR+ieZztqPHy7z7w/6c7xm//GWQTFh6UzLh+Sq6fVFAtWQARzToEFEFUBTJJqb3noB/2kMUXtdBcT5K9d0y9/4/r1qXtgfbnd39e0WkdcdJklUBMiGwxCwX2NaoshdaPFeGVnkpWUIiVi6vbr2EaDpEA+kGICaMUaLijPjAABAABJREFUkVqFOcz45aBs+kEBEfXMQI6dI2d971cGnXOnV9aXh1nuWqsv3BxvjKY7O7uTcqqiogytBsJO5ldWB7kLiwu946cWXvj8Pzn/yEP9Y6eQjrLzoYPn96y8fEfoIGPooP7e4eE3HoI5MBz0oHQ3IpoCECQDIDMRh8JtZNnfeO1bo4vP9nFUeH3qsQevbJS/+uvfeOaVif+X8rnPPrk47FNrPl9CBI1ioGpqIgolgk32ppQxojxw/syL37r89RcufuCJJzluFlN8+Kwb1ZMrX34dvWdHqOYcHVQPJbCZcULgmYSMDEXMEDRpwuiZkYgImZCJHAERzFpkM6NnZkZCCOwgNaroMXW7fn2w9rrbvbpfN4kMlcgUEB3tjsb7z00ILHNBDcU0Go0bubQxLnpbT5w/EVwq8iJ43NvdyfOMUoIsjqsWgKpYDnsFu6DGnSJrY8zYKSimGFNVx0kn+OC9iWbOe+LYauhibKOqIem0rF67dH0ybTtnfJY7IgNsOet2Vo/rQZIvHPRcgoMd4Y2n2Lz1wOxhuPnOzeRNnOHdNADeNkfyQeereTudW041AwOe1cJB53sy5B/6Y7/06//zvr16/anVbG+/mpbNPg6KU2ec77Tbe26611wad5w+crpfTid5N8uLkAXy7DzywiC8dvnmYj/3Plta7S/0O50iZ0cGlkAdCSIJGTM7F7zzRB4YHLPhrGSUipaGYNCeXD8VMS/raWdQuJBFlboaByrAkpl6Mkpl5nMvllJNONq5+dz21qUHHv8ELj6yn5JNX7Pt1/zulS4nXuzxg0+U0NvLlvOlR8QN1paP6+SsTS5Ks+tN1ijS9r+xUNixn4p+TR0zAIGBCQIp2NzjeTR8dRhoOuKffEt0qy/Z20W/Z7c0u/XL5/1gDBOasozGVy5Ot6+kchcsg24nEWJMnhEdGlhSBdVkhprAVBQRKcuCR2xT2LNBL+/mrW2/+MrxhyN3VmtlznqEnuNeqve1gRzQL5zj4Zp5rnYcqwC2qdwfbV1ePrOQh37UiJwYQZlACVVMwEBmvcJYGGJEYjIBQzPBlDCItK2YeGeLCy5av1LbGVXetaePL586s7i8tLw1Ak0OGruxEYeLfdOEFAxnK0xx1lZy1qN+5g/guZ5t8935iKvgqNCmQ2l++IGD/hlz5fngLx4Ef+eq/vwjNt8FDkpBH1H750baDAX1xtyx3w41+kaCckfffT8q/XPwFd7uh7h7zO78WTb/BzPrrr+ed1er+rLLvRVIhasbFWGgkDQ1jeU+j22zvLYUCarJCClIyq+P8Lde3H3uZvrExx4fONm/vr+xudAf9MuyVozmYpYVSaNQ7Hct86HcbgMzodv3ahr2R1FjzApNSKkxiqYGCmQGYooGigJIxIDEoiCqQAAOiZkRMyfrS8WpE8un11fM6Y3xaOvK6OrG/u64bWIqLOUOhgX3M15eX1jqdwe9LO84RYqoFWxXTffrX/qnP/KH/qRkPQY/Y2oznXUVmzPotw0l/T59j3RrqR7KjDdZrtHUEG817lRATK2nJtW73Iy12mj3NyGW9dWLO689t3D2mJirIn7sQ4/c3Nr56tdeffaVvcHC9U98qOjmYw4NZ5lKAo2Y1CyJtWpJHWXUacqKivCZz/74f/N/+3+G3vpqPwNfDXr08AMLz1/qvL4d1cxUHBE71jT7FTN/KjgEb6QAiVRAAZ2aKZiYMQAzMqFzNGukDSSM7IiDJ3QAakVesKNY18GDh+qRU/0HTix969Wru41FAFGdeXQwwUK/yHO3tz9mJAVqE90Y1Ul3DfPTq0XwAbUdLPR6gyDaxlh7F8ppu7i46C2SI0ASaRGlleg9BTaVtk3T2HBKbbcTMsco4hxpUypoUk0JJ1Xz+tUqR+wQtu2+9RxnxYMf+kHMVw8Anzivpgh4pFDcG2eSN10J6Gj7+TeyBdz59bkJ+vYaAO+STDkwm+cTgXe5tg0QRZV9p+HTn/65/+S13yjGL/zuSY/AJ0796C8c/9CPC3e1SlDe/Fd/969d/vq/4QAxdQWSIBJy4KybZWDpwbDWppSHvFNkWR7IOQVkxweANyRDSzArlYUOMKmqAXkPxgAYKcUm5iHf3N9MkPV7iyJO6sjkiDHGCRMnS75wBtLWDdKUXbu58dLm9Suf+MxnpFibpkaml2z3+W55owOOXU+KhXGTt76oRsI06a8tQxxA8bB2T+XWJI3QVpxG0uzBxm/ny2el92CEJQdIgLMxQ5qXOJyp6gcRrftQr8NuQ6y9Tdwxr6F+h7bwHW/r/b71HRRdQDQRIJ5VfCNIFEflzs16vMHgQrHg8s5kOmqaptPtsMl0UpkpO0KHEhtpWgfMzqsYMNaNZMwMUKsiZ+vrjwKn66+9tvYwaSck7ZHrFgOQumyaabl7xXeXIvbFLXI/QlsxijVxb3urrJ89+9jHiAtAgWQMhMwibTJVRHSo0YLLGMUZJUzCddNU6jJHbsBZOa2JaFROhuLOnOz2+2bAC0vDjJ3UCYWkbUTc+tr6w488CAAAOlOrDzT++aZrR4C0t1R6OCxWAXf4bI6K8cNOjjY/3W2ntVvNwGaTMXu8zfWPt/aH2etZ5GC2yt5Igu9REMHRT75Z5sVv8/y9Q/fQ6w8qO9sbuP3btNmjTldEAu52lk5i/k1qWgicRAWxbS0LRERtK46x32PKvarEps0wa6H7teevffmF/f1JArWnP/+1zzx1emd/hBl3esvlZBzYVFLwobMY6rLBiGoIhAtLA+RUVyri9qdVwATcSYw1KSuZgJgSUvAYVTPvo0gUQQPvmD2nZB5tIYdTy90zJ1fM0cUrl65ubO2VWk7akHUK5wtPXXJrw+LYcu/0cjdBAgqTqr66U2+N6lFKAuTg5tqXrrJb/PRP/3wCcURH7dXbBvl+zd/7jN526X8AoJ2Fomx+yTd8WURUSAZcS2SVHEfNha+Uk5upnRSdArS1uvTeHzs+bEZLL1+4dPrUGVcUmUw/+uj6pVev7lbxN7/2en959YMPDVw1Ua3quiUzNEOTg8a7LmIN3ud7u9vDhewjT3zg6a9++ed+5qer7pgl7e/okw+e2pxeHdXJe3ZMosoExNzGlpmUARUdMRCaaAJV4KZJZDqzXwjZOXZMmXdmgJgYMTB5Z855YswDKlCe9RwmaSOAZtA+cXZl1Njr2/u745bRGYmKxNh6VgMok6ijgjP2fmfcXL5+E5r85uUb58+d6rfNA8Vwd3tnfWk9D04SNeV+McgNsW0jK5oqkSapAiMDeco4ZHvjMsuzkBdJTZI0sfGBmENdp68/d6lp7ezagIEboe2t+PEf/uSnfu7PaN49DN0eYqTttmLRb4g/AG7vx/7d6FaHGJiz0xv/LhzVwOydiAC8W2bAkZ3yYHc9CMoDGhgRgmpOeZuf/9Dn/tMXe/9otPHK6rHHho/+4eTXHEIMkTtLD/zg57Z3Xi2wqstWRZIIARIzAtV1C+wyHzrB+8wjUUxJFNgwyz05QkRkohmqk8AYFNFmzVwOSubSoD9ARzGJR4eWUJAQwRpGp2Aao8+dZxJJYHVOzTe+9DvFsP8Dn/lZoT40U7rxvO286HTKGVu20NJS2RRKvtopQ+hatbe1MR2sDtvMN37ROR9Is6IocYo4KmQTqm3mHQ5PKK+rZkAABrMc/xln24Gac1cV27c2Jbc4Fo54q+8ne8z1rd9T25mZGSGoqhEbgoCyTpvNy834uneu21to2JMStk3Xd31dtW3TtK2CF4li1qYaJAViMFRl9izSkiMxIdPMO0S/lWzl+JPVa/HiCxfOfTCzkIkN2bqAqmm3jc3+jZd9b7VYPR/8WtVUAJjHBJJG2zdef+Erxx7/OGCGxhlFkFKkUmhD5rUpfdZxgi1WAA7ZYyKHXrFpYmWgHcoMxLjPLvgQlpcdhW53uNREdcGXW2U7qin0usOVbm8RkQ9LHR4q+wdwhyO1OudFdQ7QELcFAg7H9Ghpn0Put3kJx8Od/PACh12BDkICR3IEjtgGdtvfWwfeoPy/29nzvTD6e9PyvXMW5uEXM7vdsL/n7R+VU3PfK4IZMnj1xeL589nvwmqCncraulNNMlp1sUwF5yShirK4WmQ5TZvSKTIVVzbbp18e745osbdUbW8Pe/L4UycuXnweVo/lnVyxl9I0xkZi3fH5Qn+w107zlUJircBglAcilL1JczIbXN2unDEhIICKACAhm2I35K2KZ04WHRt79Sorg84ww1Mr/SyX7d3dK+O4uVNJAnQu9+xMC2lOrnSPrWYLfe+db5rp1kSvbe6OyiYaJSLhkNrU67pyVP69/89fb1r60c/9nOSMZsTzPEw7sIN/D9Pb/OPnnDgPyt4a7AM2/a6LEI2AI4hj2L34ref+1d9/ZKnpDoCyUEqfXOAUjYImW107VY3T1s5o+XhwYEsLnYcfOPa1b15tgL70+eeO9x5e6CM3ZFREVdCEoIhkQtqqQD22uuh290Zbn/7kk1/43d+6fPnmQsauk3cXF47V1enFzW9eGgv6PCtYzSQBYhYyVUUzYkADUvOOETmELOZStU3dJgIiAuccM4CKJ/SeEMyhesKMNfhAkMyhcy6EYBHG+xOB2PF2crH70LGFV67uvXx5q0mYeY+GZVnOxECMGqsYoHJOH35g4fig88qFnX/5u89ytnj6ZNHz+PiDnaWh87kDTWAKqp5IVZhARQzbaUxVGTPs727H/d1Y5F5Qq7q2yrI8cMhjA5s7462d/cUsO76eN2FpeO6pP/qzv3Dq4Y+mzsDhrPkA3qqmAvAWIgDwZrkQj7qz30w2z+3RJwR8Gw2A98LucuA0Oih1f3Bw7u0zRCTTjKml1fM/9ksmrSiic47AYgoEKflj555aPH6q3X3dJSNwTZtQTFGRoOgWhqhE4FiJAAjIHKP3xAyAQOyYnHNMyAhoJgRkqoYWAQmc88HQIxoxEhCiiAGSZ8dREhiEzDH5WCefQ4Dyy1/414uDlYcf+WhsKcluu/+6bL8Mzbbvrzau2/LQYJA0xFqK3iJiJ8Yq6/iqHiH3icA56lBIiWtaBOiidkIS2WkUnsdsx/VPtn7VELIZNGEGY8ZZV20ysPu3TxzYzHiX1vWu0Pt8/zMDJASbIbi0ZWrS7ut711/LyIo8F9dpjBA8ABtiKNiHoPv7puDENOp0fz/rIJs1o1Ge5+RZYgtEyRQNTdVHc4ES+HFLS+c/unnx2euvXnrgid5+pVm+hqE7XHJVW45GN8tyBK6TdZe5t1Y3VaI9P4Q80u7WJl14/tTDH64imAoRmfNOLcUKM9YkiaBRJWsFwJDMO1T1aAhqlPpD9jEhIznfhYxdFzhXTK0pMpuImfQHi93uQFVoVtkWAOCWFj4bqLnTHg6Y+UDHh0NojyHMK/ncBTU58JwcnHeelWc4xwLZ0Y39doiOHXlyYH8c3MdRk+G+V8R/v9LtiJRDN4HNbYC7RcbRbxx9xCOTYWiGqmtPfKxYHRrujKOOJnRzM54832Vsm7pilzfN1Ah9jlArc9ir7MvfvHHhWpVnw0cfOEH1xU9/8lHDxhW2tLiAZpnPQJOkKNKWqXJKzrRNJTFp0iLz2igZFFkhNXTzbL9OjowYTJWYDYG9Y6IkUQECpI6DYZGv9zuLuSsCj+vp1Z3x1bFMlAPRsJu1TQwY14b5+jA7tpSrpZ3daq+2/bFMGzCTboaDnmMVYgYK5DE2e32x53/rH37yqQf7D32kbjEjFDXHs07JoAeS/nAoZ+vmfS4X3yi9I3oK3m78zz0OAN/14gcKDJrtbF39L//Kfx6vvvjzn33qAx88QbESwlD0cyatTUXY/KmzZ7/8ta+14JaHRR3bxx8/+/JrWxuVPPfKjQfPDj/w2DFoqn4vF21EkwkgE4GZqIICgEOuNWXdwRNPfui3vv78T3/mg1U1ciGsDPWh473Lm/uVEKgkSX6G31cgIlEFMEKcVfvxjgqPReBe3m2iNFHMkgr4kAdHOZPzyIQMQAyZg+CgCD6qOCe9bkcTgMbxRFUTmPQpPnFiuLxYbO6Um3uTcatEgRL4hI6A4/T86f4nnjp5bCGdO7H21IMnPv/8tX/8a5deuhwcpV//0qs/+9nHP/XxRy0SajSDGEEEZt1ZTaJKIg5m4ZVXX7fWBsMw3d9XBCDvzE8rq8r4yssbHc7OrHVOnzr75M/84tkf+lnOFiRkboYSnQv0uQcI4M1GAN4QG9zNTUf4441/9y7X0f03AO4Ww3DXy3eM5qvn6E5wa+XN9nlSDYqCrFR4B87ABIgY2TgmHC72+v3JBDgnbRWYQC3hLB9GO0UwQiIyRjMAQSBEZkQGIueYZ9o/kSGoCjtSEwI2M2AEIBEzVSJiRBBA54iDGasqO0QMqdGQsZP2y1/+AnD8sR/+1HhfOd5I+zfi6HJuu92FTiUgMKjaolL1HvrDwSRhgki5gE57RS+zhttdr10A8Fmnh8MqFjXJFEkCZmFUpOdx7+W8/0AMD5lbuOXzP6h/fjSx/f5M43tH3bljr3svGK5vkOY7yaywPieJQa7vvPpF2b/eLTqCWXJB0GEIZGiAyIAAyNxf6NZTbDRZ0n6H9/e3pqOdcjoqOr3BwnpvYRGQ1NAAmUlNJEVyrjXYr2Fx7bHRxnM3X3nt2JkPltOx73XaCOSsU7SxradbF8Ckt/RACNx4nO5ewl4M2t688EwWcOHsU41knrxrkEwFkJxPlpJLjjwJoqqYELMzjICgCVwAVYxC7IuE0TJRbKK142pv0oymamSpiTGaGnh2M416DseHW+r3Ual5u5P5LmX/8GPz5ppzNR1viXuYo/znWLnD7Jb5p/Fwo78FBDoMDtgMQQt2aHK8C0z33uTz2+/qsMP9YSWUb/PB2w7emuxZwObA6qKMzj619rFPX/jNfxpynZbp2sboI7Su0JAaCFRRtINRph5To+GlV3aeuTQy5lN5OjPcO7s2WFzKx60uLa5mnJuqJ0zEhoKgqW3qps0Cp6htknIay6lWE9vba6WlWCpBlnlyraoaIiACMwKZonU6QWJ5fKlY8m0/pyKXiLBTtxd2y72aWqDA0nHQx3J1rbPQcQsdThi3yzia2P4kGuLCsFjp1MeX8pVBlhXeMauRZ/TBgw8Ysm6vuvL1Xz3OjVt8OF9YAmY9GM/DcC8cYf/fI9o/3IuL7v8OcCtKePB6Dvj7bpeao8uxrtu/9v/+W//iiy/6ehz9pYp6j51b7HUJM0hAiAqSPDEyx6QvvvLqRz7wCKr1++746eGlF3eN+F//9jODweD8er+ajo1VRVQUABgMGYk4mdXjPZ+71ISHH/nAr3zpX3zgqf1lr561k/Hxpc7aQvfSdmUmjsmzExUgBJ2hh5EN0SF5YwIXNDATcUrQJlQ1I2JIXZ85guDZMbFDZgxsnqDj0ICYLcOkJot9zPLuzs60NACRppkGiOsDv7q8fGmnvnl9x0QyhOPL+MTZtafOnVgbeI9NHE/y4D/+8PLVy3u/8/RmcfJUU+7f2N5XRJ+HqE0SiK2ZUlJVaSU2hGxRb2xtX768lVokcTGqGSmSQhxPRqOtaQHh5LlCp+VDn/rJJ/7AzydYYHQAqHYrm3E2R4ci552wKOcdKb/HS91/A+A9JTgO5RrON9vblQAEMCMEAzZgBDPQuT8EDQwZQtd1+sokAMSUOxQUNATVlFp2jAhECESmpghIiGgcGBER0QzUkikCE9O8szsxIRIigKokYoeEosbIs6ZfakaMxG5cVsNOjzB945tfELRPfepHyqaO05LTWCYbUo4M/WTam8RcBz3rrrDrJ8Hr29tNjKNJWUubFYOFwfix08vdoW9iTEIMLXHbzbvO1RY4Fj2ynksKcQeq57020H0ccJjUAxhCglm1rjkK4r2juL9N9P75ebeCjgCYBAJMdr71m3HngidotUpcsHnoBGLiUBhiAhQFAALKsgIxRkhlmZq2Gk3HN/f3d8pJPq3KdYSF5VVnGJsW0IAoKTpDAGwiZCEfLJ3Zuva1/MZr/bWHykShs6g1O99Gkkm915bO+ssUguuv2XSKed03gWZy8flvhP4gX3sgRSViZgfgEqQECEoOmTyBiEFCQPTESAIJTEmQUUg1884iY8KmaVXF+yBaglgIbn19zQcvKRIddOxCxINiSLNXBnPF+1BVv+WomYtyu8NZgfMIwi10Jx4W+5mt79vwmHgUXY0H+/xtL+exx7mz1eYvf5/uSbcq6x3M2a0Veof2dMcYHozwAQsgAIARt72Vhz/3v7vx2oWhXhi12zd3xtdvjtbXfc8zwqRpVMwUEom1NT732ujyJgwXeg8f16cezLO8Jaex1bUTJ5smJpTAiICOXASMMXVCDjGKWt3IeFKb+igKCKBgiirRu0xVAUFNTZFIESwPGCguD/ITXc9t9IyTstls+eq0aY0dQQZxMYfTy/lCB3oepa0lwailnQrqmDpdd3w5P7PaWR2sBYrdbmhSy4Te+YwRkBNRcpjlmvavXvr6r4WlV9dOnu+snMkHx4A9wGEBaIPfi4x4tw5+v71B86jKnHtv1dj+rjQLMMbUXHj55b/91/7GVLUqm9989sri4uL62iJTS5lXMkILrG09saxYWVt98eULl6/ePH5sNdbludOrr2zE3ap94ZXXv/bMhZOfeQJSjRlKAoBkZgrEhuTIzBwxGaZmsrzSKwp+5tkLf/AT53pu2pD2u/kD6wsbo7aNbZ7lxAeeFoWDdgJE5Ag8oWdkBkIJjoP3XUAAjIpmMffmAIM358A7co4dqidlk+DZM7KXvMPosipS5rGOMQPynY5NRy6nULh+f3HZaR7koVPDU8cHq8Ouxlqkdp40BHVcjXd+4kce263bC5vj1ESIsZ2MgzdN0kYzxZTa2KbYNj54NNqfjC9c3trea4sMOONadLo/DT0e7+yN9su1hQXUqdPOox//kcd/4k9EXHKzSnNmMyfTrUoSd4Qs3366L2v1PhsAt4G7jxy54zPvsJQ52pruwFi7Vctjjgae5wgqzXdmBSQQAZ8PU8S6BU/kGBNZEompjRqdGiAGJgIkJvRA5AEJ0BuCqKqomQIge4du1gaPCInZIRECIjGwNzMgR+QBnSggqmOS2BLVzHjl9VfG+xs//Mkfyoja8RY15XR/S6qRI6dZfx/WZOF4sXTCqA+Wt+VUOU22b0xHMZFsXr92cVSO1ronzz6QLyz6LHDRdd22Ge8GqxmRnZPUJq2yAD5o417P1juYnXFuhQEN/EFoy/SwruH7yEf+Ruh9u+kdmLNmYKgGtvfa1210PXMsGmPbYAZNfTNwQkigS0pAPqDLJYlERWSXpQ7GthpBlIJDb22t3x/e2Ny9eemCQx4Ml4hZEdoY2XmJ5hiRqIpaZMP+2snrN19Sp931D1hMRSjaVEabOmp0cnNEIVs4bpBnvRWs9yHWvWzRJvWFL/3uhz9TgC8iatKYU4gtoHUJzDShZ9NIzGqaCIENOKAqGKhIUgMUSRJbrevoXKZ1E2NqYkss07Kum9TJGewAT3/ovZ8PEgAcVOw/qL5zIADmXoGjBwHmB4+K9rnL5/Yd/GjI1458/55pvXYbVuiwd9jv03eiI06HO3O5b39y1/cOZ84AwJwpYUHnP/apn/tzX/qH/93+ZKqtvv70xsoPP4wF5CGHBnUMnkJqmq1J+tbVPc0WBxl84MHh0hIIdYg859zrdzdvXsYobJBlzGoQIXNZU9WooOZSHTNfpIZQmyJkdRW9d9aIiKVoEgmRiZwjXez5ATbHBrzWpXZSZqurVybNfinbjTqX9X27GOTESvf40BWYXJHtT2LVXXr+9XFZptw3T57MHnvogX4GHsU0+swDYk6BXIZmyaBNHCUl1KQ6Km/AqMq3dicbrw2Wjy2uPbr6yEc16wHw0cjWuxWuf4/QbeG4+6T9Hz49OPDGLQBAIHCMz33rG5m3nd2xY6pBvnXp9ScuhA89tB5yg6zXJhUnZqpVDC4Xyb767MWfPXlGq91jK4vHl6bV1a3V9eNf/+aFJ86tn1wmVCD2aKagYkoJU0pIRs4LW0PqYfKxx05+8cvPjR87m4GxRd/J1lYG/as7W6OICKZGRABgaDRPd5q5CYnUe86Ii4xmsTsiUvQIzjM4Bp+xY3aegkOPjk08Wyf3DlUdZgx57tpWc3NXt5sbexODLMcQQlagy7ytP7TU5/bkWr/bZ3ateW9gPgvkEVXWVofqspPL2XOvbpaNFdiHdqqI1aRqE5hYitI2rc84im2PqitXti5v1sKd9WP9vf2qHpecYJHRUU5SqcjCwvpjP/gHf+YX/xyuP5pmK8UOPD0HKWLvWgT3PtB9NgDuDq3fk9XfBeFyNM55+x5/CH5UNDMgAABDUEQEVEYK1AMh78jEJJkhRmmVjIABkIiAgJjQkSEj0OyLpiYqompgxA7NVJMphSxjpgP2odnXSUCRDBnNFMgQTZOACcV48fqV0d7NT37oo11inZbNZAfqye7mjV7IaheEOrpyJjvxCGQL1tDe5j6Dc9lgsGbRb4z39lTjqK6v71Lq7AxFOsMiaAoA5ArSXES4gba1aD4yhV7ml04AsDTbaVqx77msR+gQZ1UTj9QF/X169+mwOCKoITfbaeNFbptGKnaURAJr8KDjjRRF2sZ115A6CIEZCcjECWehGC4uaVOWTZF3uj54N5m2e7uT8f6WD9ztLjZiQIzEoqpJ0QyZJVq3c47DZHPjWn94MrJGn5H3JORAYmp3r72yCFAMzmHRjVw0wkDF4uLxi1def+bzX3zqR36kAhVNCujBIyKRGjACE4GhoEkUA3TEnkjMBJHYhdjGWKeqSWnWxEnBEXvvG1FGyDJnJnNn72xk8FYFybnfHW/ryoVHHuFIBOAo3alo3tJB7VBTuk2VPxJPuOMMcFg04ogwetfW0/tBybvN4zDfeW/bbw/LYd/z18x7cqIqIiIptNBd/OQf/kR3afQ//rd7F5/b2Nq9uTntnuwzZZ47VaVNTozFtet7W3sArj0+8KeOd0CrPHRc3llcWmub3TZWDGYE1lAs69RET5z7bjWtQuizB1VNGkOeW63sQaZqgFVdm0lKAuC86TDD0z0/dLDWZ4pNZ7X/2nRyaTdRnXqhWV/Mzh7vLQRa6AFYRMg2p3Z9BzbKuqzbB04UP/zhswvUClYmsSgKQq+iguozP562YGRATWNiNq7qckvL2FZ6Q+1VdL4zGK6ufvMTP7z/2I/+lHHnMLR7kOH+ftVnvie6xWbzyr33mY74Bg4kDRzRk+683CGqREX0C7/95e2dfVAlotjI65e2vvXqwon1JaJpkUPIMrVoanXUXm/4Qz/24//D3/o7Fy9vnOpyk+TYUrG5zVysvPDSK7/zzKXPffrhjmudKuOs47rM1oWiFIDEGAzFmg8/cfaf/YsvXrgyevC454DU8MpisTrIbmxPNFOHbGCIbAZqBmqgpmaCJAiARB7ZkWdERQIgh8RUhMBszrF3zI6CA0fKSASUBx/YFM2hZQ56wXWdPHDy/HOXNr718tVJ26Ysr8ZQ5GH5xOKZ1V7uNcscB0JkEWN2HsFMVIDRiuCb1pyDkydXHXNd1XUdy7IFw7ZN7BCE6rK5emP00qs7EXsh4ybFuk2xpVMrw/OPPnr2Qx87ce6Dq+ceOX72PHSGikzoHYDO88cOHt/nKtFbMQC+w7ZxVN3/dtr/e4Bub8sxjw3QrGPkQWoeAaDarH9MF9lBatuUmEFTQgAiSGoKxuyIMw6OiBmcAQmLgZqCWFKbGQQ2c/YzOUA2YEBgJuaAEObC1wQqMGRkQ4ya2Ghvd3RjY+vxB88XYaCKEmOsq/Lmjio33X4sTtDC+c7yeR/WSDu1TDuLw9RU2gBDGgwXU1WXfn9pLe/3B77TcZ1+1l8sih77LHR7BJlGJOcL4r4TSGNOUylHbCWjJO4lWoeFBzHzXhWRvu9c/2+NDhzLd9u67zjN2BYUQCWOL39dq+sOKcvy/el+p8hjOQ55cj60tbJBQheIEAi9S6bIEInaRFl3sb9yKqsGiwvFCy9+g51fW1phgv3tm459RJ9mPcIQAUyBI1CgfFL5hdWnNl7/jXLnUmfxZIxdl/fJHAp4Qpbx3tWXAbtZp5/3htX+VgTfovSXTr74+vPZ+vLph845B22qAntWNUsIhSqYAoKAiSMGYhDRVEVVRVRAiZpilKQSTROImqm1tSQnw8W+pMRu3pZ9Bhw5KoGOGv/fNujz3adxjvCZxwcMboUY8OhubkdjA4fdl+YWwtuiYLxZerduYDZed+NNjir6c8Lb37rNkTP/xB1W3J00z1+yWV1mBohZt3jsU3/gz/+Vz////q+Xn/m1a1sbwyF0ForqYuPYSdkmya5O2in6hSJ99ANLg25iqTqhk0SgaSWVnQ47R45C07RJMCGCWlu3PvQm+9O6baQln3WQsWnbVlM0UIA6JmBnZMyw2rNzxzqLZCuDQkyaPHt5r9ws42BAZ0/TufXlLsVOwG4vE9FRm726Ub+60VQJGXZ/5jMPnxpi15cqSQDIMahkRWgbJaTpNElyVSu1yV5Zb+5Oru3DZhnKaQNglpC8W1y1Tl5euHbzl1aPn3zy40aMR0brPSDc3k26v4vzQEIcju8tf8JtlcZu+wIe4XcFZlZQQVOVepp6g34r+uKF7UfPT4fdgJNp4TI0IkKQVjWy2t7+5MVXLj70yQevX7qxGMJKN7y8V1PW/+qLlz78xOlTQ1VsmQgZ1QAMjc0QFbmpRAgJ4mLef+L8I89euHH67GNSjTMXehmfXh68enWsSZQZkDwzgSUBQQNEQ0ZwaGgQDBHJe48MSDNfJ2rugRlChp7MsXmPhIAImStUBQg8o/ekaEqWdTnPyodPh+PrZ/e2dvanVTWNx9aXev0uBMkGHYfEhAjKQZt6Qt6lNgpgW003R2Px2Am2sNRV1aqRaZN2RqW1xIEhQZrK5ubOpav70woAatchia4o1JH9wE/+6C/8h3+5XXlUMJ91LHZwEFO+Dcw1r/7wtnc2ejvpbckBuKfv/9thgd4dRNCtW7h914aDfL6Do2ioiYLDEOJk6siZtgiQZVlMiYMLrvA+U0AycIjBeUIqxRhZSUFntdlNRV2wPO8kQGaHSOTIkfPIoklQUpKi6JqKUQJEEaumlYBe37h84tTqYDBIjZpVdbk/3d3TGMel1rFee/x0WDgbrQMVANaARoAhyxNrtV8Z5EV3uKTREfb6y3lnYbC6lvUHZgw+i8BmWch95li1kno77V+GZrOTdWwfQ87GnWI4MV5u8LgZ3PK7fWeH2/c/3WN3eDfGYb6eDNGMIJU3LmZqjUVrrFcMYlMFT9KUYMreaTtl103lfugxCmQ+JBXnmLhjbdtZWGHnN3ZuLi4fh6axlGptHfuyrX03J0UF8OhEQUQQqVFzIVTQP3b6iQuvfOXhJwsuuK45DyEMhtsbVzOnWzvX8/4xHxgdZd0uQ9zZqiLiqRMPfP7Xf6sX8pXTJ0qt2LdoYJrUDCCItqCN884rqWqUGqz1DlVsljbgnYqZGUbTLM9chnnXMRUIYKowLwM61yPnmey3rfq3Pl1HJNutMqNzH7PdPjGHdEtnfa/5VY/YLO8oHfjO7nX8jie3vfVd4If3fmseqrnl3EEzUoC8Hx78gR/5xb/6K/+vumovvb6xfXwwMN+9sT11i1xk2bWNset2V5bqc2cXvNtlwk6nmNRQtpO2LR2zz7wzJ0kcEVhopzUAjNv9DucdXzSqdWtVq9MmVXUKPmv2S3aZIVOKPZceP7M0YOt7F4K/uTu9XI52WukH+/gTx88vB0r7OYBj2q/i5j48f33/1Su1eHj43PAHn/pQBuOFwhwju3w6nbJ35PLptCXultO2rHm3am7upqv7zY29alTXlYaITeZgmOX9olP0OoxIkLid/Mov//U/9ReO906ccOTuNfKHJU7eY7z7NtAt4/w+/VSDI2M3RyMeqTmBAIeeg8O+IfNr21zMECL4opM5dqamZEBQtfLa1dHXn792crm7Wvi2Fu+Q2Mgopqn34dTx49949tUfevxMao01sjbrvd5OKDd2py+8cmPhsYWMpdvptk1kQnKIhsgcY4tIzL6eaqewDz5x8u/88y9c3zl3rFtoSsZ0bHkQ+GqjJgIuEBKSERkCECIxskfPpqiYucw7hygIhgTBk2d2DpjIIREBO2SCWZkUYnTkHAIREBk5ZiImaurGgy1kuHB86MOpppWmbRK1phDrKJipCkCFpAaY5YEBiVxjlBJ2B8WAG41ajqu6STv7042t/W4+YJDY1k3Dm7tNC52V44uTURl8igLnzz7yx//0n338hz4LwxMZBgIwmbVFggMJP8+9PZJYdLcB9x1Zwuw+Mtgbvei3v7/7ZgDMroG3v/x2KCD8Ns/fEboVTD48dAhtvwUInpX+mFegMEl54Lqs0ciBm8GuAbSO0RCZg3cZAhpyndo2ikMAEQUBIuec93mW9wi8d2hgzMwYADBqQwAo0AtFVZZ5N4BgSsm5nLW9cu3VfjcsdQeaRLkpxzttOd3eGiHYzZEur5/mfLFpYvA1Bl+3UXGWgawM4ti1SuSL4dKx3GcGncHKse5wSRQpeHKOyDNkGUestpvqolXbObShawRlm0Sji7FMaSdkBa2sAjkwxVsy6V2ZuN+nQzpYUggzV4SatIQlYTQ1RdEEHlUlgcM2tkwJqYntPjPEFh0tAiCTz0KWYowoPi9E01KxPtrcSOA4M51OE0LbJg9lJ++Lxll3aHZkltAoIaBi1x/rLpy+fOXi6Qf7bUSH6lzoDZZ2b17r5by/9WrR4153aQrgsmBollqHcGa48ju/+qs/9Uf+5NLptTi9aSgqQhbVHCgTsLaSYtRYWzvV1NaNtK20LaSoBkZAKWnbxComUWAkc35xecl5N+v+eyu0fr+jkPfk/KMmwbza9+Fb7+lV8i56rr5r4s09Nq3v4pi94607TnCrKTMRmqkHNSJafuAX/qP/+p//939luvetsplS67evTI5lQyPTSrKGHlpZWi760pYUuAKCDECarAge2SC1sQrOJ+8hxhCyGGNg1rbJ0DepRIW20ZSg3+mNxqnoFKiWYrsU2oePZQ/0wQFknf448U7dNnVzZtU/eiI/UcT1gYNUpOl0Kqny/W9u7D9/dToo9DMfPPboyeFKXxU8Q3SOY5RBd2latlUjVZnMt6Mqjhv/6s3mpavTzdKNRtrJ86Xl4vhSfvrEwiDvhpD3F/pE3ERggirWX/38P/uJP/6LME+OuccQ/96oCXoPj+RbtnuOfvGwBtB3iD7eElV2m5gxMzVk8FkgJhByjLFtCXS31Is3965t7Obr/Uh7eeaz4NQwpdZ59/jD5/7u11948dJuv5vVqeovdLeuTs+uD7dvbj/z/OXHTnZWh1TH5IiIiQhIDAGNIWpUAS4caHl6veuCf+nizdWHBoBRAJeXstNr3StbdZPUeTRRMCCgAwwQgIkwk8bGlMnIMzKCY/KOPWPmyDN7f1ACyDtwhEjGjIxAhAg6izUnUUTnAxKBiDgXJGoAQWsFmF1u6naaZlqP1xY6/V6XlNAUGKWJdS2TqZLQg+fPtNN2ezQpla9e3VejgHF3syy6+daNfb+w+gu/8Md+5Kd+uqpSUfCJEyeGyyeEOpyHA1Fus7YvikgGgEiHub+3A//fJIu8SyrvPel7NQDu6cW/5wZ5x15sR5685QH5nt0SR3XZuQ0wiwMgoAECKiAxOU/VJIoZGCZJBCYAIm1SEdVux6sasSbTNrUG3LTJJBGDc8whBF8QeWQCM5xlGcy8UQRJEiGm1ORFwMQSgcHXpV6/ehmayblzjwTHRQbVZNI0dTmpMHR2RnW2em7h+GNRXJLW1LVp32XeAKMqmpmmqLNl0iEDwGx5bX2wvJQUyTvnMkcBES02qdqFZkObnaAVoZp6BQJ0bQOUBZJadi5YuALDs4DIh3CH3+MxgHeZbl9MBgCk6FynH9trqEaIbawcYYJIxsQupdZxQ9ZgKrVmBcedYKZEhMzgvABgnqmq7/Xb/YSILiuSSFPVbTvJOPfegSGggsGs6hsYRMCR5sPjD7/w9Gurq/uhg2Ym6JW8z0KqyjjZ3bt+0R8n52B3b+x9oDwfb1e5HzrZ+7V/9M//+C/9r5px4zuWe4p1k1LNrktAqEKqVTNNsYqxlYSqByhTNRBVZm8YG9GqTQJY1TWTU1VmPhyWufPfDjfgt5UOQw1HJskOA/l3QIDeSyvnSLfid/dGbie8/fEOB9O3p3vAKW65Yg8nadYAQsBhRxbO/8T/5r/4tV/+q7vVt1IGAtm05jrWoIGSLS9kTTVOEiHvRtFAwAFC11uE1MQ2xlm550gtIQbwbRIAqNs2ijaNEvlAVMeKQqZ1TBKD6RMnuo+dHcZykoV8P+qFG5O9/en60H3q0WODbLw44I7HSdlmveErl6fPXN7+1iv7i0P/uc+cffiYI6tA60AOPYgZBZ8ME4ak6pzbr+ubu/HFS7svb0xu7LVM/syxpTNrCw8/eGqhB0UPC9/xWQeYkUjINXVcoGzzykvXX37p+IOPG+JcsBOAAtABRuU+dYJ/j9NdFuT39pNv58RbSUJHhMItzMEtx7DN72WevcQEAE1MVRMPLHaD7qAv0cYNXN3YX+rniwW2ddsxYCZNSbU+sbre7Q++dXHnBz70AMR60O8VftzvZr1ef3sy3i3T8kKPmGcAfkkiIKQYIHfMmpKhtKKdXnZsbfnilZs/+Oiy6lQFspAtDPJLGxPi3AzYOQPBBIYgJgaqFoECEZIZo4IREzEjO0dojj0zEJEjYkIiIwKctSXmGVKIZqoXEYIpEiu0zrHjTBXaKgJAkWWTKt7c2vrqcxdSgj/04x9e9kVqG0M1tRDC8cXV7f1vJMtd4LJp2/F0u0zqOqI4rVvBvE5u9dipP/V/+Euf+pk/wXmv1UTABAwIflY/2sDACFFBEQ4ryRsezBQcHvluXom76L1jSRvAgTJ6/054B70Rd833Mh7fw3ePxiqO/DncxA/iPKjmVNHUvHPeOTCNkBIKEBGQpahSizRN24ooSGKQjIUxIaHjEHwRXIHsATFJJEIiRCI4qKwDAKgGImIJ1ZjDsEzuy9/86rXrr33giUdEtNsLdTsuq3FdT8Bbg5mF5fUHHtPQbVVVVE1EU1s10jaWkqQYY5SkAIjgPHf7C4vDxSEBOscheO8cITEpyhSabY17AdU5zy5HcdIqmQYyTCKN4v7ldPV3qK11tv8edJH/nof/6JB/50NvJ9ldT94PdEs1usW05LmzQiEoSkqREVWVKYASGpnOTMISYIo60TgWqRVMDIAYiRTAkAw45IXzhSixz4i508l7eSfVJYjMeg2oISioIgOCWp1Q3PDRh5947bmvdlEzoJRiigYWTBRiObp5eePyqwTmnVMwNcgK36Z2MFzbvrn1L3/ln4S8E1Wa2IoCIUhbodQSy7YepaaMTdU0TVNHaRVkprURIBMymjNgMzRAVe10CyK6NSQHBTgBEN+xlMa5cn9rTzh0sd8B+n/PbAK36L1zS3bk8ZDe8O3dY4wP47iz6cAZMBMQEBwhsIcTj372l/5K/8EP7bfTpaX86vUNJT/oFfXezX7fW6BoOolxXJeYm7oGMnMd7zsevAm0CVry2rRtGyUmqJp00NvLJEnTSgWQyqYu27ZwdrzXfvh8f7WPZqpZfrmcbrfbH//Y4Kd+ePXhY3Zq2S31Mi9E2cLr4+yLF/QbL4/OHM/+6E+eeXQNFruystgrgs8cF+gyzNsGRuMo4OsWb+zKN6/Kr31z+huvTV/eqtZXep9+YvUnP3Lysx8/d3YtO3Nicbk/7Pf7nU426He8Y8+6stRfXAwrA/rav/2Vtt5WIAUUQwFQIJ2P4oEqau8rMfm90QHa+62uCoN71PY6DA7a/OXcPD3wrJndShqa6yWoqiJST+u2beY1RYiJEHh/3G6P25tbo/1pKQpJNLZp1s99sZ8//uiDV7b26pj6vS4j5rkvMlpe7EiymztVlSipMDMiioKZA/OxsbaOGqVqZWqA7E6tLO7vjHb3SqIABGK6vDT0s6ptkgwAcQbgQSaaaZTMxMSzsWMGYmJ2TOAYmQEZZu5/YkSafR69I0Zkmkl2ZELH4BwgGRqAWow2Tvra5u7GeNqCGrmqtTrBufOrK4sLkNREYtOoCSDtjKaNYFk3Ta374zgdpeCKzOV56Jlz1/bHpz/66b/6N//Wx//Qv8N5hwEK9AHYzSy0w7rPAABGM9FBCLOdBG6T7Le11X1j9B4Rs4cr+a1EAL6ds/+7/rY7nMbvjbE49H4d7NOzBTiz9RDNTEUVlEyBkc0bCGbBmZKICDFp1FgmUhDPhkkENCGT8zmTZ/JIrIgqCgxkRnaQQKKgqgqIjJT5YAnMJGX09eeef+mlF/+dn/yR4PudPkmK0+lkOp1I3cRotebrDzzmOqtNYlRkhykJkSHprILR7I7NgNAhWVEUg4WBKBCjDwHQEwBYK+0UZGTNyFvriGaAJwVTQjIAFUVVUScVbz9viy/Y+oeNDADnVVS+K31XL+c93v3OUfz7TkdZ8X26uc1athE511trNhDB2DEqGgAqIYKKAiKIWkziagQGzNp6QsFx6CAROubkJEYCB5xlRTdFRUsZ+4w9G03HI42EFowc0EFHDRVDMzFsBNYWzgB+a7R7fbB62sQRAjIwgUlVVft1ZcFlEhMamAohukDNuF3qL3/pi189/cDasQcWjFqQZMhtlLppUZKpmSWNSVtFYGQwQ3CsBmpJQVzwTh1gQqROv3v81DFAgMPujHN6sw6at0bzrl9zdQmPvPM2cPF9PeN7xyV1QG8prP6dTjZPyJ73cpi9YWDIZkoGFjI89uSHf/4/Hyz/T1ef/efF4mB3NM07vL7Y0xproNDtirJIAwrgEQgMFR2GPFgjmsDIOHBdalRz3rFA5pF91qRIDJ1evlO1RQd7PTq5Mjy+7PdG07zfuT4e3diZLA/xzFpaKKZ5gE6nEKVpqxtj+t0XRi/fjGceGPzER1fOH0O2uDDoizITp6gq2FTaNijCdZOu77Zff2nv2evxxRvjrO8eO7/0yUdOPHZ6IQTX61K3P4xiwVMSDcGnJGyGiE1Vh27ujCxWL3/xt3tnzq4sLhhmvuh6PzDiw4pXdgtR917jl/tM94X9Zu6ZozbAYZHho0iD23educ5/xNBCBABUkU7uOrkTQ0ACpRhjMryxUV6+GhY6XEs8/8AJxtwR5J7UsNtxH/7gub/zj/719v5o+eRykZFHYuL+IHeBrlzffvLBdQBMKTkmACQkSUKASRTEgBAJElbnz6198ekXbmxXDxzv58HG43bQyfuFn44iEaqIdwgASAQGYkBMSMAMSIaosxgzIzii4BgJCI1miQ2oB7BrRiCa7RoIRojI6IjNRDRZQmDareqvfOu169e2P/tDjwbyi0v9XreX94q1pV7u2LQmNgRiR3VVl60rq9TUsDse7zMOM+e7nZ3NUSP05GOP/tn/5H//5I9/rhgUgXMAtoNsXj3ipbmjP8zM9z+r+n9rVuZT9r5cDofb4ps2AO4ZYb3nW9/uwvdEDb17dCSz+wgdWHamCKrSIqEjBGcmBECWkBiVmRx7Y5WUoWpsLaBBYiLPmRgSuQPAmCogARz0bDVVA5rZm2hEjGBg1oa887VXnv7dr/z6Dz/1wRNnzlX1lLSsJrsxVdKKAk+rOhsOB2unJ+LMkAiBQM3AhIFt1kHADACIGAyDd51ul32gEMgHNYcIYElT2dabFMcOK0YAYwNTldlQmAqIAAGjayHLXCyvf6mz+pQR4cxGfkOeke/wkTfKAu8NDnnP0iHGRBEtdBcrdGDmyFlS4FlNTFFBRARSS8miV2gRpihdaZmCM/PMhOBcciZiHDrdnkapyqlKBEui2lRjT6gilOVkBJ4MIKZEjD73ZVPXLj915gOvvPr1j6ws11Xs5f3WBMHYLJZNbTsL09Wqqi01kmq01nsEa9FS0OJX/9Fv/Olf+hOE0bABNGtrkOSci0mImIEco7Ezc2DAyJLQNKaosziaQ1DGZDDodgAN1A4KNB/ur287D+Ftf47sHva2ibv7dar3jBC+B93He7sFH7qVxQdwENAkA2NVYNcMHz3/h//y+qMf/9o//2UaXVvrNY+fHmxd20tPHCuItZzxreWd3Bs6hpbEkIAA2ZwzLowZIqtF0CaGjBlcFlrHxcbWaHGx46o6x/TQ6QUM2kIy1yn3dj70wODBM/5UV7JCfE4esU2iWLx8Y3Lp+uSRE/jTP7x0sp/6A3ahUBAF7zKu27hfWi3SiuzXcm2EX79Yfv6V8eYEOqg/dm79Q4+sLQ7DylJX0RVZ5hh8CG1qu0VQaTOXI2OUNiJpXccmNnH03O/801D4YQ6j0dT7sHTy/KOf/Ine2UeVh4wEgGR2j53y/U538dn9Ybw7XEqHzn478q7BvMMdzvUQmMGPj3zVADCprC8tWdOEziCZRk0yrYLLOlm+P663xlOf8e5+lbOnjFJSdOZY15aG7LNrm9uPnFgGrRYGYXMyXuxniLixN53WaaGLAlESOu+SKFFoYkseDchUJCbKbHVlGFy4cG3r5PqAANigE3jYza/v7RmwSArOHfY7RABmNgRAJALnkGeF/xkcGpMiGTMTAREgIxMwISEwESPQLCw3S2VEICJCL2SEjKT1tMoQOiFknh21C1189PQKgYpNAYQpiKGJJZHd/YljGBQhlYkWO5UoiYPu4Cd+5Gf+/F/8S8n1qFscII9A5yC3wzitHXmYTwQeuHWOhAAO33h/05s2AOZh7nvsanccuaez/zvYD+8G2d0OsPnEHxwnEJDGIXEWWhEtUyNtbJvgPQKgkpBkeaEGyK7o9pIqmKa2MQU1EU9g6IAQ6cCVoiamCGoISMikhqZGiKhmmzevrQ46n/jgU2wWtR2VewFTPSljC0kA/KBYPl2bF0Pybta62CABsKrOnDSzclyqyuhCnvlOwOA4FACMgKYxpgraEcQxpIk7AFeLaEQAAlJLqC2AIAApRkB1IPVlaUfkFmZb5hFw81uZwXmpg/fo4nm3efI70JFlZ2ZHgLkE4PMhQAbEYmoUATNDUpBZexZVBRNoE3PQ2Brs+g5b6hgCu2BIzjmFlJSdz7tda+s6apK2TrEmjZBqlchERt7QyBHarB82CdA0cXfx9N7zX9m8tuE6w8ZaRCEVZ0aSqvF4++a2C6GejAM1ILXGhA6bWC/2F169svnr//Krf+gPfAAM2KUUWwI0FWZEwAioJjCzOnmGAMJkJjP4EgI7JABT1aSzhpQA81g7HpR/hLd3Qg88eocQ/yPNB265Fu5XsZ33piHxdtD9u7cjFtksyXUWCJgVYDzs6pyUmBtX5I9+7lPHP/L67/yy4m+k9sKkbPd29rOBVU0Va+QFzqhghwDiQmDfOmYModqfCEjoOrAmmngELw7Eii7tbE+XVnqtOO+qk6vDToY7k7KhMN5vVgb54+dXFrr1INSh38/zbDwuJxU8c2Hr68+OVgfdn/7MsfOrpUcrul1ADxamzaTIwrStSlED3pnUr99sv/TS9jcutdtTWBi4H3v01I99+Ex/kOfdIgTns8KxT604pk6RJxFyTkUtWkowKZvN/dG1zY29nf2tjVHm3JnVTtKmyDx985kvf+E3P/ZTf+QjP/HHrDNwkCOCHmyL72EZ+WbpkDnmW/59NDuPXuHe8e5DWY6zmiN2YJYi6Fy2I4Jj94Of+mi/4yeaJM7qdVrShNo2GhrNxHxKVJYNg/PgAknSthvo1PLq6xevV0+cJZTgnMbECllGk6rdn7THlzNPZmpmqqhgCdhUZ7AXIAQVdaS9bri+sV8nyyAFb8Gw18lwlg9mNuuGS4xgOOtvhwbM4Bi9I0foiJhx1jQM0JhwBjhwwMzEaI6AEZnnGSiggCAqSODZMbGBrS4sfeJDT27e3Mw4iKWoRgIOZZade2B8IBkIMLVJvacMeLCwkIjz4XDt7Af+s//4P15Yf7Dt5N4Hne+eM2k9S8o8tM9udY8/ZHQ7OmffD+x/aI6+xSTgQ56+Q+O/+zN3f/G9BLy+7R4PWogeZOPMmAPRLIkkY5GZJi8IGFzmyakmH7IENq3AubA/tpe//Eq313/ikVODjm+t8azJyAiRzLERzfBsioaIioCACrOVYZCFTon2A5/4gT/wQz86HPjt7cuWGu+s2q81mSGU0ag7DIO1aJB5x+yatj2ARigQ02xTMzNTEFGf+aLbzYLPskyUZuqgpQSxsjQlbQgimIkIIBAUYGoqZkAGBC7qlKBFzKNGhxOL18AW5sEw+B4WwdH5f08so7eKNn7n6WhEeb45AKAhGKHrEw1F9gDNQA56MiIigEgkRSIGU3AKPhGUqdnK8hwIATwYOvYGklJUNSLOfIDoohmZZt0Q21JUnUN0OVDHkMxAUiIzQpsKgu8fO/7ISy+89ORTHxSrTRkkMjoDTiYvvfrq8RMnSFpGacooTaxTDHm+vzvuhuFXv/r0U4+tr6wzVhUg1KLWRGJyzFGSKLKzWXjZDBMqeczQxyamKKigQlmn1xsM1WzWnMaOmJf4tssZhDsk4RGYL9xPLn+PhU7f62RHtvIDwkNj4NY7iKiIRIQEhgqJXLNw+vRP/4WTT37q6f/lr1987llMVk+rNqoCX3rhytLSo6lQRtRGPXizVFd1VZfSREfoiDFTjJAHomjcyQm4EcMy5T1aXsxubI4q3x9Py5VedurYMfaCDvLegDFoDdUEb1b0u89ttkR/8MP9U93Gg/iQsxUxEZlRa3u725acJT+u3Ddela++Pnr1Riqlu0y7n/vQA596cn1xIfQHPQVyjlSSIeV5QZyZNhZjE2uBbDyOl29sP/vK1ddu7O7ul9IqEuWd/OmrN9hRr+g4Bkjb33rpv3/95Rt/9N/9j6DD4Py3iZe/j2lW4fHulhTfOx0KoDvW7ZF6P3P/Ms4zTxFxFmSfocsRTczQVOXk8WPnTh2/uDGtOI6mlZEBWFG4G9uT06ePS+JyWncCeGeIkjAVeY4Wz59Z//yXLlVV7GR5p0v5xJd75XCYXxrXN3cmD5/KRCNxSJKQKEES1cB+hnNwBA1Y4PjQAwv/9mvbu9N6KVdACVnW63d8wFYS5kFV8yzM4MNAyEwEEJwPzhExAcI85xEJ0BCBEMwROcaZMUCIRKSWHM6qnjokQEQ1MVUzEDDP8YFjw7PHF9q6ygpf19NOUXBsRBIaI6OoGZgP5NQTCiZdXOj0VrrdhdU/9Ef/xKd/8g93B8shKxBRRGYpQIfOGTtS/B2PVHLAuwUuHrEU3wZ6x4T7ISt+r1WA7nafv8GvvGf2sUMb76BR6CFI1MBATVP0nju93rSeqEKUaGA+sIk4T0kgsA+eEMPFF65cvK4iu08/+/of+InHHjg1ZJ/MABSV1AAkifcZGAGSKQLPgHYgACKSrKwj9PMOQ9zavL63v1kQE3GKHFurUxTy+WDZXI7EYgoq7NBmy0pt3o2bktSkxEQuuKzIyWdmPPMvSBKUBrUmqVFbMBFTMCQkUzQT00atRZCZEa8pkg+KIWOTWM621JmX5NBv8uZ3g0P5996Y/yMMCfMn75U7uzcdsb7nskgM1AWXD3WMZjNT0wjBgAwEEQC0SZV31MZE3lFsCZ2Uk9DvttIgOTBEc8FnljCROO/UOVBA0FbLza0bTdOeOPGgK9DImcPYNiFnBUmpBsoboeUTDz7zjS88OJkuLA7LpkFLaDEw1PWkbXB7c3exm0VIIFQ30rYxpQgAnh3E7t/+//6LP/tnfr7Lk7reA9+NERAEqUXnut2ekBPVGC0ptdKCIyZPYujUCJynfDiY1E33IAJgRzbfW4/3lW67xJ3RsNn/eeLkkUf43phrdt7Drer36TvTUcPsaM2+I7MEM0uAzGzWKADRWK1D0FDPPfCZH/wPnlz9zX/29X/793t50ZZbCDAuU9Oay5yBNE012hthUkbKQkB2KTamUreVh8xRcCFMyqZwrqnG3eBBXdvU2OlcvTbpBVxbwGFeY+6KfodYAKAsp5b1Lry6tztqP/MDDz12xvW7KkaDpdW9rWmn3xvtT6IwmNUNXLlWP/f69F89u3OjMgHr++kf+eyTT5ztri9n3X6/Fut0C0RUUTNDtNQ2bYpNkkkVX7t89dlvXbp4be/6Xh3JtaIzj5RMKnbYtAlgAgS50Y2t0ZVr/6Nz4Y//+/+huh6iey/47d4fNFf98faDR+TFgQw/aE8BhzUMZh4MNNMZrJFQQ7BP/MBjL/29f90ZLlZ122hU1VZjzn57vz6xlPci7o+VKLDLoAWmZIiPPvrA1555entvtHL+ZN20/SI3wWx3Imo3dsZ12+/1fBKJUdA5dszIBJQkGZOYISBqOrbWT1F3x3Gh40FbIusPukysMnORAgdCRRNEpMCOULqdkGXzrGAiA0AmI3XEzEhMgGSghqZoCorkGAGREG5l3M5eGmhwTmOjKATMjG1MzoW6iThr/DLz5eusvIoSYmzrfte7fr68dvzf+3P/x8c+8gOu0w8cAEDVEAluy864ZdEeqe5/24zd+nsPyMj9pHdeqt/PKkBviu7Quu4XvdkTHrH2DrWFOTfM8m1iDdqaJiYkEzLMvXeEQNhIVMRJU+2P98HKhb4rJ7vZ2uIU8dlvvrI3aiajCgGLkBfee9XMUdM0mgCVZnVMQFWjgCSk5Fm7GTufQmbalBxTTm5alpPYAvqmEtNQ9JYQmBC9z9TMDNBspsGriomARAeYYu0cLCz2iJA4UyQBVW0RKtAS0hSkJgAyEjVDMBOAxqAha8kEwRSSJfbUM0NQNOyC69scGHdXePPN0Xss6/DOu3lv3d1tZHe/moVh0QAJXVEAKcw0fhMAAzNUQDWzxA7V1FS0UQ85CqZ2GuPUOTBTUyFmIprJOCIzbU2q6fjG1rWL0+2d3HmObRztjXc2qskumtXTelqWiOhYUirBZ+vHH97d2dC6ISWNDViVc7XkDUbjm69d1dZrCm00UTNJIqXLK5StfubKhv/Vbz0j3Pc+dAJnhCSW5XmvtwTUIQxNTMnAuRCCR6Rk1iYjYiQOWSi6naLo2ryIBh1m4b5dysp34pF7BUUPkEnf2+3cEYf+fXpDdKQF291+B8M5q9BM20AEQtJUmDnytV87/pk/8cf/0//7DqyEvDPa2k11ePXZLe96PvMSG8ckpvWkrsu2TdRGMvO97rIqN227O9orq3JvZxdaIdEs71A22Nwa9Qp78pHVtSWeNjeAp+TNHGFQ7vDl0eSV13cfXc8/cj4bdBM44WFRgQ6XOilNMKfGatftffWVya9/c+d3Lu5fbiyBrdL0Zz64/tTJ7spiZ7DQUYpF4fOQe5eLpKLgqGNz00qaF17f/J//xdf+/j/76he+eePKTioFWkmiqWqqtm1EUtO2M5FBLBHiVh2v7U3+9t/5m9/86hfbBAIHeu0MrzKjd3pG7ysdcsPbsSvhXc/x6FG020FHZmCmOkcgGIKlVAM0bFVvwX3sE088+Mhx79E7LrICkc2grOrdvVEV2yrGSTkt66pN2rTSxmSW+gWePLZ2c3PfmIg0BN8f9jxanofxuJWWmiaqgvOB0YOyiDVta8naulFJ9bTUlDou91kxKZtqVBNgbKphP889kWrTJB+yjF3mnSfKvSczRuh0HIGCqajFpEkhGSJ6IBRTA3OEjokQyMwz8UzxQkRGpJlaOnPFE5EDMPYIhIozzIoSEREpmIIZkigYgqrFJjJqoLbfxTzr/7t//v/0kU//aKc/9ORn3IoIOB/2u+kOb81dM3fP7oXvb7qfBsAdg2P3evl243+O2hVv5BJ492Y93ycMQFWme1vVZJx3i6yTZb0idHKfcVb4kDlCAFOH0OsV9XR6cq1/8mTv6qXXYlOfPf+AD0XIMwBt2ipJaygIQDPshImBGIipJJU2qoETVYuNVtW1KxevXbuehaJOpgZtbKumROas01fmaIrIKbYAOssdBhAAJQVCaGKJpOQ0ZM6xS2ACGFXNBK3BVGKcglQGUTQqACCbmmiUVFtsTBOagMxalKMiAQGjAA/RLxiB4kEA0w4Ai7N8iTc7mW+T9ffWaP5LDl/f9eQ9Q3j3q4MOp2gIRHknmYoKsSPHs6iyms6gz6KCaNLW0tag5NRpW8Vqx4MyimEyEIUk0BpEZiFoMNVexIk5gGpUXrnw+qsvfnPz8kVqWpQGtUEVk4QJgDhxfuKBx169enEy3o9t2WrVam2WCg+BtByVm9dHSXxTp6qalk0pmgwk6xFbzLj46vOvP/vSNoflsmwJtNvrsisSe2GfFHzI2QcDYHbMbtZimxyzY2Ta296ryvIgmQ7hNhTnfaK3xgy3shDuA33fbT7vNN2m+s+P3UrPxMMWLcxmRqI5oi+y0q39xC/913Xv8UpCIb0bF7baadvWLSFm3gd0bRXLcZxOUlO7/RHsjmoDDwghMCC63FORWZaJL65t1cP+cK0Lw0KrZn+wuOyy3BjQaxOrqsH9Pah2q8/9+BOrKxg6mS8KCg5AnaOFhZ53iAk3Rvw7z44+//zO67t1Y5aRffKR1Y8/vrS+ki0MewkwL4o8uNhWEtt+r5uauq3Tpcub/+xfffF/+ke/+aWnX9/YTZOEtUEylASiQORg1vnGCIxRGRKDUms0quHm5uTv/M2/wSma6cxqAsSD7NX3P1e+HfifN3jlAwXkFk4RZx2mVAUsbl2//NLXfuvac7/TbD4jo9fWB9nKsEcmTEBkSJBEVaUsx5NpXTWpbprptIkCSaBqkiI6ak+fXLp2Y7tqUyi880SkvX7H+WxnXFeSOHM+YyJWUAM0AQRSM0CIIgjaVFXInAvu6vVdDiF4l3lik2G/Q4SiakRimmWu38nywEWgzJlDYTJmnuv2s3iGEAAhOiREICAHjpAImJFp1gEAwBRg1p5gtjQRQQEAEQhwxnaoogAAQDALEhgiMROpSJ7nnvGpRx/7P/83/5cnnvq4cW7ABqB26ITB2wMARyfj4MltdAjh+j7g9bvoLUKA7ohB3xPV853RQW/rWL6Z0L8d+fhhyG422wiEbTVWretqmmW+rEoXSFsxsyx4ic2sWUorMSuKtm0/8fipYXdnZa331GPnY4ou94xkIG3CJrXOgRIhoFMGQZCDslLEhIqq6oi1iTub25aE2UWRpJKkJjXCrNNbJA6qAgCGYKYHRrMhmCiYJfOOzRRBiyxDRO9zQzQDsBZSiXFiMgatTOKsXeosZ0AtkkUAAUxmqkQAAOQUAASZnNESuB4CHJjRBHNDAG5zYbwJeg+hGA7z2u48/l66ybvoMFqJOKtkgGCM4rqA0oIioFlSRkNgSGgaGMAqQpeSqi0QsINW6h3WYw6zZIrSUqoYWqKaXJu5iMEg48qBiNaxqarSBV7JF01GBP06CbJULYliVgQFV/QWxuPJaP9mHnp5l1PjW1BDLfKO6M5LL7+wvv6RSVW2VaWawAwImTlzVLCb1s2/+a2vDYqPO5j6Ak8OFpg5InZ8ZuTLWJppsmRqqioC0oq05pkiUJZxp+PN1PBA/cdv39X0rdGbOdGRdICDkqD3zQi4vxz5nuXvt7/t1IHMn6Ev4NCZAXNxQKimBBoUOfRryD7zS3/1mX/43/3uP/i7IejuVrW0oiaJnDNVBGbCatSyy9WCSQUUCTQ4tE7eKmmESZWKwm1sXv7okw8UDiTWU1UAGQQEqqVNKfLeVDeuXfqDP3760cd6ZTUadPp1Kl09BW5xsKjoVVFC5/Pf3H5uO94cEwQooP3Y2YUffurYyZP9TqeAwMiMCugToxdIe3v7dXRPv7r1a19+8eKVcdMGQ0QHmkRSNFOiA9z5rAmTmtHMTWqgpmyQRMZl+tLv/MZ087XeyccNEXEWfT7Qx97OaXpH6N1aBgZz/PnRzXQ+tKD/5X/2l6+/8pW/+Bf+dDcev3jhJWfwkU984NrG5/fLKKYAqEkYsWljWTV1DF5T3cS2NXSIhGWj3Q6fWF14MfjRtFwq3OJCzqNpHlzI3GR/UpZt23rvAYhU0URNZg25yEwQCBlVsdfppagTaVQMwPKMs9oWup1Lsg9kpmrKiBhQCXW4VIz3a1R1xGaYDEgNCTCpcwBGiESzaqBoMKsCRHNH9O0gjCMFgcBMZ4N0kCsBqCo2gwwZMiIhMQA616a2Oxh8/LEPffSTP0CdBQIys4M0j8MCzUem+97OyENl8ODxDvzg9w+9xQjAPZ39b2Rk3kkz4I3RnTbLLWvAzFT2tzfrybitG4kJ1No6gqFj54J33nsXNGFs4erNTUY8d3rtj/zBH/rhjz0MsrcwCN2i69EhIbsA6OqmratGVcxUTERVkpgamaGJpgRC00ldj8t+XpAIKopx8B1FVCD2XRMnUU3EUlJJIklFzGaVfExVQIEMQsh9loMRo0MBkGSp0TiRuG8yQWkZjGYV3VXAEojMntgsO54IkUnRkAwwGbV+EVwx968CHFE/78ewv8tk3/bHvIdu8i46IsnI0BDa/Xa6x37AYYXCiu+t8mBNu+vQXYN8CbNeMooiaglEUkyCyaRmaaTaJ4wImmIN2hI0BA1CQ9ggtHmwhX6RZ+wddHth/dhir4v1ZAOg6XQydJxE6jalqAYUOkVq7ebGDQC1ZFkogss8Y/DEzFuj/esbO1WtppjahIDBhY7LvHMOdK23vLNJv/wPPr81dVQsSGCfZbnvZKGH5hBYVBVMzdRAFCWJiqhqiorgep0OHHrcv5O+/TbGdebsclhq/ha49PYLv5V7eDvyZt6z/P1OqZWzjf1AjcUDjdYQDG2mnYEgkFDwWVusfuCn/sypD/9wm6wpS4mCiLFuVc0FJoRukXtHDp1GTK3FlByHlJKKTKf1pIzXb0zyPFtZ6RGbCuWdXrK66DgmUE39wVLZAgX50CfP1DLOOlnZNECQJJo1VTkeTyaj0e71zcmvf/56iZnrhwBwbqX7mY8eP328k+W57/VdHlzwgNZWZYyTttrf3J7+y9/+1j/4J196/uL+pAX1FEGrulWY1aAjRDoofgIAZjTXuYxgXs7dBE00vf7yiyB6gDn8/nD+wxzR9L3RG0cc3H7pAyfcLPQy+3egYZrubm//2y98sW2qqxdenOxcWVvwJ5b7wyyjKNPRGAFnCbJAGJOMpk2brE3YJqvrpMaSKDUptk23486eOVa1iTgMirzTCZ5dVmRt0u3dSRI0sdRqSmJgAhY1iakhGKBzIQshy12ecVVO9nd2iSDz1O9li4PurOR4jOockcRhRufPDM+e6C8OPJmJUtNaVUvTpjZGmfUwuzXqSAiEQMiEBLdsADzw8h8kSAIg6MyHeYCRAkIGnOF50IwAkBhnvMueRa27sPCn/sy/p1gQ0PyqOIfv38O/D3BUTs+PIhy93TuffL/Q/YkA3PPg94GxRESxLcvx2LNr6rIsW1MjN7Ni2WEwQ2nb6aT2WXfSlEvUZEH3q8ZnrpWoKUKrVcLuQnc82R2N99bXj5kaHhT+AQAwMxFDiEkTqu7u3iDUIi/qmMwFFcfoRRpzXlE1Vi500qwtKyiAHVkJMziIiNCgs+Bcxs6RmYKQRGwriyWlGiWiIc6wryioCSSiyQzpj8BIbMgIAgACZpAcaSIEMrwrWvp90h/+9gjA++L3zBfXvIiFlbuXX9AWuwtnFDL2NGuzQkqmQu0U632DvSbuaqq9J4kVNbMOwFqNdwtXmPkUG8bWoDadSjNSGVs7YYtFhxcWctdE4sLnaDCWFlNd9fIhImUhE0EGkJiAcbCwevnKK2dPP9i2kbnjM/EMgaEo8q2t+vUru6dPLLTVnqmRz1QtQXTeFcHqmAruXdmavnqxfPzJJ5k1qQmRadNAgwhFnkdTIGtTZDRiygseNY2CuuANkQhN501fDea+3Dvou07vm5ZbdwQQ4YjPaM5U99QNvg8E5Pudjmzst1cMmmEPTGdgSCPAjPO0vPzTv/jv/w9/8dc3Ll46eeYDDHU9HYGIdxgBDFHqBADOEYtWlaIkJkfSZIAN0cbm9g9+9ERd7w+WhqPdKWtz/Pi6kRFmTbRxWZbTySd/8AngEkCIobvQLVObrIMMrARoHPHy6+Or12IKQE085tMPPDI4ddwVPcj66DuI6puyTCkixmlj129Of+W3Xv/ahb1pDYxFTFUrFZEDBDU9QFTMoNaz9o+zEjUGpjYDShuAGpFAE2ljawfYHewCdlhJ9R2esvtP9wsB9OZPM/cq26Hv0RDQUNEgiu2W0aX66W+8QlVz8kRvbzdNt0eLvaxf5JOmIXZJzIBEbDxp6mgdorqRsm7zLC+8a+uqImNkNIspiYKmmki6HedQU4y7o1JwFRmdOWYwRRMFhKQCJohMiIBMpCvLnbqafujjj+fYjvYmvW7H5w6ZFckQCMATrK6Ep548Pt7fZ7969cpIxaooUdUzGbFzKEJqNHPigynMQ0hANlf8D9T/w3iIIpoRzTz+YIimZoBqqkQzqJIBEaCJKjAyG5Hv9M+KW8t8b3YROzrYR4b+HvL3bijw0bjB9yN9T2VA38Jn3hdGwvyWzExN2+C4GdUBkdmrqCZQgITqQyZJV4fLMe1tj3YT69WrG/ulbVU7J1cWKVVmdPPq5vZmbD0/fG4984GJEZgYFQBAZ6wuKgoJASS2k/2dpUHXyABdTFCLoKaEmGUdSYhBTQRExIxoBv8XFJstGzMjQOecDyGE4JAdgqpIajSVkKaQKtSD8kOmCaFFa8HaWXYNACGRoZuVjjQwtVmnTHGIdID2NyA6sqYOFut7cBLfBN3+A468eu8YOHePsR0IJzOzVO28Xo2ugpkLfaRuo5EdACAgE4jLAhiZEkeVJsbYhtBIREaxNiURy3N0fZAGoEYrpRlpO8UUATRBMqK8w7U0zqFjI41kvm6ajgmgeedC8ExkQOn/z96fBluWZedh2Br23uecO7wpX2ZWZdbUXdVV3dUzGs3uBjEJTYIE0CJAE4xQ0A5YpiyRsmVL4h/JGkz/cIQdthV22OFfssIMQqQdlgeBpDnIogGaAASCDbDR3UCPNQ85v+EOZ9h7r7X849x7373vZVZlVWVmZTV6ZcTN+849wz57WHutb00Kg/Hj1156JWeKVg/diJkRDVB84YpyeOPW7MnHLigCQZ+9FFMSUyiDSyZl4cflzquvTcHtTqc3BkNviF1sVbJzzoBUFNQgJ1Xra08yUTbY3tkh5lWFOoPehQHelZDyLv3Z+rmy5v1zYsheYlAnjufv9kE/pPtKK5jQFlUa0Yj6NQZoRq6kxz7+4U/+ya75LS6CNOTRZ0oqlkHatm6nXQhDACu8V+GUqMvJDwYGKUYH6XA0gJRmgN4FQ9SiKibzuUN2fuvVN66qTS/tPxZIkkoIgb0Pqq0FIJYoouGlV+NXv3Pr0Q9dql+fYBe/9PG9zz9d7g0gVBV7KgvfzVqADiVKshdemfz9f/rt33+hacEjgmEkROKAgNqrNmCqxkzYu2YvbABgpn1iJAA0W7phkEPnjU66ai1b9geaHo7mL1gIAAACmakPxXjn3LXXvv8H33n1s5/92GQub77+5oef/FCGj373jWt1TQpoppI1gc6bLmbMgWPWuo5laEoqAUCSVOORoc3ruo6FU5EYy8BEmAQPJ11WFLUlgO4UjcAASDShEJI5MtP0/PNPP/bExZ2dkeva6uLo6kHTJ/1E75B9TGn74tbly+dKp1pCK76NoubbaEmyOGaPIVti7bMVKvSFUBc2NwDAxRykflb1KhACLaukae/q3KsPaktjnfb1lQjRELSPYCFfPfmJH03FDnPvDo1wsqo3+hvuRnRZZT65vcZw/+k+P/QeBAHfjeXrTt19Vqh5GwP+gyQkMExtqzmaQVMnR25QDYhRIM+aWZNqc7kY8u7+aHd7a3+8x1jePKinx7K399hOuUsJnn/+I0Wp0+ODxx+/NBwUCBBCAGJTNEWzPmGJxRidt9hN2vrYBzRDMepi9ABdO3fki2JkhIqWcmpjnaVTETRBVUUxVNOMkrBrC9XtqmAwR2QSyWrWKXaHFo81z5Q64d52KCaN5ilgY9YBaL/6FExBAJDQmxISiZgLZZ+bC3CxMywLaAM8LBz0PdBD+QJLt+TTrh9LaXLB0MQMSae3Xh8UWhQuNnXXHYeADGiimBKBGpD40oa7GrYzFtPUtfHYQRO7Q81HOV7p6tcR5oBi0qDMWWYeI5IT5ugooni2UUGlt6qgQMAMKXU5RQIqQzEsBqWvEByip2L7zYN63iXnfNbIAUKJxYiLoQuBjudtFnSAaBmR0JgpqBqxkReT+YXtYj6pv/G17+9t7ZspU9acsogYJNGY2radN20bU0pJsigQZoVqOCa3SFC4SOm8EeZ1lvO/Tbe/Q9oQ6HtFuQ9BgGWLlrrBypP0h/Sw0NqI2zKNLMBisMhsZWY1glDb4FM/+wvl7jDmjG6IIZigw6Jw3iwVFSOpSdIMANVkpkfTdDxPs0wHTbx0+ZxjKbdL9Ump3T9/cZpSJ5gz1hGU3ac+9TyQNHUzKEdgcDy90cSJAqMWt260L74mv/Zbh998FQ7euHXZxZ//0v6f+snzH7oUKo8WRdvcTaY5duSwyfq7L976v/36K19/DdXvelcoQNJkaimnupuL5L4Oah+mycjUl89YSEqABgTAvVAqhggOKTAtslmt9dm95p0Pfmnciye+x3vg0gyxciJ0bms0/uhTz4INXrkaf+N3Xn7x6s0LT1/EEJ9+9sm98zveYU7ZTFXFDGOEm7dmiiX4ct51dds2qetUu2Rtp7vbewUFFmRkUwWzgrks/M3Do1nTtbFrY5Oj9P4xSTRKFgMjyiZ103Ux7o7Lve1wcOvm4dHx8eGxxbg3rorgcsacXdfmcljunt+r28aIrlw7OJrFuVGTNOYcU9Q2a5LexVhNTRVMFuE3BEiIvSOUARiqqZn1S0/NFFRBALS3AoAxogNjUIc9WGlgRsSekDwxJtga74Vi2CfChjWY8qSzz2Iwt53Hq3FZ/flgpuf6U07Q6PvyqPdaB+Au6e4hr1Unv4/QwkrfI0SVbKpkys6QM5gxITkXygpAQXUuDTm3uzMch+CL4sp0+olnntOsxdb4O9986QtPPvmRzzwbXn61nt4kwlCWSKySRQTMnCNRUUkGQua6+bztOnJBkbuUUmwoGZMTQxcCet920bm+HqPvcSlmZ0Zq2RlY7phlPmu9f1qQFBUkpu5I2kOXZ85S/245JwIlTAoZUa0P5FdjQCTH5hHNcgIGQGHsTMT5QV+o28CwD7xfmCx/IMhWAvXGpLM1n8wHT2vpC2FtSvYoXV/0GdCAEGJqGTuWiKbArMBt2zFiFiPyjAiGqpgVM/mkDpBnk1vjgCJzleiY4vS6G+wjMFGGlAikD0UzZFHJqcs5O2ZVjU0sGUiySTc9Otq7eKnwpUKfmc3m9RR9mSh0MRaBRDuRDIwiHaIHxoOjps5xC40UNOe6ETDw3gFAiW5rZJ7BdeXVa0edUswQUNQSMppByqlpGkkUQtElM819ElwFKIYjlZXPAsCyXMWCNmwAbzOa72SwN+B+OOHbG3+tH9zUmU+rdg+ePvjw7T2g2xqIcFXxpLftLJKNAPvB9rM/cunaRxMDOcQihEFpjXHhXXA5i6p2nVjKSaxpUxgM6zZ1huD4yce3hkVthTdIoeQuJ6oqU6DCT24ebe2WXAE7Cr5q29pzR5C7RMVw6+jGgRtVX/3aldcaCLt7u2qXitnnngq7vq3KPSoGiMEUm/kcsZjM2j/8/sHf+acvvHwdMvmuaxFFDIhJ0FTBcQA1QxMAx0xEJqqSe09q7RPpLuq9L3zpTAQkQ1ZQBabFXAa4DwbSdXb3YOh9XwEnjkDLKdfD3PwzX/7yb/x//3FznH/vGy8/8/SnWrWmSVcPb7555YaaAwJNRowKeng02d0edEkcmaMcc57U8yo4hzhvu2JQWZ2ZmEAK701yCMQIdR2rYoBS9wW7mrZGwhAKVSUP7IgBk2YBeuWFNw+Orm9/4omBc4fXj8rBVuGBMANiSmnWzo8Pp+S5KEbHR/OjaTdrcknSdkkhq1BLWmYwRVAyVMNF+s7eCQj6wjUIar39AQiWhWxAej1UbCHmm9KyYAAIGiAB9IlLnKkhG2gK5AyE+gRfZ6oBrtjuxlTbdPo/ceA8cYXb0NDuO9n6vID7t1ncAwXgbvSidTc3eOg3npXUZSamyVTYkWQFJSQkT/3s7bP3mMeYgMEp5U5lEPTwjddaKd+8OfvNr7/+1W+98os//7lnn3nMpYRl1bRN4c3UsiYEg4TsyDlukrTT9sabN6swcC6kLETOW6G5zimF7W11bJIRCEFUDc2QneMChBDJsXeaIaSbt9746Mc/Pc31oPJZk6ROZC5yXM8PB+BDKFIW8kCaLHcqnWHw5FRZCRTNoQMk0+wYgdSjMMcIQ9y+qIBumc3czhjUPuDUG7s31vgyf8v7Pk9PrxUEWIZi9C0k5ytCnhwfATviQSYAI1E0MkIH2chEus5yq23DiG3TkEwnxzYcFiYJDHJ9QPGYsFTNYAAESgZsTFKgESgwdJrMjNRSNBTvSAuPVRkmk+M2t2SO0HmUnZ2hiTgESR350kyzWkxaz9rcZTGqm2YMysSiwMzY+8GROrTRgBgpsvv+Cy+9ev3pC5cGsZ06IgJCsoRI5j27nI2dkXeeCNuUFQbjETH3vNpOdKUVnrbeh2/Le+5yxFcbh+HySbYo7XObO9rmFzsVI3AXdD8m4vs+uR8+Wh/TPvQQ8cSVTNlIh3tPfeyp2bVvZEtM7WggAlhPoyMsqsF0LuRC3XYpqQ/FtatH6KqbdZ407ac/sjMuIREQ0lQTOYpNBLUQOOfmkcuXioo1p67tylB03cw7rsJYO/VNnDf8ta++0U3KElsfp5/90rkn9uX83mC0O2A3nk1mOclg4K5eO3zjev7Hv/3qi29kDJ60Q4RsAoiq2vvzqBizI2N2HCWKYOmDIxdzVlNwtPJ71oW0pS4we3AOCNWATiLB7tcEepAT8z0LI++pMuwasoOwDD1XAESyf+lnfmL/3Fab26T6you3Llzcz6C/+9Xfy1B0SQwJQHpzsCJ0MU6mk+DKKKme14WrIhEGP6/rZA6DARkaFYPB1rg1rcXg4ChNZ93OdogxIYFzwdCky8E5BMkxGbl5XXOFkqN1ePPKZHu78KNqXjexyQ5IUido27vldDL5/h+9sj2EK28eHFyvB9U4pqxqWQxJWydtFpewEucF0DuH6IDRUMSQTEwIiBDBxKD3SCYCJENANdM+OEB1kTIJEEx1USesj4ZUc84li1zQaKti7mVOWu3qS4Dvzk4oa8x4EZ6NK5aNtjqO92DKvA3horbPpjn6vjzyRAF4Ly911wD/2zwE7/D9gdPSZ7e3V5ngQrsk7JVzMxIxRI+OEMmbZkjZgLDt4qAqHdCrL72xc/ERjJP6oN0NhYBmyFkUFUBBLBEhB2+qCpJzGvjR4bXjvfPnwDyxF0W13FqrhMClsQciMkIVQENitZyFHBWgqmooqYkHDskH9iWDiEqHMrfmqJvctK5RP8rAWKBKZzmSJDVDP0xuLGEsIKQtmqAlJCKmLjZIkrDoxs+Mx48h4CJwZ9E/D1k5r/dMuBT3T6+795nw1OdS+7JlrRgUc2xG3ayR7KtRCNsGgsZZUc0QXIyt5S7WE+lm0DWU09HBzfGggowoqDIHn1N9E8tdYO6SgCo6M6fsDAglq5oJSO4iIlnm2CY/NpEUc1d389il0hVZm2IY6tkkx6wiucsMkkXaTpEDEXZROJTzWS0D8YQ5C4gjRnaEFDDWhiAOhoEYwuHRcbkFQwZrovPOIKWogIqoaqZGwGQGfVFJ58NJgrd1HrNE2jf+P/n+HnneCUpjy3zyt506a+jSidC/xPvuCtp5yBGTHzjCtVExAyNDQwMwRohSXXv15oDmBZPFZNrUySTLsBwe3ZqEYqjZkEgUm86EB8eTOI146zjXUbJ0SgYOsAhimkVDCPN6HirvHBFanx1aVBGdqLkCbDYPWP3G128cpCEyFe382UfDpfNcBOqACsFyyHuXt44PDrt5953v3vrNb976+stH4rZQdV634L2CArIZMKKaeHKOOKuwSuWcByKErp2VgwElzIhdios6rAZEaGoaU8oOnAczWhqzflBm5Ht+jfd0gxVjX2Zi6gtSAxLRU0996Cd/6qf+0a//4xbgVse//403jmbd17/3SkZCBk3ZeQeqfQqhpo2z2XxUQigwdqnrPCICIHGYHE4Kl2k0YEcGOhgUKjkpaKhuTGcXxxWBhuARUU3JsxmqADrfdDEpTg5mt64fSoami0VLRWGSoJ23iKRGIpIyKrjrt+aTIzu42bkw7DKJgiiIEShkpSygveMwLNP/AJkBEhChmhD20TaLwPRlBjVagvA98LUMyAFAol7yRwBaVE3msqjAj7wrzBSR12z4G36ztx+HNdB98Z+tew89kOnet7g3uz2QJ54oAPfkaXdiC2t1dk8/6+wlDwdjQQBQBVM1ME1CSMRoAKBkaIjESITssWBUKzV22uVYFaPgyVqVebp66+pHPnJJsiq6qhrGeq7ZDNhUsiUGFsUMygaStNVuOmvOP+qAOEt0XLY5qYBSga4AZFzUCVQSBjRkymqGmjUG5oHLb7784ic//0WmAtRUGpS5tQc2vwnt1LIm6pQhZHKoCjnmxNXQym239SjwiCRhnqDMQWrLIkIAAzVMdH786M/kNAp+xfYX47PmjP6DQIugUQDAJej3MOkBK1qTYXHpqAO5Pkq3XpWUeLyd/XG1tY9UBS6igEA061KqpZtrO7emsS61U23maStAalqVGVnVTW6WXAgWiz1I0VHvJ7woyR5jMoloPmVqBYIjx5y7tqmnqJhNGLNmjm0tCl1bu4K7tgXgHCm1rp7pZJJoXIIqAiEYAwOwYwMGIqSSXQjmqBTctdBeObThkEZgSY0V1BiQgLJCSlnF5axABAqkQEC2Cmc60UzNNv4+rUq996FYD/xdbTZ30gFgzQUIAHCZA3zt9zu26gdkjX1waFOLxKXHIyKAuSpsP+bql1Od62OxlInKDAamZemyqiMNnut5jB2Y+G7edlljwu++MvvoE4N5exx4IMZ1m9VgUPp5PScCIEspGpD3PuZOUldVg3Y+h0Zuzf1v/OHRlThKne4NwiOP+vGuG45dGFMRcnAiDnJOL70x+2ffPPrGGxmqMQlM2kYZCUCAoE/6DMBACEaIgTgweDDNs53tcXHu3NG8jdKCcXCEgGJMnk3Vs1Ulj89d2r38JPSZGd8b6P1DWqOTiYZLpyoAMDUg4rL47/6Vf+Nr3/razes3/vCFK997Ra5dv3HzaNZELIqhC66XFFXNe25T6lLOGSKbM0wJiRREQuVTTBrrnApHnkLhCUeDUkVmba5zAgiiEYwAxLKIkaITtdxqGy2Lv3U4VfSKuUvSSm7nmloCqjK4TAjILgQlf2vSWU5oHBHbnKOoqpoCMYpZNhM1kWXZTezzzS4E+r6u0YIpQl/dCJZuZr1zVF95GrD3zF7cpIfCDAHYISO0bYIuz45n56y/Sw+BrXXwYjGvW2TxhDFvcG48GY8V9n+/Z/3G/Q1PdpON9XZWZn7X+vg9jgF4F414WBlJ7xbS59s0FWNGEEBEIAXV3KkKdpiZHQF5dJK6KAksk6lH/OQnnvr+r3/jt3/rnz/5L3/JlWHWHIpmAKeqptqrrRJFTRSUjKZHEyUD9gYAar16bOC9GzD5xfazWB6GpKrgKWhunQF7ufL6N/d3yt39R+ZaeOnQxOJU6iOpjzHXCM4stW2UDh0pegLyYXzBBk9itU/qsjZAxGWl8Th3njB0bVbeqy5/AbY+HDyAiQHhGsT6zjwYHlZaLPxliM0aP34IZ6bBiTsLLrZhBABJcWJSp/m0nh65cmgai2rXhQG7cVbrYp3aqaW5aptz28aWKBzcONwZlrGdo86Dd/XscLB9wdQzcRIzVTTzLihl0EaSalIGBCTAAExlKLxZqmeUo3cOIFlucusObx2kCLGVrSIw+VndNm2cz/T4OGbAISGjMRKDGQOgAhkycLDSQVEyCBREHl3JHcuE1HFJrvSgRoBa5y5qTlkyiplpj872pvOTLuq/9el/7DRvvJdG3HWjw2acAW74Im26mPZs/VSc8qnG/5AeOG3YY5ZIx/q4LZ2+SvZ7H2sOfodA/KhKk5ijgGVVUxNyAVE1CQMGx8fz6DxjzJLp9//o6pe/8BzCxAdXN9CmbBlSIWjEjkQTMrFzmqRgn9XQuIttMPeNb1/77pv5QPOY9fI5fGIfz41dMQ4cyA8yD3JMScW+89LkpWvtPEGbOqQA7IBUAUQMVAmYiQCECBGyI3IImOfPPXX+w0+eH1XFK1cOXnot3Zy2TeasgC6gMaARyqgaPf6Rjzzx3MeWcsgPZ+m9ohWatrLr9kXZAI3U8ue/8MV/5Zf/23/zV3/1eD5FSdNWJNloPIxiIEC4qGOXRUU5iWTVNhoHjEkBLZNyVUYBbeN83hiYA4gpOiYmROdzBhRECFnZVFRyEos5JYMkmAFjZwe3ZpLNczBy8zanJKC+U4fkwcx7TxycD5lyUxsZJZAomPuXwj7DP5r1QeaiBmosaqp9GDoQU39qzzIREY0MwEwXpQAAlxV8zfrKFGtpnhdZHwDBhBhi23Xz2bK2B+CJ6Lw8ccmZFyD7au84w6YXI7OAdpbxLveTSS+8q6G3gSCsdrHT1mtba997as4DCgJepw/ILoeLjFUgTJhNzcgM2AMAivZQiPkQupTINKekqDl1pilR8hQvPzL6uZ/6zLdefHl7p8pd180jkKJRTDnlDigHLAgATAGNwXd1q4IxpmY+rwZDE2UENC5csYh1R0SjPu8VmBnmLB2DDBib2ZsvvvSNX/oL/9osleCJtMPU5G6Wu4nqDCGqWZI2xjjr2iKUWFRusO3Do9lfGg12oY2qmAlbLMl59hInx+20sYHtlGP0K5HqlAq8cn3+QAzoHWjTy249k+PD914L3gBL8R8QEAwhKuTIGoYDBhJydTeLKYVqbK7LZik3oq3qTKVuc5PNqKDrN68+sl9AnjvNpAI5QmqoGHp2Rh5yMqHUqolHcJZnJIqKGWze5lffvDEaBB841+2w8Jo70pyaSad47Y0bzM67IiZDy13bNfOma+lwMufS7Y39wBtzr7ZkMHSOuMCyBPZUeCjZlw6qyhVjNxi0YRCgCOQRDSwrOkFEU1FzTNxlMUBGNFlmlTawdY//k0K8uN6Ht/v+7kdkSSf7xmlj8ok+sBD9YePXe9OUu6SHb1Y/LLQENtZ32xOTdW9wA1SHaOeer79dDKytqhJm07ppmKksyga6uk6mwOy8V5jnsvTzeRfQhkTXDuTbL8WPP7MF2Ic5msSU2k5JQ+lzJ9VWSc5nUhSpfJVzZuMk/ve/d1xnHW/po06efaw4d86VW0Moxzz0fouEpD0+nM7yt7837RQpZxVtc+OKMmYjcgVhNkECQAAiNVBTs8zBnnvq4sc+8uRTl/cLlGc+9OTxJH/zuy/+0QuvXTmM5iDFLpQ8ICgwP/nkh4hKwnumPP+Q1ukkR9jCu9bUjIgl5//B/+jf+RNf+LFvff+7+/sXjm5c+7Vf+3/8xm/+tnURDI2wT8cpWbNCFogZQNETxrxwmHSprKNY1OO5NKCV2uE015mSmiffTjS2iOzmiXNWiclyEqHWNCu1USaH8eBw2nZSjCoV7ZokWcms7mIXG1PyxYAZUpcFtInIgMlE0amC9bI/LHAjBAYDU1A1BVNQBkRgVDKwEyyuF/RNDYANjJbS+rqkvvo06stX9HYCE6kKN6wcAOgaELM0FgBsBF/hCUqDsAjgWoeSYGXVXSuAdD/n/pq1Gm3TzLZyMz3biPeCw96bGIB3RB8U5oEAqmqWmWFZrMKIQI0YgRxkNSQmRssJDByxIxID1YQEuWseuzB49MJzFYE0k8rDZNaamQjN65liLKuiKgtHRAQIJppmTXfr4Ghre1AOB1mTQgYmCg6Yra/a25ty+xIAporRm5Hlq6+8cP6Rp3hwQYVJjSFJmmqcxPZYc01mRL6pZ/V0joBoLuccHA7Ig/eqBCCGQuzMVMkR0XR6C5obqJP0xmg8+jNalLQygJ2oz7ZUst8ZPVQbyGJDQ1wr97EycTyMVoCe1ta8OUBfnOfBpa1BMOJOYXI8ybFGpCSdqCWLklqMjcZWu66e1W3bXXvz5q298fn9ytCDkiWx1HkAAUhgfb3qqgqtCgKZiqjGqJ3S6zcO/5vf/fqlva1Lly4cz+aIACkmqXM7NaFbN4+AnCYQzLHTtk1tk0WKg6OZc3xxrxowUJ9hEY09hoKKAQ8GrvBQebc1GjJqWRbVTuCB04LFg6JgMgM1zMCMDgjMMc+6jikgmKQIK4RmvXduE6x+Gkt5d52/lO8XpWTXsCNb23VgacO1te9wBmta3nYVSfAu2nR3zV45Kq034od0qh+WggBuHFie5szJ8LHR/mM4mdfdnBzsnB9PDqZJWmZwZEoInnJURxpIqkBZXOHUu9HvfOP4Y88+mZprrMEDimrXdKOtYU5mJaWcnQKjiTREARE06ouvtX/wmuqg3B2G83ny+KPDixcrHzRy3Dm/g9xaio7oe69MX7wOWo5zfZyAgUB6B1bJAMjIRABghCQpF5Uvinz5fPXpZ598/NLj57YHg4EI4iPnq8cubj/3zFN/9598/dUbt6xP04uuKoqPP/8JZtdngXvg4/MDT30Fk77qQn8EkcwM2DlD/RM/8aUv/MSPA0BK3b/8i1/50pd+/PU3r/bjCwpESIhJUsoSs5KDPmkykokpzFLdmFfftNpIlzO2taoompjJ8aw+qgtH3AFn0tQmyFENE3HT5em0mRx1syYZcFasO0DnsmozkytH80ndIZbBMTPFTuqYkrFzlFXFVFURIDAiEvcGB6RFXl1TM1UjNTA1QMuWGQCBiBFx6XkBqH3JUiQDXdSf6yGvHucBWCDlfYQOaCBAsNzOURUZ1uD9E067IUPjkjef2iXWWEAvCtgixQTeV7554lzR/7kEjRbWEViYimxpKzqlD7yLdt3jGIC3oM3G3TMTxn0jM0BRIKacsy8D5gwA0CdBcGSgatKfp6JMgAjOETmfYzZT9aoWrbMo4oehzjKZxtIXjChJs2YEZciuKEENHYpIl/V43iTJIlG1i9JqKHMg8MRkphkR1MAEvHOA0OV5Sa5tZrmFT3zq83PBENARSdPEPGnTYZMmmHJAJx3evHLQNrNqvE1mCWvDWzQ/CKNHJdVoGZElGxGbQTe71dY3c3v98ctPtre+Nph+nIrnZFEiZH36P5Tj9g7pBHbA1YI/Mf49tKUuTxplqFgML31mnvO0OxqWbqtyk3ri2NpuGlylKVGKmjrtui51bdO20+bqq292x/X112+cP/9sl8QLlGUR2wa7Wp0XTTFGUjOyNtZd26ERQlF3+XAWv/WdN8g7snx8/WoknNfHTF5TthZA8eb1uioGmrHWFsBpLWlux3WeNLJ7cbhVAVMEcmRUBsLA1TAMxzyuuHQwKMJwUCVIxTD4isCrOETnkBxIJBM0BQBywAYOTFWcI0bVbkqQFcIJ/G637617MmnXBMP+gStnzRNGDWsS/8qDZO3qxWaysEjjwu5/iu4hb3wLlOhh5cAPjtbniy4UNlsVyVkOX58QEBgp466FfePvO4dJOu9hNPDtPBo5DqqcUyQxLQdUT+Ng6KZNQol7w/DCi9deeX33+afKZjbBUBCXam7e6GBUsi8sx6RdTtEV7F0Zj2cg/psvHl7vSiy9TdPjHx4+emkw8sQqBbaum+TSutn01pvNb/7e4WEczlOsUwZX5BgRFBEJFNEICACyJE8FeRe1O789/Nxnnn/mkQu7IzcI9eS4vjVpb066ukmzqFm7jBaNOBINXFWNfvTHv+jYI6ySwP2Q7iEtnVkWMiYA4sIpRg2AFlgkCBGHcvs//o//Z3/13/wrnlyXOmLqB4QJUuy6pqlGhbEJYBRoOj2eHR8fN2MvdQd5PidJOUlsZqUDy7meddNJZBVwPhvXTVaUJFg3ddNmNTg6apqYq2o4afJRkqwwbbubB82NaZwlHJXUV+dViQKaUZNaVyczQELvHDE6hkAYGAMSo8NFiS906BBMTcFkKVWYGaiYgRGSgpkZIaMBAoPJovoQCioQUZ/rc7lDiwkllLJy5BlRwBZJQtf7eYHz4yZTxjUWDifHEWARCmgnxoP7yi1XluKTtsEpnHX585rb67ug1UPeBwvAKXqYeYlqXxJF1RQBs5rrQyIRNFlg5xDRhzZHWFRVdIzMHDyzKChghGSouc0Hk/bg1vzpD12UmNg4C4JS6iRzx0jsQxYtxltUVeQctFHSXFSyCz6wOjIRAnTMDskQskZDY/IxJot5+9zlvf0PJx55Rou15pnpPLVTkVpT52hrdtweHUwRrRgioDoEbaZvfu9bj5UXyv1SEXNKOccyyCCkl771rebg2vknLye3k+gg5pkHZFjWxEZ8r1PldlvI+zb9EHrc5exyepgtAD31AVRmXF58+vxgdO2Nb6EdVWPZ2tq69drNohgAiHOIRpKh6eqjyTGqDQfF3u42q7z5xs3dR85t7w3M+84iQUrSIAiqoEmsm7qrDw8nPcidonYRG3XXp7Fgn9rm8PBGC0K+QHAB/bYvD4+OY8wfunzJTCTqvJ1qRsPw4hvXFLtnHr805nkgI0Ag4ICDsRtuFaMxjyvySIOqCkXpiypUzjBnUGYg74285qyZwAiBvHNZ0LL2WaQRKUYBVVoIbLcby3vb7XdQKXrGvGDPa8fWTMsb1y3OWfOrtuXt7/MbbNDDPccfBK3rjJupQhZb8RrggYYKzNevy6jpdoZc7o6Pj2eEVAafkgWjWGLTiXgDJOogd4kYB4Pg0E+4+m/+4OpTjzym0gZ02air47YfqpIzyDE6U1SRDLU0BNkPt18+uNLmEqfNeJuee3qrZAMIMeXzl8dQaQSVZN/4xs2X31QlzCmR4yZ3PngiVMmMKBqBBJGDLwQg5/b8QP7s5z7+zKN7owGl9ujKzcnLV4+vHac3D7tJnSaz9qAVQ86m6GxQ8oWLj+5fuEQLXvnAh+ePCy1m2QnwDItyDABIjH1yWuflz/3SL/2Df/j3/+7f+/vOPCKk3JkoIYgOokgGS9maKI5xMo8Hx5Ojw7g/hiIMGMlsPqnlYNIdHNXO+QTuxqRxquWYkuisTdlk3kjdtEeTLgPMW+myliY3Dw7arE2SaJCVIiCTb2O8euMwb422t0sCbSdNm9u2FVQYVMETAiM5IkRGQNM+r4ooiqqYgAIiEPQle23hyYOGgKKKyA4cqAnkXuoiIlFQywSASGoABH1AgApETRTg2rWj6vrkSeIzONDaIj4VfmXrB09bjJf8fMGb76vf8wJDWjXirR6BaLYqHPGu6cFZAPAt/npYiRlNMzueHc8L5xUgRXEOEJGIzSzlzMyFZ1NLkg3MkQ/k0Cw4djkRs4JFSWBaT9rJdM4COabYRRMPFTUSXVUm0q3hCMUO3zys93fPVZV2UUC29gZIjg3BiAjRVMWAkB2nqMQAHttYPv78p2ZEhYpXBevqNLU499LGeVvwsJnD1RvX6tSNqiEzJ4lI4Bzl7mh24yVXFRR2JOXAucLZG9/6PZhfGwQdnHtkFv1oHCbzW+fWprst7ZT3egjfFxXgQQpa9576wClnCuB5ePHRZ7evfvPX9zXvVMODOkLyEZwvfJzlet7Uk5m27fa4HI8G3pRTntQAIFXhxVBUKyTL0SOhqaXc1vODW9cPD2dGVPFAoszn6TsvXLt6vXnqvM/ZuqZNpJjSeDxCTZNmfuPm9QE15wegcZ67eWww0/a3Xr95WDefeu78/qAZY9obDnzwBFZULgx4OLLBEMmDCwWVRRgPlElBsybHDoEwZSNn4GJWAUyiaqxAXe7KqpjUor5023sZnFsAPL0T6cp4c2pSnZ1m73jinTn7ZDms8gKt3/wOgcirDerEgNDfzTZPXZ15yvj8Ttu9th+uo0wfDFZ8Xwk3vy3cuowAYJEByJaHFRC18BSSO37h+tYzY++5Ggzq1DhHOZoKeOeLUrquzcnIMDgeVAw5x2k3Lra/8a1bT144+vFPXTy6fk3LYdPkgNh1tD/YhTaJSTKtvDdT7epG/I3rE81B0/zCY9UTj3iCNowqHjGMfELhjNrCC68208YUZmSSJXnHqlmBDECNCAMSWFYkBoCdYfHzn/vQJ3dCIcezm3L1+uG3Xr765sRuzXSu1IJPSlHUkd/fqVA6Av/lr3wF2SGAAtDDaxb9AaGNvl0TCQ1A1ZzzgPi/+9//H7761d89Ojo+Ppqp2qCqCEzMZfKzThWUGFy2uoWbB13qcEr55lE7qApid1jLjZm1fsSsV4+Pn7i0z4AHs7ab5/k0RZOD46bRPO9k2uYYgb2rb94ypL4iOxGZISOiZjE8nkaAbh6zKrWdJFEB8I7mTUbFQVGaQs6Sc8oxaQ6mDMq9AkDAgJhVGHmVda+3VhEyAmcUMGD0KhlIDAyMGYnI0MQBgQKgqWI2bdp6a1Btbw2qUQlKC9eZNTbbL2HbMLWu6/tvbSVdOXyetTC/S7otMz/xNXzbNYar09+9Vv4+BAF/EKhfdmqS0SBlBWQxJWbHjIRMCICqKqqKgkCMZoQChITa30KzI1BH5oJDNuz29rbr+XR3e3dyo27nDUE7GFHLNrpctl23tb29t7P9xqtvNF2bHR639fa5c0wFc9FPtt5zGhGTiFkO7E2ipDZ4HwKWTiVOkiWpjyC1qFFSSxyyujqlo+NZ4b3zzjE7clx4IO5ivPH695sUH338YyVwV9+8cfzC0dWXKpb9C5fqyMXwnLeb0XQx93s/+XVJ5111Lt5+uq4Q0Ae3u5xyuTvz44Nsy93QaXFtITUSgILD0GE499gnpgdfT+pGe+frw7ZLLXSNRYlNA9nGw8HO7hYAkIcnP3w56QU3UvKuy101rIjIRIDFM9U5T46ODw9nXWedNHXuqnLn6Gb9ja+/Gs2f37nAniUm9kKMqZ0SuMqPb906vHhuazhwh0dHZUDV8tWD7lZn40HxyB5d2GavvLtVhLL0A+8LLioeDiEEz86T984FcIxMIXgnaJKyJMgo2HUxErqqLJs2c4Q+Pa+KsHPMfvvc+ROuvgb03I7WWO5bDv87GQxb4Xb9n2tNWbVmfew2rLn2di2G005EJxvEO5LfN7Y5XFm5f6gDbBCemkgbIkOfngUU8/lzF1+YyuRGHG4X7ApmJJPgEUSILAnRtPMB9Tg6HhBKcFAW1LaJefD/+e03t4oPfexDj948OoCUmzax+WbWDUvXJe0yFAmyxID+8GZ7dNhsbe80B/NL5wNabZQHY19uDTueN7kZ4M73v3/9Wy9Km32kVKcEiGjCRAYGhKKZkAgdWnJoIPn5Ry997InLYs3hbP7tl25++9Vbhx1OwHVIFFzqdD5vTbowKJv5fGtQtRG//Of+HAcPC3vpDyfMPaWN7jzTt8sUY/2PPfLtvHOe/1f/y//1X/7v/6vFoKAIKWWQnEdl7NKwGqasN27NwPJkmo6mnWcv7CeNgqP6Vndr0hzPWlHNsY0dXbtRpy4Ww7H3xSzGLsu8k6g2TzZLIobQoXFloDErEYvZIjkPIiBlg8NpO2vYAHPKSOiCYwBgMo9JsglQcAhGTKqKRtjb0Xo//2UU1QJVRCSkRUgekkEGpYWhF9kgA5hjZ2aqYppdCITY1o1oBlFQ2dsbnj+/cztHtR7mPzN3bzefz4jmm+DJnYfrHdEdy4rdxT3Xksid2CTghLvfLT1QF6A1NOuhBhGWcwdVclYFJDUlcgR9Ah5UNVXtS8SjoSk6x2BqAuicAgMamCBgQQHMLBsR+Kd2Z5Pm6OjwcFI7tPHAlb5IXVPPm9GwBEtbW+XhwMc6zWdt3aQqUUmBEQ17iUeJ0EwJnYmAoTfT3J7bLbeKFGCaUSTOc55LaiS1qsZFdTSR7770Egls7wwHg9IxsXMAhMCDkpHq9uA7N/V4MNhp5geHN18LJQ+GF7C6OBhsQZpvhajeiHo3ucUSOql9eu/o7YWg+0K4ad8/9dPDRqebhADW+1SiOTVEsp1Hjg5fjuCH+xfmcLM5OLQUGaCTzg/86NyQC2cZhrvjigtNbaYaTYjQey9mjJpEHFAyuHU4iQIZTLNizJnTJMPNRnbH7tH9LRUVyJSydknZ+XL76rVrsUnn97eidgJ83MCNafzWa4cjLp65NH5kjFueqmp7PKiGO1t+wMwQKqhKdI7Ie0WHxMBAbIhmbKIGjiwrk3pAY84ZybLkbIop5j4q3pD29s7RYm72edNO3Lbhjg5r+B4dbc4wbVx9wvLxawx+A2Xe9DXdiBg+1axT2P8p/v6e5uh7BY9+0GgDF1yXC04ZS5AkIXurdrZmB7NATCMvkiV2jgeh8LHtqqI8t1dceeNmUdDxNHadxChILO18d7j1xiz957/xwl8af2rLisJD08RhEdj5lJMheeau7kSQOJCnwjutdWscHrlYICUqKWMdBZquQ7T5cfe1P5xdOQoRYBZTZl4odrbIU0dEgNhJCo6VbcenL3zy0eDqaYrff+34D99M13X7ULujyRSjFJYGZfnY/tYju+deu3J1Zq6bH/7Iz//i7rmLhrzoi4d55/4gEt7xD4CTBbpQ1/t8lIajcfVnv/KVf/1f+6t/82//zY5pOp1tbQ2y6nQ6q0pfMM+OJ3U3nzZdVmiaNhTDtqNZbiTHo8O2iRi83xoXFNvp9cZXdFhPmtiyL4zhsacf/fTnv1gM937rn/3OV//FN5CKyfFcsxYhiJiZLaAiVGQCBVPLKgjABMyIZh6oYPYAJLAsJEOqCEgIhEg9L1NQVWBEMXNIPb6JQH1AlFhepOVDRTMCEwMAlZyd91kyAJpZUs1mALa3PfYskhrNyUxvy91PVrSdPrZ+9C1m+EY+UHjnEvfZG64FHS5qcOMGt4fbNfaU9A9re8Q7ePSDDwJed419aGmBRaOZigE7LoHqnNQMYpvKQVVUQSQniaqQNHrENrYARgSo4IKrU90zX+9cQJdRFMTQDavtnR24eXB4/cp1zYPCc1Kc17Eo/WR2NBz5ixf3JHe3rt5AIgRGdoykZoh9Fn5RA1VjQLGONRWljiomayB3Hl1KdWwODerYTlNM5Ms3bl2/fjh7/plnt0cDMkkLpRswW1GQJ5TcHl1/eR62TDrPNtrZA3cB9x4Zjipq62uvtxe++KRs5CfBvn9+ACSH9Td4u8VzJ239/aRVZjADMEQGyFicu/Tx77zxvUeHg+E8yKBqDmce3XhrECrPBSGRL51zDgSQyIMXBQzMzqkImcbYkfNJckzmfaUg5aBK07k5T+PhI4+du1TBbgmaY5uFITtMGPDg8PDgIDEXlmA6TZ3wS1duHLd5UJbPP77z2I5WIe6er3xVld5v7Q2wIKTkA3gPRIgOVUxNGBlAs6QsKadUDcYKpIKWcpMlRkuxTyGBoECIJiaoW1tbywSFsHLWXABLd5L+Tz7fO52SENfZtZ1i5bD5HU4rIqf9hW6ro7z7ubgu0m7YIX5Ip+gMe1sYi/papTi5dbUactiuHGOMXVVWXcwppemk0SxImpKe2925leouUOkFNMd5N9quDud1QcOrtf2n//evf+kTu89eHFrbUqExJkbj4Mx03kTnggCUvn3sUffS99sS894g7exuKbbz+dwKEEUBnE3i916ZH3VZKlL1Jy76/eQ361UZJlJGlfaTT5/b3+LpfHb9YP69N25db90rt5rjWZNzuz+q9oaDZ5+89Mio/PRzj6fyc1//zit+uPOX/63/oYUSlivrhw5AD5I2nLyXfEQNiIJA9x/99f+pK/2v/uqvOu9L7+J8rgxdzII6a9pZ2zbZwCh4D+TB8fWbx6DairAvMMv2YHChGk6vHDy2/6i1XTkYffGn/vSf/eVf/tinP1OWY3LsXPFf/K3/y7/31/+Dpq6BfcpKCMispogARtIj+oREzEiMBGCmWhXeO6C+cFK/RxEyYa+O9oA+QD+lzABR+9q/ptYXNgY1QwIyFkgIQGSEZGgiwg4RzDlPDDHpdNoeT6c72wWHwkBUyYeSHKspAq95/iy8MRcZNje7+XS3n+aJK+vpiUy+dg7e9pq3GVlYwEXryJCtPjfbdLKDnKw+XJ6LZ7eYt3/08hYPzgUIz3w+tLQy9hgIIXvmMjhN3dHxceEdRsi5JUIFILKyGrX1DECpz7bmCEEKpiQRQSVrIk/snJKBMlrYch9+6vy45KZu6nl366AuhhfRVYbd1k4wHR3cPJSUdre3z507Z+CIQA1M1UQXeQFM1RRN226+fS6EEmKcuqJIwk3XCDSW6xzbGNUzvPrawZtX6icfd95RYJM2Up1D6XzwKRmhugKqyonOCk/DUJqRK11B4uPsxvVbXfkMjZ4QBDpxmTgtf3xwJYjbuGXckdZn7sOgDKwGA20hlICZEvrkdj/6o7/w7d/4WwUXA1e1wmFYVsSmguCqomSm2NZqWdUDmoGRAqoiq0kGAQVJbTMshzHrcFAiWa1N3XQ3bxxuO312v6h0lrNKVs/IBXddbhpDoPF4940Xr01qmSnORXfH/mOPVs+ck61d84Xb2irCeHu0NSgGJTgkjAiJ0JAsqRiqLdJGE0hmU+fZoSVJOcWYY+xAjQmJ+/qOTIho2ZScD4WpLYvG4JI1PtDRWYPqNoDjFa3j92e9P23x5czetPGIH9L7QotVv3QGAMKM1hhmLtCTSTYRUdOUclUVmqyNptI2ddd1Mp+neqZmnn1hnTkQzt1AylnH/+B3J1891z3/xO5HoN7ehe0xpBaJPQM4BMntiOXZS+H3vi+jIlzcK5zFYTVkkEIVYgTEl19rbh00znOTBQyZXZ8RkZBU1UCRSDUTeTUqUT/12K6P8eZBc/Woe/MwvTmf17PswXYH/kMXzz3/zKVUT4WtZT6aTD/ykSd/7iu/+JHPfkLNeFmm6Yfy//2is1LnWur3hU0KDAyI0My8K5y3/+Df/48++pGP/o1f/c+uX7kSnbMUY506y/Oc3dZOSajzulDf1l2TWoaMYF5k7O3C5XM/8sXPffFP/vjVV6ZPfPjJCxcf++RnPm7laDDeISYAZaQuxp/9ua986Us/8Sv/vb/01a/9gZhIjszU+yAgWBJjVkOMatRD/oZlWfjgWFJwXBU+BGLPFBwW2CP8S1im108RoM9sbmaKtFhrhACGiEaAACCGgARmRKSmSKBiTJxTms6mkoQBwYSZXfDb++fBkJD7cmF9367Y7ylHyjV6CwvARnq33v1mTea2u7eLrbSQxY02ZfdVNbjTDVpv46m73fHHO9Jqu4EHpgA8DBLTOyRDQDUy5qxiIqPBYDwaZ+nUVERT14mYUZZMqW0QpSgG7F0nsjUqSnNmPuYkC3xW2XGyrFlV8s52VfD5GPPN64dyq+5aqKNuVQWmOlB2CE1KvtNQDaMoQDYFQDNRVe3nIqqIdpbnAb2HnGNrKCZkqQZLmmPKycDN68aBA3UvvXh1UPDlR8fDoaekjjVKAnbOMyFkEUdceu5iKvfL8/s72nWvv/7a0az4zF/81xspC4LV5re0Rv4AGADeERB6Vut5H2kTfViyEwQ0U2Yv+MhHv/xXr77wB8f6XefO85iDtM2NGx6FCm9AVISckhqomWPzzCpGqJqFiDRnUipCNW+OS18cHx2bG7z62q0/+uaL+yO+sLPlvNVdLIpBWRaM0sU2dzHWTYx11zVtmyDwM5d3P/z46NK2nhvm8V7lysqVg9G5vcHWEBBUE6GCiaoaokHuoR8kM8gAwmSefY4ptlHUhVBFFTYOQtioQSZHlgyAmJ3z/oR3v0/+hbdBjOyktA9sbjzrf+KZn9bv09sHbmtAwHfCV0+dsLzwHSFHf9xp1YcaBQEH21u5nUxn82q0h+RSK8GFmDSbIZKBa9o6GWFwzmvOyACezHunbesZR756baLXruBLBzc+dgG297c+7LQsU3A+gZhlV5S5bT/yoZ393z/0hRsMUKQtBwMUBU2UW8z++9+bX7kFgpSyOOKsPeq53OGZAACxz7Li90fl5Z3yeDJpBV+6Ub9+nGbCijAc0LMfvkxJv/Py1dTGDPCt147abn7h3HC4c+HxH/15v4W6CP/6YQjwPSNblv8FgNswLTvZnzaWKKKqQV8pDCFU/hf/W3/+537hz/6Lf/F7//TXf+PG1TeuvPHGxYsXPv35z//Mz/7c/iMXvvMvfu8//Hf/2muvvxo8eyZEe+qJ3eeeefbHf/KnPvkTX37sk59WHDgmFPKkiH25CACkLBZ8oC10Bf3a//PvfuWXvvJ7v/81gqDa12AnQAnsDQ2ozw5lROaRGYHNyhCqAp0nQMiggqAAqgrAfUVRW9XhtT6lZW+9IgBQWwA4RM7UEPqoA0IwIhXJhpRSBlByfOnxx+az2fbYOdIUOw4qFAD97WRnW/5nsO5addLjtx2Bk5/wRHrHJXO2k9/vZl3YUgdYf+qaPrB+kzuZf++EW969gL067QEpAB8I4H+T+v0R2TOYIZoSliWXXGlOsemqUCrYsNyqZ3nWQt10k2njB8GXPrTgyAjQIROCoSXLKsIO1TDFBGDlyG+7UTUYiPnJtM5vsO0XhaZm0uTML755zVV1eOT8/vZoGAiQLfeFkAAUyECj5tgGJ6Qa5w0FSrFLOVvqWAGtsNSYOAR6fG+rjDTvMAlevzW96La3hp6ImdEPK+cr0LZrIjmei42evHTp+U+km8cR4VAvfPaXfqXJu94vLE3ryKatHfigqXYnZEsjGrz1W/SLflmK4y1yIL2LfrjtJW97n5OWL/jBYjx69i3sMo/OPf/je8//VFvPiZNNX37xq/+kO3ppy2FnGRUXESwYkAQBNSmqJUngGGJMInUb60aPjw7bOD+e6fdeuF6U5TPPXfAVIMjucIRYFcXweDY5Pu5SWyMpUP300yPvt0IZLu6Nq4EOt/KwKqvxuBzuuMHADbeUC0SVpGZATGzOTNjUQAAzkgc1VVEwEcqmCuCcI/KUIGVQ6EvEIyJlFUMycj6UuJa1vRe3H6SGehZM6ut/nYJnTlkAzpp6z5oNbnsJbO447+hFN5v6Q+n/bmklCpghBsbSOykRJddt7kRQnStSFiMiB2rAjquqQAo+MOZ0dNQxY1Hx4XwyqoYQ9VY92y5DmqWYq3/+wkzo2r/9y88HvCHQeAohhHY2LYvisctbl3cOb9Uoyq4qMaBiMcuOpGgP+IUbIQ/3D48OIzIQGYKJIaKiGZCBiQkhE3OO8MigDKSN2pW6+/bN4wmRmbKX3d29zPxH33u17vLW1ogI880JdN2VG5Nvv/R//uKf+8sf+/SnlxPnh9L/vaMTCfSM6GlrYLCt8YBe9kQAo75UAKhVg7IcVF/6kz/5kz/95ZQzoIEgO4+ecsqf+NxP/K2/8/e/+s9/5+tf/V1VPXfhwoc+9OxHP/bx/YsXqBiWzvePIwbTpfC/qHtlaurYuyHX0P7t//xv//TP/NSbr183wCjJuwJsgdSToZkyICl4T57ME4Re21DtYjTV4LVwhfYFlEBB0XrhCBmR+9lqBgtFE9FAARUtMLBY6nUF1Kygzodm3hVlJYiHR12S6L2bzLtzY1+GwN5TKNUW5RROUPrbc1DYKLB7m/HZPHv5fel7s7rJWirxt14guGAh68OKfQokRFiFAcCGHeD0Dda4+Doe9C7oh1mAbkP9wsyWEXLpYALKDF1sAXMVXMEOg8UYiTjFbnt7y5GNrCyrih1nyzlHQyPnCdkhGQBBphK7mDj4zqWmbgzMO97dGxdh8K0/eqVu4+GhPrrnEH3ddtdudh1i91t/8Kd+8uPDrYFjiiZdbIkIlAoKKdaaa3TWdU1ZuqEbd7E2VVJFZBFNwqUf5Jgf3x89urP98vXZURPV8nHdcAHj7aHz5KpCUvYghDg9Oj731OMwvDiN2zuPnHvj28ef/covzzMWDhba+lo+vKV0hZs9dn/pvukYd/EWC2VwkXh3UbjpdhfdZQvXgdvbXoKbZ579/SQ5BMImToxGgAZkDECGOigqpBHslp/86UtX/+if5oNveqjFmqIoQULC7FxGZ0lVNJEvFU0tN/V8Pq/n87Zt50R49dqkTfLE5f3tKqSuLgfMxF2MV1++dTw5HlRw/ly1vTvY3nmcLBeOq0FZBgolcwHoIAzG1WBHOAh7VXKOqSxQExqRIaCKRSYyADBhROcwJ0s5ZjHCAOSiiHOujbmJKSYBczFmVTQgM6A+Sn297+5zxZbT47H59YyVeUPJvNMd7M4n2JmT70VTl1vZWe+lh5fuHmp77w863VWrdaeEo3Pj2CGPXZEHba1t06LavO5MMSWpimGsO0lmKm3bEiNQ8q5ApFD42JlmccSpi97RrMs1lr/zYv3or7/6l/7UU6W+XvrQ1ZrMYQeO2h//9MV/8gdHcW7DxwugVqEEc0xUt/n1W6nJaIRAlEAQ2dCsd6lGQux9p1lVPcO4IFQDKK8eHnTE2aQKPPADy/Kt737/uO2yAXVN4QpjNMBG2cwNt0dL0fODb/V9mKb4glHh0lS4xAMMbZkGB8wM+53XFmvV+n0YzQzQTMEICQ3KsjSAAhkQwAMAmEHBLBWV1eU/9ZVf+rlf/As5ixkgOwRkhEXhFO0fCyfi7CKXMvTZfhBoOCyYLnzh8z/2a1d+rcspOKeWzYzQ9ZXzeruE8wQkatjFBCp+VHkfSBNIF+uYCocDNl3mkUJUAzZicAIZbJWNg5eiNSl0AIjAYKQIjlDEVDQUTkRuHMzfeGNm4AzS44/6i7vnBgWxHyzeH2wdgVlnrWvlfO0tqsasT5ZTnHll4T2B8u+SM/Ug/8qisP65AoRwvam3nbP3bBL/UAE4SwZ9wmOE2fGBdCmwi0jYGZiomrBaEsxoAErWWVsGV+dOrAFFA/TOldWQkBUlpmTaF5fEwheSDdFCYDMyQANhThcuDo7mqa1rg0KRm0Z2t/eu3ky3rkyuvHp1a1iFooJMDsRSrkIhqdY0IYsEGDBorNvW1CxnI5CKmQzIVLq5qVUF28Bd9NuHL7xUT+aV2w5hONoelMHP24ZBk7RNlMrTSPI2YHf1ZnN+/+kv/ews0ig4ALU+1mEFQSwknAfNR+/H8+5e7FkmKVsu/V4fWLCP05e/xR3PCv12x0W+auGd2r5polza5leF0RH69KCkIsBFDPv7n/rZ5tojN77/6z625MAIjZ0ltAQC4tGxeUiqHcZ5h6BHx9MQ/JVrx0eztiQYavLZofMp+euHh7Ft90aDC0/ubm/xYKDeYzmUQVUUjtFzMQrsmbg09OCcIhhEgFD44NhnS4qUJeecCMCZR5UkXbYGRU3QgJwLbMShNApx3jFX1aCcN7Vj1iSaAQxFFQFAFdSM7moI7iutxvdUEpl1Q9M63W5fuY0asLl73YO3WzbtBFY827Y/3nSqM04GBNH6KKyiYkvQmRoQIRSVQ7CmESJKbTseVojS3uoc2zzNhwMGlEndMSITiikhFQV3KbssAx/aWP6D37tGFf3Kn34kys3SIfG4lYjdrWceO/f1P8rXb9KHW6ama7oI6Auf5vPm4HBaT4E8EJg30r5QgQEshMM+slKcI8jZY6U6yJmdDl2OJejQOyZLdTM97grvUHMJZnUjKK6wohr88l/8i5cfewwNAAnuDfUZGt+HufbWnPbBky11ygUt5UBcU7Z61Onk1zWAoYeceYFF2Qkot8oLAaZoDAiIBREYOEcrk6ItG3CybSwEY1sYAZY7lQEgMDv40z/3s//vf/D3vJhKNgNE1j7BDxoCMQEgKIAvg6AJmKKUVTEgJx0yKmbRbBJQDbMgOyRmJOx9+hEZgIjY0BSAkAEQUAhYEUUBFbJa4YuYow8smc6fGwfv0BezyWx/r/CEls37AXOBJ1776wPed8xyc3w7++dZC4BtXrLxZ4+krKwDm9feduKtsPz1dhq8zRy1tc/lTU6/512S/SApAPduYSMAEJqCvf7yS1dff+VcQTT3aqKGSUU0iYCJOPJdnagMTWqpoiyQJFHwwTtRIk8ERsxiAoBqkFJXuMqzRwE11aQKJikOKh/Fjg5nh8fdqBwEhnFVtFscCnfjyk3v4PKFcwUXCphSakGdA9OMHpwvRTXnBK0xewQEE1CWlEwzEqoDDt6FMPLy9LMXUXTgy/HutvcsqR0yR3QNVJrryllz442iKBo9v//EjyiMgl8YB4FWxb/6jWXhBLO0Cty+1x8ePntPqWeSZks7Xr9+T21pZ2UHvMNPqyPvqq/ObB0nYC4C9sHBZGbATKYFUuLB4MJnLofB1T/8r5iud/MZGwYfGAnMsnamKlliKyKWFdquOzyeHU0bz3lvJ+wNqXI6n05udd1gVF7YGW9VbjTEstQyYChdWbrRaGCcufQYHLmAbuBdUGIDARBih8zGgACQ+5mlAEZoQABiIGjZDEFMTEDEQDWmGMpy3gqyB6SsiZ0nl1VAMwASIK02UjzZVt9PNbVfL7aEoVbuvht70QkfX1l9b+/zs74rnDEv3Btax7Qeaj+P94ezLMFCA0Ak9FgO1bFmNTBXOmfALvsA7IrZtKXAuU1gVpaczaphKYBqfsjFrelMc3SI2KeNNvTei6hnmsHgv/ytN86N3c/+yG6av1aO/SBgbMxRtzNMbxzVGIYxzUG3zaRp2tks5gzEC5ZktohORALthZyFLweqqqrWIkltVMKFvWpwa5JqEAONuj8Yn7uw/Z0X3igHZUmsXrfGgyT6kz/15X/vr/+HvTXhXnX5SSjlA6S35sDvC61kvoX3yJ1OM+uRppOLYPUOa26Oy1vYCtNCQMNVwso+c/dCQ1hxyBPNYglmLy5ZqgAnzURivPTIoyKqKobG5FRATZmpnyCIQLQwxhIggGrOqY08LKqtEkFCYGDXY4iICNanUM/KJJKDc7Rs72IS99GJCEjMhGbgghdVZlLR2HRdzrnLuakDOzOdNSkREgCHYr2bVh1+O+Z2D6bjikufzLFNHoUbB1fnrmzWtvbfYt3aenjW3amt72Ji471VAN5fme/ePXq5LI3aplFNXRuzWFEFBHUOiUhEgECzlcPKjMrhqJW6dKPBsIo5CWq0pGKBiMgFjzElM2N2vYBGjCCmDJI1aU6S6tnEY05dGuwMt7ewjo1Ioy0i7Fy/PiXE7SKkTorCdymXFTmHVVWyZ0Q2M815pXYqqmhrlAUZXbCCXBkuDPy4ckgYfOG4MiNVBUgJt/mRjz13+UNUT+Js2pR7l5/7ku1+SMB5BDMFpBUzgNXqXEr/b9HrDwmfvT+Ey7rldlv/wbddCCc48b3pqBUHWdVqw0VJdYCluxKhSkDMboA7n3jqs+df/eY/Anud0qHGxjtMEpXarpurYUxTtXbeTLLVbdfu74zPbYWL50cFwM0bh+DysAyDEisnI4/BcwguBK7KohqWRbmFzqP36D15B+SAkBmQSInBOUE2JAAGCAJZMRJkMiAgx15zMiQA1KxcEgGaQvAFFhV1bZdyTsnAiIjJmQoimpIaYh8ntsbgH2QUwJqwfvJ1meqiHwSATSlkUxlYh+JPatic7BC2sXXcUyEK1zptlYXoIdbf37922do4lsPd9oByShwCgHfqJEURUUETBTEi7xhUUk5Uz7tOwZWuzQnAYJkR3SGyAQM6xhyTY9/Gwd/+e68w2C99/lJuDhxZWewCD55/vvqvv/rmm1P3/PZuPu5CGDeNAfgCyTuYS0YXQPrSSmCg2LsBqiGYqSIhgR5O50ZpgK2XQ5JjFwquxnHe/PSXf/T5zz737/7b/4tQ+sDkhlYU/t/8a/+Tv/xX/g1T8C4s6n+9dzXgfRH/bwflvu+T+wTkf0uwF1eGgg0j1PopG9evZM3NdJEbv9ra7ZaJ53uPI9Qe6VvznDnhOWpFUeWcEZDY5SSEDg1MrU/rSQRM5AgdkUNkMDKznMwcszNBRF4sHwMCIwIFw0WuRTKj3tMJ1QxVCcjYgWO2rJGAiBnEDCCmxEDC3g22LB6g5vP7O7tDH6XBgMSoKrxw19149038/p5NxRP5fW0PgA32vkR3Tl93Firs/7OTsTrZOTbO38CG3gPdSwVgNb3e99X13qjXk1kgIzniSvPMwJgzqgUCNeOFGqyOpWla58OgZOCsBERsACoZQARN0QiMgZImWsalsGdmQjEzC0Ug59GFwxsI7cx5v71bRufmwkGtQCoKbubzm1dvdo1evLBraEZhb7RlCijUFwlTBcwKyKqapEvamSl6dA6JGZg8k5jPWcmFoqwkZijw+Hg2evpzj//YrxiPPPVSo9elYaiP88eFJrr0UlzCy0sw4Y8t9UO57IXNoiN49tQ7HLkHPbiwOS69uBexY72OYqtjCmDEZEJq7IKNLjz5mZ9/9du/01z/+qgspJmQkgjGlIuiaGLqUCIl8nDx3NbHnnt2d8dpbNr5ZJ8GXUvdLHqGEKwooCiwLH054FD5vuY1B0feq/OGZGhESoRESMSIJqCEQGYZFciQWSULGBKAMUACZEceyVzhczIXAhUDQ08up/lcUifZBBgIEBeJpe1sAa0HGwS82slPYXSnvtrtIHbbvPyUEQD6I7hx5J7SiWBnAPcQ670/dH/64K6pf7aaqVY5w1Kmsd58RUQETEhJtfDeSphShKSILkeddaluNGYT7oOqgJAQpLd+sXMASOwPtPy//pNXSS/92HP7+6OZJgtcjnbczbl87VX58MWhp1ygZi0ci8cOsGB22pfF6EtFARn22eIBDVW0Z1I353pstFXoKNhWaZOj+azLmuXqlav/yf/xf/v/+i9++3uvfJcl/sqv/KX/8V/7922whcSlL3qVYjUtblNi9e5p5dv+Plnn7P5MoPfUJ29BSy3BYIHR9xLyCuyztZdatGS9VXjmZmvIRP/FTnQAMDNC3Chcss5UzQBBVJmdqCKSgSLSQoEAZEJGZCJGIjPv2BOBaU4ptmaag0dTNEDrU36aAhAskr8RgvW6DhIQgAESoWa1ZIaWpAuuFIOUwaio2/TdF14e7+yc267OjYa7o3IQlNWpJWIHRGua5gmycfImuPRduJNG+m5myXIWnLnwzO5wW31uM6JgsRWcvc3Jz7c5+s7pHrsAffClf1gtK0IzwKPpfLhVcCldnLtAaKIJVDQEIIIUD7e2B12cO/IZc1Qg8p5cX7JOzcwsi4lKrwImaREYAZGQAXwIqlBPmy7ZjWvTgYNbhzMz3T838uV2fWtOud6pim4+HRT+aDb73qvXwPze7hBoOKysKGPhHHpkYCMgZ6aqgGhWBM/eB++dc0yAbGFQsKH1gTV+DpSt2n/mMz8ruMcIaoLGvfLfp88lWGwmi6BXA3jYhYP3kU4vzrMddZZZr5/57hfOSVrl9UObjM1W9lxGVGdizGn46KOf+oV0+PSb3/n/qbzmtUlqYi5TMZXJreS1uvjZH/vRc159EA+tNOq4AEgpWghYeh88hcoXpS+qEErHhYOAysYBrS8CiRmcAaoqqiAZIjJYImO0jlIkVSQTRTVQYjNUcmTgijIAJMPURiTLkNhhTgkNPDNZFJV+yyJiQFQzXpMqDFZs/8HTnXn2yiawqT2fAvDsdj/dpxe5Pbj0FnQSL/w+FgJ8X00AgNg7KIf9nIjBZUkqWVU8cVm4ps7eu9h0SbMIeIZh5UUo5yjCRUnH7VzQGSiCIpp31GYDA0YSEDBl9NeP+Ff/0Zs3jtJf/NIjQz6OzWx/zI9f4t/4ze986dlP7ZWjttaEBXrdGSc8ZLDQM34EWvj+mRGCKRgoMgGCIdTm35zo3n61v7Xz9H6YHV7rmA7b+c658xK2/zf/p/8sdsdPXL48Hu5F1KJgBoZFGOqaartAGt49v1r/70HSmil7/dA9vf89u9kG4QmX38hBtwkcrH3euR0nLGVZpHAp/cMyeGTt1KVDSn9BWRSajQIiIjKqCMAiLzkREhE7YgRmdApFcIS9AcokpeCJFgHMi2rGZtpX9TIwA3XAJ1OLiJDN1LEzkpSy8yViAQrHbX316q3C4fmd7UcvXRhVZNDNm0O0Ikl0BMhs6PucoWsy/wq/XL3nmbRA6/Ru9IKFmezkHqdkeFtiyycDAKtDttHvay04mVUnUN89tGLdYwXgASzrt5Cu7hGdTHxVy9oxM5gmExHLICV7Am3aOTGVpcuaQjWKOaUcJVtZjdQATft1AbBUrAlUzbFDYDNVkZQkpdzFPD2eHh5OH7m0Dzm6wnXTaTqeT4/ywY3Z3jik2Fy8UO2fP//4Yfe1b75482haV+7atcOLF8tRx94BWGGgzhMCITARIFPpgguV84EdswN0RFh4ZrNssVNVoN1Pfvkv6ugx4l63517gJwBdsBpbav8Lx4AVQnnXPXkP5NsPIr3Fq65DNXczjW+rNrwd9TxuabtZwh8IAGjaFy1RRFTwwZ//yDPnnrT51frwSmqniM6IH/mTYzfc4mrLS/edX/8b0ze/pSkaZvRMjgdVULbA5L0rSu/LEMrCD4IrApdEzpFndl7RTKPmZChgZkiIhAIgoIZgopYAEokBALMDYhFF9sRETCCiWcRy13Wx1aIcxy4jgXfkiEhBcxYRWgo8YICES3P5A/ZjNzhlA1r/4cwyuMvRPL2p33mretcv+k6vNVipln+MlnNPPYKzquzMg3OIDiSHkpu6MxU11Aw5QU5alEUUEMQs2Rfs6zwsHRrmWSw91x2wc5gjg5EBIxigmKAhMuckHIprUf/L375xcOv4l770xIWdPLbjTz9z/qvffvG3vzP/iY8PubmGWBbBfeyp0R9dj7MWGShlBWcAfSjiUmJEAjMzUOCjNn/tu28+tv3keGd361ba2ylzEULAz3zuC12Cxz70WOAPERgzl4vcMAALCNqWPbCmLr6b2QPv446Aa58bh977nR/IO93WfrhowJlPWHGl2zVpsYY3TQmn9qZl3jvrDxHhpccf2z+/fzyZqhgS9sA9GiIiESzMvIyqhgiEiAjee+eJCarSh8IRkyEKIPZ+nogKRoAE3DPxfsoaAIL05iIzlExHh0ehKAHMIX3o8j5pHI+GYTCczufTWTMsHXsejMaV0z6weANMP3llW77dkondhU/aGcDuzK2Xo7PWebD6y96C49/mp5XBZ30k1oM9NvaC9z7j7o0CcNu96rZI53t/0Kknwj1edOsVnkmUVLCedmVRivZZnr0j7yueHgMANE0cDoIAqwoylEXJZgrKjgwZCE0VmBdQjCaHLouqCSISo3RZcrt7buTJlUVx63BabY+cAagdTA6QbP/8rnczlakj/9Gn9x+99PHvfOfqlWtTlRizS1GSz2A6oMBUKCuTQzQiQMBiMELnpEuBDBwGBoU2qyRi3v3Mxef/TDd6jpgdnCwIMFDo5/gqKBHgxNP9NnzuToJsvyIQ78MQfQDJ1j7fEkU4TbfZtNbuecduXcUQrSqXLB9JtDDyOkBQVfKZg249XWw9FyAD9AiNgkI2MJokKqhgJEZj0RDCEDsTwEBAPrgi+CL4snRFcGXJIQCTOiYmAjGLkCP2HvqOSRlIFNSyAgiAsbGpIBETCZkgYHAILCagmcwcMaJ5YkNARhPLKRECIZICKhAbaUJtGLYMqJ+5y+C3U5DLfaS1LWFjacAZlgWbf57VEGBznqxfgme+wPuztBZSyPtpBnigtK7g9Xo0mRsLOueQwBehqOuu7brYzCViyshcmcF0Nomqscu1aaNOolqMwzA41oYRCcQRQc6MjOiTGCEyIhWUckbkY6j+4be6r73w/V/4scd/8mPDNM8hD/7e3/mjy8WHv/BRd+Ng4v25Z57Y3fvG1atzyMTIAEhmogKABIDEAICmIgBoIKDfePnK5X28sLvXkcbczrr85/78X/gL/8p/B8EPQrFM9GMISIssB0s0bCU4rUWJvn3HrRxKTsyUd5rdDwu9m2bZgnHeJ1reHk/xh7enO510YtVfaBXrW/sZnBr7PWF7e3s8Hh0dTshRTsJIZn20bg81GSggIhn2BakNhDgwO0R1nl1fxh0oZ2w7K/rkWYRghoS9TxD3d+ttWABJtWljM587wrJyu1ulJI0pg3lDuXU8/d6L15pp89lPPY4Aljtf+LIoFm91ItQsemJN6l+axM9uxmeOnOHbb717r5KlbFx5m01/7VfbPLxq5eb2v+aNtdbY90j3RgE4q4Di7U54L3SnYVp/3D3jKCvJgciyOCYwGBTjG8ezN968QYCXL12swrhLzfmLFwhjTMJl4dmT9zE2xuaLIguYgAEqKBKQLYRtAmQMaIBAVallGRw5iDezxtFWRY7H++P61sHHnruEHx9YSpCdxpvTyezR87uPPLK/O/S/+8+/d9RkyApZEDK7ggARtaoKNItx7tl7F4gJibhwLghXw2p4iYqtFHaK3Sd19HTyuw74FIMHQDRbpRFDgNVi6aEf7LMJbA7E6uSNCMyNAXsblvWQbgX3js4uh3ekGN22f2537XI19OLvOis6ETYXrqHa1w0zQkPuHYcX3AAJFlVhDFAlenK+LCTnNI8G6gI4doWjEMrxqHJVKKuKi0BFcGUJRolEAEyjaULNjIREYCRGjIjkCAXAQBVM2Xk1NUDLGSQpkKCqGaOFMiiqKweu3I7qkuJ80ho2LrBlMxMkABNTyykVuPKhXOE7C158/2fXQlc+uzesQzVwuiXrQtTJL28r8Z+5zzum93b5+l72A79we+qFfgTsF48ZkLgd85VGVWMm55jRFwWXbZ0gWUrJFL1DKP18XrsQoE3bo0FKMTdtVVAziYGCiDl0rKCLWqgK2qdUxKwKyC2WL2f9T//ra//wn+n2zvDVw2qetP47b/5b+NRje6Wh7o7iRy/zSwf1XEpipyYIFphFTFUBAMiI0DnOnRbD0gS+da29merxhUt/4V/9hZ/5s3/+E5/9HAVmH3qoERa8vk8+cwIf9JkFbhdZemda7Cgr+/H6pQ/PzDmDwb6LezyQV1kB5Hd5/lswDVwrZLOm2Z194npuO7vy+pvH0wk7TFkQoPc3UxFyvJAVTEU1iQ3KyjlnpgsLF4DkrB6dIwKe1vVR6vb3tstBYaBAJCp9RRgxJQV2rKIpx8Oj2Wg0evTiI5rmau3kcF5WQ3Cua+q97X31sntu8OSl8eXzYVhgk3LOYuSQGZaz75QaAHca77vA5G6L3J3VFxbK1CmOf1ZmX5NcV+WF4cTD56Sw2Kmtwe7QkndH708MwLtb/ad2nlN3u0ccZTEgvdygiNnMD4ftvCFPk7Z749rx0VF386Db36uuXX3jJ378Cyk2gJCTAKYLly8TYfCha+fkqV8QREwAPY+XPnUWCAJITmDqgEhlf288m84HQw+mwdOg3BYzJBmNzzFs3XxDqyKAMWj7yCODL37huW+/+ObrV6/lfImxIsfFuAxFaFK3VQ0cjlvJ6J2pY3ZZpcXtYvcT7vGfofKiciFGjsABsKkZwjJ0bMnvl7wfbJnwE5ZT+q2CAPpZu8QUbjtAq3yI63lOTjxMf7DxRNtcwO9oG3yH03t57kIoXSvgtglNLCVQXR5aJSQ2ZAMwbTrImR3lDGAwKH20BkEsW8HoC3AO2IFhVgNV1WRFGJShyJJ8MchtQiQkNHJGJaADV2hKQIkQFZKaIiI70thIbAtgAY1dR86T41mSUA3aLN5EVAnBe0YCBUgqYVB086iAHCoLlSDTguUvkcvl5Lqfk+pkWFbppdcBp7OjthyHjY18w2BxF3RbkOUdt/juYZqNlr1PmVzeb7L+3xIuVQrFYMS1aFSnyohIqMBKknPHzlsGBnUlVZWbHHcm0VEYVG7eZAYeDYrjDnOX0BwamiqjU9SMYr1qzghmZgrIHeP3ZgxdVMiZ/Fdv8L/3N7/353/8iR/98M6FYvtTzw6uHl37/de7YzFl7rIZKTn0AOyJ1JouFgN/bnfrox/96Gc+8yN/4vNf/ORnPl2MxmVRQXAhuJXo3/PlHupZsIP+D1zPZXXXtPIeX/x524364dEE3ivd/9d4509Y23VOBs+WHOeknORtWMoKm1icizidzro2AiASIWFKmdg5xwBoosawiAZGYmIE88wScybyJRkYkKhEdBxzd3DzyLPb3R6j4zamqmACRQQzFgNTY0ZLQIqoGrv5sCDHJRMfzeKsaYZDdgEGlD793EWSDHE+7XCW49aoKhObMS3qJ2zw5LXvZ7D/TTH9TjN1/XQ7c3DjxM1wEzs7dLbRxav7bsr3py87bRV4z3TPFIDbLutTQo9tgF7v5rarg7C5xd47HWBDX0wxcQjTtgvOtd10d6t66omL8RHYP79948rrlx691KUmVO7NK9c127UrN7/5nRfPnRuNtoZPPvFoM2uRzZEDNQVTU1EhcGIgIioqkkUSkENGJg7BYxkkC5qQ56IoHTkCKHx54cJFkJo9s8dQ4f4jg3/piU+R35rMqYmSA3tvoO0gRCQRpcHonJqEohhVoxZ3/OVPugufyuVjAbBYiHvGq2kKdsKmDQ3tJDgFNrypb9u3pxbABgfBsxMA1tzu+qfg8tPWz/wBo3VmsS4jruhOW+K77Y2VOrZJCKtqxitz8nJ3txN7PSyVgz4nlBpmC87lFJ0DFDREYmSPUCAGI4/oEQiJSdFE+gz/SohACETIDigQF2gQgidUE0M0cCIimqIHLEKpSU3VeW/AgsyDIApAnESl1W7etU00AzVg7+bzmHJ2lSt2dlxZWp/Axk7W73vuw7uh0/DRKYTiTkzpNISz3JLfER971zDKO7/bup7zx5BsBcgtbUyI7CgEIxMDiTkmNcnkyAdPbQQAVQXoyz6S9zga+DDAEpybiUuQYwRiZAIFRvMGaqZgbGh9rhVEMUByAsYOREiNkHIgzRFe66q/8V+9/pvnr3/2ieqx/e0PP3V+6mYv3Eh1FmAGQk9EiEx6/tzu85/57E//mZ//kR/5kQuXLvtqpMZF8LYsEbVM73kC/GwWiFpWnn13S2ghXa42kFOSGP4gMvt7T+9eYTqr629efxfcZrnw1R5/8rGnnnzi+99/UUURiahPHaSIjIQAYAro+sSzIoIewTtPBMSIBJpVSIrgSx+2tkZAMJ03NnRFYDMwNUAwVOyTAAEOh4Ot8bZoYkK07AOByWg0nLVHKSoCD8tx6moCMe/N2DlXDErvAgKZ6fpOtnrT5cv2H2ti6Xr/bgrZt+XeZ1n46d9XIryd8I3TY3EHurNqce9Xy9srAHcPUq4+7Y5/vnuX8FM3XH2eUsXe9f3XqdeWERSbibcMWcBj1zXg8LHHR56K4aiswl4SydQGX15+at+MnnjmMcmqMSJhU8+ZySH3UolqHx+ORL3uiWZAyugIiQzRACgUha+EknQ5hGow3AoElqx0nAbBcjsYcjUI5MQFKCtsoLhw+Udt/Hg0CHLY3Ph+d+s7ZJOw5cBPqpJjNhx9eP+5P9XyM84NCIAMwBT7hwPgwodvTTBfhhj1H+vegW89LiuZ/k79vzZMmxVITnX8B9wIcKfFchZIWN8Sb8tL3oMIuzkmPTta8r+1o0t0xKBHRHDp1734RaEvVk2ECoZEPjgwZ+pAlT27gomRmMkxMRsTIgFSn8Ih5xZUQdUAmbAP+5LUGeUknUpCEwBgMiYi46QKDISAhOQ8sFPAydEsFAM0UBES8MiOPTE6QkY2SxJlWJalC2ordXXtte/zZFplH11047rBfLnfrI3EaitabQrLM/HkPve/1evtvyvZYnPGru9lH+jF+g5p2TsGiMbEgcoqo4mZZaHg21m03IJ5IsxJ2ZPzLkZgF1xhk+ksHk2RBzG2oKFgaqM6JYeWVNHMMaR+jzAERDEjMOtTIiL2fnoEXiE7yIAUxX/nBrx4oy1gNh4wV6WCJ9JhgUTIAJWj7Z3hF3/6S//OX/+fh+qCC85RUFAEXslGttqUlzN48X59+NZ6TOa96cA7gR7vD30Qwab32uDN6++iB5ZcC2l3Z2d7ezvFBKgiykS0dBYAIOgj/hahwQBmuFALjKBPfyIEhgBVERzBsBoURfCFI1TVyLyQNfrKQ13XqYemruvZHBH2toc4Co4xpYyWgx/O62gg40HpOWcT4kCWJMfU1gACSGZ9HPtqem++/VqY3FsJ82ek2cWRJRe83Q6+0an90zdwqbcUUt+yLfeeNhSAO4FV75ROQVnvbtG/BT7wdsrXu6d+TBHAQAEk1ZM4a3IRcmxB0ZJCbqPMr745GQyG4/GACdrpbDgsfSijxsGwiOxCESxnoj7M0rKKaV9O0bKKmQEhMjoiRBIxUTMwYAohNPNOc4cFgQkQIBsSMjtTZI8G4kpmF1RyJ7Eoh7DzdFnslK6lwYV6PpP5dOwMPJDb2tr7jLvwE8k/7Smg9QlgeuF+WeZ7gQVDjwv377/w9QeEE9Xg9KCcHaaTznv7kVhkarldNoP3kxvfE9Xx7lXlU3+uy+xvDT+8k7asISAGJ/HAS4kfYGEOWHFHWwaG9SxdQUEAgPusasjsnDfzoNn7wM4hIzokRmQ0QiMwYkIGiypKCAL91GYix8TRBDWbqvWFu/pEuUiKKIpMPoQgWdgFY6qnDQA7X6Ssse1UkYjN+tViMXaOGZ0LoVADA9OV788auHPbSm13pncjEpzoUrdjTbj5dTnEJ/4Qq43pnT74va+Wu59jawG/fyxtAADQ+y7iwnRmiOACIAEYkjOV4LwBaAZmROR6ktiRJz9PnbLGlHzA3NYIZqKOXCAIDK0hExL0hbv6lGyGi0KN2uvhSIi4THKFrGiMRECImBCj4Lwx6iIBkEdG8g5doKLEURn2z10cbZ0DHiwtTLQui1Dv1mSngLmTunD3kidvCEV4u88HTR846f+e0130wELrJUIKfv/8eSQAIETt94ke20HCVVUc6rOJ9+5k3P/IRM47h6QAWIYwHFZFcD4wmMHCRIAAiIREZAaFKwzMOyyrgj2HQUFkzkEweOTR/cB+XnfsUTFnULXMRAzGSKEcgOlaFquFuA4LqAXwZPuDDSP52Sn5Fh1l69x7dc7SQHjm5MV6WjlX3FF52HjcPZFJ3po2FIC7eNLdLtT1t3ovi/strr1tx73HLsPldAdEAG4yxGgxUoo0HG4X3oG0SDooENQKLmM72RoNGMEkjYaDLGIIKQkTG1CfIYsRBTMSACIaiWRE9N4DAgKTWRYggdTFrC1i6zl7yhBrQec5IBEzIAQip4beV1gUDLYDNJNbg7CTeE882P7YPcPN1T0dYzm8WOx/HIsPC2156lN5qSIv5I5llJctF4Stig2uOW0uxZqNEbjLebkJfN7xlMVOuhSMljb2B8eX1wDjB/nQ2+x463+e3RjfbdtwyemWCd02856t6rqB9YhGPy1srSVZJeEiubIZonkAQAfMzMCEnsgROQJHhAREBoQcQHMIHlWJWJG9L5kLBfTem4IaMDEjLvzREEyhDM6MzEgFc5vVsahWZZW63DYi2SRD26XpbD7tDPwgeA/oGrFyUJkZkS0l7JXssga93C9ayvRr6Ob6jFp/+qkhfn9Fn7NNegvamIe4MA3e77Y9TNTPKNv8bmhoOTExeu7myUQM0DPXWYg9mCKhpiQ55a7b3Rki+aNJbtsWoIhJDIkJCA1QbdnDpESgAoarmJIlTzQ1M+tzI2Yz71hVGQnJGYGimKEu4hTIIwM5Lorx3j4tZRUzIKSVxL/MLG8ro87StrMR73u/+vNhpQ+iWeC+07JHhsMBMWXJiKigi/KhtFIT+nzMpiZAhH1dMEJ27Ji9I7BMAI4xBM8oZhkBPTsyAjQ0I0IEYiRDEMuOYGt/2xUOc/KUEXLhyRWsqsOhY4dMmHNWyQSc1UbbO9X2o0YFnRgql3rJEndcvs8mZH/q8wytZsUywPFUyoclsHYnOnXuJuN/F7jhvaLbuADdZgGczvd2xzVySmp577vvW0j/qyfC7fbat7j2LWgRpYDWv7MgsncpCpFXV0aQYVFiTnPgo6PmcHr9kUdHHghQUUVTsiwMDMBdjEzmHCOeVPEEIABgR6uZaQgEjgkMM5sDhmIQzA2894E9OOfYFd4RlJjMucAeU0bnqXcZbW6+Mbhwg/d2wYD8ePepH7/4oc8bqFkhyAzAALowhPXF8ZZ+9usy3jLLw7LW4InnPkIPeZ1071lxea2TT+75Fqrt2slvs17uKxd+37n8qeUEb7dY3q20uMjBsaz3vs71EHAFmfeOwIaGuvjTEM1yY5LQGzmPQCYJFMijQ0fkXPDOB3KMzIisxEhELhCxKaNx78Hg0CHQGuZhAGCKSkjIhgiIQMRl0dadiKoKe+9d6VyZ2lZSdOyFBB07ta1t7o7qDhAJCNGSeh/M1rn9Spdce+LdyTN3n9fyZD+Akz1h9dNa77/FwG1ESL6/9HYME0/UnCXQhW99xQ8g4YJ/9p2halkduahAQN67rhVVS5JBIYsmFUeOPSFCURauKg5uNmSBySMQgALQohwYmC7EcEIA7n0gyJxRNjBQBFpI6dYbcREQTc2AsioTgQEgETGoggMDEjFTK8py5/wjyn4pD/VRxYuX6FfGIiwI12w8d71e3iW91TJ74Iz5VPA+3KWk88eMlv0wOZ5IzsS0UEcJcOEvvRY83ns4IyMiETMhAYGSCRKTqqUsXpQKJEQyExFaRomYGRMSoJg0TV2FQjVLNM+L3M5IoCmpgfOeF44KjggcEhcmkspy3Ke/3Wi7rb8EbrzUqX13oSxvHFi7Zg1C2wg5W+3h67LS5gM3tqQNcOqs7Apn2nWf6O1dgJY/rbOEO66Jzc7a+PMdLaW7Ofm2vXaqJe+UcFnoBhFMxVI3LENwwgQSO19iTlmjWjHMY/fC91/RwXhXqHI6Knx73PiCu5iIDQlzTqoZqa8cado7cgItolMQaOGfoCAGOXv0xMxMiMYE7EgZyKGgOmJQn4SInLInX6TYuoDninZ+5avF8ByXI0psyMgFIiqCA6Me80ECMLU12QjWZ+mpaXpSC3A1TWFzXt9ZoDk5y9YU4jtIt6cOry+kHyiOa3eY/7edsbdd8+vC2TvqGjvhSwb9ZNj8HZcfK9kGoGfChIpG2qUEnkyRKDCoSkLn2dSxc84TexcCOICF75AREiBJhpQENEOKSOoZjUUgq0rsamIUzaK6dBj1ffhubrum7diXRN6FEtGjZmM0cvMmNXUWpa4zptIz1XXy7HJSNQ1F0dvIlors6c6857LM5ty9vRh/Zh9Zv+SOt33AU79f3G+v9SxfZm3nhDuygTvSbTnHB2i9L1UgQwMg5NgmBjTi1IhH8uTqmAiQiCUDGiCRiHVdzkLzpkEqRMjxwLISOxGIaooAoIv6WqZmAEZEAKhgjED9+uyrSS5rLyD1/hKAC8eLvpYYARsAEAChIpmx2bm9cwR8AsqsCS2rne6E+67YzP0ckzsoFw9G4Lk94R1b9bDQ0jUT4H1ZMAgAoKLz+RwAzPpS06RiSkZgi/4zMABRAeO+NjCAAZIZ5JwRoCImJCZEUERwDMxMhLCMHlNTERFQZNgajwrHhgoqYMCIYIRGzntRQezTQRACEnn0lFO0whWD8Voe8w25B///7P1XsCVZkiCGuTjnRFzxVOrKUl2itRjRI3Z3hoOdXRoMwGAXsMUaYAtptgANNJjhgx80Go2f/CdpRgUjCYCgGaF2sVxgdnZHT++Inpnume5pMdVdWmVmpXjyqog457g7PyLufffJfJmV+bKqut0y77siIk6EHz9+XPsyV7Y584P7WCMPc6jD66WVkhb1cg9eaBHQYIuX5Y3p+PdwjovhGA/AUfI6Tnp7YCJ8oBPOcvDimGMVuYdYKq2U1Cm0AICoYCmm9X5haHUdg/NIxKwX11aLRhjA+TCrMhQmqfGOyBG2PFYN0NrQfwAQQt+aYUhRqLW8IqKaAWDwQUwLH6I0oITEBqLApsDeoVlWIRAFRUSiYCCthMY4stmrOf5CLgZtr6T53XeFXuYdOoCW8TFHFsJ8T5hjcXmLXpJvut1poUjb4tQDpLCQUc9CvYfGOXzW4+Nx57zPHPsgpwj6cAIGH2LJIcyTO7qwjaMo7povtpUHsYsKa007aAaqmcCcD5oFFBDJiIkCs3M+GBG5YGxK2hpnDNExEgfmgUbJ1kjMYg3WRJ4MhLGNxyRkpDb0Ab0virqZQrZQ9JA4pybVjaFCSjnKdFLFBKEo2A2AYl0nZpIcAShnUaEYG+wEobbE0SIXGA9sAveHM5NGp7rbMs0ftY4cHfOUtfFEBGFbkvVOvwHc7wkFcHjJnxGO1WE/ykLX8TCvl4wATpMBcFYDsJitaVK/N2AyJfDeMfEs1exCPY31ONeNjka5rsHMITpmBAAkQhG0LoYaW9MqIAB1XVcM221E1AzbkGlQBTAl6vImyQBA26wAQzIAZXShcL5YXd9Q6NrjASx3cZlnx5i1VYDOTQA+EuW5ENceEXyMlMqzw4Oq248cDBAhhGAAjJw1E7fd3dv8FASAtvgbIgKoARBzq9EatC3e0RWu9VdlE6dkykCtA8ohkpkpmKl5RkcFqNVV4zw7z0aWRIJnQk4iAIbSqsQEpGaSoqDjpKRcKhDPhZWjlLYkANmBYIUTuPNBlB8Illuccdx587GOqBg4FzMPnbUvhx1881jhTDkARzDzwDd2PprrseLk2fF4cImZGZILXBbqiJlZsGnytNnrldADpjh+6frQlSuxjlU1qSvZm8UiysZKX1RCIETmtsR+u0yAkECSIAKRqRoIuCKQuSTKwZuKRyMCxwHMgLlXFqaATITsEIoCWCImxrJ02J9l8k6Cq01QrdcJ/mA4L9e/lMk13+gX1Looj47LT38AUctqwnx97CsMh6ScI2rDfcEOfjg/sfxBbvJEWOrKDgtknG7bXKzq+97YsZh8WKbQhnUt1VhdmCtaQ+aSLtgSTyf9qzIYpWwihsqONSW1iABgZEiCBiTatnEEBgNEYmaDnBWJUQwAwXsiU4SUc1XHxhGSGLd5w2qg7IueMSOyC57J17MJWrQG1QSMZ5O63x/apEFXNMnqJs6qqqpSUfSnVQZVzwya5wbUjsq7HkZzbJ0dY2c9shNhcH9dnAzLU2ZHFs4TM+ntj/sgWFrUc/zkCVj3h7mU3PFPJSzIOQjeCprt7GgSAaxqgQzEbjjopZjJrFcWzURdwfWo6g2GEfJknKvUgCscIysyOVQlJCRC6fRKBgcAyVShlaEMqVu3AkgAjAzQugAWkTumJgiCjsGcSGZfrF+8AsDQ3vrSrC2LI7DfdfRcpI7j1PFHO+oD9ag+VjH9qEFn0Hgk83PWixw8DgGRrl69ysSmil1UA9u87zozOwJHrDmpsyxJFYlCq9i2am0WoS4kCAmIkLt6OshtGVoGBjJAaFKUnGfTafBFv99zAR0YIjAZOd/UVdHz2haEEEDTwqGioif0DvdRduApDljjO6FjbtJcfujT0LG0M9uRH/ebGuGB3xdutyV2f3SXP2fRv4V9BeCUUT+8wHR2qeuIXfk8Bl0eW1sZGrrCIggWUwqEs9nUTCRnAFPFa1evTPcm2Mz6BW2sXTJwt2/eyZB94TUDERMRoRkSAQEBGhkoMROCWiZEI8xJYl2nnIeDvvcMmomByIEiERkAE7RKtqqoGgDnHH0yorJkIq6K0jVQgQGhzAUggM5ChYsyhQCw39HxiC34vjg8SOo2r/d1QN1auuhZpg7nN7R88UcinJ8z7JczOv2xO5lx6ePRA44f4IT3h6584rD7K2pe/mnhwrF5qVcAQ1QUyKZZFNEBIGiajAI5xFoBiJHNoTqkDI7AEzlvviNQIjJ0YoaSVZWCA4dkyIggJKKKMbhgmnxgMEU1VVDLqtkIiL1DAs2WkyZBJVNRxV5/AEDofNctktn5UPYcSHAJUsrOczOdpNnU91cB0VTb4rYwN/wf6E79YVnLaSreAQPPCSOdQt9Pzrp3vPp6hjMeyQ1/vKy1LQs1ADIIOVxBZO8Dhui8MnJKSSSnBnKqnaembpo6S0JJosK+LEVBwYCAHVkbqNwq4ISd54oAABRRgQxMQQ2ta9qBHUtvD7S5SI/dckYFQwQiAjViYHa9/qC/sgaAYAoHA6Hndp1OGDmSwX4+yJwLVUvtUx/BHSxym8+4ET30r+cID2zMOAUeYJ0vmbERAGAw7DNRzMkMoS353IajtcchABi3taEBCdqujO0G0XYJax9GAVFMkhmZD85nzZ6cWVsdDpCACZ0PIQQidExAZIpJIZRF0zT9wWqTIgI4NAJAZnbgvMNQhOD37YnLBt0jj7cv0B8j5ZyINpy/zKtmzFflMcS7ZEE9pHsvqP3ISedMcfsKwLFmqvlv++X1HnqkZXnx9MMeYpijptNDUumZNn8EAKA2bAKB0Ehj4cwTQM6DfjDmyUh29iZVvXXl8lrp6dJKudIPVZPH09FgyICuVxQN6Lx+MiLs/wdDRGB2DNyWj0tJDExUzMDMiLtEzW6pmJkhtIYeIgR16AxQcjKYJBLUGEQAdtuWvrTo7730Z7EYWk3b4DiiO24Klq90AJmISxvGaRLtqbDQvk8b/SMM+8t6SdE6UM0dDmJv8c0hKfCI+nRkmLPcysk/HmUvXU/TfXUQAU0tIyqxsMuoONvZZVeNd99kMjVlckSM5NCCI3LMznt0zhiZmNkhknb5MwKgaKwGjNxWH5IMgDgY9KqRkmLOQoxgoKI5ZyRkBNOcqkrr2MYtZQUANMUUUxF6syYTIRM79sxKigDKzAooqZpNd1ZCjzhQVyqxnYZDrPcQazlOPj/aOu04TOMxS+SBdYtDIz1Jsn/onM9HIqx9zGCxdbNbf47uDlyWajR1ToqCRUJdUWpkMqkHK8EFl7ZHRGVZFtlApjMxVABEJARHTJDNTA0B2AzNTFVNAQjEQE3bKFIAMlo0uMbOTgUAQGCtBwAVNRN6I0QmBEBFUI8YQjDA5c1h8QSw6AHYCTJPYC7mK+rR6x9nfZqPiQb6cHLRh4ODpjkAABBVbatKqDERgoGqtTnqrbjSto8B7CKcO+MSETIzOUImdOycY+/QOXLMhMjsuxhDs7ZjDAK0ugURIZKhkQvkeVrHGOPeZIYIvcKXRWBUB21pUvCh9L5cyCYHi951j7RvDcK57f8o1z95V54bl46cgfvJ9HPFGnD52IM6QHcbRxjvGe2JjwrOlAPwoHvbSfCYHumky+IJrydCSwxoS2VjxUR9z1dVA6DO++ALoWY0mk6a6VMX18sg/bBWV3XVTA3Ioa+b2DayRmzNOohEgIBAhkDIiorazjwSYln2itB3HhGNkBDbCunEjNRmx7RmTMO2XYCIaqrYKxA6583rbHZvdQ3nGunSHtFZeDoytyU17gxoADyIriUVf/+HQ0rdfMnYEr7vrxccoq7HzZAfiYvhlDs8irTl90dPtKXXB4Uz4WpfqJ33+ILFzBgAmCGYImmV9n7rf/pHr33nL+tpM9neef659Z/+4uWLhaABEqIyEpN5ZmJHzhfAKAhADsgBto3DgAkIiImRPYMjgKzGnpC8KRhIaqyqZ6HwLjhDU8iYQVMyydJE5x2im45TEvGFz1mInS8KaAQM2TlMknNKjeYsuUlKabIz+s1/8mu/+Nf/ladeeAlgqdbygyIKDp12rEVnefvY1wcOmTZOZ6GPip0+KngSgt/HGFr5hwBd/1p2A9Bd9pBRs6gPPJkoEPX7vZXVfk5NEUoVsphTrZIE0ZlmZgRR0LaVUlv0ZCEPtLXiYK6id8YkVQMw7CR+AEDUVkqat61BAwAFa++tra5OhN4XONeFO/68ryFDJ8ycn7xxFJ408T268T9Si/oxwc72bsqpTUJvLZSEXUNgszbjkRENQNVMAa1N9sJOvQXo2gYgEi8lghG6NjGSgKAjVzI1YgQwVDSGdsy2pcB0Mur1Cug5QMhZPTMSKAiyc0XfrKt6dcjQBnNDPy58w4eUnMPb8DEHHDvFc4sTLkaauxfg4DcHbgYOEsy+zgCHj398cP9OwC08klt5rI900sXPZPtvoeOOBNBxy6zqmMbTqYo6plTNGKA/7LlBEJSkeXN7hKYGjsA5F4hZVIuibFNhCBEIELEt8waIBM5MANS6YGsiYBd8zjU5dmSdIxeJkGDJlE+AbZVFdsxFQC4hBHQARZHiOOfswpJTap/a56+4IK/7w9ll8UM+liMXuC/s+yiWxaPHCo/n+vtXPYS00z/CEVvDSRaL+4x6AhgsKnobdPX+DjhfEEFUCEEyvPXOzd/6rd8dunLtwmCDB5cvDSBndEbsDRgQEB0hMDkiaDsRtXGcSK61gDAag4GK5cZyUkNDLybOB3I+56RZAUVSSgQKIIqelCxLHbMkzaaGRJQzIKJmBbCy1zMEAxMRkzbaiNHQkgIYss7Ge2/d+va4kb/37/z9Xm8AzoG11Wtbbd7QjlV974u8MyF+kU1z+vmH5vEjJSgc0l5+DKcDAgAogWF5zQ2flt0m9EjrKHWDpioZgNVEDU1xbW39gxsTzdhUGQ1dIO+gSUjGrf2dAR0haRt3ym019I6oOsrF/e7pXcQ/ool1Fpq5pNF2eARog/kIsWvB3d11O8kEy1vEj4LE+tGAjyemF415OjDT6WTWBvS0ZUDVjFnBWv+SmhECECI78o69844dE4OZiRqTWOvVMrUsBmgECgAIpIwIrQaLBG0x3K54oQFaq2IQKKA5gksXL7Djsmx77iEzMpgRJiVw4aSAFTv8dsknZge33u73o3a5ObOc2/bnS3V5qLk+vTDDHnMvx4Id3FnOA+hcRgG7T1HIRwAnmd/OIlZ2Mug8ur09XLMQiqmoJJFEDME7AAW0/kpx9dqlK9evA4RJDXU2AiZkUEQjQodEranIDLXNhEdVQ+mUZkfEAMjoCFEt4zySui35rKat91dVzRTRiKEt8maA1EVWAGSDSe3GtyDuHaA/7HaHhWXnPqEN90PpCUg7Rme2Uz4dc3089Hr/cx4RLC39R3hBPPjx8AHHPtqhFXGIjD/M/dn8ZT8M7OCNtmFLjEaAnopBGHzhMy+n2e7Oe2++8/3vxPFO6TGnDIjYhSi0RkdCdgIoBoiE7JAdIJsZoACqSIr1NDZVziIqYjmriIGaERNiq85iVo1ZTDvPlUNXV3E2bVI2YCKPSgIOgSCLMDt0iA7ZeVOLMYpkxxhTA1AFr6+8+r3f/Z3fVk1gee4023fEPpIs80P7x1IS2eED7svlzofI7wsL0f9jKJ2cPxiAtVkzrStNuce9axwKdGCWLYuKFIWfjCbsvbYUKFow9XsBwJBANJMBgRKDqXVVVMwOhOe0XL712na1emHuVsVWozdcWt24yOzazwdGBMN2Ucri2p1TYXFm+/dHfe4f2fPjksR4PKs/yjIeEM6bb+AhERg3Lm4URQlzEiTqlE+cpwK3kWoOiduYUXLURgpBZsLWDWCqqqYyD3Czjl2LmimaWisdIRBYW1BRWwlGRHKOzOQ8e8+mlnIyNHacwQS9+YFioQAAqAtSP0Dw++9tIR4ttsfuG9zniYepY0mhNjz69YG3D0JZdrRM97kALVmZHz0cMEHD/p/HwXM+pGphYJ2xBRRMQBGJ2FQihFAgeQACI0YcFL7vAHOFOfUGKy6sIAVwPkkWS0iUFyUdusQXU2wvR4IZiZCJOt8AMLNm8d4BttsLdM1YDQDBMZu2gaEmkmNqmljVqU6aNIlkBOcLGWOeih6n+nTofixVOxZTeejNkUPOCOdK/o8YH0ek+xMZyBmu9KjgwJJb+IcWWgAagIIpAKgqolGWQJb2dsfvvv/Xf/FLVy+vNHVNrqfgDEARzVQ0KUgbyGnkgMgIARmB5squqQmSUPsPFaTR3EhOZshMCIhmjhmRAQnJd0WElEAdgvO+cGVgh8wavBMRQHDe93v94D0CErQJwWQEOSeFZnPr9mh790/+9A/fe+vNrNqu5VanX8LpIfn8YfG5bAU6OMs4//Wg2fWjCx9N2e+gELV4NThmnZ0nLK8nAzXjQQpXawnTuiHPk6oCBnBa9BnAbt++61yomzr00BXoArJDEzNTQjPVrJrVDNUsm4mZgGkbINEJIUhsxIatY7hVQLoEnu4fmcFckBJWIzXVlCGpAqA3FUzJDKDbihYPcNTg8DGDxycuPSbrpAEsSuma7VPyKRaEo9+f72wduIs2NfHpZ66DKaKpJkICYABa8kxBl7NuqCZqWUHUFLqC5B15G4AZgiIoIRAiErVpBPuvYEYIiqhoBkqtXRQAiRwHNG7qHBuJUVOULMpEAbF0BTGgyULZsgNYswOb8gFtDFsZsG3yPbebLv24L+Xg4W+OwENN01kjtB8t0GNlqXjw/WN9wA+hWiz3cTMzA6CcAbIMgivLXlMnA1QjASTP5AjNSvKMoBaJdG11tQyFY+fZM6NYRiQkbrm4ATIyAM1TcElM26XQenih1TxAzay1iBKRGrTZNkRdXxhDZaZ+0StD33PhfIgp5yZ6m6a4A20YxoKxdGaqhYL7KHF/iGctb9En+tzODOezCpaEjEc04BFviB38EQGOcykeptvFMjkdd2fEbDtoGw4zb0k9N3cs0gdNJDZpvPPBO9++9fYP1lfdT//sZy5c6hWFnzZRkBTZ2jRdzEAZULKKGBsXig6QAVAtEVsZisDeOdfyeQIDzW3TVJHkvG8p0QUfyrLXH3jfYxfMIEtGJgB0wbP3xA7QnGciipJiStqWlzbV1ATPwXtkbCQTQVVN90aTnbtbOp3+81/7R7OdLTOB1rm5j1w8iOMPBYuYqpNwvr8IAQ4tv4dmU09G8D3RyfEYYTno4NCgT0hcPXQbhqCgigDkhwjORBGh3yt86Z1HYlXNw2GZNCrkFBuxxB6dc8TOAFsDZ+scQzRANVAAa6upt4IUIlEbMQ1AiAREc+NkW4cICADBUFvJi5CotV5R+xVnNZ1N77z9HaumlgVAl0UM7BLDPq7wuMWlB8XMfY/v/ANzx06nzi3eLojMupJIh7aPJwK2tK21FX5SU5komrRBQAgIxggMyIhtii+3UT8IZABqCgBEaAjQdpFBBkQVTapipgCAnLMAAKBH9GKmOu/trgZtCoG2JVHMAOvcZEvesXPes2/dcSV7h0TMkNCMDZTmTU0XzjQ4qGLt229aQFgq6Xcq2152GzwiWCgc57wkD+6SjxqOZTGH1NyPAg9q88HblajITTaV9P6b30YZR2hc4YGwyamKjXmmEJCCKYFaGcpAEOtJjLOYJk2aERoTASi20ruxp9BK+4zg0QMqgqloV8MNjNmlmFqdtXUdgKHnEDiAomgWycRYFoUjBjLDrJTBZR8MKYOMYnVDuroQADBXYm3ZUvlI0QUAJ0ycHT7qdGjXpJ0/g5vT/BGx/eGutmRSOPQsB5bAcY7CQ4z+jMaeM2KsE0O7Qp8tgc8bv81NNYTMWP/GP/rPvvar/9UzVwdPXRhc2ljNorM4662GpJoVxZARAgGaoJozJiwRCwXWnCVWbMIGXRtHIDNGC2jkiLxnBgBpGC1JJvb93opm0oySpZrWKYshsue1jdX+cIDkHJdMHgyTqPc+OEZNqRqneqRxJk0VyBAs1pkAxvdGo3vbV9YG49s3b736rT/72q9KSgKLdAfY32Y/7FzjiR+OHNd5tfe9Dx8FPndu8CEf1o50BF8ssCeCxsNTbYCAjOK8EaM5h6riPIrGskfXnlrzXlb6RZomEyj7vVk1K8vgnA9Fn7gMoUwSmdR5yqoKSMQEZCqm0nmBQUEzmjERARK2rrPWrqNtmq+CWFvuBxQV0ViFBLxioYYGluLOd/75f/2n//T/Wm2/bdAG29GiTlkXxvEjRZhnhgfekpZMSt2uNn/fuSH3+c8RW6gt/YPFwctXfXxwygi4r4xbK17g1s7IlYUCEnkxTaDZVDq9BbX1VJmBGiE5IFRDU0cUvAdFADRUVck5xybOZnVVNVVdp6g5w53N7T//zvd2xxNgRrCcEyKjMiibImBH/4SEZkRAbM6DIyQws8imvT6Dn5jOzAiIkLqE+k4NmM/NvghqCxQsiyG436nrJOPpIw2de4JuuDP1AXg4OEX0/6iBmXVR9UAqucCd3/zv/j+jrXcdVc7TbJLIBVMBhKbJhlQWjOSQQCSlnEPhDIFdMAAFgNY/jAi06BpDbYkoNAIyU+0quBGBMZoQU+cmAwIDdgwdb7bggwd1oAZZEVWEkohYDz0oA7pQhO1mUnRVodt6z3NMH+jt+4jh6BWXR5o3mj/w00nXWfDF7tTzKkxij049wrlovYzxfWzsH3VA773v3Bz9dfmb+/3ayf049w21ZqhFbwgFAEMg3Lz9ZmFbP/Pl54Ghf/2SZYiU0LNl4cCk2DYMQ+gKPJspASqiKqgBYxYVM0Sm1iFgYNzWN2nz3VUBIMVaVTICqCEFAbXWzUVsaEmjkkNQsyymRIbIKqSqrS8BQMEMUQ3EAEQiOz+dzKzOQ+fv3HjvM194qTegb3/9ay//xF957tNfERBGap+69YBYK5efC7+dbzJHqPtjBodW7Smkd/TEh+U8uMwt9t8vHLWPcNmeBbrIAFvcmqGhAJuNIe6gzpjBvM9WM/NsVjl2gzVfulBwSE3jg/MFzcZNFprW0qTsnGMgVSBrTf4MbSXPRSgFIGrbt8PAoOsR0G0J83w63KdmBJiXi2utP4hAkLKlKk5u7bw7+pPfrj7/8//61Ze+gECA3DEBXCDynBbFRxIOP/tDIuJYk5IdeXP/yz/GLfs4OH3zWbwxAxKVWz98PU0qX1ATBR0BGwEAqmGXBYBEiASGaIYAbRK6qDpw7JkYCbCVzJnIOSRqewmbSN7c3KpmTVH2VQEQnGNAZAIARaTOYo9IBGpm2DoQ1CGKmRGDt3p2d/u7/2x4+WV/5YtaXEQuERlAWnwq2n7OK7aBogsZwPYZDMxNeqfsFo9ncs5fQnYwF7geOdEdy8XnmN7Xgp8045nregaILDlzvPf13/n/TrdfHZaDyV4EJEbHQEqADAhmpklNzLEgIIbge0UpYDHFnJNq5rm9pn281m8LSG0lLDQH1NbIIjNSg5hTUSJim/9uyEzsHSJSmwxsKiljw1HMeXbsfFAxkAyK7B0QOR2hJcOAc9VjCfnnx1CWGMYBSj7LwMcZxx87PNqR8Mjr8k+nOEyW5+boiceOcuyvx10NlzxB8586WaIrJ5izVtW9p59eSZVI5p07UwAmD95xSYQxYgGE3sApMgqgR3aoFk0VNANkQ0PGnHMWCOyAzCC3EZ4mKALSRX8qkikIIDICqBEAEzOTMRMhAgpmQ2BGBjJgVBYjap9CQU1Vs0BMojHFuqbdrZEBfvknnl9/+rlLz13ZfPvGpbWNd17786eff55cv621e9xUnCfXQXgU451d9D47nOFSJ/1+RgfVQ0tThxaTzXXYc90u5uO1Jdz2U3HFWGhK8W1p7iAZ93xdgTFT4YJpr1cyxenWNCUoSgcIa2urVV07J1mjAgAqs5mhB8Ikri34LAA4z8lbWCZbL50tXEqtSt8GwCgCtRp9V4qxXc8ADAoqRL6x0EjuFXG6/b1v/O6dXyz//oVrXzJDcmRdfdE5vn8MczhAZGemuEPK1CGGv29oPnDRYy994KePwsS096Bm3iZucjeYpMQMhOhMFQgByVRVzIDBANQAdZEWQIhM5JiZiBHZsWNgQiYjVARQNQZgR1cvXRoM+sEVpihobK28ZExgAAQIaETcecA0g4GYJsnYK7i/QoW/sD68/Z1fT+PR5U//7JWv/Ao89ZVMlwOzqhIAtRV8207xtrCIwaJmKC5q5M1L553zGjn/6SacazuPibkelVeWbaJP0CcwvwEEMEADpKjALn3vn//jIk0+9alnqI/jOK1TTQ6BzDuPQKZkAhJNBABJMybBqKaEhtgmCpsBzZ+b2nVASESE3BpqmNiRJ2RgmlRVjNm50JIfdc31yNAQjMlR25wYwDvnnCdiInbsANUkgiWQ2jVbOt1qCZq6eI92fDxYGPTRw9HrLrUe6w6wg1bQk2/lCcQCPT44YD+Zvz+F5R86cvki910mh65vS18tlO3OOth+t3QEs7bVlmPU8WQ2nk0zxNAP6EKjLExi1tZ2QyRkJCYiNlRAwS7arCuDrCZqESwzC1DKmtUUAQgIkVWUaO5dRTRTZgohmBG1zcUckgPnuPAFoc+igAZE1FaTQCYzRHHc+skEUJ0jEekVeOPt1yab9xymCxcHIJNv/dkft2WnrYvtwwMe3vsj8oz4PhG6/e8BrnlWeLQ2mvMb7EPBEzMWGXR1f+YfSIwt7cjmn+Cd38fZTYszzNmxM4OUYiiLmFJdV01s1EBUx+NJyskxDXoFGnrHWRPhvB46AILNG760o2DrGYB5fhhAF8bWcdSFZwCWGGsnrxCCUVuV3ShjyBaaWa5G4xA3//Q3/qt7b/wZoWRT6goAPhYS/TiBHfp7kMhOoLhj8DU/0pal/8PXuh8ZP2GbaCf8Hv0SoM15mawOEFypJmgGJnP1uCsR15EqzAOGAKmVRwxUTJLmLKbSZbmAtRXSwdQ5JMALGxtXr1xpebuhASHO5RdCU9P2SgbmyBNxCMH7wI6ZIVeREkAVL27w9Q0Jd771xq//X6Y//DWX7uQMgq3wj/vT0xE+LhbRvLDnoqvneWD8iYM7XRt95LBsjHyygEu0bsBokCNs3v7222//6bWrT89G21Vuij42UeqsZNgvHQErRjM1gdhkRjLVWZXqFH1wBRMjESKQGLZ109sCb7g/JCOAtUWtGAkAN3d2Br3yMq+Kpk6Wgi6X3tp6zoiAxgCMyAgGYpIgg0r2PkjToOMS92z2hpSr4Dwhty0lEdu4o4N67GNG6T5ql6T/M083PpF8gMcEZyfyo46y5YvAGdbmAUvp/lcI0BHgMmeDeT5w+4uCoOOiN5BmOqlkeHFtfTggh9Nai2KFkBETIAE5MDaDDOZBERUMoC3loERMCoImZGBqmms0BSFsO9qxg7Y3HqCZMDEjGZML3rsixkZEFDMwozowI+IkdRMje88IRErOvIM23pqTaqxBojQaSG7d26nl1eHG6tvf/tbKIEzWh8WlYjy6uTcerw4vtt21D2Jn8efowjgj9d3/sKWGjo+Yos/XA/Bhrr0MH+dlbV3aLRg1AGx30ru/M3r/D1cu1B5nhlA3ySH7ENiBZgCA2DRgxkyoCAnUpC375p2JAiQlbK2f2HbO6JYSIhm0mY7UKtVzKd3A2qXcFapGa1NIuz7erasAQUEBWUBKXyhSsrC5p96FeireZZ3d/P3f/C9+ud9bf+YrCkSfLH77IeGB8HDKwbgs5DzI5rcPc23zvPbtBezzq0PuCzTKDMX0vZJyo2ykkglNCblTYg1pLqy0LYzaqopLHNAMFYhEIWcFJnaECN5x2zWs8KRg3hFaBkRmD4tcYJzXTVIFIyQwQgNSZQCQnBhAmklvYzVPRqm22SgS5nIF6nd/1eJ2+dz/HNdeVl8Qtm3HEMHUOt17f7JwLqzZktTyeM2nTx7Oow8AHhRxjsITUQYW7t0uFypnmtx45Xf/8dXhIFrcHO/tTaoi9NrGQ1VVi0gRiiL0CNnQRDWLOvJM3gxEREANtCMYREQCpC4orktfgTbSU1u9F4AAmhjZh65N77xjb5dyb2CmCGiigJo1S84p5qaqm9koNTMwAybzjmd726/8XnXjW5Aag7acBKp1zi3rVJ3Hi+almV2ebVw6AA/N/5EbOm9CODc1f9nGf9IBH57JHHEX2LJdYzmyes7jDVDMpKrrnIwplL3+yur6ytr67nivamZmNi8rQgAeIACQqmYzUCRgh+iJHDsibqtVmYjlZNk0i2kCsLaRO3LbOMy1Ha6ZwTGxYwNty8NljWIxaZ2tTlLnnJjQOUdkBglRmJRYAUQ0zabj1FSSq/HentYyvjea7YxzPctxsrt1e7Z7U2V3d+82UjvFtDDxwAHqfBCUz+tSnBkO76OPCu5LSw9xqccAn5A9E9uChWggCIiWb9fv/NPmjd8K43eDjRwBGqeYYmyICcHNps1or8oZOHhiQtDCF/1QppRUtQge0QyAHBktSvsAmhFAVxMR5927sCvV06XtdL3tDDpxnxTEWsNmG8SqrVagamoGTCRNTlVuptlx787dyebWpB5t/8P/8v+4d/N1gLzwuD5xY9yThEdCp616OOe/OM8QnbPcMyP4uB3xse/cB4c/5BFBAxQQhAQzmL0xKDGBF9Q2vbYVovezV7DdBACM5i3sAACtdRIgWbbOOmgABszMSI4YAFrnAHXVRMHUuGXfrfhi+yYtQxAxMJcy7I2rvUlVzbLz6KzmtOdZc8K6dnE35g/e17e/ll79H2j7j033BNCI5z7xVm2ZXxXmSQGdo3xuvzyIjU8ePF4F4JD2u6xZHYVzx/FiQDUD1fTNb/5qKGlSy3iio6ls71ZZAVRKB8iatWGCMhSe2CE5j+whFH5lsBp6PR8CIokpArBxy9rZkDqXWGuuMQOhNk4IAUAQ6fnnX1zbuGBtfAW13S9aFaDbCpggtL4FQLPcOi4I0bsg2eqURFlTwt23xm/+et57MwFmAACgeU7w3O/1eLdk2/+HCx44jzVB2GdkB5uSHZSqztkFet4WllPheM/rg8ChOV6Iu3POechT0HJBsmRx2phgiuJ94ZjHezu5qUqHHgS01iQgYIpmPovLWXJKpkpIpIyCbZBE1+RFQLIQGiEbgJkYtQ0CVFWJIQQnKTVxEuNYZZqlMsvEBqCAgGTeueB88Fx6Lsk8RYSoFg0SmOScY2wkRlTJMVZ1LHuFRpvs1mToiKfj3Wa2U+BMmnuEGWCej76QnB5q0h98OuaYfwxgJ7z/MNd5DNCGXS0rLHbw/YPqVOcOZmrWprILUNIaNr+x9Z3fo7Q13R3Pbo9lYpaItGDjpm7qeuYKijmV/TL0OErNHhSjWur1C7McnJOURSSLiEnOYqaI6tCYwBEwzX101nVF1dYOuu+9m79aZ76fF5ds4+qADBgY1SRGVvWm9bTZ3ZtVUadTun27jqO9f/Lf/t8hzcRkIQp9gkWc+8Ajee6lfavL1+iM3ksDfAgXw331tPs+xIM8pcHc2QRgYGIGNSrZq7DzZ1dWLOfMSiDg2LcK8pKJ3FrSnSdeYdsAVVRUpS1hpYaMTLxwGJgjNjBEZCJQRUAiyKpJ2qAfNKQumgLREDWDKJAvZlnfeO/u629vNVY6XwaCnrNcud/82lu/9ScffPd745s7sBdH491vj279qs6+DrAncwF0UeLHABauFjRb2ixh7jqHx8bInzy485SBlr1jcPANnCuO95cDgoFCrvH2e98d33mjzzyZAYJIssn2LvaDNBPqDVEpJ51O6qL03jM5NCIFS5SKggugKjWhKGScTDM6ZmQxha49+7zqCpCZtomJbSQck5aFU9CsOTCgoqkJtJ1ixKGgmYg6VCJQ05iEvA9UGINIdK7ElDUmi+OLA/J8e/TDf9YLl2z1GW1bCnd+SITH6U6833WPSvX7OmCXj/OE4Ik69+4z+EPf2LLAhYt3hz52lpRWevCMnGIsykKSqkCcNZ6dJ2SJjjpeaQqiTpXNgAAkJvZFlAQWHZBHh0ygZqrM1KQYXOnRCeSsAsgExkQmaiaAICZIpCZiuQxlU00UzBMHwLarsANMaADZNAGIqKRU5dzUVSVZnGOqkk5r0LS6vhoFYqOpBsQQk3o0m+5u33qzvvaFcvUKQOcNAzBcqtR0dpirEIcweP95OB8Cs4MjnX3Ex35vhwdYSDILHfSjLHcaIBBiW3MkaSwn302v/84lmNaV7mzWk6p6Sp0vKZS9alQZ8upKT1VRwaI4IEvN3mQsKnWsmhrqqo4CWZJaEBNDBgLRrMgiua2UDgAGoKatpdTMCNEAs2nrV7Z963JXaMW6cvLW1o+edwYmBVYVk8gQdid1VJnWKuocyo33d775O7/38//S3wZQOJAM8KMLj2CpLq9BW3YBPpSGdeRuTrm9MzGj+0FnIZk/RutUAoGM2MD2hcm34M5fXPCZHadsDsBMiLhNABBFURBDMVXALALgOgsbARE5dg7ZOWh7wXeBFwAIbZ03FjPTlEC8OVJHyGriyQGYKRqYoFKbhQaATKJS1Q2Ru3DpggtsEhEcspsZfPtGfXO7fnrl7r+4/nzR1zU1Du/M/D8fPHcBV35GkRi6mo+dt6FNAkZsu0DhAYwg4BMVEx4z0IMT5oPBIVl/sQPM3WX7cL5bQXsjasaADmz02g9/H2dbo+3bs9Hu3u1b49t37733wbDsldzDzIF6KCjSxDSVHFUigjkih2ZxNvButT9UM2Y2yIoaNSmYdmnsQEZgRoiOClQCNTBTySqNA/VgLJlMmYjYtTFDhF2DvSyxiWPVhpCACySf6miavDOUuu+wJIsp7TXWlIH1g51X/idu9hRIrTMJP3bj/5z1HRrI9vEMcGKw6ZOUAB43Wo69/ily2sPhwg6+dhdZbjqGc0PfsgUW5xIxURmoCBTQ2BAVCuc1q+ZEqDHFNlwt59z2ZW/qqCpIAKaM6JEIQSHnnEzVLCPaoFwFpWwQBYyc75XkMNZTkWxAyETsEJ0KInGTo2NfuH7PDZyCyXRW341xmuqmaerYxNw0Gps2RgKBNJlTooQUKXDBwQlInWQ6zZKIjSfbo1vvvn/r3dfqelckt0+JSzrwmed9oaYun7K8H5w4Y4/Vl4VHP36knFnHw7Fmn48ozNeRApiYS+i83dj94a/mzVfrcXX7xqiaQDWNgQklEjY+IAU/aeK0zjHLZDIZ7Y1m02nOsSy9c+Adrg5XCxdW+kNHzMRgBibMACbMRAQmYmqEbVWgNqiaEMmgLabS/ls2Ke1z3La5WBsuAQRZVcjMm4JqjKgZMU0m23vjzd6Kf/bFZ37/a78a40T0kEXuEWLwo6zaPXZYMv3f13x/yiWO+eLRRU8cud6icEHnslcwSsgzNw7wPbjxx7B9l5yRA3LOAFQUEMDUTFutU0CTqiKw46ySco4ipgTorK2EbmBiOWvTxCbGNiqi1XyZGI09eTQW1SbNsiZTTKJZcpYMAIiUsxhaTkmzeIRPv/jsy88/UyKi4s6ontTUoM89X1PvvS35w997683v3oNp6WfBtkbTm9+3ZprRte3F2lil/ZmaU6wt0e5SCt0nE+jc2PEhcsMTXs8L2ph8a1nmD7/169TMVDjmPJntTHdu51SPJ03M6J0nNQIMznuPRIYOnHPeMXti5IziC3aEjoOCtX7VNhMSCREJCAyNiMFwTk6d36mtkAIIwDoXyqz1mrX5A62XN5Dz7IyRy4J96PX6YBaTVSIANNub3Ly1qTAc51I5wNab8ca3SWLnH24X2PngdP7muAk96Sb2baqLfw8x4kcTzsKsP7wT7BhxEOaO/aVvbaGELdlhEZCAHeQiYMyxTnXMDVh2BKhpd7QzmlaiknKdtUYCQspZJCYGRhHQbGamoKJMGJxHQjFTQHCMLoTegL2zLNY0JpkcK3c2SnY+lD0m9GxIGSHHOFaMqokJmQ0wAybATA6ZQXPSmDVljBIUA1Buml5RNLPY2plitmoWTXC6N6tns63bt26+9ya3G5q1i68N5O94/hmIZ46p/WTJZbnmtAucslt/GLAjH4/VRT5i6wIP35Et/33QRf+4oTUNzpeLKiIqxuad3013fjCZ1jfvbg7W+q5Hs0lVsGNrmnrLexWT/mCNuQ9IWayJyUDRsyLVTSJHIZRtXSBVM8OsmnMmNGrLeap1JeC6DkRtrE97M7DoU9naaBW0M9IeUWnbTGIm8B6blOsEGd200Xu7MyXXXx1u395+/Xuv3bn59vtvv9b6Gx4LFj8GWl4Hy123PhQtHrMUD2HBjvnupIuc8MvD4fW4hzq0GQB0rYkJQQEUlCugmZsF+V54+zfhxmvp9kya2A/KbVlDJtV9oVk6X5RlVTUjImSiLgha2l7XBkBETNQ0sZpV7WimAIamZiAA1gZEELCoRk1zBxeaQo6ZkJiYnYtNYuLgHKqoiRKlbAw0KMunLw98rr3rbW7rD94dT7VMmWw687O3oHoTtAFemPrxECa6NhyLD5/0NBl3/0MeBXxkjD8LU3W7p1NKUG+/+YPv/PblC5csWj3N06mxIZKrM21OqvWCLVXDtVXTbJhCoMI7cuAYmNEQKLgqR3ahH8rZaMoIwYWsmdl16rSpmjJSm4K+qD+LQIuULxQiVCQkJe3KALVyh6pkNRFVh8SioKJoqB54SL4EFQKabFU53w0N9/rFeo9Gt/6g/9QLqfc0APM8WPSxVtE+RZE7svl3X+PRA6z74ew3+iGf53H79k7yAByrAxzdKx7o3nB+yiknzr+3RdyvmeVmW3Tk+wVVJVjiEDTPvFPHNuz1Y6MgTROr4bDvmBOaipqpI8i5ERMCRGyrW5mZIjARZVAhcL50AaFpmtm4qma9QZ+dV0SwjIBADIRoOcnYQZIcRVVMHBeB+9kM1CSKI4/gVduO2OLMclUxBNM8Ho36vlT0tag610hMok0Th6u+13P3RrtvvfqXX/zq3zTgI0g6cXaOn4HOWwKwHzB9n7Mfk1HjJHXxeCXwIwSH1NGFdfSjd6f7t0QGGRFzBqfbo1t/2gtJ2Vm02b1R3J2lnEabO8NLoMCaaa1c8cnnVGPMBTlFQhdmeTSbRcCA6JskahSTIjnyCkmYkVXL4GplJ8ht+fSu15dpu0+hLe1a3Yayj0GweTOAzk+FgK0v2ht5IQW61+A3X7/7zp2dyaTpl/5nf/LlT3/66edfeGq88x7BV+BALaBHOB1PcmYfqJfkoxJOWqliLjrOGwztOxE7w/qJwiSeqH8csqw95L0dvh4ufca5kokICgim3CAY3l2d/HHzg98M996u3pzVe6vNdEtnYwbOagrqg5vjuau1ZgjUFbXt6p6ralZUMEBGMiJw7MuyME2thttKRwZKyG1aPGqrlJKZIDKAGrTdwNoqWQigSNArSiPNST1zktIHms4ah+GvfeFadfvuuJq+8Py1z3zlmXBp0FD2QS3djfe+6d21PLzq9sPplnDTGWfxAG4+ijzqw8LiqdwTfMInMfBC+kIEVFAD/YPf/YdXL/fu3rmpdb21eW88mTmUO7v1bgXf+M5rv/wzL3oPriDJjKQhUFF679gMVRQYCHxOWjjqhZKYiIKAqRkbWJdABoioIGpKwNT1dVxkobc9yKiteY7UmlmZ0MiIAYEEyYDNIXtwAmRgWcHAxxiLAJPd0XB1fVzVcWd3ZfCUZC14b+vmt9devr6oxNWR9uPfb4+yLzv4ZjEB849zKrD96rvnRhVPhO6PRdHRO3nQe7uv9L80UItwUjTFtL35xni0RbNqbbBSlK5KcTzT4Liqm5TjrffvfvrF6ysrBVhUrcGEACULoxqaiTnP7NlATaOqdL3XDQXEO3CGkmOcTRHBOU/Om7SJH6CGqFm1Vm3EsmMEQ8lgoFEbFSB0KpkIUSHWjdTRUsqxUUg7o3p7HNeeumJhsHlzt0qCHpXECJNaf20ADlOWnc1dMOuif1rn23xnPgN68eDOu1g9+zhe0O65kdLyLH+ct6aP8o0vggJas4kZQNp+pQ/bVMLmpDLmd97ariZV5PjCXtNf5WH/Qj1txMxhU01HmlLhXYyaRdGcd5yyZnMx5aaBnAyYVLRtPO8cZkMwaxvhtYn0amqGXW11Q4MuD7i9vy5WuXUJMJlBmzFMhq3/gJHIgBA4uN2pvH1n5wc3dnbrNJupjxl+eAsG7rkXL4+3b6FFMG4bUH60J+XB4MEyfR7Vcx8UHRcbL879XCcZw/ZPP3Jfy5vmI52eA1xkSfoHxAyZZwhKd1c2fwd++D+Wdz+Ancqm2kxQgMhyJQIIjpyotPm8hETMxNTlHiqoqoEaUKvPqoGaIDlAIqZAvq5j09Te9Zz3YJnJGSCikRGiU8gIQERqAgiAyEyIKCqNKjkiQkRVESZUpgYLZm9W+zz9yc9tvPj0X8NisLG+Wg6LbFkjZ/EyMZB33PBdHlxRItrPd4CFJwQXW0X3Ne6/fiJgWR7DLlHjRwgOrCM1vXnnne9+549+8WeeG41Hm+9v+cKmzazw5Wvvbm83fvP773z55UuX19gsEjvHvnDkmZgIFI0RAFCAzPa29nZkjGZccIqRiVUUEUzn8cOGTIjQ5VQCgBooGOSkYmVA09Z80zbOkNb8b5YZwCMHdARBxLLWyKoBDGYlm25P77y/WxlfvHoxpXG1reWzn0qyF3f+Yj3/DQh+/4nPJZdlma+cBfZTbjq07NcIOsT1Hsfdn8+mtzAGH320ZVPMh7yZE849MBX7yJyXQSDQ3e0P4tQu9i9haqrxnWk1qva26hRff/Pumx+MtnenX7ix9Xf+lZ83iUmSpLquqzKEajb1vcIxEwCaEdnCvcXEZliwFZogpfHO3RzjxpWnM3IUUzUiQmiLGGbQxpGSgYmyccFeDXM2dkxAkiI0tQjnpk6xSrGSpklNGjVwb6YJKcZ6ZzRT4v6gXxRezJDZuSIJ742aq89fAPDQiXE4t6Lul3u+H8IXM7OMPDz48yGOeh7wI8WynwDsK3aIgOggbr2a7t1JiqPb4+2dZmdvMlztbW7Wr79+9/lPfybCCEOWptoeb7nApi7NUsw6HiUR8q6X4jgaVE0ycEAoAFkBgEzNsVMFEzVFMzTtqp/DEivsOv92YloXEjEX2xSQumb22DYNRiRUUPQoRDspvn7zznTWAKEvOTje2pq+9sPbz11YmY38T/6VvZWNYh6H+onSAR7gWR7Vcx/nSliI2GgHwgcPn7iYazwQkvSgIy/D2Y0F8zochoggLMSJbq/t/nf2F/8Ix7vT21yNdLqXd5PfQhIHKuaIzZSM2wYU855f2JayBTBVbXUAbLMZAayNLDIzFARgwnnpWkAzNUFkM2q9UQRMxGaZyAgJCRkIANRIUEkNEUElEKMpKjZ15hCyYKFxzav0tFjrK0jdZE+OgeqakuaCJmH7e4PLL2e+ytAl+MOiEonNTZCfsKWwBIce6zz6AMBBo+9HAFprChLS2++8+cGN22++cWvz3mR7nLLx3nYTk9/eqqqEdeTX3/jAwCECoXrPIXgmFlEDY0fOMzMGV9ZV+vNv/HldNd4XmnMbsmmi0IlFBiDUVeRUMFUTVW3D9EWzatcARtVEJOeYY5Nz1NyQSZuokM2UkBw6p86BZ7BqNt2pbt8aEZQ7t/ecSkm6t7lrUbi5K9VNU2hzCdrHPjeSXp7oB/XELkv89yWZD0lX54OQgx6PA0M/2htYDGSHv5t/6D7NW4qaEcDW3a2NS5eLwjdxMhrf3tt6f3PzxtvvvHd3c3JvlO6M7I/+4tVXXns3OAeWTCQ4LynO9vakiSBZcsopSkwE5hyXRemLgplQc65G0717o+17jIjsEqAYADGyJ0cGppDRlKHd9BRAkdqib0zoCNAkpaqqx5M4neYmNk1dzWZGYbfmtzflT36w9Z3XdyrFwbAc9tghpSbnJNU4Sqbtnelg7aLqPmK66P+FV/6seD16YDeZtrjoA13vocCOvD+7mv1jeDA4zIO0yDsF8M1XN2cTVODe2tok8Qd30lvvVeOZVPXOrNlq8sw7FM1iMoWLzWYAAQAASURBVJlO6zp6LpoqxSZK0maWUs5tlXND1G6dtv1OHSC0qWLtrwadl8zmjYABoOsOaQBItk95nfvUujyXtngothlodYTX3r59e2syS9kAS1+iAqGJ6s7OaDwe72xuA/KRx/7kwFkWyONeRHiYJ592Bw+XO33Gi58C2BpGTAE4A6xtfqP67f8eb2xvv5t2JrAd8f2RvLMttzZj3ahzvqVDUVXTlLLIfrlaahtcdJVqAbqK5MZIKeasamrMNBgMyqI0E1Vhcm3YG3Z3AkxMjNxeq+0haa0VlZjIoN0vrCV9FQU1TUhu6PsbdbaUbDLL44pHM53U0mRNKRMY1VPZfNXGbynIfA/GJTMk7FdN/9GAc1IAlsWdQwR5En0+nGx36vGH5CI1wPWNp4ow2Hr/g+179258MPr//bNvvrOb/+KN2+/c3J1OpxfWB7ubu2++cbfwqw6cIx/YMzvm1uXlDEAQzNOl69eqKLvTOgIn65Z8K9J3qfXYZsCLaFZQMEMyBHUEIBlADEzVVAXACAUhISTQKJIyQMMus0vmBIJicBwsx145vL1NH9yjzR2X09rW+3F0cwoVxYlaNZre+yECCKidgPnHBMcavBf/7gv7gvJcFT8k+jxCZfJxI+RYQe2k/eDhVIKj2sXBKxx7PbQFbwbny0tbkzEOubzY76+Hy5fcF794bX2D1y4Un//88zmJFYN//kd/9taNG2gKSQa9wgFpyjlmiSIxWdTUyHRaVVU1q5tYN5Kq2d5eqmapnoCaK/oJjBw5Au8pePYUPHFKTZYEAIgOgMUg5hyzKpCqa1JKTSMpSqxjVUHOOdqk0nHjXr8Tv/3W9NXb+s6WTCpjZBKEaB49ZpNpNdvZuX3zbj2LuF9qsg2x2MfXmWf/pLmCc9sojtL8fUSKH8OjAERUsqyz0Y23x3f29rbtO69sv3s3fe/9+ve/v70X1rak//a7I50q5zzsrQLwpKqn1SwUfY0uNUoA0709h0ikYJJyoyYxpSRQZagEqwR1ElFjYgLsGiFh6xGdN/sF7FSDrseKEu57Sru+X63tFdg5JhZmlUZSDbubOdfswGlWy8KG9TTeee/e2nD1maevcCBYTif4xJHUWdbn47DFHBoAl6NIjmXTJ3HtOafqdnA7zAQOH3/iLZzyy6LIAQKiIlDcTN/8H/yte9WtMjYv3Loz/P4Pqu/8sPnhm03TlP1eT3Xe2gcBwIhATdoqcGRE6FARDE3RhMAcmpF1TYKt7YZEgIRtnjAxMjGjQwSb678t0gys7UNpXWqMMgATMgE7dI4J0XlWFJFJrLZLF5L29pqyzmVK6NjlrNO6Gk/GVTWR2EhK9d4N236FrZL28Y9YKxF/VHirmZ1TEnA33lwKPEleOWQlPbrtIbRNEE+k51NXQjeyzaMAzPD6s5//4k/+/Pjdb7z5nTf+/HvjnRgvyvjZKxvsodnbWrlWDsve2zc3Lz+19fz1C94zoDrnQY3IEaKYikHVzDyUP/PVn7i3ew9QffDQNo80QDACULDW1GMgCGAKpmioArlpZnVVF34DEAiV2ydWY1DCTJSct8IF57yAqWVkFOKsIhYr8d956/a9kZu+fe/KlcGVvpMqp0k1raY9qHj0PmFj4Odx9ucUyGZLU3z0m+V7OGmhLWnmh11xj9aC/sgN8Iecrcc+7LI+hkeQc+jX+wIe+XgcStGWLUv7HeoxCTz3ma+89v03o6XeoLh87SkZW1XtFGHw6g9e8xdh7fLa+++8fsW72zfeXdW1AK7ksppOy34xG4+LoixLjwZqUTURIDCYmeQGUctQprFdXr9YDi805hQMLOasisbGDNQPfUavTQ1t7KgJW5sETyoiOaKZ5GQKCDTaHW3tVuMpvXZn+8a93HAhwU1jvTueolsXwJVhr79SlgWWwUZ7O3du3U4xmikiH0FUV+H5DAg+bR7mjuNOXHusrTbmnOvArm9nppMfwxlgeYPqxA9AAMt33nz/zvfvfP01+O53R70Lpa6WO5PEO7u3brvnPtV/8enLZpAzJPUiLgS0JEwUszVV7vd6TVISBbKsCdBlicQlEVQ5Z1FPHoEU2qL+agBqKACKRkjWyikIYG2XAAEDAyUiBBDLjhiRVKVtPuOBzYQcE0PO5sD3Qhm1JkZAkCb1S//McxsEApr3tu49/aklhvMjTEnnsI7wIGdvEX7Q5D93LRpAS3oIYCcWxjjbDRvsh7Qc3Ry6kbpUBdOMwLtvjL/1x66R9z+YvJ/8N9/YKla99FerBibxrooYGDJlEUJs4/JRQQByInGqWTEwIAHuS2pmIGqAwsQxZyIMgeZlRyGDECJAVzMR9k9Scm5eLsUQgZAAzUzASNsKXYpgwEiW4nSym5o0m2bJ6ULPaWKVlDWSIBLkqpmMdG3I2zfeufRCNobD096huvWQf/IXAwKeqwKwLBgdKwbdT/Xa79JwVNg6AxxQKNoeK4PCVlZ6/srFjfUNwzFhf+tObeNNB/DS9Y0vfubZAnIN/MPX39hY+cLlciWmTMzeu5wVHZspOlNRw+bSxRWxOteV88Fyo6KIBIQpJ2JCAJHcykdiGQ1BUWJqmllbOav0hTcBVERF0zZ5UXNGNofKKmjIPjAHJVWLluu97PaUGgdPX7swradweb0Y8Hi2c+Hp1XKIwSGiLnU+PSdR4ZBQe5JAfNK5Bz/iIR3gJF1iGc5fJDo64rH0eUhpwZO/hLM9gh1ZSidcwQ7PypzvM+Lq+uVLl65X4w+sh1kKZ8M7d2+74ZpY7/t/+uaV672rvfg3fvYrL6yXQbGe1c10urIWdrYnGxfWfI7j7fFgNRQDrLURNRA29kTk2ecmS1SlzEkolIUPKdcCQGbWJEesjhVqRMmxSc0MjRCCw6Bils1iYymjadPEqpEmoUCxOY6vvz++eStT6PULSpOm59GjDUq+cKlfMhaQAkOWZlLF0CvJEXQBedbWue4k6bnh85BicJxF4kRYpK3DWdWJB4YD9UyWRrjvKvgxPDgcUb3RAIQ12t5064PJO/cwrgzv7lTj7akvQwm8FfNv/+Zbv/ylrwSfwqDXX1mzaNOdPXKOPSEmAZtGrVJm7HnUvvezWWyqqMxEjhESmqgx+2TZALNZNlQEMzBDNTMwBsomgAagiAhABqjaFotwAGYmgdgzqQIoi0IkqL0YpcGQiyZLBCE006KgoXfXr24Q4Qe37l7/4NZnU0W+xMdGwB8DwMXLOQ23YBnt636yRxs2s0yBnbFmX4Q/+33iob/7H/fZ2kI5mLeVBsVsN7+nu7tb7pmbk+r1ukqfevbt2zexhGmUXTNQQDPI5pjAABQAqYtPQzXIYgDASAYMQGqkGdkhCmhwHoyIQEG6qLU27IiorRzamqpEDBDbsogmCogC5lrZ3NpsCiIkUzK0lIUYcgYGZuXYNNJYk5p6FhPbpBkDZWeFGniGwls1irxaWU7A+8hYdoLhAxUi/FgDLlUBOrdHPsXGech4fMQy2nVBX/rmQYc1mK8jQ5Omfut735zt3SFIX/3KZ3rllf/+n/xpnULo2Ve/+vKzV4crfa9RLvQGBsXrr781GH6+16PYxIAMxIjkQ2isduQdcU4z06iSgFBFiAnQVIkIzQCNkTGlJrD35DQKIpILOQVXBmY0SwYZVNWMQZAgsJNMkupI6I3YOxBTVEtWFiFV+Xd++y9feaVa27iOK4PRuH7jnfF4HZ576fLe7u2Ln3rJb1xWAePlyW3X+2OPcVsItQ+qpx2a+o7jdTV7jyntdiwJPSA8AsI/yTZz6NntDDg5i4YDR04/OgosIxA63Xm/+mKHTCOiMqxs3R09t4p7d26ZcrO1+8p3blx97ulPP7c2APvlX/r8Z1745SFNCk9VrDcuDmfTvVk1TqJbt+9dHA4x5zrW9Vi0B9z3ZIpK7Kip46xpRjtjMRhcfhoRkEjAoaYs02DO1KlmgGy5FomgqqKmqiqgHrKlOqYm1bNGINRK2+Pm5mZ861a9N4X1QZhJbmZVwXDl2npRluyo1yuskeGwLDi8f+vmTqLB+mrrAcAD2EFEaNMlD7SxP2YSjlVjj8x0t4U8ymV1iFudYLh7CDb4YzgLLOQBRESNcW0lPP3M8KmpbO3WCNTUltXKnppovwjrF0tpSNI0zoSylEWxeW/qdUjEapbBVQmaWBszUCiCH/T6oyYjAQEEYlXLkEUtS1ssBay1TxkBgagwMhMny13ILqlkKdi31dMJwTG31aRzzk2MoeRhKB2z9zQc+I3UL+s4qbM6Xgl0cXVQ+pCjNU2++d4NNDUTPF9T4I86zI360En3ALCoGoSdnXMR/dWegftHPhicesoBk4IBEOZUb73y3eko3yl4cvmpt17f4str33vv1Ukeh2I4GeWUhYi1NZS3rY5aywrSos5Ca7Yzmd++AphZGx2Ebe9gwK7xmBG3Z7V+DxSR8XgSimJtZUhkJorUyn2MANkSKRS+aGIDgMGXZhqjUbDANJ5Wjtn5MkrKMY/jlAsquO+wFLTt7S2H6erGyqojhGw27/O1xERbNQh/ZDjr/rI/h2c9SVw7tL0dNZou3hzakB9kig7rwQhgqf6Tr/2znlMo+sVw+oUX1v/2/+yzN+5tbaz6r3zpqbKAatp4Z4GzAkxTfuvdN7/6E18xSQoaKHjn0ExFGZEMEWmwMvS+MNU29Xah4YOBQsZMjklVwFBVzZKIakpcerQIYuiAyAgQMqCqpqg5MQKZQyATMIAYIyOmGWzeitWOK3OYbo6+8cffvfbs07NJ3Nva3biy9tJnn/eD54QvI/g5le9LKOdg6Tl5oh94US2p6Pc58WxOpGNHeFSwxEeWKPbY8U4f9b73dJJVp31zDLLwEPY7vqwg7OiNH7wjF+oXrpfjnfFkPLr69PUPbrzxC3/1uS/8/a/mO/eKYra5syPqofSAuQBTzzbKk93dkCpnTkqPQs2sKetifX3FUk6Yq+momVTD/srdu5uyOy7UFBpUQc2QJpSYIUDSnGeQMwloRFMzFRCLTZ3rFOus6jIUd/fgvbv1e+9PtnbSvQkUHl9+biMMVl+9sTWd7a33Qumx9DDd3b28vnH96rXN3cl3fnB7ayT94UBzZs+t+WC+l9o8CfoUoR4OktKJytpci3jEy2qZSDpetyQ0LN/Bj+HRQWsfWTYzqKkBGfagX8qnNmh6me/VJCNXR2KXLl4JX/25i77fOPax0WZ3ApUYKqMZSFVN+6HcaUDJq/m6gqqGKkI2UFPLwAqImBGTmIhqW/gTAQ2o7Zqqwq2wBcjoFEBRDcw7B9AGkqqjQESqMM2xSRmZCh+4KMBBlAY9X9joofRjVAu8Niz6wa0UPOgVvZ7/iz/79q/8GzsbV5/6ERF3PiJwyJC1cMou2FLrrjyhV9UDwgEDLx6VuQw68xAgqpkn+otvfsfG4S0bfveNO+/vVnnnjVffu1MljHr7cy+9fOFC/mD7rkOnas7xgim1KSsikgVSFu/QABVA1dpaP2igqswwz/UFa7NXgBH2e9KK5qquABGQVAwBTAUBF927EKlpIjIhuOmsmqYZKJTFOhA2mjJAwhwGpQuuJGDyKeK9vZ3RdG99fa0YDqkso4Dk7IrD7N0W28J+is0nfF3sKwDn8KCH9tVDMsmxm9lJd/Xw62ExnGUkrZrx2uoaZLJaxtV7Lz3rP/XC00VZSqqbCsn5/mopYmR6de3K7u7uu+/feuryxeAwa2QgJEfGaKZRJVrgAtW0TfxtDf9oBmoKaGgoTp1abuUFRoemFatg9EjUNodvo0BNRAUtkikRatYmZWBEn9kFA4qNjSfTl59bu7JCP3h/e09DpOLu+O7z60V/lYtiVlCus7aN7DprQiejnAdBHyv1GsAy61s2cp9kGu+OmfcYmWceHR7iEDE8ufWKR989KLof2sRz9PHx0P3sW5BsaQVqVTeKxWg27Q/Wqckv/cTn/tmv/W4ezb77x3/w3EV8aqXYHm1y3xl7Q0pJHGIZXDEkG4dqMo2VAYdyI6CTXOfCfAhOTVOTi6LnfQ/Ap5hLTVJHIUwqJClrNjNVlKisIDGDgKkhMCrEqpmOZvXMwPz7N3fujPDGTj2ZCLseat0PMvT1tYsXXLG+NYJBEUKwVE3LC/1+idVs+t6NvZ29WhNev/40IM/r/u8HEHai+7FOpWOwe/ocPkZ/mh0kqWM55Cd5dzpvWMalAYCBEZBRWPvUtZ7djL0pNOPX351dfmHj3bvSjOJKmZ++pNPpTozjohgA8ayKMYuJ67veZtw1yQ5Lydo02aBMaJlMTJAITFVBSawL6TEFUISuST0AAhAQABl0SZfStq5BIDQ0BdS2EEUSjSk1KSK6QT/4wASkpk2sA/W95/XVviRzvUBsvSIUBVPB42nty7UkUUWJz6kcyEcOnvT6WTjlD+yF7f9HcW8HL3LUt7mvaICZgXrGb//5Dz77lU9//d3qN/7ojY31jabaMXaSIwCO6llv2C92CyPLOTMAtWH5bdauoZmpWhLN0jYUAzUQM7X5UxkiArUdfQnZETOZKRGaIZh571bXhogu58xkCEbEYKCWHTtTassNoaDkfOvO3Ubi808/w+QDu0YqpLbsijPFOMNqWo3THnhbW/cbF3qDlTXvGDgASKdnLSF9jpKFWPoAbt1zabP06OHJeAAWpH5IZjqkdt3P+/7QdwFgqMhCHJMmwUGxIWE2jU0oHRqAUVZIlpzzkVAahSxQ0LBc/eD9O6mpnnnukvckoMmIiQIQIwKriFgWDtwWAGorDrZJy4golgxYzdrGGc4HzJBydgiUlUgMTEDJVFPjEJgBESyrqCGjmLC5pkm9Qdlfc8++uPIpDZubsnJ58MHUvfbe3Wrr1md/6qcvXyr7ZZps3h6uQNtsoO04g92W9oQTvQ6J7waAS+5NO3KY7RPDYU4GBy5yOPTlnDX3Q3aVU0Z/5Dd2DFM//sAlrCz2GEIguL3ZvHnvnZ/7q1/k2Xg0nv3CX/2lP/nDPyiG1zY306V+aZJ7qyver2YRJUVVNvCF0crqVHA6Hk2mNTdpZcDlWkijhKX2+kPyfTWNte5spensg4tXBitPDahA9gXmbERUuFQ3IDnXEdABKCk2UZsmjfdSqtx4t67rPB1lmdnFXrg0dI54wFzlZq0gq/d6hhfK0BsWDjWs8LULw16p2zs7v/f17yXml5+7+uILLyIxzEX9Di1H7W/HKG5HMHY8Pk9G9iOEdp/6hJuiPiqwJBC0r2qOYP0iD8NLz0JP0rMX4O6kuhrc5gdysS/PX2IAXVm7ohaUanZlntZNo7PxXuFWZ82o513QSWNKDl0mZkOU4LkRACMxU+N5OHQbAt7WJSQwRSQBVTA1I0Q0ZUVj4dYnQF6J66xNSlkU1fUL32cuNFtuooJzBSn1yn5qYhk8mxS9AiCbo5nItNZnnr9W9gZEP6rS/xOF41fzw67x09kDnsisWmvkvhW/mu3d3JrRtv3Wn705iVRv7V2+sIpRKTbm3Obd7Zdeen7j0sbW9k4bSYkACG0gUNcTrE0FEIEsIAIioGRmCEZgXZkfhDlN7xtku940hLAy6CMGYkIRMBRRRiIizdL2j49N8p73JpO9yWww6PXKPgOYiokgk2MsQh/EFJD6fHHgVgL32EK/TGSC2LiiyebNlnl4t+ARuyTguX/3kGZ2Pwx/zMDBeYlKh0c5ddRTttYPfaudVIkGBtwfDHI1SxpQYX39guSYUnPx0sbO7t6kmiFAM2scABE1KffL0ijv7U5A5cLFwWBt3ajAglNKIpJjzAqemYgICMCM9qlbQT0FNW1Lu4kpSELFXtEzSAwRTEQVQUESmgKimEISBUGFVEVTxRAGF69BWAFnmtXSZDzeWu31Xnjus1d7M/vUp372y1emabq7F9efebaqpr2sGAi6zaWL/n8i031K6EKnkMF+OJ4dOeyUUIejkveTAjxu9GOdIS3cx7B8hmlaLJOj6D1y7r7xe2H0aCXjalKVqxfeewv/6A//8udevpCqKtWzz33mxe+88mdlqXvTxN7nxoa9ovCQGTMAJdZc9weDflkoUrw7mc4SZO0HG5TczARAQuFiXY33prO9/INX3/cFfulnn7l+fa3X0zJoilKnmeYkdWNJVCRJTnWOUVPU1GisQDK44C8/daFXS1IzxcmourwR6mwrwxAcgVCJBQCUjq5eWlv1Ptfy2nu337w3qf3gF770FRc8UBtWgWT7oZ1LSDpdIz4r939csOjOeQJhP7Sj6cdwEiw8pW0oPgKB9ejC8/TUs9nec2V+/pq/XPsCZtc8/fRPPPf05TAsQpJQ2Fpx0fZsE4iau5WAlaEvSlWGJKRm5lARsoERqgE5AgRJoIYiCtQqA2SIhmCtyISmpmrQFUExZAAkdog5W1RJOTdZsgCCD0xF8A7ZTJum8SEYGYrNmqrnOYuGwBKjD77fGyoUN2998G/9O780WFl50kahJwtP6tm7eJYjXx++o2OX9tEvj/Kp4048nk/Y/Bc0undje5zCK29uju7tgS9N43g6jTGBGlhWgs3NbQRTzYCWRYiJABC4TQWAeSaAmYm2BZ1VjdrQoK6tNWIbLISEKAbAiG3ugwIAIRGhSAZ0bZdJVQNSAyUkBpAMiqgAW7vjuk5PX78ukkd1hWSS87of+tDjIkiTudRe3xUcApMjaKIkn/q9HvmSOMCS7wPmQT9tubx9D/GpePsEgINz2eKWpbRuM1sSRI+afpfhfnbNM46/vC4AEAiI2V++vGGzD4rCpjUMVje272z1yhLEOS4Dqwi4ECw1RiqSsyQ3CIgwnVZyT6ejvLJyyV8sNU8QknJSVaQ+EhGygRFC2x+11WsJKWsGRAMSTVkFFEoXUJU8mJqIoiXUDFmUUUkJBUzrpppM95T8lWufCZeuqfJstu36frQ7qqrdQb95+lLsFa7gVdZ7CCEM19WrQYUkCoJABghL5VOfiI38WMC5h2TJgP0w93WShHQOz3hgiDZ288ivxyq0pwueZxnOjqyd46b14BcdqhctzikUxda9vXJ17c7d7fDZlezHvsTiQvFTq19+/43XP3hj8+rTV4xT48flygr5IUMpoYppU3MqGNavDGsC2p41s7y5PZvN0nCjwFkqAmBsJuPUJBVaqSu9+X6zypVfxwpqcjTLOUutmglQBJuUJWqMYoq+KIpesXE1zKpmOp1REDXQ2no+jGsD3wvIpKjgZZoQaC34DTJq6t1x+u4rd/ca32T7+V/6pZQlcFtAutvkDpXVOwHPdmTGnsBCMQA4SEvHH/MRWMWfJFhox4uN0bGT4dU0WNPeysqFtGo2Gpu/cHU82hmsRkA/a6oi9CXXoR9WLvTF8krsv/3evbqgGei0Sjkhg5/MGpFBnaSJkC0bECIqQBRTYCC0rAqWFcRAW6MktpXQAduycaDOsxE20ZJZYzmrmgETMohjQKcZMxoxB0DOCq211RlZ1gCw4oteWa4Ohu/e3b19e3TpylUk96NNQU9mG5yXtTh8K8vf3Nf5ePr3xx1z6Lt9iawVdw2MQuhduz6qFIgJxTPFpk4iCAAoxLy1te2QRQUAECwlZCLvER0ggqmxa1PSAdVUTBGslW4y5EwIZATCQG1QgoGZSs7eF93NEZgqkhORGDMhkyNUVDNFVTNE9Fzs7tTjaRwO+iw6G1diqgr37t51LxbDEtXyoOj1CI0EiZpsk0ZX11eKIa0Mer4/JOQF+vf3aOz2CLQTd9NPGLh2XzwYCfXoYYFo2N90jxnqQcn9Qcbff7i5jQeIfX+4cufuqwF9L2CMzoWeYy9G6ohDCOSQodLGe2dCSEGlLooQOWN2VZK62rq3ueeLen0tDFYKZiRiBQRVcoyigCBqhAaIUSMgqRkTGVJWS5KySgEQWvW37ekiiUS9C0mAhKFOqWliQxeeebZ/+VMSuJnsUnNX6ruT6dSV2BuWVZrsVXcDFyvFarkxLAeFRMkoKgm514p7bRekeZrRY4czDtGyvPsSxhnhQc3/j4PgD4njh9wg9x3u0FnLXOiIFjvP7z64ZxyBY7jcsoxLDlcvXtyr7r63eWeUdaPwJrONCxd61y49/dKzP/yj36vqaW+136QZJ2ccQq8g5xyl2W5jTfQ9d/HK0Dseb8XxqNrcHm+Ndwerw2GPp1vbq+vXwrCft8Y//OHmbLp7qXwhQ1E6UIm+1wtl2TQNMkus0YgcBHYAZEreOecZfYFeBlSAAUatG1nJGAUH5epkPK1n9tT19cuXr/pm1uxtbW3ee/uD6a27e5l6SfCX/uYvs/MdCrow2/aDLeHg2Al5wiz/oEoJMC8Lsuwlg2XCeFg6/qTvbg8NCJ2LGADAFDK43tpwABf2Zg2OtjbWi+nuzuWLdvFKLzL0e8M6VwrVaM9Yewlp2jRbu7ZTzcqrG7nmjeF6NZtqzNMqZsMsmhUMUc2ySFZUIzEzU9W2dTwoECBpWyZeW2cyEnI2jdkas0YEgYjYzEAB0NQkJ24AADhAJ/0LaYxGhqFfICEyMNO4mv3wjfeee+mL1597logNDM/PN/xRgyf4yGjLeb5H7A3Hmo1OgtOdlSf7hG3pAERjEfSDle0PPvBsTJibrCYGbc1MUlUgilJT14nXRDMYMzsmdMzUFrBVQ0eACNpuUmimhpRNGFAMSU3FBA0sgTrnPAKoKRGoooGJNAYIoCqAhkrmmcVy6UJdRTO/VzWhN+wH0pQzgzna2RtdffrF/vpGL2Bsqt3piBz1ioEZEvL6yqBXkHnz3BsOLzjnF20+OkTsyx8HeerZe8Z8DMEtkd8n9Rlb2N83u6bSgES2sn7hhhmyNZPKIwfvPZrozFEaDL1n3y/7d7ctAfleEetqZRULUh8CmDf1YFinNBpPd3artZXVjQvrwXEISgyYEzMqADhUUQBkJlV05ADME5tJk/NkVvXX1yAno2yqkGvRjIbVpIbhwCJIlWPln/vsT6w//3wMuWk2SbeHIcWZTQQuDFaH/RWFdPHihkQoyoDOuKnRD8kVaoG6/MfO7zEXGB7S6fsR3yIeVGM/JJ3D/c89IpsducIpFznLjS1z5ZOO3/eTnLpnH7b8H3sZADV+4aVPvfbad7zn3/vat/7uv/xlndVR8iyCR1x/+grN7plNCI2UzFzh+qQ+Efh+CZRRNSBuXObVvm3d4Tv39qLZZNbMJjooijoz9i6VG/7v/sdfnVbbd/e2v/e9G9cuhBcvr1OcXAgDELc3bZiMgbwv1DgnUNRpXWFTM/uiCG2hOS6sHGBosmUXQm863S0LvHCh13diTb61tfuXP9j6YALTDER4ZW3t5ZdeoH0n7+GM6Pnf86TouXe5HXyZ8x6EZTlgsUvBwkkN8z9ze9WC7h/UaPVRXssfBWjniBk1zRjGWUf9i0WTE8Z0aR2cg9WBqzCAeleGWE0Qx5CkD4Fn48vD1ffvxj+/Pa4BPvfMaqm5FmpAmiYDMRqqkmlyxgrWgDQp5TbUBxAJur7wqq3cjx5UsEmSTWPWRhQQHBEagBoSIIGY1impEhODkYkCKBIRYY4Re2UTa/bWX10dNXF3Kn/3F/+F3mAFABDoR1X6f2LQCZ7zKKAHRb0d3CnOMtzxn7piyAZIAMZIl65e1tL5FXacVQzR1KBgByLZTLIA6vK4CqaatFEC6JeFK7xnhlaXVVODtmdFzqZOcwZHpEqmaoRmZIYCxoYGiERmoGaqmkVVgJm48yVAysrsqlmjRtOmUeAilKtrvsSIHrOCKt/aqv7gT94Mffzi55/7zNPD4GYrAy/JAQQffOlyBMsQxo3bAM/zeTiEnuVuywYPNT0fHzjXJOBT4JDJ8yh8CP50NFCiG5CAwIrRTK5dWTVumjqGEBgbYkTjvgs5Z8lNWRakLqkCmqH6sijLcPfOrOQyEBVekcoslupmvDPp9Qot0QooSweGSGbSroGsaqVfFRUiNlaPlKomZwn9XprWkDCJao4aMwqpwt7OXc7qUvPSp1++9MyL0C9EdhgSopShHDXaK9Yc9Z3vsfNIVBSlmKEEMWYipsKxmys9eNRO+PhmfBnjx3IoPDPnOvuIp398RER1wMhyiHk8wofC0z62TkrEJRfysV6CI9DRwgGJEsk5/9M/+1P/4B/8t/d2tndp57NfePHFyyuUXM6CTi8//6nptpdqAqiSm1DyaLTtgmdG59nAW8oOGSkXjL684Ifh3r2tOhq7vgb/5pZ889W3P7i3G759M8b81Z95ydGV/l7z/Vs31wfEWABJElDE0AshFEA+eWhSzBCBTECD7wcXDB2IpCYPVlaIC0DaoCvrvrfeX9m8tfeX3/r+eBLDcO3WBzuzCNTDX/nX/qWi6CE5W+qk03lLuqpAj4r8Tze9HYClmqGnHb3soVgS81sjrc3boSxvU/vqwY/hkcC+YRCMUO++dcuXO1DHSZV7ZU8LkZjYce2aytLGcJXYz6Z7IFqNJ3mCmCOrryt97VbaRv+Du+89u9bbWBmAsipqFgBsokQRdD6DNVmMGAlyEhFTQgBEJjQwg6wqjUaVJAKIbQewVlQyBSMjQkBCgLo9O0pIuRccM0ZTFeiVocnZEC4O15XCn37jOzt78JkvfEnRMSxlAH+CtICP/qO0TODhqsfgwfeHtqEWTsbA0l7RMpp5Oq+aovC0yv314QvPPHXv3mjjwoVyWN65eaeazjyCEFVVJALVhUnRAEBM69gQgWfOrIUngLazCyJ5QkZQtLZurZmqGZlBVyFRMYMotIESRkiOHSIkFDNjR0QEYAwcUxpPZ0l5a2tWx/ziC9dNkxsUsckKvLm921vv7U3zq6/c/eZ33/rf/6//rXLoBv1SU1E3mnOTSZPxZFwjjy90VuADjZi7HfzQV6ei8iMOXbsbONHi+5GL//sw1tMznrqYzZxFzdVR3nnz/SuD9cHqxXo8mjQ1AhPBXh2rpmaOgYpQuGY0K8AKLg0JKKBPpmKWiJNDICZvYJqqKs2ShcqVvdDv+yI4RnYIKJhVqjQlYrBMYKbmCW/fvas5rnpYCR6Z0Fxjliscj0fT8c7GoPzcT39h49oF62UHUKKaaiiK6c6dRppZ3VxcXyX2RODYW7ZkPgzW/DA0k9xYGjDNq0os6l3Z/qp9cKye5fj94JKTBeLWhn04BPJDwzGLef7+0Y1wmsX95IX2oUZakgThkDvykB0ID+L/8N0tBJuFjGkWPD/33FPPPfPM13746h3Z+Z0/euOZv/1XS60kj7QCXBusXPvsaPtuM9mx2ESZOTIjcxwsExQ+9DlFkYawBlfY5bDOTmKlqxee/sO/vPUnN3ZfuTUbrq3Wzo+nKa8+s3v7nZLk3g92P/PCmi9HFy+VitmVzoXgiwLQE6MrigK0jjU7IvTOFwjOFGhgpqRCe3s7Fy5cnjZy65233nrt1mRUAYeGbSvlFKBXhn/vP/h3BZaEm2WrziPWgo/uvKccaYvm1idU9Vy+EO6ftL8xzQX9BU2caegfw4OBLf1hYJTVt165/dlPXQyDmQ+lYu5rf7I7qlI9vHKJBy5XsWDXNL4s1m7dvolY7O5OirWLtDVKM/WrG6+NKhqNA9hKwB5q6b0YJwugmECUXFYQRQBuA0OMSA2yiogm02ya1RQBEdkzmpIhGRKZgDI6FUViBJdNZ3XDjIwDFgA0KkEFm1k92Bj60H/9nRuvvn2717u0vrJq++UnO2L72Io6h+Fj8RS40OMf9naPY/UHfjr1pP3q4ABgYEQ0GLiNa5cn79wIvnj508+PxzMPtLqyMhz0gahOac9GVVV3V7J955GqVE3NxM5xnz0QKFCbf2gAzK2/itqC/21Gexf1BiSoCKRmpqakDI7QOaScm6R1URTkXIzWZKiN79wbTSfV6rCHGmOUd0czM7AU61l1+Ro888zanYmS7P3B17/1d/61X2wmNQd1BUmq65QhDKezem906/O/2FUkOiA1IC5HAu1j6WNBTPeDY5f2+fX/O8VA9eGWwNnhsNMMkfuD4Vuvvv6FFy8NLhdNrEJwKQM5hCSg2gs9A2pi9L1hXc8U4lPDqy7YdBbJYzOLWaJDU8KUDQG9BwJUg5zSVLXJsSBHRExSFqVDF3pFTHWua8uZwEnMm7f33n7v9tX1wVPrQ/JhbzTOUSw3Pd98+vn1z7z8Ml5YsT7WNimtYEmlmVYzmc1AhIyrauIcBV5R5CZFcEwh1I1Woxi9M/Kt/I9dXxFYkj8esQ0ejlDYIan0hOl4NLBsAocDHO4+Zz34OHDAiH5I4H50z4QnvD92kGO1nSMLfmH3t4WagoBq4pD+3r/5b/zaP/7VJsVv/eXNv/mzk8sX4tpKtthMR+p44/r1z9Sz8XT7dt1sk7eqiYjEoUQjgFQEpYKLoWuaRvemg/UizXY3b9y++e7em69vPfX8M9cvXyjLwdbde9/+9d958dmLeAmghOHFgSup6AcgN1jtuaJQYjXKCC4UK72yrxqlcT4gcM5qMZEh+cI7d+Hi2u0Ptm++8e6b33tttXfh6uVLU8M/f3V7s7KExTqv/eRP/ySg064l3xwTS/TRuVFOQtWZ4AFs/8uTg4dXyamegHlLoO72u/C9JdLrphQWiv3p7oUfw1kAF1YStCR64bmf+n//F+9d/pcHK5fRORKjonDNDF2/N1hdzaopNjJr1nvDauYK4toSr/RHFcySJLEqyyxbndOKd5pUvFV1BehiBENS1Cwm4BppM4AtoWUARTNRUZHO54NgRugkC7clEUE1qyG6ooioairZnKMkeVbVCBg8MwFSUJldWBmgLzZHk2984xWk4pnr11/69AuGnYa8sEP/mGzOFR4Duh+EKy0V4QRDJDCLder113fHr2m2pq4d8Wg01tZZIWpZ+2WporPYQMueWv8jgoEltUldA2JREAenDhUgZWFnvZa6uo4+XVYiAJihmapiToqogNDKKp1XE7FRi1UDICIUBd6/NfngztblSyuXr6yMx5tFWF298vRodzarbv7df/NXfvs3f1carOvdn/n5n/qnX/sa+PV/8edeHq4l0QR5GhxeurTx+vfu7eWoStQKv0sOGJtL/9h5VRHh4Tw0Hx3AAy9HaOL8FICTpLHzwu2yoa4LC2CEp5+9Mmtmb7753tNrFzYur8ZUyQxNXL/nJ6MqhLLOyUAQNcUYJUfxgTiUfnZvJ9Wpj0KJKARP6NAhkWPmgEQmJpLTuKlTyillUfWOHDtT0SZVTZMzsAEXF3NdfXDX6jHX2jR1Xi3jp54LX/3iM+vr3q+5WVAkcZggapEal2fVaFuaDOrKsjDVnJqsxSRBrLHoe1VXT2bMAfvDxiC0ev6BAIF9lD8mQeGoJnCsYvAIb+IkQf/0Cz94AI8t7LiLUx4rAR8r6y/u4xQl4cg3+zrRsixsZohkAF/96k///X//7/1n/4//19vv7P43//Br/9F/8Evr/RS8NNVoc2dnPBgOL15aufbyQOrd7U21aqbiNQ8dmlrhRVI2MUfo0TzC2sWB1HBxoGsAe7d2V6mA0q5yUfbDC0+Vl1arp69duXyhv7bS98MyeC7KQtt0eR8K531ZIJAmKYelgqVsSRIVPCj6ENGUbr1/74ffffXWW7c+++KnxqOYIIxn9tbNWdJCiP7D//R/Uc2kv8LLz96Z3HHB4lvevsDGh5mcQ6efThSHXDunnImHDjpgqsIlw1QnHnYHnJ0mf6wnnAxzjUoxoBte/2IOX/7Dr7/6N37haU8yqWYrF9aCegeMIibZqoYRFc0VvFKG2fa0cr2/ePtWhb1ZbCD5oihUwKpsJQgjknPsMEsSrJMiglJWwCyW1RTNCASUANERGpgaGhA5U3BMoCmwXd7Y6A1XPri9O5pWvgyqmRhMjcjXURAjgC9KTpK9D73BCvDg63/wncnMwqC4cumKc8C0kAo+aYTw8IluH3M4iSsdhAOGgrYgadt+2Jf+qz/3C7/7tT8oyoEaGGTEtmMvqHRyRK8sFClnNRWxDIAKRmBglnIeT2eFA17ph8AZSFRN0BRyNh8cIJoRKEEbc4SApmZkANqWK299BprAYDSZRYUcLVkEhaqxnSZfeurq05dXQpD+4FKKxfaubW5WBWhYLV767AvPghte/9Tv/+F3P9ijf/BPv/nZF596SvJT19Zz7sUk25P4G3/4/b/+K39VDBlAbbGVz40oiGZ2Vix+5OFw0ZcjT3N+CsBROLpjnZ/5qhOg5Opzzz332Z9785VvS9kvitKR6qAczRIgl4MiNqkMAbFXkDmCD+7ujifVsLciMU52x5agKICKggEBHQCBETIhACExoykGICHIjrLkrKKp2bo7AUAMnBoxSdlcXVUXN1ZX+m6jF9YGVz51dfXSc+XaGnLIe032FHp+JTV1jDXkWaxH1ayWlPsrg6Skibznqp5NJrY6vHTx+vWkBQYqXdhxBSBol79/PFofFZ5PmrKjhvmlJrSP+iYOikpnueppaslpZxyzoM6TT9zPaHzKL8tLDQwQyNDQYVHb3v/mf/e//fO/ePWf/upv5fjW5z577W/9tcvDyzzoD8e7k5s33tVYa5oONi6uXbhaN5WkKaRZZib0ERoDlZRFIwR2WtaN7E3Gzz09+Klp/tab9d279+4IrKt8/rnhai8HltXV3sqlFefY9wbeO6B2DaErCiSX21qqgdkVhlm07q0MA7nCaDrbvnfr5ivf+EHB61948cXpZJaE96LdmsnIoAKzqP/q3/rXB8MhYBcB2T7ogtUftvofqH/yoLznaKAW3O8KhwY6wAJPWURw0PIPsCzj4MNV9/p4b24PCwfn+DTWZdCG1lOGwV/7V/+X//D/9J9eGM5+7ssbzD42GJOlaeJBg4SUwJmfCjRNihlcceHdO9OZldQLPaOi8N4X4+29z71w9fLQ52ovxgbRGuJ7241nJocZ0BRFzVAJgIgARE1NrRNNaK7ym5FDE/nFX/q5ote//cHkG3/2F5N6YmDIJEnU1DsHCKpoAqUrLqxvcOi99sY7d7ZH5l2T9K/83M+VpT9Y7PzhlsCP4eHhUeH6wfnXnFPNLUmLivhq9rnP/eTm5uzilX5RuKquREzUzCy3lfuRAGTQ76ui5pQkNbFREyNEBTVNKlujbGjs2TlkQAIrGMEIa4ECmVEM0IgN0YAAVQHIrPMziACAYoppb1RVgsb9aFCPJrOq4rK/MRz2ycWmGgZ2bG+/8d77t/bWB/r1P3+HDDzTvZvvFWaf+/KXb77zzndfee3n/92/jmmUEoDrf/u7b//G73/v3/5PXkJ20Kbc274LFeZOgIWUYp3X4hO7Is5JAThjJMZZ9s9HMhvdtCNb7v29f/9/9d/81/+3O7NpOQu52vNOuB8mdbUyGBokEynLkHN89rlnvv3Kmxfv7OxubhGCdyvvv/3elS+8RA5FkDwTMiAzeSL0wTkHgEmtNjU0ZOpF0XoSKbvdnWmqm9Ve4cHN6njxpYtXrg6vrOBgvffUtWc15trDbpWo0mF/uOEvu2R1tbW3c2di5kFrdeWgHyOqQW/Nk/fTyWwwWF9dXwfFWE0wGSgy+zbgbi4lGBwJu39UpI0HXw9N95HZPyct73zgsT7JsSrxKcOd/BMeuhgCWqeQmZmW5XAa6//nf/mf/8f/4X/y67/1q7/2h9/+5Z//G1fYp1StXO599tL1m6+/M9t8d/3K1cHlq1wO+hevMvjU1HXdqMwk14ITxUkIaEQu24qyH8T1VXdxsPfmZsVF/0svP3NtxVIeX7p4bf3iRQwcgiu8945DURpSkwTYG3IjEZ0jorLol4yXVsJkb9TsjG7dee/Wuz9sZtMvvPzSeFtnM93diVMY3BjnP/3Bne09KC/1/vav/K0XX35BDVEXzX8PhDYvQuCWqPah9bj9bfcUg6MdwL7h0ten+8raIWz5xy6VZ+6rXlIIPjkr6jEDnvJpH6z70QDQBiX91C/9C//5//nab/zJa2Xx7MVVWFuNmapyOJzNZgWWFLXe2auNtzYVd2wSB+/d2W3Egw9lv4cOqhSzpRVOzw297/f2duPeaGphQOvFJEo2TAZABh4hWxQxazUQBbC2TIshACgYKgKR9xR+73e+/uWvfOnSxcvPXL/45vuTHBOac8RtcwAxIIelZycwKIdvvPHuW+/fFCAE3Li89nf+7b/jfIGABgpAS2S5ePyPNzV9LMz/j+oeH94OtTActMGGYM65z3/lS5evX93e2gbTUHhAAETJmdpuimjIBKTese+VdQWqKWZTBZj3LYyS722PcxS5MMyDIveRmpyNiB0hMACaC2gBgJgQgQhNQBRijlkysc8iMea33r3nBxfcKmcHo7pe7w+uXrg4YN68eyesgQuyttKr886b792OGcKl69/9+p9+7vMvXr92SZpbd2+8iy6HPr/6g9cHJMGvv3138/f/8tVxpNXVtXYTMF00h8RlhIAtm1Q+DpT0gLDYe87bA3BUOoT7RYwcvcIjvJHCh8994Ss//3N//X/8B/+Hi3/lS1evbIy2t3xRNv9/9v47XrLsqg/F11p7n1Dppu7buXtmevIoBzQIEBIICRDCGAwITDDJfjjxjNPPz9jm2QbeA38eNu8ZTDDZBuz3MGAyAgECZWkkjaTR5Omezjffyuecvff6/XFCnVR169ate/venvpOT92qE3ZYe+2VdvK7GkylYvndQCCwhd1+54GH73nuhWcunFy869SF+eXjn/zIZ6+vtR95+X1ba6tSAAqMNs1CloSWlNKVQrpsfOUrCHzHsk815uzz9ubath90bSEdkIE2ZBvLAZt7QqrWxvVjS0uWIG1A+9Jf1yAABD77xCVbqDP33d3pb1ZsB5WyWIAAlKof+HatMn/8JMq5jqe32h2XzXprc/llLgEAcMrq35Oxs2dyp92Pfck8PdQwwbsTZ7p/yLnEO4Z5Ro6h5d200AdABkbSGmp2rV9r/dwv/+S3fLf66B/9t9VO71jbzNcqSge1SuXiww+tX7mhultrlzaq88fpxLKzeI7cmludV8pXgRf4HaNaDvu+17MrvbnjOuh73WZ77sSZR536ViuQ7M3bbHRj4fiiZVecehWFYK0BIQAgabPRTIIBpUPk2CQQQZhWr+ttr924vHrtxe7m2kK9jnMN3w/6QeAFwp5buLLWe/pWczOwwTFeT33hW77IoJBCGFMkTWj654i693HHocH79EytnIYJP3NTL7KRaYB4FmohyeSF0CuIXZo7wHDbf3B8Wl/oS40YAYi/CCRwFo/d/bov+cvfeI7ff+ObvvrVp+fb/b6uVKRiMMb0tjvt1ZZozNcWl1eb/Y9f6j217m8GUmGgwQRdX/V755as+5etR192SoK5ckVefnEjIFdz4PkakLTRYNgYRmBBIlycw4AAaCDm22gBP3qeZoGtgD/2scfvuXhBaY+00lpJksyGCIUkNkYFyp1vzM8vXL9589bqmkHSQtcq9sMPPvjggw/Ep2SUcu9Lgok4dTjm0cKuZhkOhnlCrs9GQ5ARwnkvkRgRgNZbv+TtP/kzP1F3qwxsuxariFTMrJkFYjhzVBLVG1XLln1feUGgg8BoBgJAG4B7gbq12e55akGz74uqI32FFQtrFaui2TWi4rBlCQvJ6wckyLAOozOBNgZQOJUHHnlkY7t/6ep6R7fvv/vksVpN+55SWKlWLDJLtTltgtc8cs/1ld4nn7j29PPXHnnd6x+47/za9cvStfsdr93bXrm56j94TmjT7nSbbb620ZlfPt6YrzOgMQYgs/IlmgeV21HtThSpSaUPyAHIhx/T5SiQd99GADi2e+KwGjMgVCx8y5vf/Ae/+ZPXX7h2z+L90nF9FEa6pAEF1l2r2+86terq5q2XP/LAC888/8pHvqBRqW/7/uKphc1ua7XdZOPpPoDtErKnApSapCesikBbgCBwKtKRskKIFhghGBeFF7hAFhCpQCNrAkKQt66veWrr8tXVE6cWzp87Ixt1HSCS02q37r7/tfXFSru5Ua8Ir7ctkchGQOMjuY16pTov3WM9LQKhFo8t9tZXPZ/dmqOBCSC91H2KVs8IpHMZfEkvM9ufGZq7nQJ0RLGjezzkAS7tiOHxjUhgjHbdigfBL/z4T/3CfzjxxON/+eBdr/N8Bb4fNFV9cX7x5Nmg1/R7m35ntfviRv/WDXJrTmOBnZqo1pzKItIi+YGjlPI9ksLrdWqBnvNV1w+WDFRs6Te3pIDa/BwD2ZWaMaA1I4PvBxoF29pyLA48lEYIrNiyv7HRW11dufZCc2vDBrrr3F1+D7XC7W7Q8/R2F261gievdx6/3l7xQFQtieJL3vFWZFlChTI7J2V/75dHWnqFB1mnfYQI8d1wMBoAopmpSdw/n168t+gdzPNTBKaGf0pGYSJEG65GI2RMhunL3/nOd//aLz/2xNZC7daXP2rffbpeF47XV4DV7V6n01/cWtF9iz78ePMDT26tatFRij2tlJEAJyr8ttff9bl31c+elFubTdYd16agF/T7PURhAHwNKrW3IrEQ4SAAGzKAxCbcRBaBtRECvUDLiq2IL794tdPpGgO2UwnnTghJYIxtyUZtrl5f3Gp1L9246fmGhDCeT9p8xZd9GQmJ4awLgNQRkXBHC848jqj1DzuFukKLP2SfePePcLlTPqKNsdQYLLxFcF3r277tW//zz/ynfq/rB8LlKrNARM3GqNBoRolYcR3DhgjJsizCmi0DJT1fa+YgUESkldESur7CVuD3A2RTsS1poSutesOpO3KuIhpVq+JS1RY6FPsAQqBA6HYCcCzt+arTubDsHD9+3CYdqJaouhXH9TpqqVqnfuBUHZ/b7/ii++cb9PzlGy/0tl2pg3bbqTdO1xqbK+3Tx06wLzbWOxyods/YZD38wIXTJ5YRkYhMXqKGJy5mSTubAjQVjDRNDgCZubacjPACgjFVd0EFjY9/5qlXPnwXeUo6VtWtGWUUaEAAm1gKxaJeq37L3/zmJz76Mc3ikTd8vlw4cempF33x/N0nag6hZTu+FxjSksD3jLTBksZyKq4QwiIiAMOstUEGKVkbzUaz7huvQqCN0YFut7znXtxonDyzEugPPPmC7wsM6tVK5fM+9+FTx6tadyXSxurTjiUMKCZmZLKrjjuPVNcMjhTsa1CatdtlF6yleIQXAVKz/CCu+v43R2bAp+j5TTHxwi24092AHHby6KKpjfm7jIyMFHpkxAYc2xHE3/NP/8Wv/Pj393DOFpvQ6wjCrY2NxlwNK7ZjzVXqVd3z+r1ut73d2VoT1arTmENRQbvq2hWLgUgASataJ0DHcANABUHgewv1M9r3UJDnB0YzILmuY4wBAGX0XMPVShtlvGar3+vc2LjV394A5Zl+58TivERbaalAtjrdjaa/ttnf8qoff7Hz4Re2t23hLFUlOieOXeBAkKQoJD566mE5ufbLNU5NAUop6KhnQnokOjbqMQlJZWqC8cPJ0AAO+sJLh+EnRuxXJcP88dZKmQGYZHk4MgARSubXvvYVb/i81/35n7zv3R/a8NrWN3zV/XCrVbOd1VvrW2vNxfq565dX/ugDn378ll61LKxXYMvTgTZauxWaJ1XxWsK3V6+urG+2Oz3TU3Rzu9s3MmDhKRMoNoCAwMyExAwCZaB9QBAkFGgwCAiGNUkwhtGSbb9nI2mSlu32g6BSrXa7HrN2hag4drVab9Tnbm1sXL1+w8hoCYErZMWy3vh5j7JmsAZcM2ObUhy2lcSlUmkgsHig+RjCzd4wHuXYMTwQzokBQEDJD778ode/9tH3vu8vHBd0tyeEI4lCbgmfsSxLCBJASgXMxrZIoDBoacWGudPq1BoV17YRgAg7zV7TC6Rt9Q2jx5bU2yqYd512X8imx+Av1izXRssyjYY771YF2nalutXsBL2gWhXVCrpSKz8gZEHY9frzi4u9XrvqOL7nHV+Ym0P59e/4og8//sxfvvejH3rvhxeWjvXZY8u86fWvePih+26u3qxbcnF54YXLzdWmf/7cMSAbiKIVYpHwjfwfiE9ZgWSwpDBL6E7CgW4DOhUCpm27kqGuHd6DiImTmDQbYVG1UTl1972//8kP/f5ffOIdb/4cJCOMb1Wqre22Zur6uL2xubreVy9cufDQPR5UXv7a1z63svnYE7f6TeipbcfA8aWatJRLAizCgEEAeMpYpF1LCTCSGYwQJKt2YJQx1Wq1IlEYrbyexxDcWNm4cWlza93v9avBppCBcOYWa4v15693K479fK//9Ceefv3DpxcqulYTne2W5QAKhErVqTY0Vhls0/crkiyvv9FsrmxVqxc/V4k5keLi9Ejg1JGLLUOqmUY0xxgW2g6ZjigM3Jkddih2qnIy+puYnoOIUHg0VhhgRmRp2wHSu777n//PX/43n/9w1XU7NmKv2/ObbahJKV2tjF2r1GTfUR3te7rT0e0Ny62jU1fS1mhACBaCpEXCktJCBgFMjkCjK3MV7fWJEMAPfBUERMjSKN3q9NtK9Xvd5lZnY9N4faN8G9F2pGws2sLptnWr2W22zcaG39TyetN+8mbvfc+urRrwlK5Jc+zEsZ/4hZ9cOrnMDJQ74mUIs5WRa++8Uy6KckH+kOA4mLmT8xBivYNJuCJJIq4HZjrRS4rb94SIz+NZz+kJQckjyOHCQI6OSGJJkhtz3/tv/vcX1r770pO3/uCTW4F95a2vrbzhgUUKNqp1/+xdtvYXr9/q39rYagemqfuuVelL6Hd61fnKG1997OL5ug681qq3vmnWtq2bbdMUc5teoAm74AeoDJIJDwMmVKwNGyQMj45DICIwxgiSwIzGCABmBEn9wOeAK/V6r9m3LFGry7rjVqouWeLq1cstr6NJaGOIQIAAAZV646EHH5GWfccaNdPDYaNOLoaWRA5CDIQIACRHRY7rwURPG2ZCEQTqW779O97/kQ/6gSYNlkWK2bKkIEFERCgFIYBhNsBCohAkECUiWqSVOnZ6SQWBU7URQClNFUsjCVcoZSSS0n6/hyYw7Z5lubbluq0N30asWqbe9jY2dKVRNyRVQAsLtbm6sKTw+n0NaDm1bmAY+NbmVsWRELBj27rjLx9bYOLXvvzCXccXf+M3/6xjtFbeoy978Ite/eqgt9bzPGKns60/cb315PVmUN/YbvUWlqoIYGLfNzYK48WhsUN12Bhg6jjoEYA9IlF1xdTGTj/VXxCYUWvd97z7H3jgt9B5/ycuPfra196zXOfA95THoP1eRxtmlEsnTv3Fn35i6bPXX/Oa+z/60Y/++YefBLv+0Gsu3njmGd/XXqBACkAwGjSA0gzSAiEVGAu1xUYCA2rWirW2pXRICgNac8/vrTe3L7+4efnpLfIco9jq9/uiq2mzz9o5ff708bM0XzvduH9zrdVYdlubXY1GonQqNVlZYHLBUKAQBbVaHel1LXT6ZunhV3+RRltGyi4Thd8PqY+FT0iZJhOnyTtdGVGYGXLAjBkZy73QiIzN0NAUAhZEIrCPvfYtX/v4e3/qjQ8vMQfG+JZwgFFrtqTd7/cZQBkwhokYjfb7TfJ9DaiVR5YIFJMlLds1BomIUChmaYwUwmhljCIUnudrpVkHoH3ldQPlAYDX7dadih8op+ZYUgCjZVWbW/1AYV9Dy6ONwL68GfzlJ69dbkOnipa0K7L6ti/98v/nP/x7tz4nyKLk2Ou8aTx8xscO2PGlhDF3HHyKAsvxEH1yJgOnuk/KoU4lHF6KZ20MHj1quO12Z7iyIp4LlIv9x8+EAy8IAGiAEZA02Cfve9WP/edf/sf/6Hs/9N6Pvvfp7uOfuvK2z6m+6r7FpaVj1281GxV8x5tPv+rh5fd8pvnBS82Pv7CJNarZVOWW8GutVe74auXa5uqG11LWpi+3FfRYK8VKswFhGICQAQyDZtasjSGIFukgh/5AFNNlAOM6NjNLx5WE2gsWGxUphetQza31PG9tfbXd831mFEwkECBcGINGxsPfg46Swm1vncOEQ06JdKA6kRrJ5AbYXfnD4ScCBGbbtt721rfO1efXNzcZ2ASeAGKAikNIRILYGCA0bCwhHYsIgBAADCNZJC1hbGkxIxG4VWv+xEKn1/EDv1pt9Hq9brfnewGwQcHMRiu/VpEWG0GMwFKIwPfJ5fnFSqMigr7n9XySSLISaNYa2BilAqONr4JKVTYk9K5fOXPmQsXGpTq+669+0UeeunLuwvl7Lyz7uq2wZ1dIIT322Suf+sx6p40rl9Yd1ypRBNGQX3RWwaEa+dk/3M5tQHeFXLAfCrH/0SP38eSHjNscnfVAolK1Xv+KR7zNFrrVX//1P/vmr/m8xQbpdrtm0Vpn+8Sp8wvW3Hve86GVdf+Jz3722NzStcu3rjyxduGC1b38/Jkq1i1GHbS2NuzFOksBSLLikOugJS2SNqKN4AjNoJQGh1Agk/KMNv1OR/Wa1154cX7h1Bu/4ELFrgkftn1uS7pyo/uxx56r+/pYlT/61HP3nzpW1f3OulqcP+lU+7V526k42ti+1hAE2hdIptfpWKi32+LMw6+XtZMgCMAAUppeB2wuDAu7JvOxRmDios4CokMwsE2TMHNyJRoBjnoJC0IjnPMPv7l388nHnvidhy+eEvUqkfAD37JsQNOYn1dKB3peBX2v3zZKgQ4C3wettfJJGzCkPVDcNMxKa6M1EgsmViiI+0GPtSZkBCSDAhWisckYFm7DMb627aog2xgUyL1u4PVVt6+byrq87T3+YvPTVzcvtaDFAF2u2fhDP/pvv/W7/pbneY7jIoDhgWmTXc9VbuvtinQjn9mZ9dL3svOxMDUpCCFSUCWdAJN4VTbXo4Pb3DXH2TI1bu+wXxAAM7NEIts9ee7iL/6XX/3Bf/Nv/t+f/9XNrvMr71V//vH1l9137PX31e9fkktO29E3/vqXvuyLW8GVm91PPLV6fZ1rruw9u/qsh52u8ph7SD3seyCNFhaSUhoMIEhiMIya2RBrAxrZoEGkaKCIgRAVGwNGSmEhacNkWBBLbY436tWKJVCCsG9tbq1tbwWsUVrM2pZSABpjAi9wq875e88bEw4qMWcI8dIz/Q/bFJ9dIhRnJaZRKoqwyxSj0WCBeOrUqa/6qq/82V/8edaATLZtUZypVhyAEeFGbbaUDI5FApmBNJJmpZWWhK4jbUdWbCmBTs7PE1lstBbYsYgBDRgkdqWoVWWjYglhLMt1pVW1JEkMlPI6m9dv9aTrupW6CQyiBs3MbFnCtsiyhSOk8bxW329UnJvXboHFEHQqtfqjn/sgysra5vVuZ+X4Qq3qzG8H1kc+/rEXrvdExX7ggfPCsgFFPCg+mDg1OJi5KMiPMJuMgjzk3X7Mwg0xKFNvDwZ20oNmEG2wYPjihbtPLh+/tdn95FOXFt9N3/oNX1Gbs7vNm8vH5+cbjY9/6hJotXBicXV9a3N1xettfs7rz1w4PvfoK+9Tvh9YjIJMoIiV6zqOZdXnXLtiuQ5aNkkCkKhRcwCMGiVprQ0zMtqWszi3ePGcXlsLbl6/1QJRsW0NwszP1c+dell1vl49bltencTcYvDg/fehZFsvSPRkVSnT6nltrxeQh7bbCLpt1n4b3DU4+dpHvtAIEmF8MZlBwDgI8e4PBj1ppF0yZly/eH23Jb9jPIGpdtFQa6St1WjwF8IJEYDICGykMIHmh77427Y3N5659uTFC/Oq25GKq4vU73mBYlFxHNd1XKu2UAOtjfKRNQR9v+97/Y4xHPQCIPA8XxAgABiDDEKgU5FCWY60TBBYwuKAEQQSsGQTkK8AEbFa1T6zgZ4feF2joNoW9MGnX/z0853n1oKr2+whUEXUK/Vv/rp3fcu3/g2N6NbqHO/xORgBSPm9e7CVS1sgF/UvXsy8xXmFgtH+XElIf1DmbFw21VrpIf4Jq5It68H2jkGM8pCvWM6TGRGQjQFmYVmux/Bvf+hHUYlf+Jlf6FHlxur2p69e+7MPXHvjaxYffXD5RHW5t7pum/Yjx+17j51YbdK1G6udE/Nt7XQCvLbRCtosfEMBO5J8zxjNBlEzM4BGVoa1YQMAQIDMqIkEAAgBwGwRAhJoI5grrrSEXXUrVdeVSFKYzU775sbNvjbaIqW0hSBJArNhUErPL861mu2/8le+Zm6uQVIAJHucpyU33EGCcwccatNnbOQFRdyGk0U7ktNGAuN/xTu/8r/8v7/SaXUFA4MRlk0kDBtmRhBEqMJjKgAcRzqSGBFJ9Dyz3W03luardZsMC9Bz9cqcI4w2nm8ct9YI3F6gDLIEVa/RXNWxLfC8QAghiJkNa20hgyMq7pLlVnue7/V1z/MsKVCgDnxUMO/YntbSsgSIwFC1Vml3t3vt7gI5L1z+lJaW7ver1QpivRfU/vAvPn697WlLdFvtr/6arwai1OBXRlCHa+LuBLYYD1i2s8Qhwo42ZS72z4WLEO3mwMlQ70DvYngSNQhiVr2/+11/67f/4E8C1Zlz/G/561/+6Oe+YuPas3NgGvOLH/3wp9Y3tx+/vO1UK29+3d333nv2Fa956OlPf9yoPso5q77oCrKZdd9XRgkJUqDlSFswEQgkQEQwge8LMmAQjBJAAjAw2u/229utqrv0zOXtx55d22z2WJMn5GqrPV+tnltevv/iiUceXjh3lsDvV2zXQdcC2dlYWb9x2fP6Tm3p2LET2mETbGrj32ifvvtN32MvX3RcycYgirDGGO8TkhBt4oYY526xRXaF0jadACNGhGZIEI8GRKTiZECAGcl4nucQf+pP/lvv1ifvO+Ga9cubW1fnTy0ioM++5YhG1QEyDGwALekSAiAZ9o02ghlBaU8rrUAxGwXEoWeqPB9Mj7XWGoxBZkOAGpi9AAx77Z5C0e9Ds+051SUtah9/cuV9n7n18ec6rb7QCD3DikzXE2//0i/7H//z1wAlSXcg0cvi9ZOxQbQXzNDj4Ivyaax80qux0wI43IsjbYsV38RBzC9vvh1m5GxMOAplToPDUQAENEjEfr9rmP/vf/cDP/EzPwMsg2ZH9boWisWKeNmF+he87NwrT4iq2vK9Nlqu7/mrHf/yir600lv35WYPu4Hpa9FX7GtQyBrQMCltFLAB0OG4A4YWOhsGZoOAhCCRiLUjRN2VUpLjVpgkSLnW6dxc3/K1QRAAxKwB2LakRAqUtixLSDp99sy//Jf/4u1v/9K5ubqUdrm2PGLNMhEGvHgbRgD2O88o/ZxYnwDGvHjp6ue+6Q03rt8CAEtY9UZVMCKwY4ua67IgX7MQ6AAcX6jN1WwS6FacS5evaqT7Lpy1BWvfqzrSluBiYFlVq7G42dI3m90Xr9xwJZxdrl847lgUSAKS0PWV5dZs4VgIgk3P6wlpEZHjVhmw56tmq93r9o3Ri4uLge/VG1Vm7Vp2p9cWNjo1RwZga2vhRC3w17vtXqV6WjiN3/yjj/3JJ54FR7p2482f9+af/bmfnju2CCCjfVEGcja1LzbcnpGhg5fkh30EIMQIE5ALn5DVMalBsXjWVxzUQQiPXSRg7Abwdd/+LX/4Z39KgcOS/vh9H1lpN9/+hY92rl/21tde9+qHiOTrOoCOcPT6qTnZW1m5cGJJWhRgRTMJAKm5Y4wkAQBuxRYCJaJh1sYIIs3GqrhG90EZkhKBtDYgpZSWK6U06sLdVXv5rma3j4q3tlrPPLG1YPPnPXL+7MVlYW9zp+UIcHpB0G+tNLu9re1uuy9t++Sx84ao190KOv22B9b5V7mn7iVLIkQj18AQr3HHNEESQo3Z9sNESW78EctapGjEjzbrk3f3yJfFcSEsPHDYWT/GPinn1DpUTPaEjsxMQGB0pO0DvuLL/sbWi5/81Pv+563PrNj+9lvuu+h3NpYatkTjt7ZJSOFQpVZDYKX8nuc7FatSrQTdLoBi0gKNEIhMvvLAGABUyjNBH5RRShsWlmWRkF6gtSYOIADHCNuzhOfOfeCxq09faT32pHezB7BQWbzn2PqNjYbrBmy+7M1v/48/+eMBi4rlMgAnnTysz0C2T97OWbu/6OdCgZHHsm9j850H4mgQ4Y+fiR2xQe+FKECXlAp3zGkk9p3/yzIoG8041EiKSYjMbMAYzbZb6wX+P/7nP3Dh/D3/4l//gGcFhqgTcF9x+/L6s5fXX3fX/MUl9/yJ6rHFRtXx5rh7Yj7wjeBNnw0rbZoBdAwbCs/7RWDNDJYltTEIYAyjAa19FIiIAsmSwiJhCVmxpSVQCkBBmrnZ6W60W03fN2AxIBu0pdDG2I5tlDLMiGSM+s5v/85/9I/+2fzCfKXqCBLZ+kFBVB8Kv3K/SnB7q4UA+1O1jHlTems3CTHCwuLCwtxS6AAAQr/nzc/VHduqWDZqv+/7PT+o1aqBCrZbulJbrNk2oUbWjXqtUrOryFwBAu0ANypOX4sXV5uffObaJ5+8LKUDynvgwrGTy49Yjk3kB153vrHgg9BARhtW3OsrQEMEptk5ceqU6zidbldIYVu2YSOEXFvdMkrXG7Vma9vXfR/917zsVWeWl3udm0T69IULa5vVP/7Ap/7sk8/2PNC+evmj9/7Yj/+YtCzm9PKXJJwykL63i0Emz3dSfjrsawASDTc6DDzCxByo13gqZXK+c7j1Yfh8pVL9gjd/0Rte+5qPfvT9wsinH1+5+mzT0Ut3LYu7ztarFbuzunnP8tmW1yGqkrZq9gLrKmlfCLBsSUI4aC025pqdnrAQkF3HQmallEFGEI7rqsDnwCIArfu+pwSRRiaEigNS9aSlK4sWOZUqSItPdF97IdhGi4Qjuxqs7qYMhNne3vQ8L7AtWa2cPXeu1qi3230ynsXdoA+r7eNv+oJ3KEESYssuGetIHXiCWSqNidIYf+ljafpjQbHkHitFMeURgz+j04kjuCXFK83o0AKT2Px0k01GfCOEsY8ktIwIaBN6RrsXXv3ohQd+r0t/9t9/6vr2++46sXDx7lOg/F63U6m4tapAbCm/t3C84TaqrbUNDsC2gMhoZZSvCBgBhJRs+ipQRMKWlYB7BBQEwIo9X/cDlHK+raGpxSc+feNjn7r51JX2ZhcCLUCK4ycbV29ssd1v2I5R9EM/+u+/6l1fR0S25SQkSv1Js99e5CojlHadYcmmuTLntUXL8+InGOJZ/4jx9P90roMqDMYEks2NpsIH+8r/DACY91Ug3S6YEQ6HtjPmWx2REY3mimVr9r7+W77zDY++6Ud++H//k3f/iQTY3uh4Pq0A//7TrbrdPTtP957s37PoLLkAWtQlnq3LmiQX+xbpis9dpTzgvtLMDAYo0IIQAA0ikAFCIpLSklJIYRGRIMlEiqnr6+1ev+v1goCBEMHBwJAEchC0tgSxrwCMZQnhyq98x1f+6L//v7e3mtW6S0CppedF8weH1PtAEZZq30twlBcAFJFTapOPD0YrwsitVr/4rV/83PPP68AQkdam3w8IaH7OtcCWvkcsdNezbVSB3263jbYXG9WHHrrn8uWrut+liiWFsqQ4sTDva335SvM973+i6bFNFgNTpXZ1rfvrf/ChN7/xVfefm5O2rDgVqZmE2+p3O36/01eCrED5RrNV6TXm5txKY329s3CsoYn6Xl9LC0lcvXXLsfVD999/9z1nVKejve211Q1yarf6wa/9f3/84nazHQBaEgJ117n7zp67EB1pPJh+GQdIj/KG/4OS77LRkdkczlqnKzLazhuNWHcOVGroDADEC76ZEdEAeF736vNPfeU7vpQE9bb6WxudRt1561te8Ve+9NFFx4N207IbW61Ov9dZPL7AtjVfb1RcQbZQKjDMrlNZXJzre0EvaFarNVugJAPAKIQxGGgPGQQjsGGjjDYcKCRAQDDKRu75Xh8MCHADxf2+3+l7TfZ6KgCfK7Xa3Jzv+Q2XFhoVIyzLdZXCZnOz5lBnu2016p/+zPbrvuF75y6+wZYyXLMGg9ghjjgpaLIR+WK7lDbWXloQp8cDpYkfOSTOzPQMpsxwNKdGARILNQxGEoJGVFoJUKq5+uRH3/v4+/4s8G/deOYFAO/RN72s7qiN6zfPnl7q9Ntnzy86Ar3N5vKxqjGq2/HZABAjstFERgeeLx2XpB0oZbTpdlWgzMp65/KV5nPPrr1wpXe9Bz2wu1pudYL55Tlg2Wi4jUpt5cbWieWT3/bd3/GN3/5tfRLStQRIBoB0RCdr0aRM5wNA0efF7O14mW841XQQ4w8jFNHqhSjIH93F9I6m+zBAvW+0ifbZzideKiWOChLdEfaV8JjFfs+3wf+1X/ylX/r5n7xy6bofQM/zPQbNXEXfAnIZXKmrVbtiy5othZCB1oFhZUAZ4xv2FOjoFDAKTV8GAjIEglBoY0AgAwZaK216gd9XxldaMYHAIFBCAAECIqJgwcZXklAKa26xtnZz7R/9k3/4r3/gB/ueX6tVtdEI4WZCA88y212OrhV0NJCTEfuUPkBeew4J2I2a8K60/vjHPv6GRz9HCkfrwLKkJaUgWphvLFQrAsCS2O91gVW9ZhMZBL188tjS8YX1W6uObTeqFSHNwsKxU6dPffTjn3nvB5/e7ChlSAjh+75CPrawAL7Xbzb/l2/50pPH7M2VG7VKxfd1N+j7vmFm26koNt1u9/jJ467lBF7Q6nQCPyBEJDDGdyxh2+LlL7+34Va3Wxv9ThsM95X1zJWN9z32wvMrno8mCIANq37/27/jb/70T/5HY4xlR7sAJWEu5BQppqhfD6Yv7UElHF4HIMFkll82fjaoYMb6jxUrMyChZsUm+NX/8nPf//3/Sne5vd1v1OxTJxuvf+X5B84tXrzrRG1hvq/8+lyFA6MNgWbWik3Q3G61m51up88WATntbu/eR+6dq1VPzlnVitWoSQSlgn6/1wMNQkgEIkJkYAGEQIYESI3EQijlV/y+am6vX1/fWtnaXl1bOObe/9pXG8sBt+JIZOOj4KDjrd/abPf67nzl2WdWbqnFt7/r75585VtIkBPv554Lxw/Z7W5A4XHYvlS4QEHW5Bqi9HkoPFwsQFGEjcMM4xfsaOm6cQT6xAnCwDxNlgKnIs9sEAGMMQCaMdCIYCxHPf6hDzzx8cfml0Rz/cp7fufPX3jhKgh+1cuXl1x1pured3ZOmx4BnDh1vNPp9IOubVU3bzVbzb4mp9Xzb600Vzb7283uVkvd6mt0q9vrfWa3FXCt4W6sbUiGsxdOvfEL3vS5b/mit7zt7fWlRdupEtpogQ0U9mTO7nV9sBb/bsHJKjMu8vLQfoilfeeQI4yxJbI3N8wER8zkTJUxOUcjjB8iI5CnA9VXfnvrh3/oh3/3D96tev7G9nbX6zaqyMBaMYPxjZYgCAwiEpJEsoVwBAoEZBkqBJJkjDEMBgAMeYFiAE8rzeBppQCUNqHKZmYkyQBMjMgEAChYAaARSPWG2+32XvP61/yf/8ePvPbVrzWI1YprjInGwMPTYSGaBlu+aGaG/cF+CPAEpSpyaHY7FYKBlcLv+s5v/+Vf+UWJVhD4JIQtpef5xxfm7zlz6vxyjcBHMIAGOOi1+4bE0vElo4NAa0dW3Fr9ZrPz/OUbq6tbRtsKoe/3atXq8uKxuWNLl59/AY1xpLrvzPw73/bG1to132+v3dpYOLZgGAFFt6vq83NbW9vzS/M6UO1OF1C5lgWsF+aqx5bmq1VbIBEAE1+7fnN1Zf3U8rlnb7R/872Pr7a15dhBz6/V57SnFXhf/Ve/9uf+809XqlUhki3+0xY/AxdGxO90ILNhOKS1LjUTd4VRRicnu55HdxChrzqWdP5///if/NLP/QJpbVtYm6ui2Xro3Kl3vuNN5++5gIRuBVip5WPLytdep00Cb91abbWbN27c/OwLly5d2rpyea2+PB/0+hfPN1754D1f/AWvuuf88ebmar1RA4NaGaM1IGo0gCRIsJFC2GQ5yijJQdXrq9Z2u9XvttrzlcbCwryp2JWFpZ7vNeYbzU5r5eaL7a0eo1zb9j711JX55Qe+7u//6+qpe4UtJQAbDfG8prQ5hOl4YxmRx2mLMQme+z5OC5ba/cV3Rwi4YlJFd6I0uxlCZE201GhDOGOAI+s1Or+KwGittBIABsH3mmvrq1evXG732sjtxz/4/nf/+m91VlaFhMWlWtWxbl3dIhtAUj+Abtv4GhigNldreUxG93o+zNXPXLynUZu7+/y9y6fPPPjg/RfOnrx4111LS8uVxpwCaYBIEIGhcK/quGzhZP8oal5Wl8METqI1qZ0IYscregJyrjpHgX+AfYkd7iOp0kuoh/n2cEhbaiyYsGcYJiIA8IxijVcuX/7Hf+97H3/8o5o56HQVaqWN1kajsS0LlAJAA8ZoQEA0BlgDIzIgspCojWEDSEIbowGY0GhgIgYGImAgiMQ7AxAhCfJ9Two0WoOWlYob6OD8hbP/8H/9J9/8bd+EwnYcGa5eAERgk/SXmO5Hl/x7xoGIiVLOP4hcAQCHrjkeXYykY3rKk2C/4yu+/I/+6A9dp+JrRYCGFQJVJb7ukbvuv3C6VhNSaIlqe7Pdbnu267ZbnfmlOcXWx5944ZmrW20vqFUqS/PHt1pNQJ6fry3Oz9vVeqVaufTsZa3a3sbGF33+y+45XT3WkG6l0mq2A2YGMob6fc/rBk7Fsl3ZabXqVWf5xBKxnp9rIGnXdnyj+53+xsqqR8Jyjj39zK2//Nizz252wmo0qlVAwYa14q9517t+7ud+2pICo/PVoxHKaPQ1R5LDaRNPG4dxBGD8WO8IjAgnRxo1vgjxXFzNRpLoBR4F/E1f946PfeSDQWB6PV6cs2u2OHOifu/Fc2/5/Ecv3nMcuWc7wnWqWrPXD5x5t+8rrdVTzz979fm1arX+2adfaNTn5qpirured+/Ju84cc6Q2qIEZDIcxIZQokAWzFFJYbmAAGFW7aRl/c3WFUIIPi7Xl+rH5F29dazc7587dtb7SUrYEyS9e33j+SpPl4pd903e9/HVv8IRtWy5GrUmxpQ8pGZ/UebJYQIlZP4zgxaD7sFs7WgC7HQEYHVMsXjwo+Q+HrYvtCM59iyRm9JsAwkk9AAxggImRDbLRKgwuchB0Wuuffvyj7/6D91y9coWQn/z0892u98DLLwqbr1294TEtH19crNdPnzlXEdbqi6uveeMbv+FvfEttacn4AdlWt+fbjqUMW5IQgLIMEK1dzjZ20dk7lESPLTGGzOre5Hb2d+kUGih15Sctz747AFGohXNuzDg99DAjmXKWTKVBBA3Q6raC7e5P/dyP/+LP/Ior7I2NDd9rM1HP7wMiGNZakSBC0AYwnIEcGuWEBtgwIwASaaUQCBAYERHDufsGmE24G7pNhMDA2hAhaO24ZAvbqjhf8rZ3/Psf/zEJtlOzBApAYOb0HLnBAoDB6NnRov10cDCbAOVkUamGGkH9ohIsXswB46qN7m47F5sBEXp+X4L1j//J9/70z/wMG/K8PoABIQSiBHXhxOIXPPrg3WcXaxZ4nb4feELazfVWpVHxWf733/3z61uowJIWOEJqA9KWlhRKgzG4eGxeK+52t1F5r3nk7Nd+6aM3rz5fsSUBbmy0jQFjuN3aqtTcWr3SmK+xDk6fXO51u1XX9QIdKNVqdTbabWl4cammZP3d73/usRdu9bp+rx0AgAQ4trjU7njLpxa3t9rf8w//2ff9y39KjEKEtRsokcGhMUPipHcqDqMDEGJcB4ALfQsAhoSQ4++phaFJc4exOQOE0O+3vObGl7/z7c8/f1k4Nan9RrXab3eBzcnlxgP3LL71Sx59+IGLkiQSa7/v1i3bqhLAdnvzyrMvNuYb61tNIYTFvLDQaCzVlxcWdNBCQaw0GNAqIEIDhpWyBAICEBlAr+ujUt3tLWnZfl9Vwaq4c8YRK2vbTz9xWTqVK5dX6svnjGsrcL7867/zvld9To+kZSOCiJZ2Fbt6vMBlhKQbR/rASImTQ+mT43gIufLs6DbkSjIsnTEruz8YxxYtqfHo2ZkHA07+cvpKYsYxDPbcDy8aNtFNzRqUBmBtSLPe2tz2fXXs2JIKfBUoRXBieamz1XZqbsVxeq0WSct2HWZNJMO6m9juT5dn0MSx8ub4TlFBHkKk5+elxwGii4VCI2Ciywce2EBo7b2at9MBGN8MOoRI+DAstAnD6gZQgOf3hJD//od+7Dd+/b8999TT8/NOYKDXb/cCLaQNhEYZwwwI2oRgZmYGksIYRgCtDREgECAbwyI8BYAZibRWUsjweWRkMLa0HFsuLdVOHTv9f/zID7/6dY+KiltxrEF4K7XyPDvcf4TofSQRNkE6QjGC84tqIETp98HFlK2fT7Es9ySdscof+gBeV6Lzv/6Dv/+f/tN/ksIxyAZYErAybPSxhvW5r3/w0VfeWwGjgmbFdiuVWk91g0A89plLv/Onn+mBTTbYKD1PMbAUBCAQJaNBJruKjap79pj17V//tmvPP9Nc2zh56oRWemt903HdRqNSrbtCou3aWmlEtqXlBYGvYHurdeXaDY3WuVMn3Frlg5984Q//8hnPkhKNDtBodqQ8cWp58diy1+7/y3/1z/7qN/x1AWRZAuO97waGX+6ApJdMtxhMATo8GO3dDnmnLAw4xAZNvkNk2sReoGEACPd4aPa85sr63/5fvvXKjc3W1oYKfIPaU57f7xOr5eWlu8+efs2rHn7VQ3efXnQd0ZuvOkKQbbFN8ubNmze21px65VjNalTIti0VGEZQSri2a5HLWimjADQgorSQjFIB+MoRgpVZu7W6trm9uHTc0lpajY7CF1fal1/cePqFm3Nzxy/f6n75N3/jt33332r1wK1IC4QBBh5MIsBMfePjhRBgpEFZVL3DWmFYo+wQkBjyCWMYyOOY/pC9e/iwo22TfyBdl4M3jPJNyTFzpfoPJ5PoUu+lwtuRfEUgYDCs2YCQIoxhQjgPGkFFE9aYYomcrmxprdNWF8Qq6qgYj/HU8SH+ax4DM63w7NQcgP2T/+mzjXP7ccHRaK48hslJCM1zAAQwzIJIGe61et1u8+lnnl5fWX3h0qWnnnjy4x//mOd1u61+s7ntBYHSioENsDZMCEaxsEhrQ0hJNkprIQQSRtcBAJGVIYnEyGzqDVfalTNnz3/X3/zOd33d17uu61QrhBSOJET7fHE8FysjJY9iCxwxJA5AKa0zkqDg5w/TlZmXIU6imHTBw5hMTGpthKC+76Gir/zqd/7pn/+58jwASaSFFIGvAQlZnz3ReOvnv/ZV9y5D0CcRWJZfa9RUIP/4z554zwee2fLBrdZYoN8PBJHlWIEXkGBBdmPBal9be+UDx//qOz6XdB8IWu3O3HzDaH+uUWejSUrPD4QQwNhud9vNTtfr2m6l1eqxVbu11dpqdp946urNZqAVoAZEUAwMcPLsqePHT/yLf/39X/rFbxWAbrVmCUpOuweOmyV3bPxLqVscxoPASuzI4uBl+unwD5bMdxsWairNhYGjpXkMik3f8xVvfePXf9tnHn8ctNne2HSrLoCuVNxOr99tdis1cXzOfuiuE6952YVXPnLxrgvnF+ccKcytW+ufee7KsTPHzyzVKegiG6/nWa5rNDmOtC1HAHu+J4CRSDiuMb4OAhs58H3Q8uat7cvXbkjXWai5Wy31zHNb1zaClh+sbwenTpz+/v/zR88//JCw0UKLgdkAFb1/iN0Bjn4Mk0DjiIOilTLCFh9xd4QDMI5pXBRo6WST7yNSGP/6PmCsrJI509nLka4+SBO30EY5Ox+j02ozlyMbI179CczpcadIFUJ4wGkhj9R88fB6cd54qQC4vY7SrhGNAKSmACW3ImLEiyyS83JL+0vpxPrJS4RYKlz3mnRY1LIRgEOFvZMxqheDCZfYGkACMKDBhPueGAajjQD1+GMf+bF/93+9//3v6/vd02dObW62er6/fHJ5fWW91+lrYzytwo16tNFGh8tuiMEAIAIabUgIrU216gDymdOnX/WKV33X3/yuz3nDGy234rpSCgmDwFZE+bCQmBeWR6C/HHUMtv4d9sDwQNuIV1J/yp+OWr1M0U7Q5GEX9nxvbWX98z//jevrm+1uB9gI4QAaYDRs2BjXgruOV9/8xlc+eM/JmmMaC5bF0Pedjz5x/dd+770rLU2CJAghpRSExiBqr+fXa3ii4bzp9Q+98oELrebK4vHjaNmB3xPGuFU76Hue5q2tJpFsNruBpzylFbNbrbKwX7zVuby6+alPX3EqTsdTHGgEQADHdQXBX/mar/pX/+oH777vImjjiHAuXKH6SShlMtIccRxiB6A4sFUaqxpi/eFI06AkqByJycEqPQVKIP2j7/6e//zzP3H+/OntW00WqLVBBANGa42IlgVAApUvBZ06duyeu0+6BJbjPPH8zVpNPnLfieM1eXyhdmp52arY1WqFOLAssgQrMMr3DSu74jhSsK+DQG93vBu3mk+/uLqy3u92dX9t264vGHaNdG7dvP7Vf/2b/v4/+z5Eshw7CoNGM/6LEwdSRBix+88uje8cpdMmfkl8YkgTFN/aEaMdgFx2u+rCB+sAwHi5ccyM0XmOt8UBKJYq1jqD/SkHTmaKORDQRMZK+BsjA3OwmWXWrcBU/H7AYcUoWHkg7fYd2jghOJ52XebmxYYaAyByoePk+X86Jns8grJvAwFJYCXtBhzCJtvRVhv2FiajYelNW3HAmcygjSEizTrwlNdp37h+rdnanl9abDU7luUC6D/+g9/7xV/85UvPPefU3Fql2m51en4PgEgAawqUEkSAODfX0GA6re4rXv3Kt3zRF/+Df/i9J46fYMMkQApZGhYJi4CDy4eQ9ncycj2rtAGKGjanOgEysjbssHm9WxCOuXcwe2cXVQAAw0jY7LQf+8CHv/ZdX9MPtNfuMxMDAxgEADIMiMZYAs4uz7/x1Q+98qFzJxYs2xZkw1bXe99jz3/ksWfXNvqdvmHWlYqs1qQr5bHl+htf89CZE/MWBhUJwrYd1+l1ut2NrUAH/XabUSjNJKxeT9Ubcz7Q1Rub19daG+3+Z566bgCMBiAyBgCZCAHk3EL9O7/jO37kR/7ddrM9P1cHgHALrPwsRExVEA7HvNsc9rm/Hro1ADu7I+kpsaleMsyNLjxYHgPJ5GuAwRCirzzd0z/0gz/4337557u9tlWpBJ4XKPZ9z1cBkXArdqC1VooALLSkpF6raySA2yDWHHi21qdOuyeWF2wix7KqFVmrVR1XsgVKszJaE+h+lwLd7+uV9Xa7oxTQ6oZ/78W762ykdJnk/Q+84n/7/n+6ePYC246F0sR1wh3ohThc1RaEw9AH8okOMeWHRTJg+JW9sN0Iiblb1+IAML5giWuR+OWY+XMbMKA0w8ABCGP94X3E5HY5z+R62cCuTQWlQg2VnY6ZBMGzLZXhtkMoszNIlzzFomWsxxCdnBV7WEUHYHAxO7wyefHSAaCppDg8k8gBGNJet90yTRvxu343/pP0hWSBCgIYBgYgiDu2YQNGktAMxhgSpAMtBb7w7AtPfOZTzfbmzVvXf+s3fvva9StLx5bm5urPPPn8qTMn1le3F+bnv+lvfPPDL39Zv+O9+S1fWKvNAZLtWGF5TdqTy8yLu+2kfUljMuqPeKtcNQ+TFzCQsXspT/ia1toY8/u//Tvf+M3fFvj9QGsAg2gJQgY2RofbQwg2NvDxuvWKl515xcsvvOzeM0sNp2+w3Qmef/7mdltpY+bnatWa3ev0FhbmXIfRlr7XM15fOK7Rptfutra3A8Vaq7n5RrfjBwpQuO1+cPXW9pPPX1/ZaBskzegHSkppFBMJ17WFjfPVpV/97//lla9+ve3atiATKyeMrKaBlknPUIyAxUrnibp7su0v8qphNzkeuhGAsUozRhVLVOZwMzf3BaL1VcxkDID2UWrvyac//bu/+7t/8Pt/9Nxzz/a7PRbkujVm1e/2tApIIhhER6hAeUqDtEwQICKicWynVq00GlWJ0qgAUJAUUlpCOiSclqc4CBzwCY1liZXrN+carvbFyeMNMt7y8vm//ne+5x1f89d8L3Acm+LA4UCwjyTBMF07PoruU/HiMNO/1BZMbu22DMMKNjHrT/D8waTGma+3f3475x2STFMPvuUbOwnjM0A8syU5YiydQjTpZZATYryqJck0mciQH9E67MZN0QGAET7AGA55sT/urXypXZD3B4mKwbRRnNwt77y3qVnHyHZI6/DAU+VMvB1gEAOOzkZLJQUM4YR/w6C0IiQGJML19U2jAq21Y9udTtetOba0pGVJId2KiwyKtSQRZR2nOliBc9ulxgx7Rq4By22VYW8CAJROi94rjNGep//4D//4n3//9z393AtgjN/rAAgpwmUnrI2SAo02koAZBMCZU4uvedn5R+4+fv7kvETwNWut0WgivdnsGQOsuo2FeqvbD7p9pZEZbcfq9v2AZNeDdtfv++aFKzevrWx2+6rfC0/gIxYICh3H7nqeg1ZtwUESf+fv/p2///f+fr3ecFyXEI3h+LALyPWHXAARcuo2G4UZgRG+wYF1wYkzOoIjALvE6LoVbdnkTsgzRhskYtZGK8MgBCHzpRevPP74Jz/8gQ/81m/+Rmt7A0BopX2lmYCZmFBrBq363T4R3/fgxWPLy5ZtWbZDxEJY0pIIZFuWFwTdwNQqdPeJ2ouffW5+bu7FSy8E3Y7jzlet2rf+zW/7sr/2zTg3TwRWbDnhSJ8tquxga5YdiLMXizln6w9LsDQCundwWaZ3pMo7HJVKhzgTKyexZuI1Oqk9bUrkK0DqaLEsMrZtyv5HiCax5R4uKd4hINJuUB7GA4AhNclp/enVdv/3Qsz4GBznd8TaaxwUoxKpaE38BJRUPRohCe0PA8aYkEYUx3DifmMQKd7VZ5BeGlMl69HrVkcIUyNuweHMGTRTPzacwfR7enOr9X3//Pv+x2//Rq/TMdrowANgJMnhqZFgbCGUMVqzAHAtqNvm3JnGqfmFuUa1WrOJgIOgr8GquKT6QpLW0Ki7zKCU3tpqbrWDtoaVtWa753m+6Xiq02UAwHATTxCVSoUDfvDhB8+fP/P0J5/8O9/73e/6hm+dn58jixxpAUBk/Wd7SaJkcnswT3cseYRXsF+YSEMcRgdgmF6cACMM01EFSB3WM+hD0Xc2HG7VTFqp9ZWb//W//tJP/D8/sb6xhlLYtlOt1c6eOXXz6gpwwEEgBL3+81591333+j5LaR07tmB8JR3pSLtar3Z7XZ+p4ooHLx679PhTv/ervylcOtZY/opv/Pa3fuVX2/U5tm07Du0whnN+yk8RxcGfAzKGR48D5FhxmI0+sQdS2sEmSm2m5MZBTqgU2yHT2AwF5tgNEnUWh2QwtSziDkFO0N0+DzZZlbC/mQziEePFJu4AJNtkDeodS+hYs6TXxkB0e0ikN715/0uDfjNkUNStRb+SB3Jz8MJ0O3cSMAgCzT6/7/3v/+H/60ce+9TjnU631+myUgAGAACZAJhZCAQ0wEAsCIE1Vyu2XbGRAFQgnSqDAaMRQUrLdaDf9frKaON3+ypQQAgC0BgyAEprHYeXjp869ba3ffGD9z38RV/ypa9/46vXr9w8eeIESGlLgsj0h6inDWyj+OX05nWc0TdJ7DR9fSeKZB4qDU3uNwYjHLv0PA6jAzBd7Faz5uzaQakyR3hGW74FvkckOq3Wp5/41NPPPuP5frVe27h143/++m/fvH61VrNPnFi+9/6LC8eW6wsL0nKkLSuOgwBgjEDpVKrCrZ87f+HMyRM2ya1bt5bPnjp5+lzfMCPasZRPnA/kofXA7MeIqo2+UkoKLGPrtO1SdNtKImFDnt+xXUbY+jty+xgVPIwOwDBv6vYVtOjKQaJoMk9FZcVYoEI8uyevpOIpEyU9PjNXJM0lh7GtdoFk1CTnGZX2prLXowemXKj9p2l+6v8Rb8dxkXjC8U+APCcPmj7XD8oDGxx7D5FltxtdP8MRQGlrjrYpy1Vzip2m7QAwIhoTLreFXi8wAX/qycf/zQ/88Hv+7M8dSV637Xm9qDiYnLLEUlhEgpUWElmA0YaQAQSzYQatlBDk2lJIyUiB8pU2BsB2HL/vC7KM1rV6jUicP31KKPl3/8F3f+27/prlVBmtiiu11gIJCQ3k9pjLI+x2HLviaTomm6lzSjTvaGZMbH7sFbH5ktSyOP1vxwIcqAOwo5kIU3UAJhaOubAcJD0qsmoYEQ0DAwtEAFDGGK2RpBBIAIGBXnv7iU99+plnnup22rZtN+bnpeNWarW5+cV6o7G0uLS4tOBaNhsQBAzAAMoAABgDQoAVmwiJQ49lUf90TQ8yOJozzIrlyqm84t0J2qXoToxwTtK5H0UHAA6dXo9omRo1LZK5hBEiCsf76w4OphjZ0e8kwybLkJyteaaCt6nOB3McahI/uVPadQcwZM+azHyJpXr8LaO5o8vxk4O9iUabgbcFB8Q7MwAMUXYwQpimTNiplyT8E2bNAN2u1/eC55555td+7Vd/+91/sHZjrddq+YGHyIDAxhCwbUsUZJSWQoBgFWgiIpJCoOcHRmvLtYK+sqWFhIjUDwJlmBU4tqzVK4uLC+fvuvhVX/PVX/v1XyW1WFxYEFISIaGIi5TaGWHQwXJAgPj4mixBEfIjJ4mvMExy7U9Ephy5zIs+zmTrPQ7OASgw62DDj2KwY2cbYQhysbTJylkMyEW6O9vzEMDEHDdY8MgABGyifd+QgAEoLoxJ81m6JyemaJw9QTLvc1CiiCGRI68gKePuq7p3bZLTazm/LnkGht+dILsdAyQTpDnDGEg7AMO8rSI7YPb99KqBgUUI4S6KCZPsMB/lCDdazgEYvvnEgeK2EPQIt+JuMJBahRnGOe22U/wr7iwzTBlHkhOLhS46Bgn2p3oMgKH9Ex5GBAier4yCtur+/E/+7E/8xE9dvvzs/OKiBt3faiMyGmYSzBoRDStEwQCWZSGhChQgICEzVl3XrTjNzdb8sflTp0+vrWydOL70hW/6/Hf+lXeeOnXu7ov31Kouc7QQxiQBKcZ4R/Ss5Z7emHd48GVwGwuqrPAUpG4eGPfkdW16dDHxASC9F0H5uznczilAIxZdDAv37gpTcQDiS/HlaG5D6rjTVE6xoB8I63Q6oX+au170WIb6rdkVlAmPJzS8XZKs1GcbJoxgb+2yDziSCuDAMU6wI8fUhYeHOveDHzyNrasOMRJTMCTDYajnLIp70ODct0EgJ3srUTSZp2aYYShK4sT7xjnJOpaETUNe9YLAKHzuuecuXbu0dGzhfX/xlz/10//5xqVLxGCY5hbqne1u3+9VKlXbtXqtnmYdKOVWKmfPnen1+q3Nbbdm+33ztV//1/63f/l9fs9ngFMnTzbm6myMECI6JBEKomvgaqfD4ZicvZicRzIw4IrkGWZoYuEmZy4etBsQhozTe3BPpFSQ2dxWBVCufoaZCiNaZ7ptMMg6Z6nEsfj0UQRYbBiIeCy5lpQzd2VE9iXhwWKYddI6T5Fcpc5M6ZVdFbkY0tifnjZzAMbBmA7AsP5a0lHidzh353CYxfsKjuXDHV/TGUaB4wnHmLoCEPWB1FT/mZDaP8xIOyFyCjoyRg0QgWEIlCZAIGRGEOYH/tW//fBHPjC/OP/AAxc/+sHHle+/6S1vOnf+zEc/9Njc4vzi/MJ99937yte86srVF3/hZ34h0MFf/aqvevvbv2xufh4RAQwKkeRlosBrcnJ8HHDl1AT4gfWWncUfahyAvEoqaioosdKSsG90Z88OQNn2pEOeLMsobycnDsDYBboNIwA5AyF3vdTW3RFYeH1PZcuwdmpVRXqVeIb79pbriPKEU4yiLOKcDpnIKvYnyHevcjdgV/Uod7dSt+543NZq7lssKfQAMtGL6edye0mXGymManwIrP89hxFm2BOKnSrjAKSuzxpohsOJgZ1WDP4ZYAAGMMb02n3bkQaABHg933VtZrRdCwz4ypckhBRK63DLCARSSklbogESlOSUWPYDEz+37JVTJSozQfLRpnRpM5UZUtuBXz5IYOK+uffDHIsOwG5XWR20A8BDSD1OsYtNtx+qaxD1z7FCqtGjI40Q4vBN4hZG/Jn2C4rWavh33Ehnzvo/fBgn5LtDHHi8XPby+oiEZ1MgxsD+OgCZKE3GWs6V4eg1VCINkvKXioUZXoIoUQ1JdKnsgRn2ATMa7xpFGywT5ot1RTTQGR/kguGR2MaQoMxc6iQlRgBAAmMMIRmIop8lJ83n5kfkooyF9swbIekD84qGxQ4h58Fbe+WbPbNexlDNnj4+JmjnR6YKLPsyZnHTTTPOsMAewANzHwEx39iIiYzG5Gkc8PJAzUOhdsX6ltZlcHGoPXS4gEO+p7GXVhsn/b2nPMNtQKwgOHUgUspThn10P/YXoaoCiKqEOFwszPBSRuogsFG6Y4Z9wIzGu0YindMKnQGQU9TkcO+TMDzKDAAEiICCwocYITtJGkMRyQBExJCZZh1J0ViMJp2EMTqxNexBPMSaTAnh7IXhPkM+BYS8/cJ7NkP3zHqYjEnAkFbZCbTL53eHHVNOF3hn16s8lJ5PZ2KkbO70GWDlvJ4uT47rhhUye3HwVCkbHLxY4lEbjY7xevZ7OrGxe9kkee0ZaZG1D8lPA5yyglMXRzyf+TKN+ozvpyc5l/aY/EMA4YAaM0bdgpkZmKPFAZwK9exd4h4YeFC3MjFyVKoxw75iYOhn9cdBxbn2C0eq2NMvbGSMliU8cWalcuM20rlo+WBojsdXky8YAUxozSMDAObmvmR1C3M0ajCw9UuH6THlMxeitENRas/xKNGMmJxmDun+ybg3eb7713LZcVyipDwAu5vPcNsXAe8OaVch+Zx2FhwPX6UmJqSz4gFHFgswrE2xUPjbi/xgV2h9TVSsIVUeGrWdjALFkbqpIwkiHIL2GSA6TzSeL1hadz58DDYuUl7XCJUwlXwOhCZhIGzARfsjo6J0p5LyUeKWlwDS4nTWLkcR6dXbdxhy+jf9JYfoeuGMi8wIVzHFMUuwB6RiaXFamNqaLVP6VDUGwOTPXverm4bkHWEPjIODngI0MRLjJnE9p6i3suyb3th/wCHp28ngVZlTWv4vldi+ivVxncqcB4+4J2lVEh9mSLYtxSEVnziAsU8EHD+IcJCITi2JTy8vHy/i6LP07qGOOsecgQCDeH84G4iTAPr4Zc8/yRwlcVDtioUvcUn2IY8Z7jwUtcYMRw4H03YHL9JzehyH34q0VRgkTQYEgDkS9Nno9fj02jNlMR4uSP6GPkm0ajM2/eMB6WjEIn4Zk79T0Cl792SyDljOfxkHAwfgkNoHBSRVnWIfS4iYcfQwI4hLhfKuiHYAQiE3NlQ6Yph87ta2Gp1dmvkiLx858bcLZ6BGT46PXNQA9oFj4wSHBTVuG1K8lxu5zHxjZE5FK3Li4LDaEwzIDOEsoBRzxqK3UJvRiILuHJn+JXJiX9s1FsqpeME+5XQIXdUZZpgBRoXSpit8Do+iwuznwA1IYlLRFAMsPnO7gDm9iIXrmDOQMKNcY4PmdtE/Q0DOXhm7TJkRgMPASQlKjUU+QGtmHCvztjNxKYaVudhL9xhqKo92lmeP+cd2yW/FQbhpEX8Cv/n2IS4jQxz3LzBqwdM6VP26BByZ7YmTg/EU0CgwsxOTxhQY/BcOzyZGcjFSlX5xipjWGNduM5phhh3BpdPSp55LNNfjJYd8nTFzvZQidzCZkpkFkP1yuJA14rGwYfPA4s8uWogjbvmQ6+1CnrpjU3vgABwGnyyNUlW6TyUsTfxwx02HocTiySEnj6YfR8ecAhgcfryXEYBMFpO+WIohxTiMLc+JuErHVSCOByfDq2WC6bAqm2h3HEzGY9O7PMBupqbFZIn26Bp8H5H3/mKY47F3zCYUzXA4gbPRKQAYEh0rpUvpQP0wFB+e0XpPwPgjFdSHnNmPsepNKaMkaJWPvh8gMlG/iZCZAnRYTQSAnSq4x5IP65aHmSClGAy9pX6OqMUexcfwlDNDC8XHDqy3HLkWHI0dTsrCAcWLWifNDIeNLNMJGe7s/KZzzH9OEQdA3in2oEPvIs4wHRyGs+fuWHBe246OdpUa/UkcGsIZjAzJv+IrRwiHvOSRpZdSk8ksoNz16AvGs4HiB27zLKA9ZJ8ZATha2I8xNUw192EbEhkfo4s9xZGNMRLJzEe5XSQ95AJoAhRrVGrLprXRYSbCFEOGYyaUmw53IGNKM8xwB2B3feUwi51pIp4fXj4ttuTxGPFbDIDRzsdhUlh8+jaG0vaCAy7kOCyXC5CVlrD0OiYzhVKHkU1dg+wOe6Dv4d0FaEeCFms9Gw5LMObcg1Iajo/hD6ctz+m0yV5mUwx7/nCGw0cgKTBmf8c/OTMEVBgH2NVY8xECpv6OrmPp/Lf9kxuHnMFGFezQFnqG24lds8UR0sh7YfnJl1ek5DIPK0ROzhfuHHIccCF3y3JDyZ5KbaBY4z2eo6uRxzY47H1kYocOh9cBmEBw3GY/7KWH4W2UiYPsfTgladYdE9nVMoMpDoYcDDKTeTgeqkxkDw62LEMYVCxDE05ePkoV3xHJdhO5SpX+xCE/p45DzmAjCsaHttAz3E7cgWwxvnLZARO/n5rXmd6aJr2Cdpi8PvztcSArz/c01o1j+gDx/u+YvQgw2On8kAv8IjIOwBEqdw45l2wvmLkQE2PYONreSTo+Z46eapnDwcimfQIiFCeHJjFwhryZyxmvAAEGW+3v64T4g8FsevMEGNHWeKT7xgz7iDutp01nNuxEei5vL2YGcPMbOh+McTn1bn+QkpknDTXiyNdG0yR8lXN69EiAgWb9ebqvz5AgY3ceOMaaCHjUGzsdhYjPrY4m1CeuAWeeTZBamZEh1VEnydRxSKT6fhRg1GjJke8bM8wwFqbVsyboMTtnnXUJxnrl8OHARMk+ZbRDstH4DMLtWw08IRAoreCOWOln2B/sahCt9MnDYDuMHy85umyPYaC2aKKm1QamWjTxCtKz5rNT548uNfYJpROpDh6HoU/NMMMMU8RYnTqryfZbDuzraqj9w+0Vj5nxgyMkqRmQmZMY7VFfRLvH8h/16k8FE0yIHEG3GUn3FTtL1WH+GWdsf8b0tRnKER8xhtM5B34fwJm2LXcJh701wwwvTezrHgATpMw79dtZb51hWqCy+OFRRRLrvBMqM1WMQ5NkouFuZ9GVkj09bfGQ4+gGv3MtVTKjY/BEdiZQKqydnB4G2XY8ctTYbyBgaqrvYMiUx2OhvVO12ItH+Hec6stjdsNZi8/w0sN+zoCYLMWd+u2snx5OTEVvHnDjUtYiOPKsNYH9mn73TsU4NNlL9Ytkv4OJediAqe3hcg2RaoV4nUCZI1D6+qwFi0CIT1PKHXZd+FL6LuxNzhZ7cWlq6bhjqVs+7OcMM7z0sI/SbsJ1NLkYTEpi78W8mWG/MRVOOrDGDff/yEwBmmGGlxTuAM5PZn1AdvrHOK/kVgrMsFuEovPwzJ/cacJXyfOzqV8zvLTBHBvq+9ERBqmP/0ryJ/virJPOMGXwIT4HYIb9wCzml0Yi948uWUpjz6MnouRC0aVTxmcYC5jxvuDAaVjMrmgljDBr0h7gDDO8JBEug4q/7Uvye3pxNh57+HFU5ScCpYeyj2o1ZtgJs1ndpeB4vdXRtYBzZS7VFiMWlQ2ZLzTDbpDsrbTzU1NG6YwgSJ2JhgXPMDO54Ojz/wwzTI5kXX/0a/qYQKhGUzp5JpmPDI5qAxkejADsnftn+uPwYMSaziMd8J4uEDFNlqOIcQL/w8RTru4zL3G3wPgP566MePgAEJszxb6fNin4juD/GWaYHKk+eYj4v7Cd/CEq2wx3EggppcX2iqPqBt2JKIYAZ61TgpRkPdL0GdbEo216nKmWaeNw0ZMjN6CUtwdTH5KfpSnMMMMdjVH6cRYUmWE8HEkeCRcB7/6t3Q2ZHWnT6giBd2/SvRSWgRarFl858pVOR3Zz01hzzFBcKDwOnxxt6kyK0WwxARlHIDeAU+yPOWE7bHJXriSlzY2FV3aFI99bZphhUkywlnfyvArddh+XKMwwJRwh8Zi2f2iCKSHpceRhm1IV557OsN/IRf2H/YTUdc42/x3ZTEX+xKjOqLU+6lOidlzfmeMEGMIYAPGsU86kcGeHwErrlV4eXZw+B4WuNCyt0XKy9Erp92HP51IutmbywakryAn/wyj+53yVjop6m2GGHZDjfy7weu43H6iOiLozx995qJU1w+EBHhIdOaIYsTJL2z+ZEYDicHFpEHHvmHHz/mGCNnopRRrSVeSw26bCrozZK+MkGD+fDrMeBNK9NddPd59UOe5oTtgZDDD6zN/cvWLoDgo/97W0Yz6CkVHBmC1Uwv/hLyzUgKPr4dVk80ROpfISZ5kZDjly5kwU8QgDoonJHT8ZH/vHDAUWP4AhgeIo7gyHGaVC/pCNDOTtH2TWbICJiuGo4phybhB5xPPxczz07o5JZIexw/Vqw0hZyJqjzjs619JiIQ46PEN48A8nwoELLyNmvKgxqoqxdEkGFqPk44GZdKfHIZXmEVmVRTSGvFcWVUQY7HBenCEWlhsGKwhznkPWpB7FM8MwLVezNOuBicYcHurExiT8nxR4GP+PjhmXI84vxynjFb8sj8H3onk5zP6cGBg1eJReoiUxcaDiOwObcFd+VJqLil5o3JUZsNwaH0V2SNeeS5oxSTPlxsFwguaTHE1c3hXxhzHRmO7DDrlE9ENARgBGRMOGkACAjQGi9GNQqCMUaLLb8o0oa7qJYuky+JJ/tMzLyiVeIn8ySXEkVg+Zct4FsowYUyOxWTO1jStaqGqKoUtYNcePA3U38PlC8zntR8airjS/nerE+ZYds/8MeTIW79lKJbIkfslE9g8P6IARH3Is6aJ3c0mlBEusDUez7oRiOCWbino7xcMj5FI+85x+gLjrpR4ubb6SRHhgMfEgOgA4iBeU2Aml5SxSCCFR0ilTMl+auJlSBlVpmbMtNriaMXPyVs/eFeiO2KsMKnJ+SAvMDUskTMSACIaZENEYgwhJJ56SSOT010Qs7VyJYXnnk9ipjGOYWSNUyK6xo7VYKqKKeixfmrhPRS8m0iUyvAq6LqZ1WfXTSqJY/KyATwnyVJmjRjiqOjNFAoxopw0LQgAGg0D7FsznREYic6Q6k+3nYlk51KbCkmupGoUVwvTOEcNMtcHvUkULY9Z6LOpMj4Qp6u22pNGDg2+FfpikvA+2IMcuH6YspbiDjqNRRkhDKNIh21PHAWpjJCEggyFI72V0e7t4rpGnIqmzGik5znmAoyLV0sQZ/BnTRtll3xmd0j50GYCpW1zZUmKiT0P5bwSFP2P7Z1hgn+N3o6cyDg7nh8tK6jGEVDnxhLGCHqiJlHkPXBi1y2DH5hjmE+yxZyVGZd5DghR/pp+G7K0RLsCQ8uX1W5lewELmmQIXfImiekmMrjGKPlUM7V3jdLu0mZP6TBU1dg+0MWiMAWAEGslZewIzp/UsjGrPoWnsgUnj6scJlPDqoBSjdV+a2UeUarpcMXmzRDUfVl+IOAV3UvgcW5o7Prmbsg3tofuNjCMAYMAgU+JT7ZOPw4OoUnrQqGCSZi6mLqV+DcRUnCYi5mJ4wwfAdmDOXMDmNtuCg2aajrmxo3UxxeoO+l2mtaaXQSHDIdcLNUZAxtgOZsNxRDzncx44yly0EkzGDSmdzqPHk48cyh2/fDgDs5d3l37KpIjS3CfsaJ6VMwZnyjVUdkUyPiX/GRgMACW0GlYkgFRfTpeEAYDTO+qOSiIGQkEjj+/EDWnCMRtlGBnT6nh8JhnWXpMVL9vQmYjEjsbWxGJhxLuJAZ0p3DRRYItcmaAs0yGtH9sAue5S4hEhAyMjIDIbzlR9WvZPWt4Ooe/k2ezoAqX+DmmzokzZna0zhF8KHmTWbZykyjm3s/RusclTnmthaBUy8nEc7ytd/mkK/9urhkN1EArilAM8OBk+h70UNRUPTrIeR5hgWosnxYjollQgtuDSz6Tu75xVhiuGCJDCG6P18rQadlquYkSMJI2UgbQvfJhI43ia32SqI80vsINQG5YEQrYtB3yBAMYAUnruz9BCHAxGtMWemynWR9OWY7cDaWIMYYaE5YfWdS9m3u3HoDPkhMR4M/QH8j+rCofJf8j2xnxMJ6HPjnRKxHdOdo7TrTHRVWmRmJHc+Xx2Kksu8921dErHDE2lqLoSlTSY8zdC79w2rhsEr6du+aeBGYMMYKS7M9zsjIjNkOONgbjLvs7AaFinru3PWDjASOqNw6KTFomH5l5ItDACUqJzU2vnirZ+9JkacMiZY6Vl26Fm43PBaHCZGthVIodO+k8DYfgz5/whwGDwbJp2bOZXKrRQgjLhlxVCJXbxcClVuFz+XNqgyHD5HdDyBRPhAEyavD4eRw5NIc88BvI/bfsyczwQhUX+v0M7/EsOAw14xJc9DMOI2NfONWXOSNCCX1hMNmuvZp/L9pxk4kVO7BQwgXGZt6un0aYThljK7Z+9lCJObt9Hm8ZFeoVDqqWGsd34yFugAEO8oGwOyXT4gYmSIlXUCLFvNdKtCB0AxlLbcKg/Njn2kuQei1DwthNg5s80CrBz7GC8dKbC+neeyJ8aosNSGSNxM/DCS4g29MbEmUNm4n7chcuEwt6z2t2NxFaYMc/+IGcijINdqJj8c4ldkvJpcWD9RPJ/aFsXNdEMM9wRYI56RT6WMux5iEOA2dBiMolu1KuD/bPS/ShrQObDRKlno2+pjjz1adtT7t1FY2eEk4CpmFg+oHWIRU5J243vCWDuL5T4m/nciheHlqpwkTGc+BPdZjD5zX/C1AfucPZzz9hLkiFNxiQuZ78XIq1JEcYtxdhdg7OfnM4vlf3OVcm5aunkduVrHuLOc4jAWdJmiIapXeCmR814sC322pOS8CAuNVFYIYfyFHZKd+/5zjAOsnbEKLqPakfO/is8FHmamHxA9hvwUPmWE9YzaTLDUQbnfmCGtTHXETH+xOgTkcN9tCIDKn4fyzteuk+WWv8QR34YIKOBEksvcTlCsz88tYDDLYeQeUKjYAim37u57OfA+k/rvUQRYvzz0Fv/kCosJDwxPgZSOXsp8zMn1sciB5YxRYndjRQOEdwOGu+WYXflnSbhqvhn3Gcjkyuf2I4pj511klH6Z76uuznbYyAKQl6bmWZTAwJkQxF54wgL7Tk1pGOq0RjEIOtSI21ClKXAO6Z7WM41ufMQC/2oX6ctDi58iVHeHGNr/ijQn1cuCJja0DGDqcZ9ZpjhkCCn9TFeE1Qw4LOG3cCCT7kJmGwgFFp+ZR1xIMw5eiVROGFgMmUSY1Y0ZOQAp1wECNfwJ+prGn11ak7EDkkUb2dzzkQajpr4GbrRyjBkRnAw9a9o7pf5TPnwT/oKlj2GWV5BAEqM0wMn9gQZ7oY/h3F0Xs+OU4ydw/X55zN0Ts5eK0tkaMLFLpETMdNz/F+qMMwD0wozvTEfmZ060r1896GDUchFk4eopeLjJYnwvu5a85IEZ0b/kDG2LkKNF3JhJD5ScaXSWb5DNX+pUEtP9AwH2pl59OYlM8xwByNa4hn9GjUCFj1fdnF3HSirwfPjAOUx70w4KI4bxCLiEHbfgcMC2RB2LtiRHuWA1JUwXFEWB7mNGFGKgbkXGfDJF9hxdcYQqzB5H1Jfct+jlQlJ7lwwPLMaotwlYDbTn0o2LnYV1N/dGEiWvdKfpZh6/bOLizIjEqliZAgQ+/TRE7kF2amN4/MjGEfNUT5UiPd0TnNG0loHQdn8oSPZ1p1KCXhEP8jFvZL9enfa6naGvSBL/5EqLtfbc9/HRnZNNw/imMBl8n/W5jO8NBBtiJjGBPI/3EtohLmXN0fy/X6EcVKGdGeeaDlAMUicNysmSid1IZIxEXVL7C/MUiWuSgkpjoI0SqTp+K9kRzmGVXJEoxRt3NJ0eDhLUxyfvi3YbbvizuoyQWZWRTbHjG85Rhl2Gw1OPxgWI/J4C05deVgvcRzTZikOmpgH/uXR6BuHFYYNhHPuGaDgKB8UZRGyU0izQaIp5ZAOpuSlcIotGWLHE/PCeYZpIQ7ixb8jPYBJr8/37mIb5KIhxRBPORKhk7gB2dQyic4ww52NUQu/8k9mvnPJrcFRqsV3GTJGFqdfKAZ9dkIcr0IIFyTEyxLGtYzGzGbcwqQSDY0cBkgtTkCOpz2V2C2Yl10lEfEjIY0iK3pINH/IxazfmTMAipRIXy/a+mkrH1JPjigKAyOzOTIkjrCb0ua7VWJAJ9t0jrHXI8PgmRHOVElZY+839WQ2t2QFUHgVcxv8FtbmpZs+ttFu2wDOnYFBBGiCwM8ecx74dPlz++IiTKsopbGCUuzH7hJTw7AYx1FD1snjfMvkNyXhkpfLW3FoCw/Ok8gXpTgCcPTpO8MMY2LoCMA+5JOx9AuDAGMjHzEP5/JNsoV7zpyZXPcNhE66RpExM4gm5YJb42XG8SKLQy2YBmHi0HBLTuAFKN/yG7Mf+1Wo0c1KR2pR6WhClXnpCCV9DFNTpbAkKJpPNp3tbuLDCMDIOeGSnVaNcbx/YP3nk0hKWp7robXVjgAY4k0Vop8HK2QysY9kw4mIIQfyZHoYbDgxyDcbbhg3lHy7cJtLxjzpcV45YL4qZSY+QGombKql0k5BvsHyEh8h83YeXOT4w9v4M8ywH8gujRwdHhmGYWZEOjKevjTydLbxMsPQqcB4MGB3++/zkO+QVwnlz5RfjAYXB3YVJHHMQUw0/DdYfJ1Kqlz95EXZLnFABi4yxMYeZ+YUM8CgvvHDqc9UUJ8LZIdyuzbfRqUpjGPM0OAsu8OOdAcqLW+OwCNM9biVuPjwsGQz5tKY4EFzmMKtVFMxMmM6CoGFrpCqMyb3x/WgZxiBMLjAKbLGSLns+9w/Br5dcsZjtN3zNLPI6rmI/cJ8MofATpup9lqJnNC7nUAcNc13dygx0iFWockwYXqLttLmyVAmJzdGC0EIBQjCkYoBzTDDtJGyfyYOgAx7saQDJvN3Ju526QhA9LlXsVQS5OShD5RcTN3OBJPijyQSDvEm1ymxnuyzCjz4RB4ExaYg+vdfxiVBWk5lx7Hmza4yHFKaUi7CVFMPk/BY+Jd+cQQIGEauXTlsKBrGkPpe6kEVXo8c6Ens5129Ee7psfMrCBDN5dvBF4yfziQ5094TI+qWiIglZMxuEb3fJckbuYnsm3I28RdkwHgEGZLPTBGmmOleZPjBtcKBIiW5MS/Q0nG9wfVC/TGJuUHMQpx6emDWYLmFEPuadxZhZ5hhfGTtnzGMtCkgkogTd7t0yQZmM0y5yGMUj4ufGNk92XkWIxLeZ+FzkOZRarvHQdXD78wZWxAzuj1ntRcx4lZp/cYcAWAAOoJaNees8sCVHqw1GdHwqbU3Rd6cXhHTLnGqQCXFwrhQw9y4XIWxrOh3nhtwIDXihIPS9tNBIxUwST73sV+mrcqB58GDW3umQjjmZXKHmk3BB4B9JcwhQNTpY5E22KQW8l/CH3mSxkIDo/A+J9ML8+9COMQUGSMzzPCSBA4dAZuKoCkdvsxGWvYm7KMpNUNCBHvDjslh2WfJWzywcMrujchurzXKje3sL7KzuwcXORbAaTGe3gN2Sqoxd3HHyBkCUHr6z+3QA3ueH5DecLbIj6U5DLpKpifuc/VHeHGQFINTX9LmWe6Z9PhAFMbbhxLfXmRaaP8y4ShisZeIzN6RsGRohg8adx824ufcjygakfKApkyIAxXCRwxZ2V846SMjmVIuWnSBM70/CTAhA0M4qXCkdTHYTmSGGV6aiCfjl6ravSPXBRmKo7o7mn9lI3/5FMK/BQkxBMNMi3RpMPtzdGpl2WLp1xH+wohg9rC74xQMDja0lzRGOLgU8Vd+O5d8S40pggdtNKUzoCk9/nuAemAfbIKBj5UlSDGH7JWsO16OiZgv/VJ5NYcVLV0TLDxQHAHY1eqfI4f97bqY+Xs7fYAhoZDpFimRG/H+tOkQc3IU5diJDf2eXus+NmlLWvpOD05j/ld2UX86cl/s5YPJW6n9RRAwmtQwVkOmeGB/x51mmOEwImJ5LvD/FPtCRjjm0y266Zi9lbsCmefDjo7IyIjFrWZGYbT5MX5Cu1edmHsRy6oalmK3rZA2+iPa7Lp400C+xZAHKh6HVWu3jbf3qtEe3t0TYvdlrwoec9Mlxg5oceyNhzG05KXSAk1A4mG78xQN+uJFLl4f+JVDBgGmAZ58Z7J9x+Es1VgYVvSRVYp3Ptuh4rsnS9xfMqGVgTjZ/Y4AxYBReryzfPLJ0GTSbP3SCE7zQB2EM0gHh1KUHtMe3x3IrfxJlLvLHnC/p5zNMMPhBSdziKc+AjAqhZLAd84MLrUUclFFTEX9MdzQlzOb7oyqVLF4aRlctEl2hWHyBLNfSyuZvsDFb9licfpfXNR0g5YOd+wXkgOckr2VUzJ9UOj0AtSkAceYx7HHRini4ByAuDEyNZ3K1JXEVAIYxibDixO9FfejfZlLM2D0omuffy77JQMeGETFoMEUMPBPDxFKOX4qg18HA+bwoDGOBQFDpHM4O5CXmo0dGeKwH02B8XynFLcNGXoYO0kAyKqxKHlMmbCj1SsDM4b/UkfnHSpG3DcUNiMIo/eRnOSUH5TytCL2x0S2TCwOXhpEnmGGIRgsztzJGN190gAwQklh6VNc+EwXJStT048Ndp4f6A9I2b7jRDaLIwAYpzBFObGj9ZIL6Ebfyl7gcHeDQoqY/bwNdgIOTF1GGKwODjH4mhxIBbltYBKUMcTUhjUOzgGIq5nh5WkhRa98c/Nw/k87iIPVl9Mv1CCOyynTa3dMiZBsVoDZy7nMJsYwS+22Y1ibTFcq7RPiyRgYN33By8u4XdnY/D4grzui7Yuz18YdBChyYpZDC5kNeTdT+ZfItPTcXnh5pceZSEa6QVIyBHMvTlaOGWZ4ySIMxSThP9iH6NIoY3dwL6cXCrZryYhB6rFIinNieELWlCxGwUsdjgNDYbETjPhZ+hqnwuqZPZB48PXAkKFhet0ep0JZXOZpcqpZCi1RqjWna/Yc6BSgtDc5LQ9mSD4Dm3uMrFK+8tQYJ99Lk56auTo2dgzwTYWSu5g6PAQHFps/QgYiYjwtOxfpzfwJkfVkx6jkbumQ2ObJ/PHE+41LO5kILWv2XOCq5Pl0tpgMgmRHse9UE3UQvE9Lq+HRn8yLDFCyRdAu8ZLwtGaYYTiwLLQ2XeNklPxKh4HTynOwGisXZOHB8xg/ED0W/07FMpOKFMJO5WWbqu4elUxOF2L+7pAmSMXHEFP0SR+kmtpQcVj6U0c+6hyXI96GIavWkz+ZFsyXtbQteHhTTobbsAZgPxqjMOE+w/0j3srF0KY3AjDKfNt1Bxv7hb0cHTWt7cD3qbPdIYZKUSSX3N7p0hRKMdj3B9IDi6FYncQNzIWs0mKqNKn8xSGS7kgM80yCjPXOA70d64ayON/gM3wrptCknT72JGaY4SWMfRYweeuwmHd+UmZBLmcGCjGOGQwSSH/nwUOl9wepwHDRnFwvHTQYD3sia6yeYtcs6/jA4CKnptEOPjkrFw9ci6SbMjr6eCC5h5G+rIi5lpp6LW7bIuD9ABe+DAcmT+4PZ5QUYVLXLV7fEq3Q3YVjffCYrnt6p2KUd5g8sc+W2SixMknWpQb8TlxQMmAQKS9MX7szEGukzIkLHGu5wbh9+LdY8XxI6FD76zPMcFQwzoZ9e0p+5FVOC8t0fGgnKVwYLS5GvkvfyBnHkHor5y0ctN1cCJRj3Dgch6zygXZMnTnGeaU2pjWyd01blgLHMjwez0nTfQyy5h7Zp4kVd4gDUEqXkcSKRt1z/Wx69J2Kw8YpNgp/J/v9JbcPKQ5twfYRu5xANpgTGP3OiHCMwzkHj4mGgnLCdrwk4qdS0q10UPoOAee26eMkihX9St0IUYzfYSK7IHETJhUzO0cUZjgsOMzC/giDs9/2g8rlvTMnL0syLryHWdu2bMBgzMJnREn8blqWjG2mThUl+aWsnyFH4gxOQsO0Himx64aplomrOdwo59hliYuY5LQ3mk63RfiOcQAyenCMnXyGefy7enFY0xd+7OiTF6/wQDNH5/mEYdFURbmQ9O1TELlBt/0ryD4NhE0HuzScMyGWtNwNbxXb9+AwxWxLtVJR1yWHVoWTkDguxCFt6smAmbh+aHFgUtXUdcz2pDQNoy+xrNtzV7vtg4Yz7IAkDDRrqOljv2cCjJV93gTJadF0ZKCkv+f8gh1zK17kvP7JP1l0GPaGkmQK1nks9BIrusyuS7tCxQ0gEzpiGTUna/Ciy1SOAWNBakXH7jIqvjFdywoB6I4RKwPrM+spJrttDHlpdzRNhSyLHJVq7PJxhbSFXPzOeeZEiOR+OEUgsQo5qhTGe7ZH7sLtbslEiOyfsioVjXvrFdMr6e7LEQmmnPxO7xV19FAahckzeSjfB6eSxTyceL3xy+NHtQ49Es8m7ecNLiQ1TVv/CYMgQM57mAJZjiqLvbQQqoA7pRccSuzrjMNRLZfu6IMrpaVIi8Z8GkUDYkRuOXcjbR/vWP0x6BOJ9+ym/LmipQzy+KmCy1FokoFrVOob5WdNlnoL41VhKIq+xMCCzJA+m1Xutd1kl8NsBGAoin5w/ImQaQqAQh+YNMPSL5gycdK5curLOB028QGS/sSMkCwFGHxEXsFtVRGpvHGniu0FuT48jQGB6ZV0952c87wKEA/0HJ2IX67Oaf97aD8rCMj0a8lZZBFjT6mctxt55RVHDOLukjo5pvjONFzdGQ4dim1d8JwjTxlLbs5wpFAiyNLhd4DYEOYMA+TeK1OuCGOpjFL5smcdWpD/0eRWTIu8yIAZINl+laN1vJzs/cbJITm5yuamSqbDn1HeeeEaF2hkucfvVcOcikyief8gfmoiEnOhvaYIBJBTTXA6KLD8WM9zir9Lh05yjRDF0ydpldxrHCeW82vDDA0kNwZeLaee4bg0UdfH6AwgiDaPjy2jJHWKK4KAYVfBeEzgthmNQ7KdkMBjYxp1nmYBObXaf+RjmdBLaUjm6CDXF2LejH8nPD+iXrmwSuqEbkQMeXuqR3TcFhToVMAO4j2n0uKxTUbGZG3QBFQ6avx2lMCD5YvJ9/Rn0UMefGJoDw7miDEgMjDiERQSL1XsbL2VBEZyV3Zu6xIbfEhyOaUzDR5KRH3KBmFOC+wy8R0+MJgmn2yFEFlxzHGXSVlICKmltVHfSFMnfDra4Xony74YwYV0AQo6upSkhcTjiqcna01K5aLrMvU+fxgdgN1WEkf+jC4mFgVgoUPt0MFS7+ScRihyCBsGRAIwrAGJgTD1OMcBnahMCBw+EepzAmJmIABmNoDAjJhszxmqi/C8j1BBxIf/xb0kqeHBI2/4Tk++jMI00p8awZiHiZo8ikIkHbw5suo9LTwzHSR9IwRm7kXmDTMgxRIegZnjzawRmRljwX4n2LlpVom9/5SEweITcUQAYaAVIYoqcHacc3dIK7wZpoEoJpMSx4P4D0bGDUZSPLEYcrzNAMiIyBzHeRAw6hGDXA64Ync8DhdNMRs2yZmiQ66V2rWQvTjNOsbZhxY6RoEJTj8QGjSIBMCa0bAhQuRU/SJ3GMNhgdDmMcwALCiKiCIksxyiSkRhVEyWy2GYUpnkHBVc4TKijaDeyATCC+kWmpzYw8zNKUIeNp6fGMPqEAVlMelJPBCz4xnMmHxAzN55/yHkY2YGIjJsGAlRaAZtgBUQASITASEYYDSIbAz7QIYRdaTaJTAxUOgXUBjrMfEsLYwtgGQ8Id0XUgxymyKlePBsNCVDeWqlRsTxfYDR2R+1DlkQUIPIZ+J6Y6rHhc+Gp7gjgAm/MzAwGs1EiEjRKeqRrZRq7b3YubcTidTJ6KNkkRJmLsZIPcupLYNyr0/qFQ3ehztGE9w+RLomsv8Tt56Rk1AmgzGIBGiYQSEAEsXvqIBJACIaw8AgGBGYgYkIBqZgaSByhulgimRNR9nH0goxgxTeiGPaSQGHlBKHfC8t1dSAsWESmuIY8igDIBqDgIAERgGJQAOAJkkWkAZNIABAGVCGMYyZGoNs2CAhSEGACMYAIxtGxMSQCz3rqBaMnJNdMbU4Vd9S+peSIuOAj/dKLLn3S3yWenfTwmEcARgfaZIPI3+yc06aT+JTUHfVZqPsj9j2Y8PaIBJAoH2BbBkmClS/B9oH8vq9Lc9vee0Oeb6URhIbMgwABi3HsStL2DiF7hljqiyIAYiYwSAgmtCBNvGW4al6RwMJI2hwoNi1CbwH3P7aZrG7LVX2EM4+HEjruNyNKKSNceiTIdoEczBxBZBNFCZFowHZgBGAKCWD0BqEAGYAw0gYjwgf5ZlAmUGyolYaTA7JXx18j38Vxgl4Ys8/TuPo0vXwACN7P/wRur/h9GYyCJqAAQ0RACAAgQLwlNdzwOeAeyDQnXfIAfCZDQppAAnQgDAAEgwAgMEwmpqEr2bYO6YuUnIifVyFmLdVc6eDjTL908bugSO2r8JxKgYAJgRjFJL0ARWDFJIAQIDobV762Cer83D12Uv1esNoWTl14uTFC6iN6Xa9ZksFqnb8jFg8rtlFspEI0JBAMAYSHyBcDDmYHpoRg0kcZUe/a5q0wkG3D4kRSeRdBQSHt9/+NSsym5ekKElctvHjyEkHLR1WwiQ2ZwwKw6p7ffPmU1effQaVOn32THOrubRcU17XrSBrgwoEGiEAJIIAZo3a8wPuelbl2N3Owv1WYzkgGy1iFMRooUGOR4TTuWIyVSLZeHYwvHFgEqHY5V6CLAW7IXg0bHm0HYAi4m416BnGICjWiBo5IDDIRgc+IqtA2Y4LDKACv90iCLTRADa4dVGZB+kS2QCAoAEIGZkNETIAjrXO4vYhHMhIzIp034g6axgtiNf4pNVEQWuNkDjxAxgH8Scjy2D2LdxRrHgbELdR6NWGYVBSDD4BAwgIOOgxdgh63sp13Lwueze319c6a2s3X7h+7P6LJx54BIXcvHpt89amcRpzJ+86ee/D1XMPspwTABIQOBoRxoMW8FPFoSp11BOnz/8jYs/jvjw2clJinFJNtQWiVMMYKGIY1xEKwJgg6Hd0+9bmyou9jZX155/47Ec+cdeDd9UWq8985gWv59/1wL33PnAO/H5vdeXq888DoHHmX/slX37i4dfq+TPKWJJcAhKsQ8vaAFBuO+XkiLB4ZKC4bUSePqnQG5c9MKYLUUaDHGXHWhWIKXu0eGvInV2jNJGDcAAOoL/vsvcOHTnIcUP2mXLeyIRjGADBqN7a5U8//tF3+/3W1nr7xPLixYtnN25s3H3xbLO5PX+iVnGtoONrZdCyDBIxkDaClQYOgg7VyHJQCAfMfLV2lu0TWD3l21WJAjnJHaOAQDIrLmNR5pfUHACG0ai0AHsUQ1zWMQ6DWtmNA8CR+3YIij0lDLgTwq0dEDRrwh75W6Z5U2/f6G9toPZ77Xbg+9Wq3Q8UCexttftbbRTMhOutvsfVE+cuXnjF57in71boSLtGKNAAgyFKDJ9DPBJQ1AHxbwaA2AFIdV2AUGklt8ojRyl9lFdNMR9N6gDA3vrjDCE4nO0TjXMJReADAPvcukrty/4Ln2peerq3vU6k12+ucnPLdb0Aq32lb127MXdsLpC241ZWrt9YW9mgyrxtN6z6/N2vfdPr3v5NjQsPsnARBCOLwUDYkWyuw8VmBQdgWsXbnQOQHzWYXMKNMBn3lfLhWFd4Khej8BTY0H7+g7/z7Mc+4LVu9Xu+ITtQ/Xa7u7bZIiFu3moiWcx87mR1Tniug8eXayS13zO247zs0S88fs/LlbVcP3s/izqRAygIBAATJFFQzJyMGg43x5OC4rlCGRQ9paIDkHt4l1RIC3scsx1HOADFJ6eLfZ8CNKEHPDLBEj9m12+n2CCV4pB00kYm5BkDkxvICNrv3Xz2Y+/+/37hWKNGVqXTU41Tx7usPvmpZ5qdbmPJbputxYYtAq64FhhbkhSGUYMxho0W1MHAkO6TVsBWd/Uzfs92jj3iPPBmYy0gGyEkmzjEiGmmjcafwxsHrB9228pJ8SaLu2Dhy15S2y2iIG6Zaz9B1odIF+4ALnQESEvOKPANDMAGEAAD9qS30bv1VPfmM97miuR+0O4QmG6z1e12K44ESVopr9O/9sLNQGnpuKvbvVtNf22rIxpL973mc972zq8+/9AblG1bUgoUfCSNVUx/49RK5shX52jabIq48YOZ/l3Y+j8dhYgGAnY3OMLZnnhUCHo4wRxFJ8GwBhkQeEFXNK9cf+z31z7x59i6sXljfWt9s9v0WLrbvaDb7do1G9x5T6l+a7t6tS9lhZF6Xo/BaYiK7yvavv5k6w/Xb9x8wzu/8a7XvMUTdQEEaGJv7+g1WjgscojKXbDybk/BcrlOehh7WJVcMK5g8Uwb8bFEEMouRl+B8Lc+8ls/vfrZ929tbHoq2G631zrc7Ku211vd9F64tNX0haw2up6pWN681T82V33rF7/y3ouL2yvXaWWT4CMnXry0cPosdS5R/WT1xCO6coJJiER0xuuGc82XGP156z9t8fFgkk7ukRE/R1OgxH8bz6DfMZdptVopAwxGAPapT8YO9vQSHEKR3ZQ/E3ob471idDufa5LKrWvP/vPv+Y7PfdkDdy8v99Fgw5qba6xfXb3+bIsw+Lw3PeI3V2zs12xZqUGl7iKywUApzzALS9gCASxgIEnKsSzHMa3+6po/9/CXLd31JmPZAkM7KBzuIoAk8Br2iHjmabR6sljAfcGw8MkIoZNztcfJAod8JrcxRQwA3KcocbQhTZTNhPNRkgGbo4ZcL8i1AwMYZtJIKugJ78X+5cdo+0p/a8WR1Ou2t1e32+1Oe6s9Nzfn9bpr682+H/T65uaNzW43aLa7RlhtlmvNfrNvhGPNzde/7Ou+8au/5W9XlpZIWJaQhg2FhwUfdpT0PM7chXT4CiPexQxvD6JYiUaJ90Qd7AaQ/InnAg3nqhFxwUNkkx1NGDaIgMYEIH0C6t/0Pv27z/z+f33mk5+4dGWzY+Rz26LDxAY7nvAChURA0FVgoVqsyVrFQSKtgrm6W3UtjbQ451w8XXdcu7awYGTtc770Wy48+pUAQrJJNfGs0faE9EgslCmvSdLcU4H22hsTswbK7Mt9YheOjX9gUIAm6D357l98/N2/2u9u3djynnphU1lus+WJSkXovq+51YMX11pd5n6AXt+bq1hVqFbAfN7r73rtg6fmXKo6orN944vf+dbKYlXYjrHvPv7w2wJrgZhk7HEwAkHiAER7SsQFSlU4Jilihryc2KZFmheu7GCp59wsSAzfcgMhl9qIn+MbqWMil9RgBGC/BEm2+nuvCaY+i9d3k8bo90aUNNVAiZvHCGhY6yvPX2pubX/ik09etT+zemPlC77kc5fvu9DQnc0rTyzMVz70+y8ab/vee4+fOLHASoJxLUdoMIYVAasg3EPUQqhIu4IkAw1S2qfPN3zvxd7aZ2tn7jdQC4OgzJxYu1GwMOqEmW4QM/3+6oli90iTrzTvtJwaMwsoa7mU0Z/Leb9MmrSBNblve0QjeAPWSp9OGln/4Z70xhBhz15/cv2zfwHda35rjVC3uj1pS7cCGnDzZu+zz1ypzzW6nu4bePKJq7ZV3Wj2t7raSLyxvbXS8bo+MArH3bzxsz9349bq3/oH/2TxzAUAICROZX+IaZgs6k11hGjqT4ZrwtG8RFGkNBjG/nx0BCAARJvJZE7EGbQCppIsJUpZ0OSw0u9IgQEIyYBhTUqAq6+/+Ls/+9yf/Mq1529+5nr72VW5Zvhq24AlbfT72lPKACMKACEk8XbAsOExqJrEpXZ3roq2FNxvnGhUq7W5XsuvOObZj/1Jffnc4j2vZBThUYHRjKM018ywS0xRMXJaJWW/7CIJiN/cQzFK09uTWzIyu9AegdgAUcytK59532/+2q1r115Y624oeO5qS8H2vCuXEE8t1E6eWpLS3trobLU669vNzRY3arWqU7cYT8/Zi3U5tzTn9wJRafQUQYCm22R8yp07VbnwOUwVAErGRsO9h5Ap3i0xJfrKorWJwo7N89LKlFgbO1AvZ81EUjufyJjItSBmr0+m8opcEaZw0LsA3VZBVeriwUgDtfgd4uhdzttkMMCCmp2WY0zr1k2lvFOnTq589hl1/dKp03Of8yrHrpqq60i5XFuwyTKMgW9JcohIoiZjmBmNYTKEgCbw9ZYPZKGUPdUxsMFbtwA3G6ffwOCGmyruzAfxbkf7jWL3KPXTSnvRrqz14mOpGGiYHMYhVRzx1t6wXzbT4bbFsvY2xiF/BohXs4bb1DB0xPonOs/8eaVzq9PvE9lBoLa3tjW3wJjanDu3VLl+OegE3ovXVvtGikbt1na7JcWGZd9YafYN98jyBLA2QjprK633/uEfOBX77/5vPwiOEEJgrGmigtw2guyIrAKJdzOFJJKPkBFEqT1+IZEqnGx3jfEpAFCq0BkSgkRabgx2OhjxcEcio02i0xgYWbBl2v0P/tpTf/xLz1zauNxxP+vJy61eC1QggRCURhXNlUMIDKrACPSNMQYMYhegtR2cWJL3nnFPn1sSrmwHwQMP329Uz7GCZx/7w0fP3AXuMW2MIDFruCmgcMogD1FeY6eXt9h2gX1r0YxpOuWkw+14kIEJkIGF0Guf/fD6zcsvbAcvrpkVr9fRYLSac0y1QvXjdU8bQXzuzPIDtQuITNI0qlVHCsOelKpqo+OCdOXJu+5t9/3ta/3jiw3XVtuXPtI48aCuVky0BCA1NhofEJD4xXk7PgnYpsOD4Z9k6XCaUru1ssOHR4c8s4+P+JnGkHrsolAj0ucDcABKSXGbDJ3S8PGukKJk9lUEBmQNdOLEifvvPs4bN47V6mcunj52rFIRemFOBqZPNs/N16UQitmQICmFZSOQVkaSEJYFgGS0YBQsiCSgMVppCCz2+q2NK89+1tlWr/6yVwI4TJhsOQ0QropPzk+KeHEvUmyPGNa+Sbca5jBMmGZ2KmCy4fz4ie8SU0ivtHUOt0ZPx6YBIJqFEu94AhDu00aaere2n/wLXnnG73RB2vNLywFSZc5tbm4G/V6t4VSkeLHOS6fnNtvdy5+9ornS0bDe6a12sKOoE+ges9IgBHQ2O3P1irTtP/qdP7rv4c/7ynd9/eCcsHQs51B6TrGtniocDgqbLTOmY1cYr+LnuJrRMoHE+h90oUxnyiwrHoskk4WTZoCcNuFwEIBBEaqV60/98W/eWLn11Dp96oXVpkEjGX1NGogCrQ0hCIlCEoHUmg1rIiYpQAoEEKDBFppMp785XxfHz54zQgNIZuhtrW5ev3bs4nGMj1bCw8n6RxkTUzNnXuQaZpjiK8fkrbrDm/vAK3FdGRjYMLOCzWs3b622Lt/sr24rbaMt7VqtceqEMzfvEoK0BNjEkv3Ak6Qtm7XWHpCQTJYkxyHC+bpbr9iWoL5Bi1xpWp3u5f72JbuyDCgg3kwaMbslUEo8YjxMnARMMtYwDjQJxGvLkp8D+6mIdBxsZ2dxLHqPsItKx3DGbMQdhTsCyP0WH6XdoPTWAWIHShYKlvbvuPBctDu3QSEZ5kG9/EIdTi7WGlWrKqnOVK14QhDWhBSMjvIZEaUAAhBEiESWRYiGDSGgsIAYgbXxWXtSwPZmE7Tq+2rpwmvOvvodbOZQYmzvc3waNsclY4B4kXyRTfcNOd4d3+sbv1xY+BwkmCyXPJTK8EipaC5SN48k9p9cAID4lCqjtbdxqbt2Wa2v9zumMgfNjXVlWZVatTrnttEjpEAHF+853+qoCokFp+r55Pf6C0B9YzbbPUC0bEQCzxgfsUviuWu3ji8tXrtxmQjYAFPU3ukZNoeTyHEfDc+rGWzVE/vtkJQ+510lXzFRRqHCil/GVH9PI5ZJY5Ij14gzTIjYozMAotva3GptNNld7eleV8sKMXKAoDUBEqFkDcaAlCQFakPMho2HlnGYhdZSYlU47S21TUEdu921dRf06QvnQVvCcYOgBQADfjq07u9RQoaAE1OzaNvkQrC7CN9O3p4HzQnJmGYIJjIG257a6EIP0A/UnFutzzVOnDq+vOxaZASQbTtkk4rOOZCeJiKLLIcN6ID7KKkiLc1eNyAROLV6AEDEfb/pNa+5p1+nWFISqGcIz3gNp1GGRYhEblo+pr5HI6pJHGmwtwLHCY4y5znXxrnv6YaftCl2a7gVM8/dHfHifp0EPMLuPwQYWpZCw+UauZBMNOWeERAC9rZvXH78D+dr6LmN6tycqNSl60rLFhZJaSMKhaZWcxABiBE1AwMhCxEmY1gzGGY2aJAsoxVSUG047GMAztmXv12efpUBSaANACYBwfJ6JEpi0mWqu0H6eORsFHKAxHYvul/jMzpk+9ege2PmgfQr+1P3XSR8mDh/R2DhS+kznDzCzKnjflmwCjZXTRA0Fhq1RdHtm04/kAS+7wvLXTxmGz9oN7esapX8bsAeCF11rM0NXwAem3PWOqAVoyQdaGQwzF6/39TYDzbW1lrGGBLhXkDpwxd3afROESOyzFjy6S6Y3tonXhIQPh4fHxv6BZECK+qBrMZNX04kAo93VgKW9KEZdofB8W3Rce4gAG3X7Xe0RfrUcbcZmK0eWJIQgYjYkHS4WpcOGWIdMEuLbOk4LglmEWhisGxRq1SqFQk66G5u6W5XCLl86i4QllNrhCwTbrp+IOGdOx45+kVG5W7nxk2tFXbVorts/lycrtROGyPJlLpPQm/hkUQGHIEm0J5CIaxGhRYajdOnFhvzji2QyBJEhoG0EELYtuXaQkiSJJCFlCQstiwJQJrJECplbCMMy83NHmFdgWMgNv6jDT9xMBVoUK0sMnZSMs2yEMdKIohc+IxrzDmlk9zNqc2iMzCiRAX/sPRn7uFiA3FcvXw2Q9oyvLNfU4CKBBnhDd9WETY6c8x+ZtsiamAGxEBpVM3H/vLXWluXq/OLyJVqre5U6q7ruq4rHTJIwhIktQoCQgnGEBsmlCIc8AcDDKDAKDQGkYUAdGpep2XZ0OusbK+snjGOQikAUDMIAcA7xRQOmq7Du2B0t5Tvxy9ijqlKo5e51Pat/vtL2MOq1LN9lwEQCAHYpPaoUUxS1BeCjlEawKL6XIUlaIGeNpLdbjfoGME2nX/FXeL0yQc1nTp+RtjOlWurnpQf+ujzv/Szfxx0dMN1hPEIAfwAmGrHFl/xmleFJ0DGsZ4C80dS/YAolxo2LhYktU8UDiRzxP+D8yvDEAKXc3Iyqy29U0Xi+0SRt6y8jzfIjscMdqDFNMJVL3GkHC0OfxphVdCpV6vyeANN2/d8cLR/5nh1vkagPMlcrZGwwDZQsYQCFMRSSM0gpbQtBmPIcmv1muMABtr0vWqj0Wv3bl1fWTrX8LUAyLR76HjPlnLsAXkPm2ESSVI01CZ0z3b1wkTNXlTEOUNnvFwZIA6hh5MbmZlQB+r5GzfaqkeGTi1XFufr8/MV4YZCRlrCdm3bdqQtSVooBUhJJEBIJsnStkgSCSFIAgP72kAnCLRgy6osgpxnkIiAACacH1lC3ySIUmZrpqz8rF8AJW1VYl7Fl4bt8pm+OJyao+220vTSCY96PW8b7ZD4NKcA7RgOK5Zqtybg9FAsLENqid4OJIzKzQAMiByQxO7H/vJ/Qm/75Jl77WrNrszZli0sKVASCZQoLEsKAGRCAI0SAEExEZuAEBCBCQgUmICNQQj/adddFMBGuvPgdLo3G6gYJBCF239CZgALoFDuOJi4/wQeQ86l+0XOVdht+fZi5R1W2zqDw19CSJukkVRlYECBlaVjvH287/U1sXAdWa24c1WwLc1ou1XbiI31W5a0BFrH7uLnnr302Wee22r3PvPpy5tbnU5PPXzv8U4A6xvNhWqlrYK+b0iQ7+szZ84opYSU8botjHzgWHbHov2AEAa8IJmnn+KsQXAMMZqoxNGU/pD1MfqfB7t6JskmVn6iXtMW5mDXu0xZIDp4tkQBjapC6vNI9ItDgFwgCzItwQAGrMWTp++5ePbajSBoOx3v7CJaFddxsCKVa7n1ikUWsnRsEtKYXlcBK4PCth0gZNBICMJBaQHoymLVrtQt2zFSMIDRAGAZAAQDEG2HFW8Ikd9yaoaxkbcESvaHGTuVYSbNTiZFOn8Yz38vvDK2z7GX8Gu6guHkeUQwhpmZgDnwbzz3sc98/ENzNfvc/PzxxblqrSqkFZAhaZGoOtK1gIiYhBGEUkghybKkZUnHlkQECGRZlutIS1jIgo3jkFM/6yycqhw7pxksAAQQBIYhDqxkq116BlimDgglzRFbf8mvHRssZ5VP2vdGZ1VsrHHzKTW4sy/v1xQgGN4TIL4+QR+bHormaDoWljNQMyWNwgOMzEBIvmLB/ac++aft9sbJs3dbVsV1HCEtZmY0gAgkDBpCYjCIyGxQCmMYURpBYGxjNAlAQcYwkjZsCBlZE/sA2rIlWjbatY2tZxfVisITIASwgVj8p6tTGoQ4CMTxpx2bdXzrZBeZA8DY7FTw8WcacwTysTEASAZRo9BjOoiNAMhWZc5pnLaIrLorahUhLS8IAj9wqhXLRmh152x37erlT334w09+7GMrN6DWcFY2AzE/P2cb7m4vz9eJalcDYy8s3Gr3X7zRZDSNivOKVz/kuC4k4p452fCJIT55+GCbE5OZSIwlMaGYQMkWefExYEnILLWbf2jdx3/C10vc+/RGQuUHBkN8e3fMPesG4yHxxpKVGXHDAAAyEFj1hfm5WgXaJ9zeuZdV5houkEJpsWJJNhORtEEIQgbDQaARfSYLSUpBSKiM0RqFZZEtQKDy+4tzDtmSK5XG8erSvEsAiIJj9zM0/0OLdbYmeNcYorEmI2TOKZywJSJ9tpvA3XgB/JytM5kbkIgfHsxYJEZko4LW+mMf+NM//Y2fW56zzp16kCyJkoW0hSQpybYc6dSEkMgMqAHQMAGQMag9goC0D4iBLYz2gqDfJ0GNWtWGnnRBQM2xkKgHustYAYoHWaPwExeWApdVe1gEupQGudh7ik8Qk24/buhzmJwuNkTxgXQxR2eVzyid+pA3pzkFKOd85gJLRdv0ADCydXJeMxS8gpwy5/hb+EUjAvtgi/7jH/j9XuvG6fP3aLbYIYOESBKRUQMCIiEBAmitBAoiJBKAAGiICAWzNgyGGQRFu/kzAYMWqI1SyjBKUV1w0O5vPPehY/d8CWOdERFN6kCqbBGTK4wAnJyNvXeSDUPBSNxFVrsMXpRkkby1t4LvI3YfZTmAhRvjIN2JOXMtmvITXScEAA26o4PV3vYNwx27JkmSVp7vdT2/z+zZZG+vbm48+9zTH3lsfe268tXxRecVLzt7+syF++572VqrZYTqbnd7XeV5vNXub3T719f6f/mBz15d7R6vOTefv3r63lqlVkmKwQCIyBkreIoG0HhJxfZ7PBKSbC4R64hsPGHgC0A2zBhZ9xAfqAwA2TBWvBw4EUOpN9MlAYiPBN6x9AzDTqqZoYhsuC/lvCGEjGjAMJPqrl/prT53fEnPz1cdpyIkKm20qElpCUANiGQZEMjMSMzM0A80AIow+gmEoScRGEbGaq2iNavAF5XA6E5v65ozf8EgUTQExoNJ2DxN7n+poMQQweTybnVZadrDjM9x002Xa+8NHC7Z4iSOMtQGGl4gjKIayBgLsaCvoP3Mb/34jzS3bp2fs/vuSWm7vtJAQNJCKQUisdF+zyAxGrtqE1soZLinJ4JBNNImIYQAsCQiKseyTNC3KthrbhkAy+K28pwTHV64RzpzRtjIKMOxsGgGdToMmZO5BZoiFBYPZOqYv5OiEQ8GZwd5jjBBRjT9jlyRdtggL4My3zOpjfA5Um/i1NcAFKufkKWkPDEXThE5O7I08VT1YciDXPyS3l2WgRkIGFS/+dyT7+1svLjYWOhu9awq9wPFAsEo1AzEIFCSEARSOkgMQgIYwxJJohCoBCNiOKHfsA4jhATMGpANMwoNoA0LA47lWhK2/PaL1tz9hqQgRE4GKxNzYsAVnPtAzs9UKMWk7TFBuDH3OWYKaTmVa+tDaGcnRuCucAhMsqIvn+nEPJjEzsiaUaNqmtaVYOsyta6Dt6nY5z6TYKF8bm8B9lv9btDZDtZunDihqscaD73mzSfvelWz2elvtTc9X847uu9XwLMqumP12/7m6bnKgw/d9YoHTq9tt9Y8e+3FT55/8JTSiFoysrQEALExgPFSdxiHv8euf2yqh+bdUBIl4yGJ1sH0rnIhk0XThXK6KNdXIVoGHE3pSEYU4iBOtgz5lwfDAeNzdUyybHozlCPtDIeqnxkIwURLL4wCJkYt+YZbx+MXzrFoMLMOgkBpFDUJipXn+b7WyjABIyMFvib0GYRGJRgQLWYhBJFElwQYZjJI0iCz6nmrV6594j2ddu/YPa+xK4sQTpiA2HXEeEtezBR1hlHITvdJ69DJaJezJ3K29S7cgKR3Z0b69tSgkRjhAQ/nPmFnponWu3M4BQ1BG0QV6Osff/ev/egcbx0/3Wh6vovzRrFlNCAQCwOkmIQlgX0h0IBQRmutUfWJSApwbOFUpO0gsrYIkcB1XSERNQARCaffDVitzG01Za/vt7d46YxYupuxGq89CFm+zHBPm3uD2MngRJUy36foM4QpFZ/n/KtjI+d3jbgIZS2VM/13Zq0SkxYY9/McgDTPFisZfpu6fBph96eeybkJudJBrNc5+xaFupjDuItm4u5zn/3grUvPVy273eoZ9nrbm9LSPaMEGTQsyEhijUyomcGwEpKkTYgCyJbSIXIt1xV2lVEIYaGwURATERMCEyGCEGRAC8WA0rKEr4Mrum+J6mnDFUES2GQJHEmukirF4cTRsi2xInDnAGKOqhMi563tqLUwy8w7+nsjk9p3BbkHsuyPczKuVZCTMPlEEEJdQMxoWIlgzd94pr/6HHc3IGi6UpPNQCbw+kFva77CytsGqx9UerX7ltZbjYunL1Dl3HbbC3QX9datK5fWbzb7613LIJLUDVRCuw45lj9fMUvV2gP28bWnHvvT1WvzZx45ee6h02dPi2pVSy3JSvzbWBPw2Kw0BhVi1yJd91TYPJ52hCmlmsTeU5+JJwHJNKDotchXSKYJJWcBDBKIHY3E02DI1Syjk/ZiuMywIxiSmTeAYEAzgAAGQ0KzAbUS3PgEsW/bTjcwXq+ve10QQjFwr9fvB55hNKgZNWvFBgEcyxiUyiAhojRCShkuCiY0zGQL1L4tmFRbmqZY7TQ7q52bl04+8PmN0/eAtBHjwYB4rcnM9N8FuCjgBvvDjE/EUnGZe30iNyARBBhLm8mRjpPkqofD65vTRJjyMQ1zXymz+fT7/8d/bAjVViLQvkKt+h4TghDAgMwCJAMGQcCoQBAzWtKSEiUIFSjbIccRbDShZTu2RUAkANkAWERsMDAK0PS6fp363DGG1wP/OKu2tfigsetI1iBIkq1OpkIZy3qYvTCEBum3okRSywzSRwqMjZzfhYU6lDoow3yGEhSNqjJ/BzmyIKeAHPF4pxLvNtdi4xQJn7MId5NqpozMJprgO9htgyGcdslMQvr9rctPfmz7xpWg3VJaE/iSFCESBAIVgkbWCEayIjAIPoIGNEQC2ZcSNFJ4iguSEE4dybJsixxXOFVw5lE0LNkIGKW0CBQBGySjDdlGc8ciIvcMLbwcqEbAiWOaoXNqaACybLCD3cuRSNytAzAxRrBN+iIUHB0ufN4xSBsZt60IQzpubOkaYDKIgQmEuuzffFxvPkf+NrIWEPQ7bd/3mDVrJU2XTM9xVFt7HWNVTt5TWbrYsNzm9Zt89fLzTzz16Y9+tnHsuDO3NFett5p+p2Mu31xlBEvKU8v1e+89L8Dva2ulU+HqQttUFDVW1rbf/BVf8YYvfntggSMqAKCZKT4BJi8O9k6DUk0Oaf7jzBXIEI/TQ9OJfZE4Bol6KVEz8UZBOFgZnB4ZyIYGQ+djt35j2nOZYUdwdAQFMzADU0Dkh4TzO7j6pPfie7Y/8x5v/cWep9pd7nZ6Xrfna/S0o72ur4VnyCitlDJsAAmAbMnaAAEL0ihJSmFJW1qWkExSyKplWcIWJN2KW7OdSk2Qq8VxWTuzdN/rlx96g9M4iUIiUOz+JifE3F5CHRUwF2iFEO1uPLGJMonFtqsMJkJpeYq3CjKP4ygEJDsNEDAz9bXxWld+72e+71jnal9bhrHV6zr1Cmhgi/oBgGGBElAYJgOBEACABhBQGAMCZa1iu1XJwBWngqiVURVbEJLruiSJEByBQISAvofIAYqgWnOqtYazcNo5/nJ7+VVGzhMhgaFwXkZ5tfe243GKLgkR4ltZ0bwPwCxfFdtoZ3Yq5UIA3L9tQHM+dDr3ibm/SN34ymQORYI0QyAwMPLA8w6nAhhtGMkESuvW9ma/2+p3N3WvTyAsxwJfK18Z0KAVQYCWZhUAGEAGUIQshGL2tQ7AkJQs2BAYNhrAWCBs7QEjd5XxCO0K2PMs57SokdVAt44CSbiEkgm0ZkEIutW9+SkH5mjx/nDGKMduOSTBgkINoy876QWOdy2ZjHx7QZhOqcU1Iv0j4AnE5jzkPbH0I9lbk6uIfcLA4ESEeA06s/FF/2r32geheQn7G8gBa+UpD4yyiW0BRNxv+l6vx0bLSm3xxElsLFYr9vqVZ9effuYjv/ceIRaXH/icU/c8cGl97UPPX3n++ZtBIHu+BmVqpJ98dm29zQtzMH/sxPq2f/XJF9z5E21fXrty68bNK49/+pPf+rf/XuCCZdkIAiDqu4MVfHtkhYHwytndMTIiNT0JJ8k5r22zrsCglJn526np/ghZti7R4Zh2E1JxqZ3rF5IqlAnT7TWHsRtOAgZAZkYAEy3yIAQGMGAAWPYQNIL2N+T6p70X3quf/3T75nM3r6+vNL3ttt9TotkLWi3dD8A3rH1PsQgMGq2V1mwQERHItYCAibXEgAQ7EqRl2ZaQgi1HOBVLSHAsYbsVt1GtNbSQrUq167ZXL29eXbnyzMXXfuHiXa8y0hVIEO1gYxDxzmmEA0RItWH2xDgEHSoeUkbRhMgZVXtIYPQDBYSxioEMRDDAoAN2ufPnv/IfnJUX+6bXF7Ykci3Qfl8ZEfgQaEYBAjUwGUMGAgbja41SWsKu1xfm5+Y6zS1fkTHs+d1qVdgWBtqvOI5SfVtUmYR0XU3U73bJJmZHotXtBcpvLgKj0cKq09IjLCqZKT1oUuOgmKEbQ5Z8RVu6jLgpwuVJxNnPqXa4Edpm9MWShKBEd4TSzUwrAsSl6mmnUu0+l+x+Z5HOTxoWYaA5R5Q0XQQe/I9p2hAYRjJGBd0gaDZ7ELT77RYajSZgFXj+/5+9P4+5LUnyw7BfRGaec+791rfW3lXd1dXd1T3TPfsMOTPkcBmSJoemSFqyKYgCtMCwAS+AScA2CUEwDNswTQn+R9BCwZAt05RgaziYoUYLxRmus3dPd/XeXft79dZvvdtZMiPCf5xz7z33fsv73lZdLSkK9b177zknTy6Rkb+IjIyYxHJST8apnpjViA1b9FYRElliEmbzEJhAo2dhFqJAAFNyTkFCZD54RkYM8mLMmc/hC7A3Ih9yckN1ucu2XMhhEMDnQVBzrBr+RPbCz3vn2NS6SIQLx+w1t8YWe84H50mz6dqIP07Z51RtjaPO4bGP1KJnPdswLe0o58ubzsz4IW4AnBzD9Xk8x7ZdlZXMZMaTt6oPvsjj90lLi41KA0bSqpmMvOrAS1keJdgMkbL88rOvh/xqRjq7+c6bX/nWd751Y+fyJ178sT/0y7/+1f/kl3/t/iRVJSjLZk1SpI+9eO3zn3k+LyfV3v6AUi2Ycq7sZo354SBTtzHYnFX4mT/58//Lv/K/H17eoiwQ8SJLwJPsufMywy+gQg/6z3fQ2jhF7fzr39dbqU4URisztN+QJef0/Fdbc9TaSege9rsICvyQYOL3DxqdD87cNNwiHzEFiE1bt63EvjKQ3A0f/LPqzb9/9K03jm7t7R2mg+hujez2sUxGZZVsFqHKDTCtU5MaIpeSspmDOYChBPPOBsF5Vk8agMxRFtiTeY8s896bIwtMeZ4VG4OsyAeF99tDv5mH3d0ChQ+XX/zBP/bJn/izKSuYF0lS0bXiv6cH0MkdgIUJ7CEE8EnYc0EU9HB0lmryROn0Jan7x+bqgKBJb/zKv//tX/9/Ok75sDgWKpuGOBjYKBMBw+CMFAA4ZOy4To3Ph+B8Y3N7Oh2X9fT65SvltN7Y3nUhr2ajZjqDRg4YFJn3wbmwUXifhWtXhhlbKIZllVTNJ2GPje2tjeuvbr78c7b9Coi8ifVNGQu4P5fIT6Bfuv4/YSB9ajsAp9o319bph6BeKe2/yx2AJ1v3p8L985LR5Xpe/LLE8e0H6wf0X7jrrgTN6an67VJKrVWH2pIFrdUnojp6/523j2Y10/DazmZoDW3khJP3uYXG8pytjnWEGZmaKkwY6hxITZEIwiRmUUV9G/sNQnDsHGAWVSmCzZE3hZAQJyC54IgSoXG0aZakDiZgBwipaOZEs0NWIefQ2ex1bgvsemN5ZJmW+s2iB8+mh1+pzwFID13MmdRHQotP/Xm3Mje+N7FNVlow96XCol6LSvW58FT60H1/qPd3YQJbncFdP6uCBKza5NUHsw9+F6P3OVVNU5GkGJuEJqWycLpduLo8hJWDra1o3m1dHV5+Ls3K5s6to+98692vv/PSp3965+XP/I3/8Jd++R9+t2amkFlmURIPshC27s7kP/+HX/nsy9f/xM/+wZtf/9po7+h+WUZnk1IwHhfkvOwPw/Zv/JN/Kun//Ff/+l/zl3aCp3ljegPx+MDzvMFYHcnFWbR+d87n3DngwFZuXFnC1t7Uck+L+7Ew3y+Vga64ueR7wJq0UCU+BHb7foCiy4McrTN9f2UhEINgAvjaXCRszL5UfuPvHn7xH91/7/0P7vmbI3rvftyv4uE0llGauhHTqBC1qAomI9YUiUAGNs2YM0dERgaIakpwMIKYRSUOrFAlpCQUGM5LI7WVkOQShKK4LQGFMBlk07tf/C9tUn3yT/4LCjdvy2KB+37o+I8WdVPtnI47qZn3H+5Lz5M6/+PioqczniclEq1+hYHIlDrxlgTp/ltf/bX/ZNBMKliZ6oTA5KtmrASmnJgFCjNm9sGXk8plPiVhl2WDraPRuKmrLM/rKoZseOP9W+Nytr1RXNrc1tSE3JGpWSNC42kz3p+++ebh5Z3hJz79Azub18ty0tT7ud8Y7e9nDjHfcn7LhlcNc4m3BHdzZLAAvMvGnYWr135c64X2wwkw8qTnWf/1/b9rNzxKuatlrbgAPWYr1jiGeh8ubr49lxYmMswz6yxkNPVLXbky11ixXBmXiXdsnjvTrJXv7c3qiWMUcPnW13777e++/dzLr+9c2pW6dM5gymyqzI7ZhawoWGpLjYUAESipqVkkgJCIIrOoJcdGQEJUmGOAlQ0M88zMSBbZgjklR+Q63QBEFJyxMYum1AYVlQbBO2YE23N6z/A82movkYN1asyit22hGjwYFz+KGvtEWf+k6Omrdyszj3p/e/c8Jc3zgdRHnX2fcMyjw3SXLhSk40Nets8QeSukpiAiJiRtQv3B5IPfofoWuI7epEkMuNyxkAe81PWs1mqWF8UspRj81s6l2IzK22+O37v53q077vlnX/uTf+Rf/Et//c17abizW02OYl2ldnKWQGPXX3w+7Gy//+6NXx3/48+8dDUPeV6laiZFnokoCSeR5OK92x/843/29/Xfsr/6b/yfGHDeryn6T74j17S8/gs69E+0jPhDC5tZ70TR6ko0f27B6KfO0ROYgzrmt/5ud18XoC5T4dldsAyf9N9FOokAlgG+u3BMLS+RwZShMAJnotR4ZOV3Zr/9t+/9/t97/4Pqu3fs/T3s1zweYVSnJsakGhUiJCpdOhiBLtQKGIxUTcwUwkpi5BwBICJmYjIVTabem2OGoWmSaqufCBtJUxVlvbm7LbnDJfXu8O53/wFf2X75J/4cyKGzdmH1TPz3N/W98D4EOv8tJzXztatnwZ4Phx4KX/VvphMfevd0AQuIYKom9KVf+0U0e3VqkitgEI2xqQgIzGIliJm4UaixlZgla5rR1Su71bQq66MIy+GKIitn6WD/gw/u3trd3X7mheeKEDBwypGdBIcsZ+9DUQTauRybye/9zu99/OOfeen681cuXz2cHXjWo7vvee/ywfUw+ANCzqGLq7DEOwuWWdpAT8D3lc8ne+40/89OVvfE+RmseRYzPA6d2oBHI+7X+gky6KmzAr3uf6R3LZYrWi3krH6wOfZS617cBUqb22Ztns0X1A4zGciQBESG1MwOZXpve4MhNUnjHcyMmNvYgwCzD5wNfLHhBxs+G7osJ+fa872AAIlgDDhmA5iZSB2bYzCMSJVEIUzkfeA2W4DBgdjYg9hARg7qYd4hOHZExO0BHIFVMn4PUnUeUHMmbP9tT6iBzJY9bvNL5/TWo4zKk6KzNF2cbT5ZublveKUnPOXOpuVL5gC/Q//UuvjSQh2zuW62rsw8vWo9TA+c+0RnRCGAzDTIqPngyzz+wKoKME0JxErkvSf2zNykKjWVqRrIso2tKy9YyCdH+/H+vb2bh7f33Q/+1C/8H//G3/7azWYUQxk1KRmRY2awwpLo7Vt39kfHYXfr3rj+/e/cmETn/SDLBhCCcd0kBTeqk7o63j/64m/9xq/+3f+MYN3RvflceCocQP3uOtmBLS4nLHPT9FbVFbXwpGq7SCx8ylRcxxzUJgI/MV2oZ/o4aan67wnorQtdSDd04tLMYESGZTAdMzMRFU2RooVoFGGc9uzNXz/62m/t7cc7s413psPbEz0az6ZV2aRGiaNSUiRDAiUzMQigpgoYkRkZkREpw5jaiKA6X5bUoNapAmakagAcMxlIBUlTYyzEUeLxNM6qyXE5GU+1mX77t3/9+M6dNd63lpZi6fxu+ajTU5GZT7rQsyTpQy6wp937FNr/wJXCFlYFLM/ASj29++XfDClGpqgxJY11SjFqTE1sRJoUm6qu6yYeHpU37hzeuT+pG27EzcrUVGXuvKRZ8Dg+Hn3597/y8sc+9soLz1gapzhhmJkakRrKsppOxnBN0jLPw2sff+0rb/z+77/xBvlLRoUZq/jDvXvl/e9adWRk4C6VyuqqbHNwcCq4OPl5rW9opY/67tUrcPb0sel37Jp+9VCDaed+fWQ67dD049GpffwYoP9E8Uvh1l8/T+sQ65BAZyMzENhEDKZgiVBhS2zKqmzGCopKqnR4MDKtodM0Oxjk0WWSUtSkICbvBFAHMJwjH5iZQiiyfOizDeKCOXMuY3KtQ5CJIpkJAGdgMiUImTqCI/IEcgCrIyZK3jE7UhJRAYPYEZGlCEukSgAxs2MYkrgkUk7e1rSnatrvAepABIE6J+T5WjfXnex0LYAecYTILiBFHoNsdaS7l572wqV+3jO5PzVasx90vTzH/CuTnYz6Tmsnn3/i9PAz7oH3khEpAWiao7fj+E2NRwaBgomJEDL2Ifii4KzQxALjEMo6FcPLG8PLwTgeT4/uTfbuy7VP/Nhvfu32L/7XX6qRI3CVSjVIq7V6mLeIprFmVsVRWR9X8ajBflUn+NbDToko4wQttRRKKml2MP6l//Q/nR4fmejDtenRqdfBtnZhLp2WE2N+E538aPOUmobl5hudweDrNbDe+0A4oRDYgxnhoc77fz/RKc1aRO3ufIWN5umMOv2LCATtrpkShE0JXhGSDw1KnX4rH/03xY1f5Bu/Pjq4c2ePJ7M8SWBKZHUtkkSaJFG0MUtqohAzMUuiYqZmqqpmLco3IwJTG1F9MZadKtCyhJopBDAwyASqmlRNxERgsapm02oam7Kux7keHN//bmrTES+SPXXmqFb+WJc4zE6qyGcYMD8yROvi88mV/ERLM5xiZHtgzdcsWSd/e+g6PCSd8zJaSLkuMgqpaX14G9PbTapjHVW1LCuJUY0aTTGpKaWo0wr3j5r394+/+/7NSV1rCN9664b5sDEMsTq8cnXncDL9zS9/88VXP/bMtQ3B2OfmnDK08BmBk5gm8exTzakWSZg04x/8/Cc/uP3t3/nyP332mWvBe/WuTk299x3sf5uaRnSeRaVT8m1VAK8A+RN/L9grZxi3zzD/96+sLganDNPJ++20+08CoUfmlaeYB+BpUSeuz7LA9b4unYDITGFQlXr/xuHkqFZf0GB7a4t95kNgz0osHo1RXcvbb3/3C7ufMTm2eJj7acGR8ppdVMsAApgJxkZkMFZm9i5YzqKmuaQhkoATqZC1qQMSVMEEBlubR8OW5mpVsHaAndtlSMixESdEMjAHNQMpO0dMBiUzYnakjvdk8s2we0m54M7ozbaAE2iDwnG7r01EBu3so2dvDD8sJy1asvSYfdK0pjevcf9ya7i/QfzU1zLrMeGqSWCJ/Gj19/VTvU97B+DJlk8wA6spy0G9/y0vI5Wa2BM5Y2UfmMmIyXMg02LDEzRWCZzxIPhtK0ej2/c3hlfMW37lE//2/+XfnyXXmJaTkh0JDIZoxmbU5kQ1m5UlEzl2s0b3xzO37VzIqay6+jiKooEZZE1TT0fH3/raN370p38a6Bjiw4uDss5zqx/mV5e+Xyt8QTjPCWfB8mesOp0KMLfLLY7EzJfqC1f++5765x1sPgnX1KEVr1BtUTepgWkeJqETngZTU1OQBU+A1jOqP3CHb8jNL9Zy2BzPpjffnY60nvk0Pgql+lhnBJ/lVdlou9lqXZEAmc7lk9nCVKAAiBwTkbaHuKzLPqM9M6O1u8lEBCiYBMYQIjhOrOoTcVOm8jD4Zhh5euMr7gd/hsgBIJB2SgYpwLSQP/NooUSnMe1Hlz7MKj6e5KC5EFp8fwAoX7n68C+m1QVp7evFqQ+NT3lHJ1+MGbPRnlEEIyWRiLqJjhxcUBEQi0FEx5Pmxv7kYFTf35Nio2nivmjMh8W0PtraHMRKv/Xmnc2tK1/4wdfr2e2NofMORR5MKKmC2MzYqZmS+CIbVLHM8kCOfvgnfui3/tE/+9RrL1/d3on3awZcc1zfe2Ow9aruXDOAwKfEdr1Q08/4enIp6X3t5XU5ZcT7j54zLid/pNM+24mvCwjyCMNN35cKwJm0hg9XhouIxOz43ju/8Sv/4TNXd93GlYz84QBEgckFx1kWUua12PzuW/f23vn6D7zOaA6r0Q3S/a3ClXasdoUoN/Vs1CIdNWNix2SOyNhlnjWoZKDcrCFKRKFzKoUBAiMiZpAj0NwIRWSAGIjMOXZgp1BCAuWenBmzDyBmxwSGmWeCJJATx9oci35b6ErY/LTwEHC00HwJMDUiJvMt+FAD82ri0icgUz+ETLpYG9rejsB8a2du8j+lUU8JA/bn8lkT/7yHnwKttPQhX/Gg2+cbbwrTo5syusFUU0rIPEAgx8EzsZpE0cyF4dZWNWnAqMt602cadXznrtXN/X0rtl94c//o2zcnvgizWGdZFmMEQVtFcu6OoWoKNWJ2rA7jWVO4anOQm2lsIhOZc0Sced9M64Bgdbrx3vs/+jM/i1Vb+oMH4/Gp6/gzOG3+M62xzJLmoX9s3gXz38/l3rlD+ZIB272/uXH3Am3+qFt9H4bmkQCWGtGqaJp3kxmMjCgxKeAAhinE4FoPTJKEEAScAFMwVXz/a+n9L8q9r/P4/YO7N+EG1bFMjyb3b8ejCmWq2TkEnxRG7IjEhAnMYgQxIgWTLqSUzZ3EWi8gkLaWpG4fogX7BGIiMgdjOGZiNiZzbOzJAVCJMZp51x4WblI5Hmege9/92quH94eXn4UplMGsJG332Dxx8fxwwKJGayzw2EzxfchV58OvU+nkVF4Cvrk+jtVL6D3ypHYz+kU9VJmnLlRntHquLc69yNp4+xwcO451bJKKqDIktQdbkGc2njV3Dqvbe9NpRUURlKgs6xdefDbFGjHlvnj/1sH7t+7+D/74H829hY1crAl5UBExSxIbVcfs4KYphcyrUOY3rInkPLlw/ZkXvv7VN/7gH/jpwXAjVVUZ4WZ3injf6BkB+Xl151oLLbZYz27lGmI/rX/Xbp9fXNtQOAlAz3nrBWmt6qey0MVHv8+x348KwFp/PvhkqwFkYpD7N775zKbsDstsJ+W5C0UmCaYGSox6yMh9ulN/QPTesH4n1keZ7gmNglKS7cJMU4TnKMpkDtzxlPOAqTTmiIrgkAky0gwxEkcPdsZmAoiBQczcGuEFIGJjYiKvLMxE7FpDjWOiNhOeGyQwExOxQc2kFeIwadNGsh4f3/mdwW45uPIJdduKjBkEBZShCljiSJ5d5rpVhrvzYcvpsWCvB0ruD0O029kiqeP+7p/ejT0O6LPCKdG/TlEPcC58O7eOfcTWTcr+IeozdRG7CNc+RD3azyv23tOh5oVKOu1au2tFBkPcv8H1YUIViNA65TA7FwgOFpnIkDgU5IYipcuZ2FWz0eGd95rJaG/P88bVt79724zKKgbm2KR2vXQAEZKBASJHJERo/eEACo5mTaMijpmNQFBVJoaSY+e8kxQBJccLNL7M1fsUebbXzacNZxejc9G560tBj4mt9xdY4atFYbRICNwqOZ2XR/t35R0P0+TvN7R2OvVSFM17ed49vU5WKBmTEkj25Pit8XtvTPdvbW5+7Monf7y8/LLakJgAsRi52df9d2T8XXn7S+UH366P9kSbOwcj2thxKe0djW8cH92pHDgTco4g0UyUvDolASG1+pgarEsi31azrQoTyJipFdft2XpiMIGYXLsOgNSSBzkEghEJgxwcMympmJkml0CRnadBvslkmY7Lo5t+55ngvHKKUgVqUmwyHwwFXA4w5uC/33W0lBZPwCb0fUZ23h7c6o3AaXjuFOxF69/6oO1JoX+cW+D5b1lTG85ctLC4TIskg45o98p1P9jGeI/AqUkgUrWkqWoadiopHo3j3kF9PNEkrtjNY5Ny77MQpqOpD9QI37p3kHm3uxmQxmzss8LUFBqTEMw5J6pNSj4MCK6sy2FRDLzTqoLSS8+/+M2vf/Gzn/3s9lamicGsqarKw7B0au8J2YtagdawRk/8Wmei6eV6Wf681qVrWsHJUTjlUg+B9b6t4P4HLugPpVUupvr3owLQpw7CAjjRXQsLB0jVDEl0fLQfpUyWe4UpN9NojpUNxISM1eK4Kad1TAdV9U7TzJpmmuKMLBuP9rOt2oUNtugdmSVjUhUwqZgR2AUVZXLkAueZmTfyrAYVM2EIE4jNMbUJwlTFt1tH1poMjNvA4YBnBweFEoVELMQKBrjNHaYGCgNyTkVEiCxuFccWf2N653c28mcn9xud7h29/97+e7fKw3GDdOWlT7/2838he+WHG7fr2LcnbGDcN0h27qErXuprHNj/8cOjc5je+re0xgmi9SfnnHA22F5C/4do25yvloXMAw+slrJeZB/9PxQ8P4vm7V6zC6wXfYH3nHe9E02t0sjaHN2m8gDOhWKjjA1gPgT1pEYgBzZmUwlhsFHX1XBjy8Hq8nh097bM9IN3Z2ErvP2NCcOxs5iUiWSe9UDNHKCAkKiqB6spMcFIgLKKGzsDTVFESDgLGZlDJCWbTqrdTdGUVI2Zu9gtveY/oIWPTj20vTqoF9DvDPOooS07tZ3QnVhaVLqLSN5ba+a7iujP10ev/Yei1j99IpCqMhNUwayGBCOQ027/00iJXDRTSTT+zuSN/+KDN/7h5OabMU6mU1x99cd+8M//ZXftUxY8yjvpzs3qzluj9747PbhzcPeGilQzOo52uF+Jo8zh5mHzzj0cJufJDTw8mRo5NKSR4BnE1KYjInS5Gs2gaiBybZwfYiiRtKKHnBGBnXWOOgYwk7jOOUl4rvsZTIgB5y2wAUKaVMSqpCAtZPTe7/7SJzKXF6E5vnX3rW/J0b0mVsQb1z/z09c//0fUZSC0uX/mM8R6bNDHHpgLtcXKYCsM/0T55glJxId/rwGngadTa7LW7lPB9/kzki5wz5mvfxCtKRgXR4QPWuNP1pp4sOU3Liu/rSKOSUGaTMyaJF4lQY9H9SxZVJjabDIbuuQ2suPj47oOu1euvP3+/r3740+8+mJd7Q0c8rxIaATG7AxiPk/mjNFY44FMdDjMU6oq9cxsVU1Qyob39+5tb7zkfHCZT3HmtCIkIFuv7ak5HNe6rHvkROtpLqf71pjTlOU11G6rBZwscnlbr+S1KXCqVnaWyvdQS8Giqk9MAfheryLLBVKhZITWZsmkoqSmcBYtzSqWhJRiUzN7n3kzZbSmcTWKppam90OoatmbliWJbRXb0+SU2LtChZVEjRybqcLgwAYGHEiNGebhHczDOTKm5AAyFVEhZkcsmphNYcyGdpuXQQRHwYgSRVYLvgCTEJz3xKRgtAeGJZGqMAnUtdHhhEBCFJNKRt5mNwLF8fidvXe/9MG3302TlGd5dfvb9z746hd+4V+9/PlfsMHlLnwRFO2L+8i1k/N9a8j3GBictLWsXe2h/vmmRr/2PaZsV09azrSFzvNoBuLevF7uA/Ql8Jml0rKyj5u3anXno//eFW/zC5R+nvRYRJgiZoal6cimI85cCp7Ns+dkwiLMzgAzNFGcattMH4i0jtXxIMsTPDbqz//k537j5huUk0Qhhmi7LbXU4IiJmTwCEzUWTY260y/cmBFBQZ59EgUUbOTZewb1j7bTsmuero9afx1Y6ez+y8+owDp8Jyx4aV42liOzuDrfwVu+7b8F8P0xybrjtKwxkvomgHnirVYaNpQBrg3JRQZCzUffHX/9H9z//X+I6X6cCChP4+l7v/t7mZPN554HWA7fn90/aqpmvH+8P4sf3N2bRmhyM7FYV5RT2ditSXx/kk2S98BmHi8P1HsvylFdAqT1tCE2M7DNNTh2bbwHmGfzsEDEiAwQJ7RuofMjyWZipIAZLCKZIXPOGAoLcOiOLShAooamyiSok0z303u/+e7stqUqHt+lJJlpMSgatZv775jhuR/5I4awFPNEButWgy6C4tx20oqmVpVecNuDoOLqqCwlYl8yn2/FfFJ0Qal6FiI/69lWanfrJdGpD55Em3bG7w+o1kPS+apIH1zaaV+x8vnkb8vxM2Mudjee/fitd37LsXOsbJRUk0obb3FaNnvH9cHI1bU5Z6KuLBOZ6Waze/1KPauPxlWT0uYgbA2cSlLSnDMwlVWTZQMlUqip5VRIe6LYjANijLnLjYzZbwx3JuNpFAVIVTj4LBuqEvHSKNjriI4Blzh77hjU65o1qXoiGNu5q2kftZ/Wn+sMuazfBcb6Cc6LtaKemALwPV2E5jITXawzIqiWpqIpkRFbTFVdjo7Q7GucaCq810Yqs+A8oz16q6IyU8Hk3tvXt8a5pCgZqaSUyOWTqmyqWbG1a5K8DzHVjpjBZspKMJegzjkYGzmDI5cDEVoRAmlOREQqZo4BTQCYCAwlA8yTY/JtRFZygDMin3PGHJThQUTqrYE1gAaXw9Dm4mbHKt5EnXOAJbJia9MXH88GQcm/8+U3qaaNPGTTe7d/++9de/Zle+lH1W+2rL10BDKgtTvSqknye5FJ64F00rbRVRag1fRwmLus9JHUysVHbt8CXRPOyLX04IJPOCY+LC2W1C7u8fww9EJ/WyolDyxnfr+t/dwCzvYQopmSqZFkg93J/SYj1hhdyFWUQwY4EABmOGJHzKKqEI/oqpEcTWYTeuut+9988+h3v/lLsvNqYDTsVNuIXKymrZmU2sPwBiJqpCEwWgUbRGRR4Z0TUQMcEWBCGg05AUAIgU4I21OF7xOiOV8tF5fecbCLELXMC6DzGyas7i3Z6ofunmXpH70J+r2hVmwpUR1C0pTzaP8rvzj60j8dbF3fefXHiuuvuHzH6nEzOdLy/f13v1TtvX3rg2+FwfC41v396XjaFCF755tvNl9+w4VBMz7aP6ob3qiatF/zvSNUwpK0glrUxmaJXJW0EhOyUSkzyxJT0aiCTX0ba0dhprIIyMCta78JmXqijMyzZSQ5sXfsTZ1jIKmpGkDOMwVHRIo2kg93gNzDOWOjxJQMQi5jmFeK04m3QqtpniW5/Q2NqKvSAnhjeDwrNwbZ7MYXf+9Xqx/Z2H32kz+WyGfzfdOW59qFgOY4z9DxcntPl0FmLQ7/gyZVt4QsdNTV+099+onP04sU2MflpwL0/pzuaTSnQ/lTfzzVHHOWPkB9c+Yj0flqxvnaDq38u16PriuYKLirz378fU+VRhEGcRSUpTTmTKysbZb80SSB4YtQprTliqqshsM8DzQcbty6dz8EevXjL5bT/WKjUJZam0AhzwIbsQcpsXcmkTUF9gPOyjhjx6bi2TVVYrjx6FiTbA4GyaSJzPkuiNdidvabdVbDTx+InuVlfvSh74N5Sq+uvKz3eaE/2Ym/OOPHs+h8Rj2zQqdVrP36hF2Ans5Ce8HXtmOsRHLvvW/cu/k1LceOVJOxpVRr3aTx8a3cksA0WV64duO9s3RAhBLCxv2D6cuXCmcI3seYaq2m42arGNST24PhFWNAnScPJKVkMCI1E0eiMLRxnR1D2cxxyCCJTUiFoKrJsWP2YCEiMDM7T46IIyoYnBWOqN0uIO72FpjQJhKOMgXIcyHSxvpRMJE5AGJK6gheDObz4bXnP/a55t5be/Ew7t89vJbtWn0wvflGcemZOr8eBluGnIjbYwkd5J8Hz1powgsP47OV2KcykGfROkrtamRzLE2d1tJWfkXBP1HOijng8ej0/YMH91jnHW6PLOq7/rA1I9QShbaLOBbH/ZYL+inDivXeoOUfM6N2+ScQXH7l44c3MpayqcuMfD7cro3JOZCj9uXt/hsREaQq6+no4Oat0X7z3nujDw5xY//o+seONxzXSRKDBDB13DkbACCCQNnAzCbEBDJTMZ2HSQGxEhMxYAoYA54Mluf5ySF4EmP8IFq+Zz3a9Blq9MLpejVWzbks2ekK56/P/x0lMzM1hiVt9vT4O8dv/ebt3/ilu1/9ZnKbcfCrV1+8HrhopuN6UnNGo9GxajO6uw9flpNmMmlKo0omd8pYlVWpVVVOZlVTadU09VggSuyzOmKaVA1JU7RoUAMlayDWRB1XoXHWBnaGps7TEm2QUSJqRW33mckIcCBH7JgdGTO3c4eIMT8xTGCHNmA0M+CI2XkiBhQmScw5TaIETpqChhRlOqqTjwBXtdVNssBNRaK63xy7xN/87le+euPf/oV/+V997ad+JtqAyTmimYpjhikDUDOz3Ds1cwsx0gUoxRKlrPHtGbQW9Gwhck597Inj/nPetX7nmjnpxD39XzoJtRpo/iJo+1Q148wHH6E7ehDyHM3kVPWGTvm6xFSn18bx1rOvIGTsVRqDKIBGqBKqNNWJq4R8kCepiejqtatxcpAXbjabzKa58xW7tLNdiFa7Oxt5yEQJmarBI2ti1NQUYQCQmAFqmoj8MMvqGEPOKcZqVtWz2XZROKaynhbDjSy/QsXOaeJ2qQmsjVTf1WHlEAitjNaKh5zNr9NcH7gAcF9TDPpIhnpXTxZ1cmj6FTzZokegJ6wAPB7fPs5ru7EiMOLsxjd+uzz6TsHi2AugopJgcJbEiJgcqQLk4IiMwMQgc4Yi0ubedMzZkEIuycynuh7DBapGFDd59xMSmIhZDYhAUoKZAAoVRgLEINbG32eQEZuDa2PyU2sdJTKDdJKfWFkZSsytodNYiQO7wAwFADKog6rWopHZE9ptYiWCmoFIW88WEWaBB0h9kW8/v3Pt+uDm3T0gUmiiHdZ2E9W3y9mNYXVtuPUZpU04XuCKNvYcrY4FAb2zAR8G5LgIQ69CoDY4AS3nd7e13S6hdDL7zTJ/0lNpytrsPuc+6j/w8GRLQdR321qJQr+cW70KzcOlXOTNnZij+TYDGOy2XhAUZI0kMRUieOdaUA6gdU4ggGGZC5pGBRS1mBQJm+M0Pa5o9Oa7rzxzffL+bUnGgBkU1gbIArECRqpiRHOPCBNVMQKZmhLMtNV+uwx/zswM0rpQrPXR08fIPT+6E286EYgGHYxv9SRarNfdIYCeBts9vxjnpwGbzllsvn/IACixpCq+9WvvfeVX9r79ZT89UrO3jvOD6D/Yv1d8fd8BzWw6mcX88i6ZFKwbjZ+VZfC6k3s1ev9odnRQSbTKJBrMQhIqZ1xDoWJWkQtwXokiVIwYDsRJzTSlJlZmFjyzMjNEqFWCAVCnCTMM7TYZUZvji9rg/ExM0mq5DGJqTzG2224wbn1EjRZpBRlG2i13pqoqpCZkUZQIQNOQGOpKqjoacVaImaVZNSg23tkff+2Lv5Lxvh+98eLzr/mta4ejWXDStHduXtp+4VPZ5lUB5qFkjdAFJV3jx8dhljXc06cnwoHdrL8Y9U0g5xR4omJLoXgO2sZpK9pJ08uTobOx+qlkq39PAftrJof5rzZnjM2rL4ZswC4qokobAYuSWFPV0WBJtoosioMmayqWuLMxOLh9HzB2nPss5MGbObB3LmkySc5lzjlOybm8zToTiCSyGAk81MgVoq4xiyCx6LPtBAxDziAMdpFtAbx2THfeppYpWklK6DtaruGadQC02sftoGMZ5vUhe321tNM+n/xxrWp9OpN/zjck9b7677nkf5wenNOyuWbiUlVN90kbIknNNOQb8FxqUomGSJ7UmUBExRGD2LEjgBxY1UJextqEmMjDRMqCyqo5dOUsiadmn/0lTQ2hdT2I5Fglwqw9u8gmRmpsSkIM1i7amxJA5skbEjMTmJmYFx4r1qkG7ZrAjjkY2mg+Rm3SGBEGHDmCc8RAWmwut4cGvXft1yhJ2LLNnedfeO7W1z9w3ktTcppSeS+O3trZufobf/c/HvrPfeHn/zKeeS5wBlVibmW6Wd8xhuZTZjE8T51PLi4QT5nAS7hrQN+9Yr30D6U1a8jv9JetG8kegRYBpxfn9LrPy2B0vSk2t2P0Yab1lIWTlSX0lCgjBQ8vE4c2mR1BVIU8W5uhuu30RT8TefD08NBSun1nfHPveH+aKmUy2dG0PRhMRzObx20wVQBwxHDSGvpJTbtc5e0WhKm0mS4MRvCAGcMISlAoOzJT6vSQuUr4Icm2NUvfadfnm2zUu3/lVlr7v3fH02nFQg1+kkDkQ6Wu9kJoRu/90r/1Vwq9o5G8ZXdqvD3ytybTD+5NGnHknMSUlHyZkGpXlzse3vtt77DtneODKb1XmpCCJKUEGBErfBQTNSIjY4iZQbVNHcAwJDUxqKqRGJF35tR1XDnXttts7NbZWOBBc9TYnlsQQJh9l4CMDG3uLu5Wg4UKQQRQG3OrnbBmxqrdMWJrTGKjSZ1zdUx12aSoBlRlTFGLfHh4VL19p377g+prv//VlzePJy9dcYOro3HMLJlpbKqGaee5H/yBn/kLw098QZGBXGv8mW8AzPNar502eqjRWvgYPU1qp8/FWbpn7V1+XS/wRPlAZ0s5CcvWCj9Z1CP0wPlveQSiEw3vky0bh9NNG4Rs52rId8ymYCU2BrFzShqTJbAPJGTM4eiwmlbjZ3f9YBCI6pDTxsbweBwHeRgOM7NapM7yTOFEWNUYDmbWCnWwD07VBL5JEY4KdoEyaSaZc0VRGJx574pAw2vCAzu9PXPI3gIaW2nSAgwsV8pzesyWa+wjzIBzjN3nXFpTCS7EBheu2YcaBajPcLTa5ofvzf4TczYlkFGjdW0pCKslDrmYmBobKUCsBmFus7ATjCFsRMzOgYfeJTJnYbMYaj2xyoW6TkejOC737008tq7GcaySyzfUEqXIzCmRWGK08fXNmRKpQYhUTbrc1NxqjkTUppdUzw6MNqGGgdkxewdmx8HIEztwa0EyMiUCqcCE2TmfA8zsTCKQCGoKo8AUmFhUgaSok6ZQFH7gM0/EOQNcW3l4WFw/9ld2t0L89V/8D176+KeuXf2jokPnh3OmVqYuSGibM7LTlZfD8yFYUx/y/nXzBGG+RBmdkG9rbkHWuQw9HTrJn0+j5E5yEXqoftULEqtqABYjutzvWa3jUrVaeY/ByIygnGXsPJs5JoLTjrsNpArAiMEASOEA1eSYj+4fxJoPx804goY7s1l5Z/+4yDY3OU41ChDgjWFQUTUz9mjjIhLZ3BGaA7GHIzNWMJMzgKh1iSCACCHj/kHDD4P6tvpue5hOnyVLpbM7M7A8p2xA7+zCh020qNb3I3VrugLj8fTLX//WtRcvOewWgm/cid+8eTy1WKY0q5oyKhEcO2uUTBF1lPucpBryVtRLLCo6mWkiCU5FYSpgSsmM2hhx3J6Hac8mWmtpBEkyMwhgotya7tmgcMw8PyTahQAlIhiDFCQgNlMig8CUuDO7MFGbYgbMOvcEai9Ypzsao5u7RK1ubFBrQzuoWkoWNSZLEhXmYKSNpjppavZnuDtqqjB87258+927jg5qfY/dJqYHkFRXk1EpzZfe2L9/9Mf/9X+Td55XMxBUtY1KNF8FzrITXICebkDedXqoV52P4E/ik1OVhJOFPHBSn5S11rt0EqCf+vXRJu/aGoHVRq3s+q9B0Ln26rKt3G86QubJTJKZdwa1JGzObW4NpoeSiKqELFgjaGrNFFaqeJuMmrI5Op6WL17bJFKf5wZO0TQZBVjriwYYlDUShSgYjWZJymefvXr79p1qOitYsoxUGuIcTv1wB1RwJ4hPHfyV43prPb/SjbS8ZwlWaeW3R44cgjM485xLj4KQH8ivc3oqCsBZb1/78YEPnvrLnE75mcwMEsuZxBSkJkJshBwx03zfx8zMe+fYwWCiRGBPROQ4UJZtFduE4TtvffDJz79m9ayezKwxaWg8aXKM0+yeQ0EyIDZWAZjMoQvuhrnwVkYya8waWCQosZIZgw3JVIlcF6+EdJ7khbx3xJ5pYM7Dtfu+HmAjODJIA43EGch1WSI1AY2ZtHvIpmTiCAZT7y2ZxFgaNYrK21AqgQWyzJomxvqFl59Ns3t3v/xf7DyX0/YL/vqrhBzEoqqUG5HrVKl2xiwymD519P8I71iXUe2erC1VFzsxrVvAY31DzpMnW63emc16JO139d6VE8B94bU8xz132p1fX2gJazaQNdvI8nN3yQCCKik78tL1rmMnRmBut8O120ViJW3TmzrjzUGxPcAw8z6j0TRFQayil9lwWKTKkhrACdomx1Nr3ei6acvG3jETcu8cO5XUYh0TJUdMTGqOmZmKIj8pu9c+P1Gao/lll/f78wyyE4N+Qmn5MGbaCfrw3/gkaNmXSUKJ/Nvv1K+8nE/u3rl1t5qqP5olQShTEnhVs2SGRGSk7NUb6aRRoRCc5llUpgiKyWAg8maW2niZbf6sLrBVezTKxIDW0b9VCxRqpoY2FEWn47U7B92UA0AGVYKYhC4riwFgIoY6BrUudkQOXWQKA9qzLkwd6CACOSZoG8DHOqcxMkMUxChlI2rG5L1zpGKq1axm4WmNo1kzqsLN/eb27ckzA0KeJ2tkNitLOR7XH+yng9nsILzxylvfee1Hn2+hFBO3sSusl0MGwIVCRKwx+keTw06TDicBzDnUR9JrYuciz9JqCWuPnPr4WcrGKVj2tBtOvv1UeHbWcHUrmsHM5WHgmAjmg5NGnGNJUaCEwGCRZtpExyCzppbJpBqSNno8npa15sdj/co3b17ZfW2YcwgeCohGmBkTmbbn01WgYuymMd24c5AjSorvvf3OtStXN69uD/NBmyw1KwapzeN3ysE/6n9cb1fPvrm2ZmNtRGy5dp7fPw+kxxTvdLIU61848RmntGhBT3EHwM79iostzOtg50y8ZAYyg5ivm0wpU4RSmuA3YlUTE7OXJNNJ2t0JsQEVTs04qoHVtNFUVWnj8jOzxji7PKmPJgepnE2gkJRLJdOx1m6k6Q6X5P1AiQiOKYewILX+akAwOLAnE8RZ4KgmJIlNOwVS4TkYhBwRqHX6MagSRUQP9s7BeyNrw0SbEZMxaUqzlBo/GIiqkZCqsyhpCm2c85RUxQsz+6CRXZGJkIzKyeGkTqmJ452MyRKlOk6P4uRyU2lpJrpX7n1za6g6qg4Ppy4iH1wZXP+40FBs4LwZmZoxMA/+8KiGn4vRo1mW1vmcaO1ap7v3hNyHgq7mdoLTq7leh8cSJS1AXx7UawunVdC/MGH0cf8Zy9wpNVsuamZelY2diJGaiAVzRg7MZDCTNgkdiMxRIzGVVZ5iGLhLl8KVYb55TPenFbEJ87isnHPes4dTkMVGzcjgiI2MiFuzaHDsnYN22SpU2yi6rCSBMg+XgXJymcuzbNh5UM8VFptrrk+HrP+uBxqFrM+NXXcuFIiemvU9QkrfE63jgXSxWpkptoudWFIzra4UWwd8gzeKw8OyFlKiKKwwdl5MAGMGQDEljwQijjEvKFiChpikSTHPc2sTUxODADNp4wvO/b7nHv7toRMyUxhU26hugCmZg4JIWQG0MWxbUK+mbbxNpRaxEGDmmFzn468AAALItE0O00bC6m5v38rkiNW6hI9GxklJjJqIqopmxNDE6qAijSpSxORYm5kyYlm6ewcYXeUsRwNXN2F/pAdTd3Nqd8dNcevmnTe/9Jkf/AlkG0TcxciYBxpbP+py/vB8lHF/n07U8Kwq24m/59AcUK5IhlMRDK1ewtk4/hxVARfr77MKPAX9L9cKW18tjAzKzocsNJrajNqAkplqBGmTZFI3SdN0poPMF7kXbZoaWcGZd97RgLgk/42vf/DaS5c/8bHLoa58FogYTKoaPJM4s6RGHLIZ/K2D/VsH9/JU1832M1euvfTC87u7w+DDIB/k+QANNqAskNZ4ueiOlbjfRKtb4ad32bl6D2h9BjwCPc6zJ81GD/PMKeS/t0vO2tcHVeN8Drf2dBWCF8isKYc+RQRmJ6p1U0I0d85iTYYso9hMMh5oXVZCddWUh6Nvf/s7X3pv/5d/5dfpx/CHfu7zR3v3i7wwsbquq7IpioHE4xTVQgEi7/KkQQ0wYUpGAgrEuZlXWM4q0hAic4IJ1ABzwcdYZZ5jTFkooKZOlATOPGdGbMxiaqZQYg4AqYgiidYEeJdFc2QEFdM6NiNKdQLYb4IyFQfJOMsoOcRKysPR8cFkpi5HUlE008nRsxsf1zg7Hu/fGzXfeevt1/7oT80mhzmwWRQax6O7N2Z7bx2V+Uuv/5wbbiYm75ypzDFmOz5PnVMe/wW05r3YD5TT1/WfbpjTM8vu8/ljVGCB6Vugu4xHMHfw6TSD5f1d242WIWh6FeprYMD6JbQBOrvo5t55jgwzdszOEwLg1QQATAkGYrCLoiDUqXYOg9Bc2SW8XzGjEY1iPmSqmqQJLg8uY59Jimpd9Fbqsg5Y5rxjR848GTGJmJiZCRE1Gl3EIMvaaEBEyzTAbRtoEe78zLFYqrYXox4DPaQyeeqdazz4dJXsU2n+so8qTjuvMxZ1d0TF1g4Prrpm1FTNx19/7Td+++79aZkXrEmiqBLIhIkJcMxm2kiSqEOmKrlplNp4Erk0Y+Y6JTMjJgYzwWCt172qgtvIXWpqaG00IFFhIhF1REbtgS+h1gMOICIFHBZHU8xoznQEgjlmqJHnNioYt4ZMOHYMSq1zUDf9CNbifW6PBquaKcBggGJMTaQmkiZLosYamIMbqKbU0P1pGguZjUjKAAEAAElEQVQpbJTo7X351Jg3G0ylqWpVZSQ1lUmDd9/df/OLv/G5H/jxq5/+WWZSE3a+ZfgW/vfxx/cqUvQ5hsBHoIWUWCv/5EsfqfRTJvip8n9xaQ3fr1oMznjJab+saRRn3dl/3YnqrUH/FTNdkpi0yUKwslIjjURMZpTUzSKOJiVxtrHpZrNmuLWBRLXEwpiEr+zu3r5zD7x59970d7/43pVLu4MQiRNT4T2LIolAW9eK5LLMZ0M/q7KCL2/s7mwMP/HCc5e2tqGzQLU0pdpOTJyO7m+/ACXyWMB8O8WIvwoN1rpr7fPJD98Tuoh6ctHnAayOor+gMv/h0IPqcNbEt/7lvBkVcTwtp5XOklVFMZiWExVxTqbjqRTsQXdv3OZAZoktpSbGWaPl+I1vv/27b1Yb5CcHB5N7d4YB06okFYtNnNRbWYaqShzVzHmKMZE6UWEWUM3OwN7EwzKCi2TkQCwmAhNAW0wTQi6Sgi9UIjkQrHV1BhM8qxNyvvXuNDIy8gZYo0mybAuaZSE0TcycSlNaMyGpDBCtQRnARgNVF5C5alTv365HY4lgx1WpIinP82FOKFKgsU+o9g6ychKKOB0f5luXxJprlwaSRib23ht/7+rLr+987Aui8K2FCtQavrCcP0+YnnihK4aXnhBbtcZ8aCx/rur+iHOvb+qwBaQ44x3rLT9RyALp967Y6iWwwRjmmNizd+wcExugJtpxrbVnYBwjy7ONGscxxsEgG9P00mbYzhvvNYklwGtqnZfZVGNtUAI8g4mZCUbOkakSlLTdUmARlfaoSuscBCM2hapRe6BykVltrQdOdLD1mnrSjnb2eCycqtCCuIdeF1a3ZU6r2hOdC+dzVn8GfBTk/2l0XqUW1xyZ5oNrL7zw1bdvTsaTnUtbx5PaiE0kRRU1hakZOyZjVW0lGQMztVms69QQJRMGt/H7TdVYwaRGMCPHRBCASJSMjRgm2loowQYTVShBzREHdmKmpM7MATAiR2rKNj/VawCkTe9uZGBix6rJMQLDkQUiB2NDu0cMBRhmSu32gGNt1Xhlg0YIUBEF77mZVUkkRlNjEkZggUliSTJRUyIzqWAHldsfaV2kRlnFpK4y8tW4ijKY2PD2e9984+//rWv3vvvyp39y97nXFM5hnjFgFQo+Rg7Dx6JzhNyjF3fqD9ZN1D7yOAcpnipK8JCTq9+6k/LloZSfk+Aep/3S1z3WGtV7ynq6BAHmBz7bLIglsFGi4J2XBhaNQq3sQ+bZq0YquInVIARDBFJMaf/o9vZOOPzg8NLm7re/eePq1fALf/R1qSN7C1QkghppIgiYffA747IelPEHXnh2ZztsD7cDQauJx3Q6KV/82Gtmbri7ef/w1pbVhrxXdyykbHeG5mInWE5usJwc1u8Vrdf7USfgwprre8ea58k+HpsuWMRZ0+ZUW+QJWrsLhBY0axzfdmlajmbj8R77Yup9lAhCnrvx4YHfzhwDni9tbRTeOWJC5i1k4M//0M9s/6N3/s7f+erHXrnORE0lTUNa1ZzC9EiuX3KkSVNlypLEoXDqGAmUjBqIGjkiR5SBPbGDkImBtE0zzI7UDMbe5yp1FpwjtIFOMpcbMXmnrWMoe4ODmTMhrU0qEBN5gtMEhkIqa0qqSkszl7FpKUbeZ2SzgAxTyOGBG5XVyFKEzzLPShaiKWUFXF5wpiV88gfvvf3c5vbW5tVYHRVq1f5hfj137F979dLbX/oHmQvh2c8oOZ67VcwP1zxZ+8v6cD7xEvsuQAt78IcC/lf00lOh4lk1uWhv2AMac4oRaO1cN3CKSDyzxgYy0hptFM7gzHEbBJcAJlIwOUfiAAVZcF6c94ONiCbbqDc29bndcHNczhIMYHZzNc08cxs9F6bcRWowS131iAmARNHW25nMQI7a4OVk2gZcXw35tGq6OrnEU2f+79+/3lVn9vmKjeyhOZc6x6QPJybKuTXByrh/BHWA8/W4xXc2E3I/9jN/6I3f+u27++N7e3uTiRWBNTaqbVgqYiI1IzJjJ1GDc0pogHGVbAc+Y2k9/tsTJoC17jgGhcEUAMFxu0yiPRLPRjY3M3ZOQWTdNhTZwl+gUzbaWJ+O2JM5JiaYGTmDqZkGtja+MxMzzBMF58l0/oY2MGd7XNgY3M1FYiUwKEHrJD4EqxIca2IoNJKBYpNiStMqiioDgDTqppUOfAA0RqnqaHBMrBbGx6kpD/j42+kde//4u/Vnfv7KJ3+afM6ghXOFzXXfvtT5CDLPBeksJf7U3+20z2sqwZrsPEv4n1OfPrJZfD3593w6uUif/HwqtO1Jt2XdV1pBANiYs40tJVM2mMB5EBW5O26UODNARNGGvjWVFMWlpPCCgecsYHcTo6pOGn7rKx9sbvgfef25rbzeKTaybFjWPFVREQaVzbggfy0YxXI3WGajPBscjWd1eZTn27UbgEjSOKdIVpPm7fyjPq8ud73XVJ7Te+zkVzox7h8+q/cX58Uv9oAGPaAotDsANlfqCGQP9mV9MJ2qbp562/l0RkXWNOSFuzMMUOU333rz/v5BMdy6fKVI2ggo1lLNqlkzq8qStv3GVr6xOzCrgjeDtsmIarXpeEpKsYlG9O6Nm7ubQ6Mgs0ls/DRqtrExmVWWmRPRWCpUEZhUKYEaJgU7I2OuvS+IMljn4t/iDDV1BLCZ+txlZAaOQg25QEbe5do2h6BgEDmCk4bStJGJ8wHMAjFSx2JNqfWU69pBtayYlRlxGkM+0ORGRxPfyPHdajo2Thu+kcF2Pj2eXhEdHR1sh+LwzkQl3D+eIeeoU0fDEHKdGAsm4zpcuTyt9l64Jl/7x//fz/3CX7Gt7WXg23MG+GHogUmGn8gEW06NXqyf0yDeU53OhA4arMyqReD4szriohV60GqwBpeWmTlPPnSykLazVmYpmYFtxl4Jzjk2gqiYB7WmU8DAYFKJoDZKlTTazNJEgww39foWdk3HgDJby+lG1J1wbHeQGTA1QFs/hw5umKmZyTzhG5HNVYUuSqIYpD2m2dXc0JOPp45x75y0rffUyurZt3z13QUWJ69tPshnDsSJwvGUvScWb7nATauT4iOM5Fbm0NoFytwrP/QHi8s798apTE0pIWObSRuhoTue7siRsakRQGpkyaSNsamqwj4TaeZh0KDaMoEZWgd8Akgxzy3SxuE06DzsrQFmZDA1kBnNg7/Nw9yamYHB7R6awsEC4JL5DJ6USJmImZiIHbEDszpjZm03iWmeC6B9NYhBjC4FN3dTtdsIc6aaxEwhAolSN+m41gQOTk2sinJc0mZhBqmjqAuxESBlWsN8VUdJVT26RzJ5f3S4df0T7vLHrQs1tOSUNXPByc//LaBHwO447ZGH6pOTMBQnOvaB/bww8WJ1duNE4bb6yMliCAsV1LrpYCCDZ58VmwBglkyJMyIaFFzM1KkC2kRpolgLf4xrTxbYEcPEO/fMte3ZzYnj7GBf/8tff2889j/34x/bDLGs9lVdXmykxGo6ndz3goJmG0X0yvWsnlTNb/7Ot69eH/yhn/uhxvKQolRH8Ff71mtb010uxqF24uspa+BHhsNX9bGHfrBt2vwMQG+QH7NO9KgddDE1q1956o2ttdqLahqN7mcDT1GimUhqYcLmMDNCVSfvXBF8amogNYCReu8ZiGJ1zfcPDn2Ad+Yyvnn7NiHjGMvKH40a74wtStPACSwpERMUydgATRAjZQKcGDxTMEIbEqg91sVd3iIhJiPnmFqzEhOMneOMiIi8kp9HfYtmjWKWrPE8NHaChozJ6qY+1mpEceYMQIo6U4pQm81GRC41Nj4Y37lZHRw0RiHFlCLFiKauYlUe7+3fvrNXwk9IR/X4EtWaJk4bD65n++OY8jxryslwtvfud77x6fGx39xe4EYz9HbUHksDOPn0w5o3HvySVaj7sKL8SdC6er7QVpcs+xilX1xXp7kd8gKawgoatvkPC0lqUEszU4EROpcGANAW/8C0jZSehEUkNXWqRZNCQDbM3ItXsqvF9PYUGSEZpAvbZq1PjxkZ2sgmBmrDIJpj1rkUbjMAgExVyBwMzASYqcWYYkyr9e749BxmpX5Tl6PU/95bHzsr0rzQZc9RpwxciHrMcLEHHoUesvTFOvcRWdsuQMvRaautAA+v+cH24XjEW1v10awua1EIrDsk03kzKkAKJSLPILEQgiOpoyTrAm/aMpjIHNzPfW46ANTGYusC6fZRkhq41Q+snUg0X1SpVV2NQG2+eIIxgZ0xwZE5gmM4hid4brNHCnP7OzkmT9QuItRGH1JrM04yoKaqbMZmIGZJiIooKtCozMaVoKIAB7CaUGV2UDaXhvDsklLSWEMik1piplmVjg4qc3VOpuXN++9846XLH2/DH/Xni80DQj7Q9/AjTifPAODsubAGxxcY8dR7Ho3WFKpT6YH93K8enX3pAmWuqg/zChGBzPlim0GqSKoqDRkNQhhkaVBLShatC0yuZspWOURk28MsyojZO6Jnr23EW6UiHBxW/9U//eb9e3t/5Cde2dzxm6wuk3pWO8aQGrKGLY7K+P4H03duzb71zv3M21/41PM7W0UBhBDqtE1XP6NUMAG9mIWnba2fJ6apN6Dri/f3mp4GeiHAd2rdE0JIa7zVtxg8sPB17bkXxXA+JqcqtFisyQxwqDWOZ3v3dgbFeNIEH2IUZoQ8r6MZBYOSi2VZB+8iwdiYzZKUo4m3YXlUbg/x+ddezFKlfvPOnePJuHr3zvT4CF5LTCwMeDKdZSE4RxEp5M5UGQJS6nJ2OdGoyYUitzbvewv1FSAwkaAmdUw5kTdzhIw4KIzJMweAlQAoQZJWkEhwRAFtUERNKqXJVOPU6lkWco2NNLVohKh3ZGJcs43S/t3p8dgKo7AZapWNMAiUzd7f33rGV0dV2cjuVrGrSW5/YFfH0bFvBqPJ3qUXrtx//40Ag8pnPvOSyRH0RWZeyruVpLuPMjXs4V0mHpPaNbxLX/M9wjiLGdbtRSzSdPXWzoelh9LVz7t3ReZZ//f2h1VpaCSRmUg0xegzMLsuZy8YpqA2UCGgKk3pmIizwOZQDzK7shNfukbvjKwBJzVV6zwb5lGf20BAHVpCdzxG577Y6Bz8QWQE8szOc+ispixJ5irfMir/+WLnFHvmyetLcTbnpL4vqZ33/Jn9/NRjol+o9IshgI8+Ucsd1WRWHeynsimZYmPeSIgYFEXJzXeXrHUhM2U28lBHvGEspUls806vqn7tD102XLLOmwBAF1rA5spmp0GpWhsuaHU6GZRAaBMKOCbH6p2xg/fELMETA47VMzs278Bk3DoFMcgxs6GND9olgOxSgIshQtUodRtqbMQJ2qgm08Y0qc/aTPXG3HoNQZtoVURTN5FdVM9sZMZsbeZJSaGh4ZQ2qNTgZDSaiMHNYwusLeTL9f3DDfP/xOjsFeHUK3bi80ns1P/8CF3SR02PSX1F4qzJfrG3LID/8puRC8VGeyo+JURNTN4xbeQ0rRoBqXkfpYka21joNabjdGljSOaqsskzbA/DMFOLSQqeRfz+N2+//f7dF569/KmPXd3eRO5EmiiW7h5OD8bx/dvlewcUJR/m9Bf/2Cuf/uTLnGaYvZsVz47zF5/95B+OCO6EZW1NVaOVFq00nU78Pac/P3w6S6t8BNxOPdb16CGJJ9g8O/freY/REmZiactrz/adaTNtcVQbVHO4semz7OjgQMuUDwoL/viobJr41o17hwd7n3jus7lmVUqaUDcNBUMtpKAYgFAl2tgajI4Orm1mTn2WbRwfVzdvyd4UlTSNpjQGZ5tN1bA355yRJ1JPDO4yxKgpWWKtXebBjrsKMtACJGLycJYseWRMROyJSKHEpF2YBQUpWxJJKkwuMBdGmamQipcm1jOtZxor8lkqo1QNTILzUkWJUWqux2kydlVleW5KmmV+a6PYCLy7G7Jdf7h3GJvm2tX8mWe3TDXFBmLNaJo7HR0eDh1d3909ur1/fDRKMTI5Q5tAoe3ixfR6dGY5Z504qX8/Oi3sOkslcv3VD3RGetwaAO3BlGUEjQXOXmgAc/TwIQuXpQm7rQ+t6ezdx7Xciq1SoOSsPXcLY6buKC+xUZv2VIkUrKKJwWLkfCgKrpt4+ZI9e5Wv71M9gaqJWZtviJxbZCO2DvgTmIiJHLeB2buETwYDiUUxDexbqZ6FzGeZD5maMnVHC/qivGev7HfAXKud/3Jy6e19PVVK9ko9dwgX5ot+BKYnye2nvfDDeOZ7THNrhFEgHN6/NT2ahWLrWzeONNu9umPHs0bFUpugDoBq50sPCMDE7JhjCgzujDUM6qL59xl/vh3QLUO2iIxm6OENMzMlOPAiefbcZafbLGAzT+wZgREYuaPcWRE4YziybgeA4RwckWNiGLMxk2uP2KsR88IXw6hN/QIRJEN7XUFJIMpJJSqSaWAfFU0bQ9q66RWBSow1GYEVOVvOFgAxqY5nUqUkOBxPhw7PhoWbwBknXuaa0oVoDZF9z+msg/w9febsRq98fgQodnalHp36EuwstQSrg7CmsfSbM/873xOzubkVBsAzOzIyqJCKmlMi28j9MEuVSpGrc8oEq8wgKWE00nvZ9OoGqIwQZFl8/npx/0ib1CAQKByN4t7B0RvfPcoy50hN4ViqiiPcpFbHYXvDXr0kf+C17a1cMl8NsjSqrz33B//ETDcKhzbCFoAuaskZ7Vrr41XLzPrlk4P/oXGurY/C6fc8QrEt0VoegEdu2MlJTXOLyENXqnu6X9wCyJ0+xfpvF+TPv/Zj3/7mGykd5qT3799PxAcHzf54dn9c7mxtqfH+/SMhlSqKWCgoeeeZB0THs/JgIoOtoYo7GjXTJtwb8bfeb/ZGoRJ/8166vrMZpWIwSyANkSU3Bos5debbk5EiSmSqESKgNpwJG1qPIQoUzFS1RjAl5+AzEEmNUEgX/J8Y6g2IKjXBho622DZN2aQhM4oRdS1NQ6pmVNe1VomFE5KpVJPKFMdHur8vsQ5hq2iamVE2mVbj2VHaO+DR0fu3Di3i1psfvPW1Sy9/9hUkSdMqb/z4sOTtzWdeeXFyb3T7fvO7X75RX/v2z770w+w7g+zJg18XHdsTKfjWwi/22QZPY3b1cHa/fDpvZj0+kWEeOoOw3GbrfICWy+b8kMUpuPOJL5c2t2WiNY/P5eX66xdici72Mfd4MFDnkgACTFXb07wwbT2iDQo1kQgIyIRMOHGhA7W6rK9fpo+/MIgf1DxxXlAmEahpex6gjXcIJqY2byt3eZUMALGRahuTCmTE0dRBWKysU2EchgNuY7yj7eal5jVvZL95i5YtrJq0MDjMO4B6Z19W5fDix8U25WkTw06oyyf5/CGG9eJMcLFZ2ueu7wdaa3+3f7YYwtl0xh6e9bXnN44ls7qk3c3JvePMOwO3oWykdSNTg1qEKRKpJdVGO+8dnqt0a53TddHcJ8hW52V7IkBNHVxXMTO0cTppnuaXjRjMyTFlznJvhccgYJBRTsqMNuGd9+SYHLNjIkvExK0LJndhFkEE1darqa2BKNTIxFJEapzENo4RiZgCCmvgyqTS5nWBZQxnpEaODDARyXLvKHNEColVykTKvUN2Vcj81uaGa9szN88BWM6CzsqxypjnMOpc4n1UtgvOquqjVq+P2B4I3c55yeNMyfVi5xD+5PvWRJCt1qqHu7DIKdlp3W3EmNYhjgNI1STVrYC2YUGNJQB5Bu/ZVJuERpESDg8rn+AvwWJS2MaQN7dUQ7F3WI9nMQ+OSH3mJDY1ODXE7Bq12ieG7VLzIy+lP/dHX/3kC4XLQzRX1fbcD/2JqnkpK4hVBGzc1XYV4PfxhnW6+3ztO18aX+SXJ0gnR+mJv64Poz16bPpAeuBkeSwAR53QXL6t3fFfB4qn11RbocL5i5/6qc/+6J3f+0f/mfHxzEldVgfT6VgkZUSZL2scHZXTugwgAsm4DJ6HWSbs7x2ne3vl9SuhqVKT0qSR6ZTDcGDHYyjGlc5KyTMf68gwNbBH0gSNRsFU2bnWiJRiw96rCYMFDJBSmyOGmH17MEzaI19wZCRQpkygRMkpOUci4qRRjeY2OB+AvJmCFJos1tpUlpo282pdzriKKbKSNnUltToO5cQfj5OBvKfYKIzrJt6/d+f5sNWU2c17cRpRq41merg/gteC83rSHB1Pn33u6mjv1tGe3j/Auzdnn4rqg0uSHPNSgKz+cw51GXlX0UhfmvQH9SnQvODzXvB0lfkuBUHf8X+5IqxqJLbWS2d+eNwq9f6f26HXUG8PKS8R0FJ1IfbMCo6OvHfOeRdVHVnnF83QCOZWBmuX05HJZc6aOqbxINcrQ1wqZDK1BI6q5swYSRI5JiNHgYmZyLNTS8kE1uZi7Qw8RsbklABSlSTw7KkYBCI2NeKek37vzErbEusFiDjRrzZ/wk7YGnrjsFQZFprAGTtJi32Ix+SyNRzxxHn2Q7NoPS6tDdbKvGLYpZ2NEPDjn3+xyLPffuvwu2/WmvFm4Y8mkQM8cxsMVEydgWFJAY9EcdLoUW1JBfNz5AuHl8UMnefC6o4Uzz3fl6lH57EGTCEMAryZEZuYsQFGgRwggGTeBY8iWBFQZLxZuIAmCx4MH8iRo87vH6rEREDrSEoiALWKijGxwMwoisRkdUwxISVf16rGaizKYg7spzFV5kSE57mQHAB4YeecIjaDrIBC2dWWDC7fCJFIBUlkMLgUhpcJIKiByRbedcshmeOrs8dtXXH7iAWfPaMSJ+Xh4tsDIdPJS2cBuwsqIBfvpR7Prjy8AFbnZEc5Q1FZeD0u9sUMQDKCKhN5ygKnUlJZiQHZwHtNQ6WkJoxiI/dBywaYxKpSyoppkqF4Y0tNZVrl7Dac0VaWBzs+qlKDNNMsR/DCQB4GmaCI1e4WXn8ZP/GDl5+7tEFhK4Urgmxwaafe/pTlBINCiXhZ6zlMmSssi9b1G0/9Nl64j58uXRyQX5zOWs88HpK3ni4tNhnncVJ6KY7OfP88AFu362Nh60f+8L/wzDPPf+mf/sre8e/fPzo4HFWzRjKPrS2fynhUxmlsPHHmCRynpU5QFz77zr3xncPJKy++0EQ/mUyblJVV7UiGnDY2MT24v3dvdu3KoE6JHIcsc2AOxqCmiuyF4T0FJnLOgU0skTnvjJjYXLvDbOwAY5Aj7x0xC5wj8mSODZaUXYBJ0lJtCqbk1PlkVhPMUJNFksbK2urkAqOOzXEZIjQyHJl4SalMdPNeLGu3NQhJRQxNaQBoM9g+v3/veDJz27tuWGzv3VYZH3qSKPXm9tV3782ObLZ379b7b90/kJ0P7tUhzwFQuz3e2xNfrn8Xk9+rBu4LKxCPSbRcvtdevi4LngZ1L1pgUTtxeVnR+Q0fVljt+eva99ECLS9CGs7r1TP/W/uAARSGwXtfG5lIipBEnCdJBhgSw4J3qJQY7ExcElOXFyAc1/sVJ3PkKQ4hW65z3R6JMTE73zrCAZHgyNjEACODB0mXIIyIIGbOKIBa3ZqILCknbA03+jtN/XFepLE7C/uv/rC6xLfd1fFPT2071fi5bkCzuRZy7theZOCfuC74/UfrndvrCSPoq5/99Iuvf/wzX3h9i/Xte1P+5At7M3OzOmFWzlJritcEyjyxkAIwD/Lsqok0tW/jXLahQOdBPFs9oGOGnvWf5xWyTgFsLxgA5lZbNTEw1Lxj7zRzFth2cp8RBYJjhNwVAXnOeQ5nGAzJBx8cHLEPgYhhkcwRkYjGOirgg1eFKIulVr10RDCumyYKx4g6SZ0siqlxm0FYFDHZTInaPRAlggUnjgTm1LwKVXUA07SBo+QshSyblhOqaXPXYePy5jMvdR3eqg+9A7M215QewL+0NPmbLWfmRwL948xKnLJ89H48C5+dBfRPsw+Azv79cbpoOUn6Yqy3CXDhlaY3y+ZFUa9XvE69I7BkueUWJk2ZxJoysmMXwgBCYnAkmana1hCxgVETQfujenvohoPcNNlU8iI4aS4Pis1i83iG8awqo2iiLPh6Wg4KvHgNr3/q8g997vozO1tUXBrVAWlvsMFbL1/hQpngRIhICXOJvKa7nTUsiy8fCU40W1blMSvU1wP7H9aK9WvPPCydyqYPzbWnTqa+qOjuWj/dMddNCe3eQRsSgZ0WxYs/8DOvfOYL5cHdb375d2/dufPmN7/+na//JjOXMXlnBC+ilWjIXYoqitTYjVtHRJw5Oh6X5aROZAqYxSuXfJHn1jQHezBJm5c2UpUUsBblMA+ywjFrgqg2SIq0ub1FxM67eYIiduRMyRhgOPLOBSMoI5ERDJY0mveZg6o0TXPspPJhmOUuaiRLDDWZSHkcqlEsZybE3jeTqtyfqcug3ucZBNVUD4/p/hFVibYdl2VppFUdXe7LWZqwlJWXfBMWXcAguMxx5ty1555769a948T/v3/nV37gJ3+Ytz+5u/3Cv/4//Nk/+mf+dJLoXFDrktI/nFyi/iSkEyOIxdUnux6slnOW49iTpwVMmB9VPX+lWHsOhrlj1FNZGFcWsR6cIVv6TZ54Akv5QQDaQP2KlKKjpIlCG+IKCmLr/JLVoMwwM+eCINvdHAy2Nw/2jqA8GGxM82mRpWHA0Fud0iAjGEURMJSoO0MJY4DIkipRG0HR2tCHhDZNhrKJA5sqvBJDTYo8W933Pdn2dW/eE8mNTrZ/9YG1pNI2/6V/QGnOB9363iKksxOsPDj7ykdibfpo0kKqwISfffWH/yd/5W84G9U3v3T1mbd2NmlrNGs+mMVBzJ3FxpJSETJjdiaSZOi5CPDsmigp+cAuCdr0LF3UTmqTAKCzJFLrDDkXaEvrf3dcpQ3MSURq4rgtqU3/AkeUMQXoptfLm9lWQZsD2RnSMNPdLZeFIhsMyAXXMg5Yk5pqTMLMQX2WcZNMDCQM5RQbgDUhKSQRWQbjGKVJlCSJmnSBgkiBKsksmVCX5SAHDdicQZMC3hERNJmbNSomlwt9bne4kfsqVoxMXBE2tmFoNSPqR4OzVeFwDp06seb991HxBTpBC2G89mNLJ1cVW/1w/rrWv/kcheHifXOyHDtRSmf4p7W7zqkd+mNs81Wtc6400ckBBcdeLWqWoSjctEyiJEnFzBNlWVDzwxwqNVS3t6CmUCorM0WdkBGRwNW1d+QtheCLgmfJERwJyOTaZX7hufDK8xuffPnyczubWdiMtjU5Hu9uNa5qMveqVMdUPLes9NKSNl/gujQAC3R9nob1PVZKzz6+3LvlQnDGTtx56lNLBeDRmr3Gpgul9uEQ18oDczQy32jtnU8krHTHig5AgJkymFuv4VAk78PzOz/4zKc/7/D+N7747/6NGzreOxg3edtoUgZcis4kt1BJmozr3JmktD+LdWlKGqMMBrkf+o1hFrwTo/HUktV5xgRsDQdOzDRZ0KaO5Bg+J+dA8FnmnCfnWmeIbjMDIIOYOeeIHLq0C23Et+ThSU0xlWZmqXbOZ6GIRI5JBCnFQqQa7Vd3bwWRFKGE49t3ZSyV1pvbmU9WTZrpvt2+oYd3gYhqMk0+5gOq6+RLulXOwgsbR1UsNbGoinvr2zeeeW5ne3Nw67j6xjs3dp79hCK8dUf+1L/85/7MX/pLpsMomjm38OSZD1HPgfNhcMsSRl7MRvLIU7HPjadXZTk9nsbG3xqevMhssN6f5XHVJ1QzOzFB+zW1/k7JCs3FT+9yawLKYExooxKSqYFboAS0stfIwELOOA8FT+8e33nvbREZZBsNZW7QDAbYzLCZozYYUcYKslSbAJoxMUNZVZKZcySGJG0AUJI2bC61rsxoHX4AUjOJ6TR+s/nW9XqgP+v9u5Qjq920RPLth7lD0Ry09zrtNMV2udN+mgPucp26yDg/5XXpCarfT5NOraARCKbMEPI/8vN/MVbj6Zsv3LtT7n/361l1cxzLV569fK9s9o6nQu64lDrCOx431WbudjMC0YwotiEWGEZQmUfrtFXGX4amaJnL5qLQACIiZjKYmThu81mrgwaCBzuBJ2zk2EZ1ieMzW/mlS4PNrXz3ykbmiDmqCGAeyYNdMDZqopWRDKBkKYFr1MlEU3sSWMHt1BCjZBqFGzFRJLNkmpSTmSgnW579tS6/GfKcnDOVRlSdI3V+3KSJGDm6vJkHFwQZO0tGCDuqFLzNjzIs2WSJsE6OzBmctJxWfR/s7y3vnfbiE+L3lApeHOGc07QHNvziPbNq1zjjnrlIWnX7XD2R0elki22dFT3PulmgCoZpqsbKmXMwb4FsMPBJLM6qpJaS+iw4n5l6L5qRU2fRo26M2CmhnEndWOYRMkcwiWpNzVSXDVeVgnBtw7/24uVrO3p9F5e2N7ZAbjrhQsrJ8d79g+L1lz7+6qt1VGqiy6BEbWo99NLB9zpjoQ31rXKLw3fLLvzeysCTQP9x6tNvKk5wSFu+X/vpidDDof+1B5ar7EntwmjuwrsMwjy/0wAmtm4bts1olhMMniqxWaLRZLZtULFpjCELMTamCkRvOgxUqbu1H+sGSTEqNUZOYlI3WxvFZsiCk3xQVJUezxo+rmCytb11ZZevXA4bW8WsjoOhu3zlKoUQNWWDAmIgUkUbJIvRBpE2T46cU1JiD4N3xu1kE4IjBiXJmii1CPwW8SaFQGpsyUgojpvj+5nEOClzP6Aax/dGOgNn5Mk3TX14UB4e5XcO08HY5Rsu2xiwb4ylUUIdi0E+Se7Nm3uHE8nYKi6Gg8GxeOc3vv7ujWc+9ZlpGcRvXrn+6Z/903+xkY3gXMauMw/3QgBgTSxcmGlWFowLPPf43PhA6P1k0X/XLlqiSsAuNht6NV1uBDwOnTrrexcXLbfezWeYQharRfs3EXvnqYEjMLOION/eQJ2qS1AyBYEzT6xRj+8fUuYuvXDdq2X75VauO8MmalA2qiwCZNawmWN1AJmICiwalClKZ6YXo9avwgGAmZqaOMoIIHBKCtNTFJxTPrQ8vGrLW7LCEurPfQvXn10tb5lMbPVlqyrHCQ1gIeBWl16cTh/KurQw33xU6dQOapd6JoJXBcjn27uv//E//+JP7b335q333vqRd9+7+uKzkyYeT0exPvrK733pN37tH49Gx9uZ3yyyjWCjZPdGUmlWZDDKqloJJJZg1B4cJjUGa2v2NGsXGjVlauU6WReulplcm8OlzebrAE8IsAFky/ut3G0HPH/tmWLDxaG7k0RvR/nWrSzG3WF88bntzYFopgkkwRM5DraZD41JozaJQOajNtRqxVY3VCVJ4uqIRqyOKYmJtEkMIGopWdSYVMSUlFmNQRnzdkFbQ++tUSVzMLNx1L06jhtsqV7bzjeHoanr7e2tSvHsy6+wy9BFM+iFBFudPQ+k/rRZBJNY6ADfS5Y7jadovX106qVz6CItelL3nP7UYq3tW5Lnl+YBnRb9P18SFtGv6ISuZphnbdQ2VwvZVGZHzigwKzsPDHNujZXTWa2qTWRuQ7jBBQcRsGiqAJ+IfWNWTxMILoNzqBskgWeQqSdsZLg0SNd302aQwiHWMTrf5DwaTW7fvvPcc88+98zHUulme5Nwea8YKGfE8wx9XWPn2ze0YgDqaz3rAvijRqcbPC7yYG9ROpVjF/PucXcAVrnrovU7vTqrJZwi6c+o44qF04gYNk9dBSMmOEdxOmvGx0JN6SRZotJ5ymKEwgGoM74/iQcNa6MUKQrNpnY4nmROrl4eDgZUl9NqqqJ5TDQr0/6oVK8qe89ekS98/tXXPvvqcDOIC009EaI4ixtFYaYEiLQQ0IjJMZsZGzv41ERmazycwcQPsswSxrW67etuuJGZNJHUJE+Upbo6vkc0Lg9v1Pt7nvOYrJ7OUiV1VZRNcgg6sWpqo+ngzpRujJsqDLJhflxLqiwPfpjZThEaxb2j2b17x5T8YPvS179z8NkvvFZGrqohXf34dw5md27dfeaTn/6f/2//d8VgN3Bo88v03elOwVaPxDQPrzs8Il3A8P6kdwCW7rAXXCnmj60Qoc31SScNxxckOvG5Jw3mlhBaSsLeNKb5U/0lpCMFoMRKAic0r6EBROyYzdqcvVBjJa8IDBr4TMp6WOx4pSIrLhWDuFGNB2TwUSp2HIVhZMMQBXVCFFOQehcZTYv0ARioC+MOav2Q5hVj7ryPRBQLULHwx6G+sjO3e/WCuMzjNK51nPXi3vY7Z6Hb9SbFSUFri8LWrDCnkJ3s+ydw64Xoo7v0nU6n13fRy0QU1AAIMe/uXNr+0ctf+NHPAQAYYEANP3/znX83/i9+81d/tdgaTmfwPs1cmBhqAUGKzcKMSJV8UZalCBXZQGMtltBG8gcAtLK9M0gRkYEJjsgRmNgzIOpADHNsecAg042cLm8Xee4ObDDdr2bfHedUvvTS4LOvXnruCq5f9nkwtfqwTnVlZWXlzJX36nq2tznMNochK7Lg2QOFC3WUpKhSw8xqELMqWiOUzKSNktUGIDIzVUkAOVGIKMCBaOCdMxJDFY1zH6UZMd1vNJnbNAxTaqp6MNw+KuOlF1754Z/8Y9yeQO6pw7aAiBeUcKuAenGcuvvau+V7wJBnWFpOKOsPRw/smCfd0qX3eGeq61s5bHneq/tuS0EGoHNtM6PF6CxNFra4BDNAYAJhCaind+tyzATvKLXZqtmpmVqRxBJSWWuSxntCGzqCzDvyQaJAFKZksGQoKxAjGczIw/KQIedamxmFvWmaoSTlrZ2NkdHR4ey9O7c/8Ylrn/nRz4hLpRTBBW4OLY4122YCKeaZdsgWZ4IXlV+uc71lDmuctw5PzoKjp4zB/MNTZeNzYP3KTRcj/+BbLkCPAvrPprVlcwnuV9rVhyrLvZx5Dkda8H/r/8WgupbK09G4Cg7kyVEiuEpNE49qHTVUDIrNrTQYusmo2htXk0YLolFJecYK1zQpGsjldbLRJDbeSaxefuHS5vYmB2ZPIXjRrK7FhywmdQ5mUAJA7RFgBqlSU0UUPmPiQC7zLFbWzcHxfe+d294ebA99sdnUKbeAKBYrl1MkkkpHt/d0Ot0cFkjBYtq7O6rGUcVx4jTFbBaOGv/2Xvn+2CKqxtnAgwg5VAM7ta1imCxL3mfDjZQPPxjrH/zcT//xP/nzV69fq8lNm7iZD65dfzHy0GeBAO0OYJ8YW1r552FH9iNFTxL9txzYObaegp0fuqxeEQ9TS1udMSeu9hSUtQ/A6mvnn2kezQgt7IY6wDuGgqDtB3gGM5mqdvqAEStDTMWSyzAdH29f3vXBisIXhQ1yVE3aGlgyTY5FSNgCsYNFpiicCDCoWSKzNvqhwUCmMFJ0uQLAzMQggIgkab/DDItlcYHH59oKzSXF8rTGSmCSNqJLqwLMD3rOoXc/o8DS2XSh9PWWzkV/nhEjaNn/6CLKz12WVsX8SQX8LIZY0eoezDUnzTffj7QcPIYaGYwBmHEXMQeqYFJRNbWNy8//9J/659/+2lc2MhlNotYqUS9tetrwjm1zs6CrlHtyTKN7FjmLwsdHM8v8RHxZNzxX+NpoQGpgEMgcO++cZ/YEgjpCRmBo4Wgz8FbuMs/TpmmMjt7fH7jmx1/d+PFPXrv6bEZcHh8dv/lWOWto/1gP69bRDdBsEMLljWJWlnVVEbudzbC1WSgLZxhYNmu0agATXUJpVkutUkzdEWYiYnOcGCBxJs5AotOSjHXosqmYko0aSaDM5S9c9bR1bXbplasff+UHfuSnPvuTPzt89lUwz49Go8ffvZO8F6GeTOoMEl0yg2Xgj48OXRzzXbC0p9e4eVWXb1goazSPoWKGXkCyBZRXaoO1dSWgO0G78J6Zm59aXRdmALebQOYoodbjd5q6yjPOcopRTSmH48K3CUvJcUqpjqaSYoKIJlMjcsx1bXVMYjAFiFNUzmGE7uBK1URljRpj7TQ9s8t5kY0PMftg7/D46DOfu/b51z8TUKaYKNBOllt9I+CgigPOMqKkRAyHdp8Cwm2iGgCmIDdvK+ZLoWFpJJpzc381WFUNFnHzaF30r955zng9fADctdsvYlJaGqBWHzz55ifvAnQhBeVcsjMKWcM18wr3trI6v6BFjFpqDwew6Sde/9yl5z5+6923Mr/j6jqVschio2UUUSHzmblBkblrm1xLeRxlInzvOG34bGsfRVF4ibFJDbtq1FSlsPO7A/fCK1d+9Adfev7qkFPZVCjHI+ZsY7BN5MlENBqTAc45Im9oj2zpIGwwuJ6MJ+XxaDZxhOHW7uWdy47qON2b3IhkuaWayLncW3DVcMM9szWgnZvvfO2Dd/blkuMx375xMDpqMnDwtjEcjmbVqHZv3hm/eWD3owWONNEy+GLAjrPjabm9vV1P3N298aT2d0fyseev/7X/69/8mT/9Z2DsmetKth0zQRieHdk8WqLhBFO0sZYekVMWQ/ZhyvsVLnrKr50fC6S5mewRi1mss7holW1VEe79tFpCqw+vPDg3maxIJ8J8w791q+x2fYUAaqBiCgdJdeUyNVb4brFo8/YqEqEGxJiSixvXit2dy3nhRsdHZVlJE7eG2eg4FoSCrUxi5ETVVFtfZXhVNZDX2O1BeVqeXBYzUaXgs8COTBPYB3LByEvfUdsAIlEwEcxUxbn2SKYatYZbQusYvXQanUsUah2m2zhFPVG0EgRjeUTQaP7f4tDSA1eGvsKx8uPi7MDDT5MFnP/Inqx8KHpAByx1pTlRqwouoAp3u8BmbUJ1pzFzP/Xn/7nR9OY/+eX/F+tRRn6LraD4woYvNjcQMdgashl7yZ7d3Nze/spX3sHObrY5fPPu0fv3NKoQk0KlVULb9LtKntkzewdSYwITgkMA5d5y7xzls+TStNng+sVL+rM/cu25S2A7+vq3Jjf3cTz2FW1H5bqxSjHIGgrJyAcvd/emzz2TbQ6LLIN4TOomJSgygw4GwUDajI/K2iIHt6FOYxQjZwCcI3CCNYRpFGUKAWjgmCVCVZCDMq7Ejmcoow4cLm2F69d3fuTP/nM/+T/61z722stkITjnFsJhocouBEPb43g4Jl2aRhbQ/3usfV50lj2ylvLwRpwL0tI4Z90Bj+6YdkpdUHJNkoi8YxgY893OzgbioAAb0G6cMhmDujR4BiPYXMljGAuQWkVQTCd7Lt08/tJ/Rd6cD8Gzy0STmVIIbsBcx5AYSZlrSwmNSlXHKkoSGFjIBEjEYGNQnjmFkIAVzDCF1cqKRnB3X44nuHFYepm9eIV/4odf/oFPvTJ0blrPQj5kDtOoMt3z47f58vNoUMA7BswamaE5zpg1bNYWEHJmR2KeWrUAZswMUbABQDR1jkDEBm9q2iYOWfQuLSX/IvFLz87yEK4Ej80HD4DWZ1879c2PuwPQX+NOvvr8uj5QSejxd7/2y76e79Mv1jybx29u/2kFjTJosH3l//A3/tbf/o/+vX/43/zXMSWAm1rK2LDBTGPVJGLnOMtsa3f7aNJELcX7cdIP9iYudx97dquZjcZ1PBo35DjfyD7+sWs//IMvpebe8dg2eTt3eZHlBhdjhLPgSRVOyAWmBOVoMGKkKDfvvzuqj6/s7l65+sKLH//RUBTKXqRqmv0B4oCdNJEzVS0lpiSYzDxtbrvB7ic+9weG/oXf/s//8eTWFE0ItL3jaXPQVHcmB6P43hRvHWK/zkoYOZ2JNIaZ0qisr20PJxOSDZ/8oGzqP/THf/6v/83/2/alS6DgySeFCx7UJis22MLacwKZPDFa4pQTg/uEX/M01hfrUGrvUOji0hN6Q/e3Pd9oD4oSswhG2P3p7YKdUAK6rd82Yv9KfaUF/TaH/tRG+CGQQU0IqgZRZSqFuDHv4AQuuGAhFwQ1B5gjz+zNTETMVBnF7vC58EJT1nWaJat8QMiQB9scYlJhI2eG1mbaiCigkC4sDkmqPQdlJQuSlIDMeQZqTewzJucDB+9gLCk2k/FsPCZCgvo2nqOKkDGTSbRmxipgQTQQXD6E8wZv8CBiZjVhcphbyNqtO4N2Bsv5NsDybEDfPWuuPizXhflG9KrV4vRBXPm1lWOdGrIMRLkeu/tkYd16tOCcxehfYG6tne58ctTx2iOXewH0v/ja+Sl3h1Colf7d9CcGzIjVwOCs2P2z/9L/5rM/8NO/81u/ee/dbxwd7uXNZPvypcHuzuRwurl9mZFtX9t88WMvKWcf+9x3drfzgzvvFL/+G5c2t27tT/bLuoqNa0/Ag1RSRgjsck8OIDJH5BhO0zDQIHfk/bTR2Ewuc/3ZF4rXP3ElcP2bXz+6d4CwtVVLoYONO3vl4ShC1blAxFHTTEJiYcm277nNjK5fqj51VV+8OhxyCiwE89kg1mlzI0uc0XE6GlVMIThXJVMjhSlgZGKiakiWEXtvwTunSoRGkeo4rcUYm5tFKKsth09+/vN/+i//T3de/pRvkSGBzBRKfciPLhTSSfXrQkO6qhXTgp+xmDtn8vjToVNecrrCvmCnVejyQBhzVoGPSV1NOoEPJlJVVXXOGbRs6qEjzMa+GDTiwVnunSRpT6ekpD44aSrn02/+/V+jMPzJn/vDTVKfOQUZmSE5wJNranMeSQUq3iqd7dH07dn739p/92vHN78qhhAyH5pQOI0CcirGjfjAA3W87fJaY0N5I57ZlU2UxAGDTT+rJKYObqjaZAwgUHDKqYoa1XzBSankbDoTF+vPf3zwUz/0yqdfuZL7dDwdD4os5AUom02mAZRPvlFWSmHrg3dv7L1391OfecWK2Zf//i9P377zY3/qz228/qP03Gs1b2VZblUJcOTEgFkV0yxvjssbbxW71+L2p3T7EpETwBGpCoEJMEpEvj3f1q2eNl89l0vshUf44dl6XdK1434W1y0vXOhN/tHqdLJyjwB9znqEeo3E6ofFN8NcAW63b6h3/mOuqM3jMzDBnHc7L3zsf/1v/M3/2V89/uYbv/+VN7741S/93pvf/WZGdTWd1qM0K5vCI8sGzThRjauDraODQyU3mca7+8dXdt1sVs5SzUW2MSx2t/Od3c0mYnNwiZxNmzizVLjkizxkzOzLunIZkyNKcIkBPRjfH5XHuR987OVPffb6j0f4lOW1KyIPYZxC4Te3gCo2My4qeAum2jRSJScWolOJEcWl13/oL7z+B2YHR3/nP/67b3z53ZefeW4nlU7S7VF649b02HMZwsaub47HUAaCGDjz0RcHTbO7ld04OPrpP/Gn/1f/1r+nw63gmMGiCiMjdaC5DgBayOY5fFygx0cR+Wsjt/xLDzl1HuVdOHuqPCLXL2y+i7ioC/T9MBPvQUQdTAfhgVsBNN/NVDKab2ouMdHCjA/A1FqfBWm3dF1ni1NDmgNHAgzJ5mMjRolMKCXnDKnUyb3B1jWkzDGn0kXaVBmYK4idaqnSeGO2vMCWYBrTuCyTiXFwVVXHmJqUEkG0ydlvBIpjbCBzBoN0MRTNarVUa84+wowU1Pihj0rTuiH2BDJRZ2SNOvZtlj2XY1C42KjPmKWBmMvhUlXeuxOP9w5uvOM5XnvuymQ0mk0mYD/ceebyJz/L2y9Oo8+IHTglARk7liTsgmpnG2NmNSJing9z6yu7BCt91NJtK8zB+oW3yzqPiOUPyyKWs+QMgdhjGlqoATbXAR9M89o/Dcj11DAcnfw6D/XXjYkZmNpTulC0GhW3DmWpGHzyp/7Iyz/6R5rqeDweI05glIWBM4lgH/Kty9t5PmwivvALCL45fu+rHP/vb7/53nfde1+9tfd+pQL1zoEo45BDg1mA5t4RvGn0zJRMQDHfmU4bbQ5evYKffpUv5c3x/r13b9uB2x6l7XRXZ+MpS2VcXbuUX9umjaKWhLqOB7N4qJimcDht7oz9jXu4cys8f7l+8VL52VcH1waI8OHS1uGoMk+SppJY1EETOFSqTYqkCGQ5UDUxZ68haCAoEyMP5tmqJMPcuxQHFJ+5TJ/84df/lb/2b15/+ZOdOLO5Htlquydikz2RkZ3vNXcF9kN/fyjoH32L7vKn+YdlxXq/2Bk3r93Zv/rU1jgDTMkZIMkyNm3GIvVsPBaqbnz7y//sV/7uCy+99HN/5p+PxSXOB1RsIhuQCyYoU1XNjn/lV3/p//0f/eK/+D/+l37iD5epPPRlqXVlWnJqmiSRkHxOZBQP4uj9+ujtND7iaharEsTYdJNDD8fMKWN2gdWcMYjizjB4JsyEcxfJQqAi8PbQOS8uEDvPRFElxUaNyOj2/fpwzDFZQyxiSS2peRP2dG1LP//pZ37qM889f2ko3Iya2SAPjoaaqKpGljNiU959R/NJsbE5vf3u6Ob+P3n7nz7zuRdDXmW237z1j1z9le3mC/txqxa/UYrEKiEyw3vSZLPq/u/94v/nlRc+9vqf+9emsxf58svgQquSY2NRjBxn3gaBwrbywCgLaHeFtRXZc4vzw43ZQ/HDSUlnJxiy/xfACh45lzwemMXjkch6fx9IdGJGUa+EM2xo8zzUcwhGmBvprFfS/IifGZh9ImAwfP3Hf+rTP/bjf+lfUdWGY/3Gl770//gP/tbXvvK7G86uXr/UTMacZya8u71z5+CQGFWSuwfHgV2WD3LvN3Le3clCZgkR7KIkGHnzSmzkTDipOB+yUDh2TTUdHYzqOL507fIPvfr6cOP/z96fBkuSZelh2FnudfdY3ppLZdZe1V3dXd3T+8xgGtMcAAOSAkAQBIjFaIJkkMHGIBlJaDEtICUj/4uUZJKMxh+izAgZJIKCJBKAgQAxxGAwwPRMz/Qy3dV77VtWZr7Mt0aEu997z6IfHvGWfJlVWVXZy0zX+fFehIeHu8e999yzf2dLIHbgFhuihqlBXmNk5AKk4Mlw4qDZDVQspLDmsbhLcgyKnCw5teuPjn/lr/+lclSuv3742//sG9957pWXbutbtN5qVwS2N6ZtsLRYjJgff/pKXYfdw9sLd7D1/8n/7t//4r/5F8t4HDkCmNrgtjQcANZh5cVc7cXLIRz+n8p8fpd0zgt9at5/LPS+7rs0NwefmMNKYT63bN8nOcCpLqP3OsmPtf9BLXVFNwACQgBaPq0jmIMOzRLdSbVU3ruh0qg4RTKwvusOJc+869Pi0HK3sX3Rq1FoKivz3M3ZO+0WCBIDE/bGU4hjYGrGDWIgrIxY3VwNPZv2SOxOg+liSBADgrNHGgXuFYPHCTVW4axdX58gorc5Oym5FEVxcpjWQZ0joqAHBkTvihc2Ha4L5A6EAQACYc5d7sJ//p/8X37ui390cXRwYUSlnTdj31jnNN+/urUNmqjhng+gJF4sROT2jedf//5Xm4sffuxzfyxsXy7zA53vtovbqevXJlvTS5elaOr6uq6cQ1jbAGyAA2AEcHfHIXQ8pP0ss2VXuudpN8QdDs97TvWZcP6wl/kKJv0kefdtLrL65Bi58p1lkp/i9nd4xPdBp6o8f6i0/CnH8nXZHcZPPh1GhNDB2REROALTtJluEK8ez0EMmIAcVCESUATj6tJTn//zf+0//Nbv/Cb/vb817zqRztc3Zl05POqAeL3hSaBIxKCISIElp+koTC5s3N630B3+3FPws8+Mtip/8ZX+zUXdTy7f3i8HN2aXx/65p/FzH9t8fFvWxrwWkrlk8yqGeSu9xsMcru30z1+zF6/RtUN660361s7WSwv89GP0zKWw1pQLmyEsRL3qtcznaTKtLBklY3Zxc9XIvj2F/SyLrGrYMLHKWhXHFSXFioGB18brv/iFz/+F//l/uPnUJ4dtw3C1eo9VHIDj/ebUKD+A+Tr15k5P549RQNxx97OMcuq040TAexcz3F34vS86vo8DsgIQlCpf/+5v/5rODx5++GJT89HBzj/6W3977/q1qxerw1vfn0NAyQ4cY13FytxnR3vd/GCju/Xsw1V7/Zvf/ce3a16EUCqugJCJmWMIAeumHN28/uLXN8dc15oyETQw3ViIt1VqS4qgHL2yWMSKuzsyUj2K4q6KVIwASaAK5B4QlRhijMwA6A5R3VHzQ1trsw7deSF53tqizRzpytb6xY3xhfW4tVmvN9rmA/RY15V7JeYRBUrKUDmoW4dx/8buLk2b8dMPt/P8/FG9mE8zrNezg4/0h4cv7NL4ccf1ogzgqOrugmjIVZzy1oevz3aftet0+6WyuHC4CGnvyLsu0qjvpV4LW5cnoVmvtj8Wt55RHDMFBDMbeGWJqXSfM3wvd+S7pTtUjTNWKC4Lt+/nOuF9PtHpxe33+HnveIfzn95VjTrHkHgHWy6dygO6/kktzNI0IEI3D8QOCMjuKDD2wJ/8ue3/kdv/5t/5csNUedja3JzNbx1hgTHZAWxfnDSV9JKgbiqAtabeGtdrdT2qMbAqAFNED2wcvKJSIYdmXIMqJbl58CZXfPnq1a3LT4c4EasOoYE4pqauqAoU0MnQxbqqroiCa2WBHRkxIIrpIcgcQOo4MfGqQjBEkDS7nbvdPnRPfOLCRz//F25fv/k3/++/9o++8ir08eqTH3n2s5/5Q//SH5mM1rYuXPzIpz86Go+e+9rXLl+6eOXq0wUrmWAAQrAh0X/V3GUlo1fTeZL2fDJ5q0m+72y34/jBWe/lj5oeCL/dQYNStiqSPpaWJ+bSg7jJKaTse9lQKxPBzAkYJUXqoHQgWaQoBAdEL+TFPaNLduwFovc3X/nOeG1z8sgnxJtU5lpmqcwtHfJicf2lF/r5/me+8MW+akDY8p7ODqyfp3YeyHHUxOmmEVOcOEgpgEhcR2QiU3R0IoKKORiDo5pbYAKIphZj2J5M7aArEWZiM2lpDHmxAIC6wTAZCblYODqi+dycg6AXEYxQR2Cmw+Ra1DwkMwMqgGBABpPKawBv95/70j/5zm//s4cvXRoHjegfeeZqubq+Ng7CPZBPRhuLWQ6EZL3BPJLazVe/+i9+7YXf+mef/6V/eWf/2je//E+2p9XGhYub2xcuPvyoGfRtUrVu1mFVX3z82Ud/5gtab3IcIcJQrHBca3ectO9D7smyGuD+JP+x7rnyXQ8HlxN+rD7fj5Q5Tkq7nwV4bKyc75LwIOgOD86PRp87YZQTv9FxVtvyjJP5QaiIB+3NVwm9FR93AzZwZLIAqAzbH3/mj33smdGVy2t/929u/86Xjjp7IymEMdexwb5mB0DiIEamEqrGq9H+vunhrS8+Hf/whzfU+esvdN+4trGgDUwHT1RH/+afevQzT1fb69anGUJ/OKOjznsh0aiukiFyG0E+d6n63CPVQW8v7uSvvgRfu06/9Rq8chj+8BP26cd8c2rjymh7pBSYbHevQwijOkQLi2ygAqbMAjVXxBFtk9QzrIfCCHOEusKtD3/sj/y5//6f+Et/td5+OJ7yL9IKEeZUVfmJhrusgzwLrPKg6K7a9o+GjsXd+X0cz55z7DA79rmeGJun1vr5bft9MsJZc5pLhuizm8//wxe/9qsfevIJ2eDcveJt+u43v/uV575NTXwm4PV2bzKeVth5KehshRxkM6YKdh+f5phu3Xyj3d++dulCEyZjmE4BI1BUlN4r19jN0ssvvfWhDz0Zre45OlYxqTFvrm2mZta2C3APsQoVsUoRBcVcPIRQ1epIhEC8rKsKoXY0joGJVjCNzKRsNq0zxYhxXNSkJEJ86MJDrHkyqTqxnYOjccMba1XwmgCLFs4Bq0jklkU9UzisqrEGtCAb22uhrNe+/vKefvnNl+J6/dGx2sFumFbJo5j50LLS2Yzmi9x85ov/4B/9Q//6W599sik3Xyy2XkQcNBVZtN3+zb0r3fjDj27u778WH9rbeuSLAhwIyRWMYICmQ3hXNsB7oDuY4rxufG7Lv6/7PAAUoDuWOJ5VV+4IVdzrW+flxL19/29Dw161ygo6FpqIw5QDDr2DggMQQHRAAgn483/0T/7MJ7/4la/+6s9eeXgEynuLyvLi8Hao4KHLU8lHANj33eb2Rh1DVcd6UlcNo1ug6IoEFGqOMRIzEZtYkVbavLlxYbI5HU2nhVgAxtONYg3GmkMDSGoANuRY8mI/qYBDQEQOGEgdALFxQwPPqozkqgYKrhgvM1eRu5nMO78+uTz6q3/9L/9lvmxh+vCzP+/jTcOgaobAsRL3T/zcFwgrQKwYxJ0Q3XzId6az7n1coYOdEaJn9Pe79TS6x1T8mBX/FZ1faQ/06ksJeN8m9/1f+TgKs9IQ8WROhgOr0wyAwAy6t/ZvvtAvZkxATBTHHCs0MytmEgido1Nwaxe3X234YU2P9tpF7RH6SObg5MXzoeVbJLfBYpcKlzl2R5Q6LnMnFxxjxR7WAMmMRJWCAzgiIKECqIObI5A5iqAZMkd3cGHCCAD1uLL9oyc++vBhqZ1G6Pjay691rWi2+bxPPWxOm4DhaO5ATsHrhqsKQmAgSb2LeAbqxAEUHMyglEKMJsVA1keTbn+nmdQ4gvnRDsjtyeNXZrfydGsjzYFjQCRJiUMJUCrrPnR58ttf/rXf/Y1fv/j01Ue2cLPafOaph4rltP96E6vQllGMdS0pz974zm+8/MK3v/gn/4psXA5xdIyV4QM65ElDqOOd+t0L+lUc4Hi+T6sh910S/C64bmllnG1j8KDovFz44dNxAwcAWGGgAKzcs2dEi60CNbisL19thgCAbg6IaCs4KAIkAiH4zL/yrz37yU9/6e//F7/5a78+ev75yV67vj462ttxxqSABC7KpFRXqXA53Pv8w/j5R5hcv/da+dZbsa3Wtybdzz3iv/zxSw9fAKB0/TC/8UY+6myWa6a6JLfiyFEEOSgojG8b6nxc9Q9twl/63OgL8/zPn++euzX99e/RSzfCZz9cfeQCApbpyPBCdI7XbnRA5NqNmrqpx+NA7d7+x37m53/xz//lD3/s2e7gcGdnL5WuSzOs4oc+9rEnPvbJ0fYjkYhPFp4TwhJL+6S2YhjY41r3wQPyo5jUHyWdd/zfle3w3Kcn+PnHQAx4csEHxQin5IE5UDGoUF//5/+PV7/zqw9fuZjnNzpELbJ/e/ZPv/Td711rL1xcO1hIagv1R0o9sbMHRgwMWQqGZu+wfe7lW+vb7c883Uy3ahCArMiojsBciBxiodG1W0fTtaPp+pqacuidCo+qJLZ1cYv3/a23bgHxaDKuOKoCkcWKAdBRzRXchv4ZSBjCkvnQjRCBAdEJQ11xUzsyAwNSCMh9olmStcqKZaQIWFEVgDiVrEQUqlQUWCMxoRpIMEGIUrx2mu3vbta8MZrQk0/96n/3zbS4vTl9toAASAZiRiZGJDNEQDQZrW3Pp4/8n//r3/pf/YVfeHJ7vDvrsAno2KeSjPY6X7y5vzUKDIfFf0d6uPD0FzyOkWiJBvye+gm8W/Fwx0I6banexed+ynP49hTgQRvcd3Xenz5y+onPmrN3ngmrH3zfz7bE9zuDijX89SH4u+wJ4bCEAXR3inVW+T/+zb/98mvffeXF7+29+TIcaMqvj+oR5AIuwXlUNwI2bcax4majCnWFhFxFRa9q5AqpYmVUKOAZDczzxsWN0fRSmNREwcEChzQ7ig1wpIARCFP2LFnEJLMrRx6HKnowdbBe3FHMnNydmAO6EweiAuruDeCW+vq4eSjUlSSVsYzWjSYjG0+UR0QUQICAAYAIiNwBXF2RaRB9ruBIuOqttspVPrXEVoBXK/UW/YxB8M5Tcb9z9kOl80/8Q3muOy/6gBSqpU55rokJwGkeQgByk35356UvV3pUI0gqFGsrwSAGqriqAcAyGFn2FqETOUBZQ+sQwMDUUY0jTZS1FzqczRezg1JVYcQk2YtbMSuKQbIgSFFN5HFQENSsZCOFgOCGam6ipGbO7oTAbmBgZu6KkvP+fG7jZvOpJy5ffnrz0sPX3nw5PrHl87S4fvPgrZ2dN/uDWW6CdwEdlEAnVVVVbOiTaFsTliMTZAro4EDmZFnBkMbMCl5cN0ajToo7znKpp+PbR+1kZDiaUHRctD04ohEX9x5d10ZxMsYbB7M3X84f+8JHL25cQEBDzNohCtfBSzutK3b1kq/vvfx3/4v/05/9K3+jjCnGegmvtzSdz0b/T5UHLOfq9It77P13uk5Wi2h10O/b+L5f+uG4/o8vfvL3R7UZ4Nk3x5OAx+nswykEq7rWVX7V2T1ieZSAHB2AHJzcI1qsuHn00X/93/5ffvgX/sir3/6d17/33ed+81dv0Pq1wwVW1fxoHmMD7kV9tn/jmbF/7kkYb+LXXpSXdprRxfAzT+uz64tPXdL1C7izn968Jm/ccKJLaglQM6QswgGYuKk4VpCMWxq74lEfdt7KW408tBb+9FPxmW3+716aff/WeLYw/Vj40KOhCQuFvLa2sZWmXSvb01Fl2KbSNOMnnvjML/21f/fzf+LfQgAD+AgAA5hBQFh1MACApQvbEVexzBUulp8apKVv7Yc+kT86Ojv1ePaTd6S7jMQxNtgpG+But3ovoYCVSIahSLsodC99+Ttf+Xvbl9ZvHnR+2JWUmeoXX7v13CtHMLmIDLtv3jh69Hrc2nTiUDcKlXoBV88hYLPbL26XSlI4WNDuTDcYEihHjkDRkQlE0ghDZATTvl8wNRVGYCia1ZTINy5dWPRpb/9w0S6IAhIOyAshUFSUiKaipsOKElUOCAMOWyAiImZiIiJ3NSdmqkPV9flbr++8fH3+uQ9tf/zDF0fj2gpTCIpkLi6uTuiAogZCmBkQAFNqj4qQ03Q8Xhzt1qF9dGtyZfPq9194/l88d+tTn37GsnCMmgEoIwA4gwOajRf2kQuXr7/VvXbLtzcqAa2VpG+9GAFNm803r117OeZHHhtV3a7Y1200ufDwzyFHXoKB+FkHw/1O5bud+rt+66769v0/ypkIwHuXLkNd3PnDp/7i2bdvZ768g6B82weBk/3plKJ0gg6IA574aut3dHeLSFrR0x/5zFMf+QwB/Plf+fee++pv/Md/439969rrPllDX0yndXCbTGPdVKPpCBFirCiwkTp7XUdATFJCBELHEOpmWq9vCjbd3MDTdFIDoAmBE0ElAvNukQthjEANNXXEYAbZzUXNKRADEARGr9DRwEIARAdm5qqKNZqJKUjO2aoRTGKf8qxkG61di5tRoaEYVLW4oS+r+hGHLCjgpS4/2Em0yjQ+o7GcjNxSGJzxatzXRLwf9I/fh3S8uo536AdlBqycmLS6zck0LF3G7gjl8NoPeHGLylFssCTpO6pG4xAmaJWbAjEYI1Nkh9zPd2/71qU0a3FUg6ibuqmJmpImbw9mlMVyKhnIpLS9S865R86BUUsyCAYVDI4UMAIlB3IEADN242zGjB7ICB1JxaVISdl721i79NjlR24eienBfKZ9TlcefejCNP7Of/u6ljSOgBvVYsFdLibKDE3Do7pq+65i2BzXfScGjkU9RAUXAwBiZCInRFEpUrAUC5xmua8qaaCzfh72pyEwmWkGAnQ16bEk68vGCOZ7u2E80aO5zNt27yBMYo1ArjFUhJWrkcu4tstr4eCNV/7J//P/+q/+yr/njojVMpR8x1615JtTmuUd3r/zDo9jzjtt3J23Kk+bAufWyXvavd/fEvU794vzPqof3x6wBE46hmM6Np78uCxheLuSGWeMtVU99SAlHMB9gOMyIjTgj/zsH3riU58+eO0N23/j5pe+gYikFgIrqJnnebk4jZ/7xGh0wX/zW0c7svXEzz72yKZuyf6VmvaP5Duv4d4eQhhP1yepFcgL8+LqgZGLkSEBQNGAkUNFrk6UaHNmfHCrbfDw44+tPfHkY//o93Z/7/vyz55LB3392afq6ZhSwitXtxddnta5IU6ZQ3P1X/4rf+3hX/zvAQABsAM50ICMcUqzX2r8q3YbcJzfcyro+Ad2Oz+3TE9zrd/tFD/FOWctR1gGAc997TwvvCfdZvU9d0Qg1e/90/9PLOX6W7tVPQV0UjXoD/YPF4tsCmVeMAmkhbQeRpuaSnFBRpHSdj0YvXX9thbXXsust140m6IBeAxmWjCimzfE25sXmtG0mjJjRHeuiB2RedEfhmp09cojTGHn1m6b+xBrBFJRMQfHwCSB2dQMAMDM0JCICFeNs4fmHQSDngIUc+FXb/df+s6ROj37oZFo0KLubu5qwIQKJuau6mxuTmKqagSKRFwXLdbPuW5yOojFPvHpT/2Lrz7/G7/76rOf+YxLABiQu8DMzZSQ6hi4dB+/tP61nr719Vc+fHFEUbtZioxehCBUig1Wb+20083p5qStYffoza9NxpfGF55CZFj6F4YJetB9Re+5Bpb0/jWMsLzM+92s37n1zB0PetdT77AQ3oOQOy91Bp5ZbvrLTEZcbfuAQIA+lIWRKQASeyH+xKf/2N/5p7/9yg++87f/y//8u1/5Z1gvJgE1tVxXoQmByRmMKIbAVGVxbszMzGNdV1zHzY0rySFgPW2m7j47aNucppfW1qvNXELpPIStyXijyzhbCJOBLCgIEBEgMRRwN2ViMEcAJlZP6uZYiACcEMXLANviadZVESsH0fbW939n7ZE0vfqp4oVjQHNTY2ZDMCQHj4DuCkPgaqVSrECUzuDAn20mvTr2B1cKPAC6Y/d/17x5d3PYllGa0/bssW8OwNHASQ7mt78/6m5rPtrdzxIjx0akMEqIYyATqogDx5h6iYA7N48uXey3H+LS5eAagnvJ3veSUtv1pehs/4DrWNqSrGMEkT5rRpe6LxA6ZGKMTBUaMjJiBiNzRxdUURc3AAvmhIxIaA6mIMlB4tF8/sabL0w2Lh8+nza3rnzzuW9dfGT8xMMT2c011hqykdF66J19kau6dtMsBQhAndlHNSADA2ZQwShLQFONEKo6EqoiVDFodum9dC7JsO/n/WI/zdbWKiLlOpbOTEs3O7QiMfpobdRJurm7N7m8TXlto45gOpmsZcNWCgAIODi1i9mFSN/+xq89840//sinv8iNMvJxvdXgXoV76fCnWecOteAsbMGx+rxcDecWxb1W1XvizAcRp7o3vZ0580Oj5RD6OWF8D3iZU0rwefkyVHc7LDdMMAdCoOHEUOP0YmVJ02y6tnFrd1Y19f6i73OpwJ+8MtmI/pXfPTrC0SPPPgT7e/svvL6xTfJw01s029rY9P5wd37jcJFAXT0icVBHRgoGSESIqh61j0FBNVZexZFyw6g3dg4uP0R/5lMX12321TcX33hF1tfXn7wwWV/DtUl9dBRT0en2Nl/4+Cd/+S8+/flfgmo6oJqbA6MTosPw98zonIR4B4fPO+EP/0Ggu/3A0y5IOOvCh7Ov8ex3znEqvt379/iky83fDNP85rWXv1mN/LAnPNiPTYCSS8Gbu7O2M9dQhTq3bXuwd3GKmkKxVozcxUwghlzg5o2bi8PDzdGa5GQiknIgcsBMxiGgFqMcuVrb3ljbXIdQgLCk3rQECIqu4POUGm6mG2t9ST6bl2LupmrqrgBGSAHYyAQAnHFAhiAxdMVAiGjiBS24KkYmcmIYT8MvfO7SxvTyxx9Za6jLIkCK5lC8imDm4uDqqE7sPnRyMTDPZE1A6NMikDHyfD6TXDOPj2bp+y9ee/jRq6PKQ2zUUXxoaimioAYNRyq2f9SVfh5dAWNXVMBVkoitjeKbt3fXt+opmXV7YWu6+/rXx+tXpFrjwY38/mf3HN3P9c47/t/m7V3pJALwvh5+4JVzNsBp4XVXprrrTe+wn08PxP2MyN3k7ErlP7nekMM4lKYcl3QSOLhBBOCIBnD5sWf+p3/jfx909+/8p//+zg++Pt4MsarIAREYQ13VAYCJsfJ2sdjY3OKqoohr61vE1ZTqUqqjw/6tazepqR59+ulqsp5zQA6xqXOho92josSxkZybJhAxDF1N3cERyQmU0QCE3F0zuTpms5w6M0uRKDC7AzMhVAjajDx4O3/tdzXp5tMfF4MAyEzLVt6gKx8PwLH2vxyWu2QCnAZney/uvD/wkuNudErjuWO93w/dTdHDVX3RWVY69gU5gKml/WuNz8psj/JctJRESlU1nmKFbSehNgulHk1BEU3E2tItJPciOTSxQSAmJBNPbimXxe1bN0XbxX63Np0WS8LW9wvVQiiRo/ZdVcdQqZZccWSQZeGrO5gCiUrhwGCwRJlyCLFKtgiBpxtTqOpbt15/dDpZr+J3v/WNTzzz2NYjo9zurDVYb8TIDDMEhbU1VCA3MUMXZ3Q1M+1HMarJxrTpxI86RYfAzEaRoKnY1c19CCj3pWQtfZecpN3f7Wb0yNUtIGmaenDnltTnNkfmC2ujG/vl5u7BxcV8PNtvgk0nI+1biBWRE5MTQAkxhlTapzbX/qv/2//hf/YfPQvhAoQaYBVRXE3OSV7NaY/QOZ/E6Zd4zIF3lO+e9/Yc67UPRt6crM/3cLG7uVruecKPzHmwGvDjSThR+t/mAe444dSsHVd4H9cSLz8MiNPJWl0SG7BD01R9mZVibV82L07Q8BvfOsQJbT718Cuvd3zjxi99DJ7+0EaSaj7T2aI/3JulLmtsDgWTYelc1IwCupCXgBoD1jVOoiLbuAJ27UqH1CDGjfHkrd38+MX5H//sulXpyy+WL/1gLh/d+tTjdGGNti4/tnuQH/vULz77r/wPfPMqhmZ4YhtiGDh0RsU7BSseB8b9BP/nxxvF+eHT0pdy9ieed/DfocacPhNOBuhk1dxRd368Xb/nxzy+82pZuyPuvfY9mR11qerFI5uZiZT5vNy4tT9fZAXoyCBQLmVv77BZq5wpFQ1g6Fo6aVucL+YhAqJlgDbJWMEBRESthFgBEkRjwDgdIwenknLnrjEEACDAUZgCaM4pSz9dmzSj8Wye9vb2AZ0xmKqJuQMHAkQ1s+IAaDZIHER3JKy5qmIUF0AnxkD+1JWNjzz2UIQa8iEEJWQHHqJ05saARRUDixMaBiNgNDV1Q+iImlG13vWlABhyrP3q1Y3vfuUHr7326uNPXBHtB1mKQECIgJKlVckCk63Rmzd39mZ7j403uywihoBWCjHFysDkYHe/TKccABYt8fW093Jz6ZMQ7my48qAY5l4XucMoffvvvuOTPIAiYDjhCjz79i6s4mc56q4/44yi8y51z/MSc/nXVxvcGXP+eNKOteKhCtYQMUCYTkI2Q9p+9KmP+ux1zK1mQ4XJtK65jhYAi3lvnWysbRGEiGF9ugVWobNrfPW111965fWnPvyhp595wgvH5IFxdpRb65xi5KYZOWOKo5isAEUEQiACI3SEYnkGvnBdFO0IxFWAxDwzO2hRAlElCoXjQqGuR9BjFXGtym+89uvTKxdg9KQSEbiZ0SDJwBDIEQFWB1bD9E4Qg+/aU/gHWWj8SAnxFELkUiIf07Kto7U7r2m3A9R3870+pWpjDbT0i0LFJ+OLaD26QnHXGBiZhGXBZVFhbmcFAxlkLW2eH5kaai7dwsohli7NZwVNTNGFzCN5lBwlu3dOdT1aQwdVcc2rPVBNM6oGNLHCCKbGjGBcUQORbt/al6wByo3XX2yajc016bvX2sWYMTdTD4jMQRSgUxwRWpTsBuiIbZcIkJ0qhPVRNcsSI02IizghBoKaoTIzk6hYcWRG1oRap0U72miK+N7+bDQKa+tVd3RIBE0dwTX3HVlsQKpe8mF/6+U3NwMsSooX1ihi6ck5mKOSAXOI/eZaKF2a+O1v/dr/+2f+1K/YJAYkN0ekJc7FSoMCOMEHvJMZ7r2dnbTGuGOjPLH6limMD9w3+x7k1tucf97XjqeO/yjpfWpdy8lctdbzoUAWDBzBCTxLd2CQpMxdi1o5Wuh0fbJG1e0b+5cu1vHy5RdfWuxcP/wznx/9/KfXQPD16/naXt653WWNc2huHpZM1KkZYGQA8hotMDI6uQWycVSKVYUyHsPmWCkUDujUq4nfTJvb9Nmn1tt29tU3u6+T/sLPfXTriafrx77wsc0nJw9/stp6DAcRMAg7BFr9rpMS6dNa/ukQwLuq+PoDQefVJjy3kt9OEOLZc+6wId7X4j9jzYLjEG48uPb8IvdiiDFmKanr3fSoLfN5BypVFSODmbWLfjKp03zmoJHrrp2NK7Ki1sf5wVEFNgqsRQNj37UYBBjrqjIDx6DFTLTPYiMDkCYEEZKcAzESZE2qhmYOWLRkA4pxc/vi0d5RyYaAVaUlSymKgIFDpAgOQIjkbkruCEhYaVFErKqA7uwe0Fh6x8QsyHHpmXUicIIhwJCkUOSarUYlUyGgGGKnmZC8IAYs5r32GYisjOuY5otyNKtGtTsaGHEsyR0ZqFIG5bZueuDm1s7BpXV2qNgIEAmhlD4QTUd1N+9255Nq3EyKVT67+fpXnrzwtMMaLuE5hhnyJerkD5Nv7jBK4W526du7Zo7pxAB4v1vzORRcvNsjvr02fy9Bef8PdtdfPsjN4+qlE6l8Wj4PNcEnpoANwYEaWSDGEOvI7oCiEQOYEhuyOpm7xboKTUAIo7UN8zpQnYW/+dy3v/WNF774R77w9OOPQp9DGJU+3djfp2a9mqzHuo4cAEE9A4XAjRsiIBOyA2qPNmPbR52Bzsl7AiM0A4nkgVitAAIGR0BzYAQoABCwGQXKj16AvZe+tPZY5OlliIER1ZVWKBcr1X+laBy7b08h573TzHxA9yQ/8/9dG07nLreyXFfh+lN7y6CTAHr2bhfao4O9PWgX7tbN5zHWYIgsVlrkgly7GWEt2QIXl6J9J93cvUopi3YIEtj7LuWuQ4TD3b2GMlaoSIoIpiRKwSwgxBHTCDSZRHcFIMBBSwdQJRP37JJAOpc+sIO4iNTjZna42Ni6kCRfAknz1M5vi3RjG/e3S5/68ahp6nUnH02ltw7aXEcfjSpA6nsjGvfFcwZhEEAKMYiwmQVAw0AYGYMNQEaCjgG5QorgDhmoQWzadp56CyE7ZCLRRGSgajnnwFA3CKHukkfmspgfeYvEHiNXEZmzZVNkdKaserQR/Et//7/8+B/+szydACAhieuxJ+j07CGcxSQ4pxffoYD5Kj3xTP+AYQ2sWib5sJk9iITTY2/ie6zXeVufwenXfrfjP/G03C9POhn4UIgDAA5GBlBR4qBJvSe7Pe8EQ4mEENvD/SefWQeefvM7ebHo/ujnNn/+41Uq5Qev9N9+U24dmWroiu2V0gNJFnWoG1J0IMhMnbsYIkAwGAO50BrXbWcqfR1LFbEspFqboNvhzuHDFyZ/9DNbt9ON223a76rLn/zT5coXty9fNYgIhO629ICtIN7g1N+lSXm+w+Pvq4l6H3R6ca7Ar5ccBmcH6/yInNfD7k7vKYJ+t8sc7w0+uCpvv/7qPKdmNF0s5lVNJSUMXMzFnANV0dxLt5irTKVkLQCogqWOnFMCJ1HvkwJGwhADW05UsWanEMqQGc3qzA5qqotuXtfZEcHYKSi4FgVzcAMzWCoQQ4tEraoKQUHUFSjYUCHmQIGigyO6gSIwgjNTYAJCIgZwYkIiIDJic0cORMRI7lTERUXRJjG6ktEA1mUqjkMzI1Nwh6HxtQdwcg5ZePvq5VdfaObzjGBMYOaAmIsaMQCaKIEwQdM089S+tZs+/ARwUELKObmDa3Gum3p81OZbLV1cq6k7Gs/TmOvFwfXR5TVYKVDD3r6KBjxgX8cd0dTzn56n+7n9A0oBWoo6HGKMd0TQ/CybwX0rmO/hkd7RKXXytOfOPy2Xl648dFBzxJK6ZjTuLatL3dSOJAgBjInUoalHYr61vl5hrUZJ8LnvvfC7X/32H/mFn3/y6iXvSqzD7YP9RQ9xvD7a3mBqkKmAATqHKhVDREIK7FA68zmUI5IZyQy1A+wIChKAASIwVQBEjGbiJjGAOwy1vgZoPXXeYjUbY1m8gRee/TeKMrAzkOOpje30KJx297z3gf+Afnjkp6ftrIQCB3PrJe+jtWDlaDZ389F6jOCBjSRrbjFEE+EoIZZijmwp911u+3ZhQV079URY3N0NJNniSPdvzbYajzVZCMCOmsQKkGf0UI0ojNxrteyu4IjsNORhmlnJJn2ABJpYMmrp+9R3fU7igUdXthHp4cevSspp5/D2Gzfbo3Z+kNBHezMxnolZ6lLJFigAe7FUMpggAjESITA7ohM6sLOABiTAgAGBkZyQkIxAGJ2DI2ZwUinivujLvC0EzFTQulCxmatILhhrn240GeP13cPXr+9UUMYjXJ+ujzYaz2pi4LhY5NCwlWwl712f7RzZWy9++/GrjwIPZju5GQyuqjvp7bjpjNPxGDn3lPJxJlB5/9d9N/Rggwl3v9ZxeOQB3ulHRMfNhYd0PAM3cFbCzmA6f2UyHYnH3EufLcUQq1F7tPvJZyb1+sZvfX13N1W/+DH+/KV2LPr9V+Xrr5SdLrQWWu1nRY8yikEVgAlEVNQKkpmJuRkOyNV1QI5xSmWjqheTuN7QWilrFWsHoXEourt3+KEnp//SJzZ/7cs3vv/czT/+F5/auPKUGEQCNLAh1/98RcTwg05VRP84wjM/KXSnzx5WdvYpM/u0Eet3fPFedM4Ofg9G+yplZem4QzRwVCRznd+6oUAHh3uTpsm5DFuwiKlZIGACJ8igC03j1JpkcA+hzoGQUNw7tVkRDdirmCuAiiTvQ2jQwCE4EAMzO0oumZ1JkdkNBcnR3dXE0MTNzcDMTd3MYWgkFiAiOoKZGykiIWKMbIOn1Xhoz73SaxzQmJmBkMgxAHBAJ8gBAYkBayIHiFUd6hol95EJvDgwYFAwRnZzdHZ1QAFAdg6ABDCZRKBq0WFXXIsDIxG6gxv4sjZZjWA8nogtXtuZC05cOvKC4K6AyGoeiNho0elagiaIZAE5SEev1xc/wqt2YHD8/1QLnwdI99Ly/ZxCe7xi3/EpHkwK0PGjnN/l78o2px76DhfYD5HOyNrjRmGnE/UGS24ZFyBcBjW0VyiyiFX0UntTYghAaOhJcsVc1xW4j0bjSFXAIIVeePnVr37tOx/9yMeeePiy5QRxdHN3/6BU0/WHNi9eUYqGOBTxEyEYAbiBkhe0AnIo+QBlht6HYO7Z3HxI3nEMEEwDMLiCAQIEUQYjcHfJhklEsMLxuGEQ0jcO33xu9MjPe+Bl6jDCasNf/eoVvPM7+QB/emXD/dPxUj6JKN1p+b63y56KS8Hx/2MBBWadlTanbn40K6VoVg4tmiIChViSgY4EcpAkKQLFTGU232/bg9QdQDV27cU6047NpA+p72ZH7exwURUcCQMHR0FPDuIRQuSSWoqToIKm5gqGYObgbuomKgU1uSYvC8tdSW3ueyl5uj6ZdSKAxZGpglhh3VWTOmfRWbp5Y58YQ11zhSYW2ENkjdRnBQAKII5AhAi5mIAg+jggA6gZug7QXhSHfjOAbIhACAjKhFK6bLnPqZ13tVchJLeMBRFYck7ZFwtZzGymOFu0FWiF6dLFZn1tXt+ipgE0z0lCHHU59fPeqNrbny+6+Lu/8U+vfuoXmo11pwjAgEDneOTO3e3cVuer7ej0/J6JEwwr6JTG8cD5cIBw+6EGrVfbzO9HwlWLJwcAMDAIxSHJYmIH/ff+oUpbTzZuv3Q9TCZdcWnbxzfh8Ye3vvyNW3vSPPXk+JMP7V8Z45u30rdekYN+VDz3arudzQwKOLCLiykmtWwupsfiiQAMoFWQXhkgQru+qNZH4cK4ujLhsUBJFskMcONw/tHHtg52bs1vXn/jpe9uffqLDEaKQIintP8BxX/4HctUf8CzS+2nmM5G5053YT9tIcHdNK0zV3jgz2UGCIg4IIIgOKArckA52rvZ9z02VZZMHBysiLUpdTkbEIBzYAxgqqVPYIYESQULUlMlx1nRru9MFMgBVFVN1AG8EDmbKhIBoSMX0ZwxkDIruCuTmrspaAZTUFInM1QVEXdFcCcmBmBHosF5w4RQVTT0HjV1cxuiU0jAHAhhgIwgJHRANEQLgQkCDL+FQC0o1EmVw4hDUENXC+yIaO6mBhgNIKswM7K4QgW4PvKSpOux5AQxUFWR4dA6052YnN3VoK4rCOHNvb5THIG7gphIMXF0ImZ0sKNF3u5Rqh6UpV3kw+uS+jhqhpVgq27QiKcqwR4Q3csKvSPT4HS06T4jAA94zd7hZX8bDcjPnP7O9ID563S/29Xb4ZGWh9HA3Ai19OnwNuZCGMaTSelKQFLVEJCQELyqmypU7uKA+/vtd55/6bGrFz/8yBakBY23rh8uDhJcePjRON4o2LhaYHJxZmYPZs5kAMllpjZDOQzeVUFMspm4mRO7sxgABTMmZJE+YnDHyCEXjyE6OXAmTaXrat5o94VGNKr7vPONjaufyLZBhGSrzf84J+EknPiOjomfav/QfdJxArefhNrfG51RC1fdOGEVXTt2JA3pW4AuXdda32rXyTxFxNrV+l4oqC64AQzEoXIpwBWSGSQuJrPWFgfa9wTi1oPMtJikqF0PBrnX3sWzUhUcBVEQFZSljipFS0+aWGsyGPD/3QRc3QRNQZLkhaS59a1JIfMIaF1fIWNKo+kaGpZ50i7FBrcujYi072V+aJgoEiO6aEYuToRuDIMSA4DAFRJRMXRAQhqxi7qaOQAwUINq7m4EQODBMbo3AJgzlEwAklJisVBC9NTmIV69mCtDhQCL1g1HB3tpHKWiDAnXprGDAqh1E7quL4qp9azZABbdrN9/+ejat6vqwzS5Ik6ByE6a3/lJd7DzHpFTB9Hvwlv3dJXfsZ8+SI78gLXvSctgMDioFw5HCluzr934yt87ePn5uPPijZ19Ubu4FW52nS5sivDxD11+7dr+m4f91SevfDjsfHLNUuHnrqW3usYo9NYeih6JF6BRzWaYRGdZyjKNYGUkrqaaVuhSAlBS3ktld+56oVlj9VEZV9ApALaT9bWPP/voD17aef4b//xDv/TnxpOJR4rcrLR8XMq3E2V2aOkFP+1Tf4fv9L7pzqy2t2HGexy/H/Ydkj8J2Qxs2FSQCbTvFnh0resWWYGygkrdqJuWVFK3yH1xq3MvcVQ1ELggdhAiuhUkQzSUJEaUx9YWFh5gZ1VN1BgI2B3NFQCViYBRjZI4Fw0iiGYKauCuUgoakKO6FfUsqgquNiCqIAAoIiEHZCYmqJtlazk1VCM3AlsmSRITE4dARLRCZWF38hjUxQcjOUE3K8T28JWNTiwGJ1FCBiYQpxidojs6kAGbFHeLjE0wNcmi2OdYjxwHt5kDG7i7AZiEwBOwMfSqJjlRpb7Mu0V0NFVUJyupo9TnkhcybrDh+e2daZrrqGEYyrIGD+sKFf9Bw4LeS50+r2m/qwjAj0ixu0cO0xld5+3pgT/lKplu1XH1xKz34xABAFQOdnTEIIHJAN0tUkhFsGYOyFUIVeXIqiC5fPu7L4yqtSceudAEHI8nOwcHt1u6+PCT041NwDolqQK6FiICBBN1dSYzm0E5AJ/VQdAzu6GrmQA4AqkpEQOAWlZCJkQk05yKRh4RMQVSzG7a1O551tTbxR1cLjVt+8aXRk/+ksYJIbgbIA2lbCf71io08E6r9adbTtwP4RnJDQDv1QxYRWWWHRt82RTq+PrLJetg4KCaupKKp5z7tpQMCFpqpArNyBVVHFIVGdEIsvTiXkixdEXmM+UUA7m1aIkU3AI5oHFupdMeaw+mgMbBHdUIvKibgpupgJmbo6K7ABq4oAuYmGTpO0kdmRNAKcXFj45abpq1bZzd3pMkab7oZzPzVIcQIxPh5vZ0Ol0vXen7PTFDNzPjAAjYp8LIigaOHIiBh7rbGlHIdaiSZnIGD6RmKgaIjsABOKCKoiNhKEWLAagVESkDuLsiBlBmoj4VrmLFYW1SuYqZgVuMwd3BrApYVwH6jI6BWXvfeeF73/7Vv1tduPz0z/7xqx/7rAMjVcMM+QkyxPnQ2plgzl2yr8+/hVN7PJz5+z737lNuqiXq+53Ivx8QwGpAFCAcdbAdnv+9//TfKbdeaGWjXeRm80I9vlm1DCkcQf+xRy8pNd9/Yz557DG1/mOXw+Xt9V//Qfv8zghClUve7WwvWSteNTWFqmRpkyATLYtoDOEYom4oOEZE4AGW3yEZJqV0Iz28USuEbZQ1oht7NH5979I2feip7SbePNh5ZfT0xwe/KeGqCgBWPv+lFfCB9g/u93DTrgyl+0m6uDu3HB89Z134iZvg/gg9p0IU3HoOIIuWY4e3X3/5K/98dnizFI0hOFi/6IAwMAUmFVADDhjQgrlnxRE7GAVHcndBlSpEJnXEwJEZzU1ErA4AriqMBEyozsEQozlntUrNXYb0CHV3BVFzA3A3gywq4u7ogA4GTg6ISMxMjMwcIoWKhmSEAGDmamBqamDi7rZUuMmRiBwM0Yb2Nz6YE4zgVsCMkxKLgmlgFBJwJOCAdDTv9/cXaxcujeqQ2w5ZWdRSWt+YiFvX9dVkYignHnJ3dTBQL9IwkXWLLIvcbVbFAN0YyFXUAMUVuPStzhfd1joqhkVCgrJ/ey9uXmQtTkEdGcCdiHAVVn0A5OeWHJxdmadfv9s7PvgIwGm6Q2zd8fo0/RhlzjK8ezK06Cs0DxzWR+mbgCbetfMqxipWKSVDM2OKkUOl4E7GVXPtjdtH837rwoXJKNaT6qj0t/YPti4/uT1Z8wxAWqEzuWshYhMHd4pQUtfNd0cjrSISmqin3DMAgIEoA5EbugZWBUUKgWsAy1Kaag0LKuaSJURJ3o2qOpot+p3J2kO5U6j2Dw9/c7L9SFn/uMaK4ZT3eOVOftsgzQf07mhoOOrvezhPMYuvhDaCrQ774BlU0Awo2s2bEDKG9qCkoxIjGszHkzVuSiBCJBFRyYGcQzDlUsrhUVo76jf2DsOoyWTq2UvHGNywL/lotpi3iwpKXQUwR9RobIAgYOI4RJfVtBRzQEMHIXIgBRAwAVVULEnBFEAACQOub24cHswP3rxRUjExlQypI3JErSJduTKdHSUti15yhkQ1MVWs4NkoYjR3JWLWZftfL+5gqkDMToCOqORApmps4EbMtGpe52oAGJijmIqTi6kKGCBQKWBOkt0NOFIkx9JtTjaYuWloutY4ChNRBGIgCgxYSrx+dJh6mO3uv/g7v/nwk1e+c/M1W/zZRz//S0oVr1pDLjtsn9vV7rCzl+5evPMkwHOKwt32x/e9Z56RKXhy8AOCY6No4GlwNvNRaF/6z/6Dw2/9zjN/6LPffCndKpF0sh4mz8/2xZo1hK2rG6/evvlWKk2nF+3g8bXqey/ffP7NccF66tKK3E6ehdemIyBe9F2bpXMIyG6qZoyIBOADzhAOyM00uGgQwT0GKKILwGuHfZdDb/pUqAjh5s7hxe0t0Pls56W93Zcfe/bzg652XJW3dP8fJ92eMgV+aglPO//uQn7c4+l+6Q51/w5D4ezh+7kSAIIz13jr9R/gza/L7PqNF189uLmzu7t7/eatm4c9R4qOpk4EIZIVAHMHEFFGR6JiyagpJIDsaACKWhpiENWSnJwqYoIiqg5qZqRWlJ3YA4BjAfSQSlaBiMoqyMgQXFFMzUhNBczViwqAITISmjgSECMDObJDiFzFiDyICXdwJCQmFCRUM3QAA3QDFYOAw+fgIKZGgO7MhM2YJ9PxbJHUm+LgUsyyD1wCRACHi/LWrcNxz1cfvgAuaX4Y0RzdI5Ri/SKNp0mhJyBAXFYRozsKIjiC22Leznf3bz02Hqu6GguAsrsTRaMqAZhKMx5Btzis6+20uLbNLRYo0lqoqzhCMPfjqqH3FFq6P7rD3+9nV9n9e3EeZA3AXQnPvb6rt+s9D9LdQujvgo4zYGClWB0fBBgqGrzkmbIxUBBkJHclRhGtQmAiA6vHdZdTntv3v//C2vpFIt7auABg13f3L1y8PJlulpTrkaacObAVQ7CcelQvuQUv7WwftN+cbCJKKWJWEBDclr5fNDRHB9WsaGgEZAyGEEzZycTapqm0wPr0wmx+uw6xbprUzWO1rg6bEzu69Xv12rOGwECD0xgAVvVNS2fQB0L/AdDgUfN3LzbOXWjZpu3kumjoLhBQABTA+m6Ru4W0bTWJoDPGFAOEgLOUUidhvK7YAwSmEFjMnBkYFTTn1vpU5vN2Me/aw/3QV9W4Ei91cNd8dJQWi1nO0HVFK9WCBkZg6uhkkl1TAXVQNykFe3WIsSICcEczN3MproURafDxFB8CWUnA1XLbuQEzKDkyOno1aswIQMy8W5hDISQAV82IjIhqzkSiDiYBK0cspjTUvZupO1PgQIwgboRezCIyubkqWI0wZIgquCOSiDuBlsGN5YSVKPRdnz2YIld49er21lYTKp9Omip6qCogMxNwMZemAa64riq17rXX9y8/dGm+84bO57/+//rP/szahfEzn8NYLxMrYIUqdqZJ/N047a7+/uO/73ODe8eFdvJ62A9WyDD3Ov8PfmuoY1r6gdSNHEA9QUiv/ubN3/n/blzc3un8dupyr/PdtiKmEMusffSRrTfe3Hvz+my0vmGdfuQhffxC+OpetdP2481aDrougxMwac3YZjlseyKoYxB3N6hiBDU3BxiSPRzMApEBASgO6UDuFXE27RivLTRWcb2CKSyMYWffHt7C9Wjl4FUAIB56f63awK8cP/5BiOeY/CSseopOOoW+4zZ+Wvc6OXSXG51W2XyYFHhbqYurB1QwM3vtm/+k+95/693B7bcWB7vzArTftgddfzGQK4qVGIEoxICxrkIESujmAOZg5kUlIdVZxFTZDdzBqG01FzUvKkHKsrgFSwFC5ECmiDRgoHR9xwgVGatVZLZMEII+F0KS4mqiZEQYmN2NYyDipZGJhIAcQmAMwRzAwExsQE4mR0DSko+TEJgRCQk5hGhGMaAU8QAq2QDQsqY8n7Gje5QmuqZc17UW7fu0trb97Me39/a61M2qWnKfVS1lSCLslqVXKaZJHEOsVGAoRFYQIDCkVLp5zgfzvpRGzRHDYJabGZgreiFty8KpGtUjc3OQo4MbLVw72ns9rm88/tjTk6YZDAtA/JEx2flVepLc/U7f/aEbAO9ED2iM3vuWNog7dDhWiJeoe0sp7FC6VgS0T4EDAJppSikEAhcXXd9egyJ1qK6/dTOlgrp46umnA1e39m8ZRaDGHKsK5ovbo/Gaq5dcADAQaD+vQpnv3yj56MNPPSU5AaKRgROam2VEZwquxhQcbFis7ISqboXEY6XFreFxEEMEK2U62jBTAwjaa+vYjEcRFre+1Tx6HesnAR1P2zinnEH3gSz+gdR4B1qqfQ8soHJqwAcMP5J295W93Wu5dCGiKyzm7XR7fWy7KR903Z7HUq/BlcsPjybMZExZ8yFgT7GR3jmyG9bMyROHkLNImpEz0JjJ0FWSTeJGaVPOYO4OnnPGwgjuQUONzkA95z5x7CJXTIqIUAQDuhZAcc3W95I7KGJZrBQ3A7dB2tWT2lUdUU3NCasKCbNi3/WQUd0QvQqgxEUFHNCckNCxlAxIBspUTAEA3IdaMmQEHADgAAOCqlSMIhKYyRSt9PMekdE5p1RDGGAiHNGBxQxcRBwDSJIksF7H0SSE4IwFAUspqhqrQAxaxNxKLtW4WVsPFMHr9TcPFgeHeW1ajmTvS//w7/2pv/4pschLF+4dAduVUn/fPHSn4/+HwH94fq2+/Sbw07UBuCO4Aw/F5sya062v/v+otj1ff/F7Ry9cm10ZN5cfvjj/7o2FQKyqejp94/Xd3qEcHV7d8M8+Mb6+P/vewSSzjpl74oOSwHBUT9qUFiKAgIBq5oDEiISggIBrlV3dqNYDUPEi9MK+7EFwcARhAiYAJzdjpFuHecL06DqB6g/eKKCTi5rkG9/6zC9LmNa+LB84l5By2nn400z3iADgqSy+e331Dg5fHoJzRvKx0n+SbXRfg77KUnFE3X/9xdmrXys3X9yf9zf3eN7ZYlFe3W93U5k2oyqUqkJmZEJwr2sY1YwzG9JikLzv27oCLD03NQEhRDM0RQVwKGZAwIGtSM8lMDMhSU7OIXBA5L7vXTIgSG/qUjw7sxsiURLRUlSdiWPdxBBoGWcY6s8hsKuSmgKqAjBGc0TAEJeRUgrg5kDk5u4K4CoKFQViACTiPpVIrEUZKDKh20MXxtf29/eP+u3NerQ9DRw0Q2AWU0aZz3dGTNN6pE48nqLgG29cz/OuqcLRvK/Xeyau67pPXayrIeMOSEtK5g0HDHV1ME+GpGjq7m7kpjLYzcR15RyqsOZFvBA0o3m+XZ7/2uLWTvPQxW9df/MjP/Nz0+2tgESr+PM711eem/R3RctFeAeHn8sRutdlf5wGwDtk0d33ReCdxNbb0BnJfPoV+nHrYC1dLlIBtm2PHECMAGMMqharuqrGqo5Q3dy5XUQvXRipdEphISlOLsVm4u5919Wxzu0e8MjRg0fpepD54mj32qs/+JlPfhRkVtcjByVjAAJ0QDZLADpkcaiquTGTWkEDkcIUUlkw1+RUvFfvAofoFTJBMXBkJ8KgmjbrbLefw/HDxmG49Gr0j2fgfoBrf7pkxVAmcf+86ytpcraD8nsjvPONATGZt7s3XjrcfTME9liNx6NqPYqknLu+T4eHCxerQ0z9/NbO7sXtjVhxbGqRXLwjDtmEiN1C3xtBTG1qD/ZzRX27UJLJhOtqAmaEFOtaVEtRYzD3QEFBOYAhMbEW1ZIrcUM1hBAYzZmREA0gqVoRywXMEYEZ1Za+iKEh3ZAlysQIHOsYmJG415YAmYAIxAAJzd3MHAgpjkZ1ypkDmqMhDFgmg+FuBoQw4O+YOxEgopGjaSAyUQqec58SuSsCwSpBxwAVwMwGDE8KqOZu3qd+Ly3WprGpxkSq4F50XDVcszmgkWuOgZihd58X3D2YNXuZ6vUXvvcDKRnrCQwPOMSBT3dXfW9xtmMj/QHy3104/T6X60/XJgCDlekG7p14OPhO+c5vXLj8yAuvyPUj2jkMY6SnHqqaGpXJiH/w6nWwaASIYR0Wl6aj771ZXt2Vhx4al3mPMaCltcjJvXPviiKyD3WGasyE5oFwc0xPb9UfudRciXUDcunK9FoX/5tvvvm16+0KA1IDDsFca43fWoRqRGMQx8nrs3pn9+bj9H3SuXis8UT/hNN4n8fpnx/QXei45urECDg/WneNHJw6dNfI3bvQdPBENHO3s3f9Oy9OTN66Nr+2k9YubRfWm4f9LBtiQyjuEGKFAVA1VjRqQowWApp7KQoUTQHBqCgxExA5qkHJxVSQGKAY6RBqGKCiiMBMZenFRlI0k+QCJoTkDGAOxKWIqiEgEC193ujgwCEMdfM+tM6gZdIPABIS4rI6CgaH5DGGtBM4MhECmoq5ACLFqK7gCI5uTkRJ8vokkiO79b0j2mgyFlACtJxG0R1FZQFeoUPKeXt7bW1USQHDKhNViOrCRGAWiNWH1I+YhEJTjSakRk7BJRMRU5RSEFyKRowjRgIEa8h6dBfP3F2Phy99ZGva2d7OjXxwYXO8/fMDOsdxGP/+t/77XBx3LKkzCxFP/t8Pi//YDAA89/fH9Bi+mqQVLQN0y8gAgklaiFjDaIBWVHNp6qjFsKrq0VS9inX91vWb+4elGk+Q9fLVrWuvvhrrUTNZQw4qxd3VlRFykhAZvCVJeX74/CvfvnJx2jRBUgIYIDqNXFALQSZSVRUMgAzggQKSM6KZBWZwDJFdzbGAW0UjdenzoqlHTFFECYzcUufjQLuvfP3K1V8y3qSTBIXVABy/+KkT7g+SThy17w+o8VhVPJ4lBwB0U+raPvX7nnclk+NIDmbVeMxrF9nXGtoMOK6jICTNOh6tp6RiaqCBi8PY2JlMSVwFCkonFsQlK6GSCkooTEQhbpqbFmOIns3IkBHZkcBUQyAEGvZqNWVXUxcHEw8RDcWk5JxK34OaqQ6d5whQB4mK7gYIwCFQIEIKdcVMGDi3nXkBDIiICIhIDoDBtTLjUswAidnNA2EpMuSzkcPQ0H1AXiAEQAYEGjrSYABHLZZyFiE3C4zoBgBsCABiLgWQTBwcqBRd9GXeaqg0qYpwU4Ghm0FJxAgOqq45lT4FqqqD/V6Z0jytj+M44t68R65cFQgdaJB+S1fCscn9fhbWA6S7wAz53cIBHxAgGCgIVBSOjr7xd8rO9bfoibfeOvrB7cPbXXhowiWnrcm4wjYhShECRMDotIn5yqMX/uvfnfcWJlFvtN2eeawjQDwUK2bE6AYGYKKBA6AbQMN0pYmXYqn6DsCoxvlsb2M0+uVnJ13fPn+A6sFBkUmKc0QTnyfYn5cwDrfmvrfQui/NxT2yOeIFMzvV23eVdOLHxSkf7Ph3p1Ne1HcYohOewdPH4M4vng013B+nLTcNBZ73i97K4jDNtKFJfdTa3sFib95iqANYYA11DBEY1N2qwEQQCJs6elEyZAhanAYkHc6BIcbAiqYIwIhEgw6uDu5qigJODGzmbmCE7FZMSjI11UgM5A4WA5ZckIhpaOe1VCGQkGjpSEUDJ6cBv3SAyUE8hkpzHFKAgJ2dAI0Gs4sIB/gLHQLBS1sMAckAmLFy3ZoEVSCukIN5cDVyIjMgNTIDVhN19hAwCkJxA4OSS8+CXVGEEGODyOaAAQnIio8DV7JgayIF0ULA4BiAS84EFRiEXEKknDzGKAJVzd6/WnfXPDftfH5l6+q63Vy2IVu6fh5cGcCpnLG7O3Du+aW3ox8dCtBPJq3k3nk7f9C9HBxKnwBYDQg4S2rbjrSZjmNdNQMSf2yqlCEXvrq9vj4dg8ui37+0tVFV0Ry0CLIKGCMwV6hqqWCRG9deevH5737u0//64eHtrc2Lauro5oUkkSak3kxiGDlj1tSExs0QnTGAC3MRKQgMwdQzGFCoVMXNJGtgQgBHNV2YUdeL99etvSFxk9kRcJmYjCfRyp/qRXCOVlz77odkCKXcaZXf97dPxcRO5f84AhCC9p10C2mPFm3H2EziuPSNdXD54tqoHoPA/q1DlnS4uxivN+MpTtYjmJsiRRtPK3Qx99SZlNL3KXDo2q5CIo71uJ5OGzJS167rclY1cDMpMqw4HkqkwJwcEcx8aCGEACbZyd3dNElqJSVTRR1yD9Dclo0snJER0W0oagSMVRWq6AAxRnAydyREGJB1AQg8IzrNZvODwzKdNpNpDZaKqBOQMyEtEeQGXdtgqfoDkhMBg0MpSqSlSC4sCsTkLjR4dYnBjNlMURVEkGLISl0rG4FNsO8UDSC6gbkTA7hLSVKMDaqi6sCLrgscexVN7TZQ1/fjarQEaDpZSg+CsR4wc56NFq+OfOADOKahYNZXhdwLAXr9S/Pv/MMwokWJr+3uXzuQ3Zl8eCtMp2PUPVEr5ADgZoaMiOs1vv7a7URVBj86OAKEeS8jHiORqBU1dyAmNycGInBEMw8Bx0GjSRHvoJknl1TGHejs8I9+6srhcwdv7WcgG+BrTB0ABWRvpiP3jZHO5uXqxqQA9GIRgABppfUP3ll8r4GoP7B0d0/+Wbo3E7/jV0++f2IDrBTad7z6igz91TdvdQeL2xKPjsqs04Ou3U9V40REESQEcPOiWUVLGdCTLWJVIQYASSWYGjC6a851E4gcKEKokAIFQsYljjMw2GAq6gCtIG4AWjSX3AdCcIfg5Kzqprmo1FRzCCEwDR7+paf+2LV67Ep1PDYAlqPgAMvEKGZ0YwBzG5CvAIYWw+CI5o6MBMtaTXY3JHXwyJWAqnoRqaJHt1IcwgBK5Oag4E5oqBaUgLjSKlpkVzGkoTjNELAkBSaAOrddns9Bxl4KAyIwYjDpEdAM3dHNTGXeHsY1njaTUtpGFnT0+ls7pTCOwGCxo1YiHTvWHxyr4XKtHC+aOz46f7/7ufePvQbgx0+nlC10OFGJ3YHAAVRK6yjo2nb9vOukyyOKXI88kBOpYztfvPbSy089+lg/X1x8bHNxdHvUVE2MCIiOjGiiju5oHBBKT1Zms6PvfvsHl9YvSUohuAk6oJOQKWpCyuwSCcASuTFGFwE3dBTIjuAiwJilCxyGMJpZQVM0MCzFSqCKOJZ2HpupV7Ad8uK13xr9zEecaZX+vzSyP6DzNAiF9yArj12r76oU4PSpuLrMicmPCGaAGXlRuvbWmzfn0ldV0FAhV7DRPfn4MwfeLxbzg90DaTtLwJGtCGJkq6drDVXUtwt0QULQSotIATVXlVI8RgocNEtKuS00P5oVs1REGnO0GCsKBGTA5ABujkRMhOCmMqjdaG5aVJNpGdzfbj5UBh+HsQeYBQAg4lyKA3OoUC3nrJpTTkTkjshIQCCqxcQFoQ4UY1BAQmcEVhfCQBgQAEABhvDxCu3Eh5DAMANUxAKDG4sFRSkKgM4BwFGMVBkd+14UgjpVTVU3FWBSN1OSHJQMwJTEUZm49DlSXYrnDKkFKTrdmjYEh3uLarQ2bpqmCkgIiHfL+8TjKf2JoZ96FJh3oKGOzowCYLr1639rsfP6zUX9rTcPX905OsL6SHCewEjX1yIDlCQxoCSF0ciTT0bVGzulQA1gbVGqKjso03Ft2jMFNCSiAeiTncApIBVzVc/ZSh1apdxlbBqF6e3dvC5hNLXLVXULiiKBk6MD2NCDoxDePFLlwzVqNsYhjGw+39u69NRqIR57tE85uz4gAIC7lgCcyguGM/9Pv3wHzsHVP1wpuWeOnpYu7xhkkFFo2h7fuNW9sSizrmSDRdFMAZXNKlORTM5Bi6iVlG0JjmBWV2wmqiiBxKVyBAoQQ1IBBOMRcAyxAowqgEBugMsI0TJkZOqiKkVM3BgIQdUNTMWKigF4RCJGWhqXg20zpIOusHyG/oy+igsMNVIGAyD/yhhyGmBpV+OCfgzSikvgcgZAB0Ia7GV3AEJXSakoCGlEoADC4OigBpldghN7jjU2k/HW2lqDVRAi5aLAFQIAIUTiTpIJH+zNUufjZpxScmdwVXQg7/oWuOqLAqGUDIghRrdUV2vdjd1b391py/rmle3dnfn8+y8981n1ein7Ti+g01n674r/3v7klXF/9wXpZ1fs+UuFu57600en43h27BsfVphIT4SaJMYG+lyKciRg4DoaBeb4xssvLtr5uJ5tb24HDPvt3traeh0iEIs5urEP7lLHkkyy9PnGtVs7N/eefOzjslisX9hWSRRqVHKxIRKGiAolUN3OF814LYvGMDJAMRTN6B4qCMFcAQgQ2VRCiGZCDkBadMHYxFgxkgeqgbx7w7qFxymgATDCXTMBfqrp9HC8t6QNP7Xhn3TdvDfdlW/vIqIRNafnn/vO69/6fu2dmvS9pChA7fpkWhYHYJANY7O+ub5ZR0ZUkXY0qogQAbRkNXURYnJDFVdDcVZ1FvMimvpeVQRTrgHQgIqaDXEHRKKgltlJ1c0BBpxnVYfiBohoqABCbkjoRghUvCyxS5cJPQRDj3dTRCQiJAohAEDOKfVdKYUISlEAJEIKjGIIZGaT6Zirkbm5C6BHjgqmooCk5ma+rLkFBEMDxcAA4GhApKqoABAdgrtm1RjAzVRN3HMRN1fEnL3PlpIwcUeeCtVVzIUDKSN4IHV0McCoQOahCABRqGh9c137PowkjMaKEYlXNg/C0GkJTmcBvS3Azo+DzgcCPqA7SaFTXKPbN1/9Co0vPPfW4fN7ua3i3qEe9HQoYZ6IUJoKWEjUAwMxT7hc3qoPsxSL4Lk13JqEGmEaxQNYr0TkaKZKxIAAboTBXHqB3d4j+vaYp4hS+j7EIp5boxsJHCrCbgCnQ0NAoqF+gLNoVU0ur8eNkUyC6ewmwbEXdqAh2fkDn88ZutsOjPfUp84aB6e9Nnc/He98iwD32QXgRF90iJNNr6bzzo66rp6M28M5gINkcZ7N0jopQvJAPhTY2gBk7u5D0qaEGNwFkYHMAQAssBuISc8ogBUCLZu5mxEtE5ZUEUDdwERNl0qRqhkhgqmaG1IIy+AGEjjCkABHK9G39Pk7oC9Ds+4EiEPWD7qbwTAg6AgwpBYNnhJHNLPjlmDu4O4DMujwNcQhVGAUKLgzu4kZBiQCUHQzSIQUiGvAUd00dR1rrpiQKDKQA5MPDEjoDNC1HXGYjONkUrmLqhqzGuSiDiAluQNyBYFj1QRwlRQkTqrxb72ymOOYDq8/dmH6+MUUTOl4to9jPgPCmp9M/H22XHnHFCI/9/Z+bVQAGAyA4Ts/3fvCaVG4ktJL25SG0sOS9K3r+29ev/7I5a0+p9Csc0CObtLN9vfRiIkpsAJK9o31NYDKzHNJIJlQmR3BIRVwOpj5737rZZ7W060KpAdrFczNDAxJyV0AGaMWVKjX1i8cHszHF9YXPVTVqAj3/WwcgFFrEkcTqYgrZgRQQBQ1MAs1AqijqSOFzV4xdbujfNthzWG5MPFUjfpP9+wv6fQgvLdA+dub4/d567Mooss8SRHdublzcHRQlflRm6eT2KZZ6tqf295ac28V2ll57dVbTz95WXIXIqQ+iZWNtYqZi3buWoXAoe5TF8K4TY7Zur53N1WJ7AUy0kgsgzk6ibKaqaMoRmAAMiA3KVlUzdRchYlFHdDUilkGLIRuauDuYETBncyNmJwG8YDmiIgxVsdxNlV1A2KWIqUUUUcmRiBUE1Ej02GRioiIOmA9ZFoMnboAAFZ91x1cXdHMUYnY0Ytmg2AezaN6DzTk8ZsBmBZCSAU8VBigJFl0qU153WGNQ1MRI5oWLCkE4QoAoIqNiCGGpCpgsQlE3GWoxvX+In3q8sMCUBMMUZFh/z9VEQ4/kUz2QRDgPJ20yx00tRShfO8fr+HB8/OLe930oNX9BAetZdMiOc92a+3HbCyIAkZAjo80TPPuiNYCB5ZWPCbFWMWjXkYViriaEQ0p1wIc1c3BlCAj3MxqZhQiBgGRtisQgsXx/qzdORIDdEP0wYxGAgcKrBabuD0dPbSuk0AVRyodgRKyDW7UpR6yKmv9gN6R7sGsdz38rkbUT7SL+7o/Ydi88liYXDL8QVXTIkmXjANUAJL6ecuzINZpYFibVoQUDMLQOM48MDkUdXQAwhoEDDn1xoGJ2DOagmdTUaeQijITAIBbrNjUANEN1D0XIQB1N3NQJAQzQ8KKmIiIVysRiZBhGSheJe4vh4cAHFBX3pABmXYo4MKlZkyryACgqiEzmpq7qjIzIVgRIja3UoQjm2pgMvWIFMDFhxYI4DoExwxQgYE9xDCOselSijHkJKOaRRxAwUFMQohMcT4/qkbBoAfM6qGq4qxfiLlmdCyIXrIWJ7GQNGQhBx/lviy6GdSvL3o051g2juZWklVOBCftVod8AF8qWndmXr5LuofT8M5Pz4ucu94rHH/w0wXxfBc61SJr6bwEcyNiKe2oqg9Sd2O3u3Hj6PHHrpp75IDABJWB79y4iRhjYCfKoEJQNROwYJZMJSAyuVgmBM3zUE1vtf0rN/cvrxsRMFvJPXFwACZ2EAcBkq7LDa4dHJbbvpMXsh7q8dajaxceXuQg8dD0Zq8HllJENDMHASTDgmhODuRMwR3BpOu7aryxSLgRzPa+DdtPrPrKHAc5PqC70HtOlD3Z3E8rV+e49o4Dd/ME4DJHEgHdmULA+M2vfcuP9jQ0s9lhJCnSfeTZjyMhQtzd6xalun2gRzduTyccom5shZqAACmSmYJInzLheNa2vRMrdkkiAaHNDxcEigHaQlLEIPYCWbBPiiGKKkMgpD61MYWSC8dc6WjAxLGhz7pnRHW3IdUTEAYULSJGRAdydAMnJHVHRCml71LO2UxT6rUkUGByQ3BHFXMzYiSi7IbgYIUY2MHQcPAGAZm5O4ABIKgpEyOAgzl58RIGVQ7RhxoFJAMs6gBgCuagCkgIjlJExLNiHWIqWcAdoIgpGBEUg8rBDaT0gaqUU9eFrAp1uLl7MIrjlLA3+ON/+k9QjMN8DcFrH1CAjif2Jy3e5nDG0vyAlnTaAHdABITq+nOHbfvWgb0291d2Do6SAQdE3DlqMz5Uj3FzPV/EeP12xsnI+/jIQ3r1Qvzyy5mbgIVSdq2ZIhkxMccoXk5yRNw9cBQRQDKHXnHXyWZ5lmyjroj5YJGPhObFj5IbEgckR3cLiGAOAKMI08jTGg93d9HCBd+Yzw6urByPJzvNByHf+6TBe/1OZ92nMXWHlMUz/+Be+8JJBMCUY7V54dJ0PL61d1QShCaAaRFDrt7cWzQc64Yhu4BULIqMQIGcmYtITtK3VNUUWYuWYljEnCCG2LdKTsXUtYBayWJ1UNEQuRQJzKoGjnVVKxgAm6gvE3vIDNAthBBCQERVHSQfARMMlVwDFpovXfiIjh6AVx5+86VeTI6goAEDAqroIPBC4CIaA6ioWzBXY48Yck5OKGpt6qsY3RwZRYWcFEgcWdVVgNTNiCEX7ZMB4GhUo7mpcWAHR3L1AsBo2PcthiY0YTQNFzYnH3r0Yehvp1LM2H1oU+Y55SJ+62jfkT58de0IsBk1lvzwsC0+ihVzVEfok8ZYAd6pSx87FU5Kg4+xPu6xzs5/4qf+nl5Ofu6c+3c4heNb/SRo/z+W/ekkYrLa9x0MkWDQPCD1iz1wIGNCfviJJ7auXqm977u0feEhK97OFw4VEFPN8/ZgvHahiWM1DeisUpG5ACFWFLxkzaX07W/98+fao3Thke2jW7ef2n7Y+s6JMMRY1UDAyJLVtXKevv7Wq6/tvPbRJ56Oi8JjmbVCG484bLOu+/x636lYPx0Fc1nZb5HBmHGIkokDc4VJNyebDbbXXv7ti498ASfbw4/8QPt/93R/K3TVXu7t+0yeZIW8Q4zPESmO6o9/9hNf/cebz7/20t6BJOOFWkqyNxerR4edvbXT3jpwtb6iWrr00GRMHABizm65V5NxXXFd9SnN55DFOHvfa3QVtSk3SJi6lL1uW1u0AsgO2RANQipqmqJAxYxGATkiS99rEFFlBgBjMHQFGLA0gSkMSflMDIDqNizIQR4wETITEoBJhjzUFgCYGwChAyMrALn3JRNVAd0dxAyBTASQAamIGLiaBwAKSEQAaAbmZUhfLZpNsbhRdEB1N/fowGoiBkhgDgpD6M3UQZHMgThwqHIWqlndSLEOLGIALgJO1mVZZMuGpXdgV7eqGf3hP/SLn/3CHy7AQ3NR8OM5PTWxPwEb7B9QelByYxW9O3bSAjigacLD1zjwW7cXr+/MD2fz7BCBDK0VWvSVqa5V1SSWrpSNen33oGfHJ59+5B/83rVmxAVEVNHjkJ3A4COyGSxT6YjI3EVyCKyqjOQInUHONnc6cq7JivOtHvqCGCMYrDVBkmQd0qSBIExr36ixrsqUaWtzhCWlNg2oK7ZKpP5h0R9Em2JQ2R3OKEXnlfi3T7e4I8EDTylweOeJ9xxGdw/E4821j3zqZ1775m8oiL512ANyXWkFe4flYGY3algzQvAeuGbrxQ+TdaJBXbxyHs3n2iQbN1LVKRKoqwGYmWfXlLCuQC11XQo1kcYYVCRGSinFEFPKgWJFXHpDxFQyAVjFYB6rECoaAOIoEFMIRIBDHhIOVsCAFQHohgURwXjQshAZV05IB69oZKYKThTQEYdyMOCuS8h1caRozLTo++koJjEwRuPD/dna5rikMh2vZS0GQV0CEUBGBKhCVs1F51kkdVSmltTHrqqIaECAoCKEgcH7fjFvcx1497XXD69dWduiPvWAQRHUXMRcSdW6vgRkWbRYNy7myRmon88eu/TQYXubS1SMXJG6DvCnq1k/PfmnZ/ftEg3OH79rfsHbKBj3w5rhJGX1J4CRfyza/8mYnrRPW0ZK1UrQ+fzoVuCwfXXzQqavfOn73VweuVr/7M8+jQbdbP7iiz/IOW9duoBEKbeq/Sg2PMRaLYMbIpoDKqITWDg8Wrzy0hsueXMyXZuuHx4ebG6tk0CI6J4YSd3q8WQy2f72d1/+yu99r9msJttTc9nc2oQw6pUMq8LT9e2nFrtJ0l52i9SbJoQBCJFNFYKDKiOnolz6VELWg9nNoys2B9jys/mgfude9/uAHvhyvV/V/n5O9lUfwAFyaZULfroZzNvc94SlT8kZES8ddIsewecLVfZ6Y32yYeaJQF57/Y3do3534Z2ki2t4YX0tK3StQ+knU24mIVZs7t1Rm1KYHWEWpOJFQANi8a4rIaha1aesAuLUl5JNopSotaqiORkIGwKBGqibF3I1G55RARXcAA0BwU3dCZgIzW1oEQxEg4eIYGjjBYzESMUE3NwAwAjdCETUhmYBgMwBYYDwdwQwdAcHj+RGHIsYDpmnNhQrIEFQEwRyNx+a54mha+rNVNy4uJuBKRgiOKp41lJs2TlDzIFJDGZtFqMYPDKge10HdANkoApDJZ6TQ9vL9qULi14+8XOf+w/+o/+4F2tGwWGFA3sMA/oTS2dxOn4/04N6fjz5fyIVkDHkxVHm0fX9fCRsMXQLqccxqs6K99iMg03qOI3WIHhJCOXa7bxzMMtgi3kBjq5ZSqEQVE1yHpPHAFmcI5eizLEKbCZEaO5E5GYGOCvQiVRugdSQETwwMXODkAmzmDi6e81Sk5HkPLd6Um9tTT70sSdjHQHQwXDphTjZuB7geD3oa/2k0PEo+R1r4dQJeI+P7nq1dz7npFz7xCk8aMruppI/9ZnP/vbfr9s825pA7541CVfThqtx1WUvhxprzyp1xGJQDEMVgKzvShcAK6TiTA6QQwWxQjSrwygicMRCrmp97vtOwVgqHo0bNY8hmjsR5pLm89YUmMiIVNWyMmPDwW2oyaUV9AICkIMxMIAPGsYy5R8CIjgNjYF9lRG0Mn6AiNBNgBFWKZSIaFBlbXaP+tt7b25vXVyb1K+/8sKHPvyxGzf2qppiiKkt4/UREoOKSkmSFIlRA4ZIQzfXKFLYjEycuU8dcnBnA2JmE4eIVoC4vnFj5+VXdtcrLST7s0xESXtBVAcxPGoXzfpmzmlUxUkdNSeejJoG3zy6mfNh6gIhdzDeevyjBg0uAVZOagDuugTOsubJeXee9m5yzN7m5LtePOCpTe9HT6cZ7MdCvpKDy9V70hsLEAkRX3ju6y9/77vrBJVpe7g4avPBYpea7VGzbtKbWHs058CEFClKKqoyriK4Aymik/tQCkkAjqqKb711q/SLX/7lz1+easXuAF3fowNxXK8nOUkcjYvhazdu/N3/5letg08+/uytG7c+/JGP7+929bQglnEIucutlTi9QvXkYP/a1tqEtIhmjkPLDwdVdlYtVdVY6aiOChCmDUdSGMp8ht+5KlP8MY3/+6EfxbK58x7L9yeOH1/F8U4nehxL2xUgzEnY18++ODUTd7nb8V1MMVA9XtvdOZzN87jh3oZ2bxl0oXm/Wxwsui5hBepHPQQuTahHFSN6XqT5UVuNkNENAmBMvaUEoUERy8Uw0KJLTO5I6iNzdEBxMwMVl5wJkYAUjBG1aEklVkndQ+DBiYnog/N9CPAOPnBzI6QBmQeGQl0EQlqmaaIjONLQLgsGmAcDH9p1LbV5QjIAMrRjf1xAdBvCCYBugzUxVBijAzq4GQEiOJnpULXs5lk0ZU/Jm8hu6opIZApmWNTFQAzUHNxS5oN5qkkdceTgoEpOwISOrm1fFjm2nXWd1xdqX6//3P/wr/zV//G/3dfNdDTxY0gLXE7+T4Zf5R3p3umfvy8e/wHTyiZyADRwVizTzdHz+y2GLai6LMqRhi7UWbEri8sjmQTZGPO4gtylGvlmG65d3582/bUUmhGTQyldqKZ9EUWtEUcsquxAVRVFihajsKyXNLMhdoRM6tgVJ3cMViESOoORGiGbgRAS8ijyZKQbLBtNIF+srV3I0va5dSgI8bju8NjD9QHdP51m4Lv6bt7+i3D26297/slNTp0/6A4YmJ/5zM/+wp/4N37tH/xXD1nY3V+0pdyY58m43p4yGopAyrhYlBhJFTqBoTq8TxCigZMhSNGu0lCVqoa6qtyzQuQKCxAAtl2pEMC9T9KnMhpVVaNqxsRdn7s+c6xNgYhEVEDriobkHxosgEFbWj45ATjQyU/BZUdEG6p4V6D+CCusnKWUQIYBhwgA3MUMm/of/+rvpgxf/Oyzh2361d/43b/4J7/w7e9fKzzpdm5f2Bg//fjDs6PDpjEmEC3BWUpWNBwSuAFVtBRzB0EvBsERBEzNiVVggHevsdmd6awNezNNlHeO0mgrSCrOVSnFHBVpnlpJ1aJfrNXWW7uxNqlYpqPm1u5hCeNZqWnjoY/93C///L/2b2XnhhHuTLDwk6k+fehsLAnPfuH0qcs2o6fb+d1jvZ2+3ztaDuHHu8Hfi7V+tA+wfAofgKmOfXgAZnK0e2M84gsbNbR64/pOrOJoc7PaWOtENjEyoTkjsoiYltKL9AJjd3e3IRtCl5BtBOaGTTxcHF69NH3myYcujsVtpu4lFUTAVo5KHtXr1NSLPv2jf/Ll23v22PZke1zXGM2xilFK6fduAeFk6gqijk19EWuZ9benXCGoKYCbuZgVdambqJKZQ+nnPB1f/dRnrFlXH2zwVaL670+R8MCf+v4ueOdZqx3uhFvxjg9P9YFaLbY744Dnr37Kt7S0H91148rF6aWrN9+6WU+rMnNlktK7mOWU+y7WjaAB4FGnrjqOVLGntqtZYoAiTgAURLRfLEAhFPMspgFEsJi7O7EUF3VHgpQtC1WBinggdPBAaOZ9lxazOYDFqkIeGvAOJoMBKjggMSGJGvGA0AknMDiI7rb0cTmYmduQBQFIAOBE5DQY4ShKMCBqOhm4DQUFxKwkUlTMhkIxIgRDAIQAisu2MYAAaE5mrgpaPBfPAr0aB0BABQRngKGjGahhNi9u6nY4k0lgjlDcKmR2YnB1RkAVT8k61VbNR9AVeWjz4q/8u/8LCnEynZgWIvZlEwBcBRPfyb3zk0R3ecCf7Af+oZLjssMcFksFti5cTrntuuTuQKHtMpI5xBtHs09evRzevD2pRohgaBlwF/DN/X5rhK/vWYnUIKbiowiIIKqTEC82QRQXaqEK7OCi4GwA4K5gCEPdDCA4hRAwEGpEFHQtZWOrUWvmnRaAAHBxylsj36jrin1za1RM4nSkBKLCHJbm+MrN806Jxx8QwNk9+rSGdn7cziswb2MwvCttZ5mZgY6ORNHq9T/3K//bZ3/2X7322ksvvPhCJ+XQZJPk27/297tc5skPDhZqgM5EnM1dnRn6YrBIUigFrBAxlRChqmBUWcWQUhUZiiIh5V4W3ptFAPMW6q6vm+jqTTPq+tS18v9n78+jLsuyvDDst/c+5w7vvW+KOSLnrKysrHno6pmubnruRuCFmVoD1gKzkAXGFpgWS0gGg0BCy0vYsgTWEl62JGwZJCzkZYyMhVrdGNF0Nw1UV3d11pBVOUdkZMQ3vuHee87e23/cN31DREZmRmRGqXOvFS/u994dzj3DPnv/9jQc1Z0pskGVxIHIEvrw374AQM/0FpUceyWaiPviAHNjKLs42QJl9V78YJAT3By0lHOZLBeg5HTpysXPfuITX/nFXyqvXNlvoVJNjL/4Ky98+8eeNMc/+tWvXr20XY92bt6+sTUabA839g8PZk0y1oDkCF2y6WQ2mTTnLrkaTD1GzlnhRuRmxMTj2eFMxcnPXTm39+pLs2nCViCnNuWsOZtnp67Rzid7h/tbFbrcWagdeZb4yke+/3f95s9efOK5nUce2zx/JRRDEoHbPfnmLGV5OmOCHZsna1NnZR06OVvmv/qp6XqXprxvdQDed+z/FC0k4VXotsYgRaAYcW5rNEXz4Y8+fuON57tZd7A3Bmg27aisYhxkzZY5tTk32TMTSR8KKcJm83UAhknKnEzbH/rRz9allkNOU5u2UyOqvBgfNYPhMOwMB5sXvvmrL/7cL9946skPP/Xh81WshsPtlDAU4shCdOPmzdRYPZSMYpZpsHmF29BNpoNYm2dHZ2p9WSXNnYCEKLtBK8hlx3Bt6vXv2bsDORZxTw/ToNyR/A5s937d8F5p7i56ZxvK8ifHki/OLz1Tlz/epCV6R05Sbvyx/82/+4//+5/7ez/7c1/8R1866qZHB13Hw+kMljIhK3si6lIuYjGemqdJHbwWVBHOiAWshbNOW1Wwk6tJk3JjCQQRePKp5mlCUmsaTCYUyr5ir4DVBTl5RzqdzIQ9isDMiRAEZA512MLZiYWDk/VOyv3U6kt0Jc/kTNSHPprDiJ2ZhNjJiUhEyMmg3md+gzqoD/b1hcUqmzlT6tR751IRUjjc3dUsZScGgdQlmaZsSTUTdw51pLQseelk1Co65eSSXdvsRZRk1mUvCGVRhcCg3HRdl7SOwWDZ0ClNj5InmGKr3h7VpfVOdyy2sqYtRu3YVHkH0+s9ITqrtb+haQlOguAxsgwuhvh8VUnqMhxG5vDAaLK9ujudZBwlKOKV7eqrN5rhRtVO/Ss30mceq+PurOvqYVHP2ulggEGIKVsZi0u1NNbmxrvsBGZGVgOvzIimBgPIzQnMdRmE4JrM/fJ2lRX7u1mchsEvDqhmLYt6uLll3FUb585fuljXAzjDlylyl/DEB9L/W9Np6WodxT+97zwgYWYJIMFNQsTG+Y9/4cc/avhBV1GbWiNp7898+Ve+8ZXn2SxSNCbNpkJqlHOfXY1zq0m9IQvMoYyFSpF1OvUidM7i4JQtmU3bLnd5OutiGUDUtKloc2DqOp1Mm8m0g6SsSgC71cPAIhKEmUELmGfu+d/PMZtzwmPbI1kPaQELVYEYfTUMMjasjMMsjIZyg2gY/sIvfHmYipsv3Xrk8tWD2+3RUbcx3KiH5c2jyf/nv33jx35kkzfc42D3oN3bvTGbTmKsvdWNAdySo765d3Rrf3KpaWcTFwt9OjmOBUCuzJRJ29lUb928KfXgwla1OSy6yYxjSNnbLqecc1Jr9eDw4HB/ojsbVQxt03JV3ZjFp37od1/55A+4agFjMDOb2cqh5Nhg+skv5wjfmuJzliZwgu4uNdzJAnAnHeB9UwDO1IbfD960sqv0wB0WRoBeJa2qkknGB7drok985Klf+gfPv3j9TQ4bh0fjqpYu5cFwkK3d3N6qygEbp1kOHCwrl04gJnJ3ESKWpOawD33osaefvHjjlVdmnbNYWdXazo5m+53Tzdv7n7n6tMSt5MMcwnBrWA+KdtaFahtUd10zrGsJcWMrHN6+QUmMawQbbl8j6rjddBw51LOTeSQmAyVzciVWzUhuszRYSKH9jFvkmJnHq/Z5Qb+FzMT3ccK8rfsce+7dr1zqVD38to4I0xwLP6MdC9l/aSUkkMT6/LOf+okPf/K3/It/xNrZN19+5Rtf/seFvvr6G5OUOkZKiSwI2FTMg3Ao3XM2T+6Bc8owIMQCnLpshVEydB2U3IAywo26jJSRVMdtOph4yBo4UHQX1yBAL3OTZsuaggaWuMjR7PMM+ALzLBSJxd0Nij7TudE8wRtADAYbm2edxwoLGZMIO8iZyU1YlIyEezGMiInYDGp9kWKouTtEyNVJzLIDnFVzdicTFnMkpTa5siRnBZIiGJLCyM01ICajbECUTrMRZyMq2EEkYg5VU6hmmLplIwLFsk2OWFGXYlU/+vgTaiaxsBNl9daG72FfTPSBZ8gZtLZXu3rsystx+3yMb3SdBJakFrnw3CHbm/udU2ym07LcfOb8zss3rgcWZX2xwWe53CnbG1P1WkCFNd3mqNrvuMtpVNK5ii3I7cOkTpD5M2EgnssPDiMHS585JLq7GkaDjY2N0fRwtlGwmF2ssVmwBCSPk1RsbdfDzaqKUnMgBQTzuKNFWOsyCckHA34XWodRezrZYw8Sv1wPDV3MQzLvgXQwEEiIqOK6Y+xcfpx//VeDdlVgB0+Swz3leZ1PVVdizSykQl6QZ1vk2FdXzKxV76RhnUhqzcrCJDkJ6pJztsGwbqdt0+pk2inNhEJVhzanwiRIYBFgzvp7a1lflp2JrJft544Gi/50I2L0XkAAepWB5umS564I85gVZlhB8ehmc3jUfOE3fVfz2uu7v/rFT3/0Y2WQf/QPX/7C9z31zDNX/19/5W+bI462X761m49uF1Z8/auv1FXxkacfneVpbmlYluPZ4Zu704RwdDgdxwY5EjNJoDYRsWZUZe3t9NabR5qzzWYXNqigDqbNUeOx9myzgzEHcfXbt8cpkXguKUVjUpzbPBeKbQIqEVeAFnlIz0iw6Mf+W82ivpj9UkKY0xKSOXN+raDqZRTBnQT8u85SfxgqAa/bKe5ubrsvTzlNCz17WSZx3rPEDAokwdzablZUMVL8rs9/ovn5Xy/RNUfTiQzHqZnmdOXatdyBLXZNHh8cslOjHRURYAfcXV0LqbrDLsTRxz7/3E4YtwdQcliZWqMos9tdOdp6/vnrz3xmSDmM6urjO+Eirl86v33zVkPVJkmd2pS7SYihqos8qKcHtzY3qsh2ePP61Usb085idJhrUiFyV3ZxsBKKYiTcRIBiaaogIWZgnqRwGRC81Ml8ZZJ6IB1+f297vx70zu9zD1cuupYW7H0p2K+dsZj3tLBGrS9qBpEZcWmE7A4uzj+x8aGPf6odv/jf/fX/iOuRJk4zSJRKpHBjh2fPSOroEgpQdGfhZkqTJEcK6tC2IjAEdrfWHKAmGThk2LS1SZLgHlg8WlmA4MIeA8zMctbUei6doF2f8tkRjLh/ARYROIjJ0cfFupEa+qp1TMTGOn8tJziYQOxs/QbHboAREROEQGC4Gno7scON+hKoIgFMZg5nMzg8Kbrcp5JWNaRkOVNLPukUGppWmMyA5ACbUacgJ07qvWbjakURi8jw3Mxyoo6oh2cCu3emgcv9aZopMlMyeuLZ57xfTbY2mt9yTnXfOna/O9MDaL7DQMwcQHzhycaa88VE0I47jVVoWyvKsoXd2p/uHh5evbzVTO1wb3/nwuiFW4dbOzuTven1w+bSkPanXeMbXIe22We1jY36xsGRoi2KwTlCF2ymYdx1fVosW3AJApil1wXMc5caCFeCKxdqTrNRgc0NGRouDi1oKnNsUp6O9566sDWsS/PQZTVzwyr/wPym66bfb+0Rf+BEx4/v2F331o9vp7fP2hmY4CBy7t2UQQUH8MYjTz/56leHt/faw91sISKgM83Mmq0IDKhnNzDcnVjbnMRS5QLLEV2X2yk0eWfeRqgD5KzqrA4OARwlzVoDNw3Up8KsWppjWKOQaCkjFu7mxHMPH+oleO5zM5MzEdMSiyYiF2DhcdCznR50dO75fO82ylC4wezi5dHohgtmTvvf9z0fnlq5e3Twme8+933f9bHdoz0VPPbI1u1X9wcb4fWvvzkqN28d+AZ50mb3jdu+OZQNu3F9cvvWrCyGs8NuUlAEVI2Y6tFQLVuGtlMhSskMdLDfbFl+7RuvdZsFEXGpZoRErnJ0pNOG3LiAFBy65HUpRy++oJvfjB/5DAAm1kXM/V3xlBNWgZUP9km0nlZnrU+/k8gjrf15Wg1Ydf7qr+Xd6GFQAE4Y1N6Dp5zxSx+uOc/ePVdce1+0rmkJSCkdaZpN24985JHWfP/geiE07bqDadMkywGe4KQ5N81UUztxFjgxs8MBEbB1uR4U/7+f+0ef+vjlnSeqrc3izb1xCEWMPB43g8F2Er/+RvOVF8efeWbz8hOP/+gPffbLX/qlX/nSFz/1HV+YmHXN7MrWZtPNWkuDYpNzFvH9W68++ujT00ljjQjILZsmy9lNCZpzI5FbRYodkUeTpGXNslBRl3pqP2X7jYIW9rp3uzU8IHzk9A3PNCXdPzr7JWj9x3t69toyPX7+MTvAuu7VH/vqaiLpeUvvQrM5DJqVwrkLjz/1/N87gKsaw9EqmkyzRFJ4OXffZzhMXRwKP5x0yrFzn7QdyPt8ySWD3REombl7ytS0NmBp1QNDZ1lCVNIuW7ZoPWjhrpoJDFYpzL0PyHWh2HnDFGDUp/zM5mD0xfIIYHBOmXq3oGBuDrD3VjeoIwMQCUZmzjBiBHKoe+6B/nm/EYXQdX21GkpdkhCbLmUlJ+6ypezmBA9HqZ0mIHktIoGMOLuZg8mz9W79Nm1NFUUQB2fFLCmAKpBIX36Sknmr6FrfO2rGU4ThcNx0H/rIh71XdRZxQ7ww6i5Snj70tJhy3+L0jrv6jgt8bsVxByFsP50R65jKEC0QmUOYQgymM8g4Uyl1itIoUpeHm6ODZhpD9fXd/Kmrw83ycLdFUXkxiIfjtH1ehlLk3IFTRfFcFQ46Ii5nXUdOIgTIggnPsUEzN9YYigFlaSYWQ0SoQdWwPDdCdzjbGlE30S51m+VmGUJu89H+TcszQjWH/WneRbQM+nqQ4v+3umpxwrGnp2P6wJpwdo+vekdjwh3OXD9/bhJaJCftW6DubefndoaPPrLFhb0xnrZZWjeAzc3IkmlVcsrORAq4usNdM4MiUKTctTbueNapciCnQKltUbArXKFR2KwjMxDPOkwTp6Tm0ws7gysXIlmbW/igNBCv+oEZsvDu6ZvMRObwVZqjHvHnFcJtc4mZQLDeEcFcQM5Wxtl3f+6pf/zf/6JNb3//D31fe7sV6Pd91+d3hvHoMJ4byoBD0O7Fr7747R/71D/94ldzGHYWWue9/W5yZDsf3dqbTd448LJOG3XZJp9MLcaQum6aDspB0TbZ2i5w8eZems6yKTdTnTY6rVw1l0Zu1DWJwG0nXcsMrwaDlFQiqio3N944lxpHBqR3pqA7mFPpZFUgLC09vYK0yhJ4B3vsW+sUJ5n4on9PTbh1TeP9UQDutAZO6zDvEb47F7DIl1bSuZ1Ux/s3kRsuZTJppJJqSJcvlqN6y3InRSzr4e7eLc1djKHrGhHTbnJ48Ea1cUlTDlXkQAolQl1Sd3RUcP7QR5+a7n5j1rRRYp9/3AUZ/PyvPZ/VvvL81z/9Ez/OW9s/8Nt+4pEnz3/py79+1I53fFoMovO4ydq01ROXzh9+8xvoulFRpOYwd93RAZXWtd1UKFtOwgQYhLJpVY2yNwFtkMvVxWtmfam+ZWfTYp85sS3cNzPM/R3BM++2JiS/8zvj1M3fuuXv/MVWM/1YJMApTX9+3lJBZaDPK8WEefisjgI1tw8KJonsZZildHtmBLRmo8hEGtQGAjCamSGQJRISd1bzxBAGmNtk0Jw5q1qI7OSTVuvIdfayDAAyLJI5M3MZw4BRtm2ikKuqihLN3T1HDkRibswB5sS9R5BbgDsFLgxdH+pYBDKCG1IGk6uTmZiqwUFCIHMzeNbOVWDBjVSTZoUxmJK5c0gqTetZM0UiR+py05paaLIm51lnGexuXWZ1pqyzzl0dEgBSy04w9zb7RKklthCz26TVElRz8Cl1rFFyECIyJmsVUAUXcK+LKpSjb/v2zzB4HgO+UNjWGfj9EYPe5fy+O61Ps9+IdCdUd45YAhiA8sazl5762HM3di/+ws3XZlFsQkY609HGIDXd9f3DRwbbz7/w4rXLj1yf3Lo9tUGIrO1um8ZN+9hWOLo1Vq9lc5hkX9U2inJv2jHFUrBVCVFGpzEWs1aNqE+q5WoAnAwAAwWJwIUxm04Pwqj05vxoVLJyGm8MyzAYvvTabalx+epG2yl1eZCOPM9AO4t3nDt+9szkfiA8b79PvzVp2W/ra2R9y7hHOgPBvfM5WBul1ZB57zHjvV2HQVVFlDqfHm6wDar6zTc7V2b3qFxIEUwDhSw5JyjcemdLEjdS9SZzUh87NDCBrdOCpPCgROpdNi/rcDBtqyBVFTKXexOdtiFEzlMf7B1cvXQh1sIMIRYOzAKwOZzUiIQiEzmZIYNAcKLeCgGHkjODFi4XziCHGZwdAjZHUlNCLMvp0VEpxbd99iPTvTfG02bv4PCZjz5n3nzt5dcunn/i0x/9BFL7oY89vXv70W/8ky89/ey1N1/Yu7nXvvDKdKwclL9yff9rt6eHGOxIaOFHnbbqDg1l5OR7kxRCQepN097c75qWItOgrDT7ZKoSS23RtV1ORN7NmgxoYJiI1MXWwKjZmxyl7UcfI4RTThMnGepZBddp+UOv5y+l/7sLHie0Uzpl2TvxhDvOsPfRAnDvYtb9ApLv7Q7LnqS5R7Pm2dEuk+eMajToOju8vTuIjjqQYNLOvByo6WQyOb+1ZZol0mR8tLe799j2FRGCOzGYAXFiGg6KDz1x+fkvfumxbeQ2Cwq1vv6Rz8bT6WG6ePH8C1/5+i/+/V/49u/77tsH7dbW5Wc/GjqCp2nFW8zWznI12uZYdJ1CpYpV27RF5JyaEil1M8A0ZYVLgLlJAALcmsFGbGywWZ13ZlgPVvpS+TwB+q/5IN4H6f9+0Z2a4qdX29unsy5/oBKRr/U2CAsAeY46ruGyC97g81LiCwsCwQABsURobJscooSSppaZSENoXKMSk0ZGTiCBEZyELGTnpBZB2t/bSUnMvGm9I9M+66cwiSjRtNMiehkomwcmVzYnNdfsIuxQN3NFCFGFQO4gc2Wdx4CZWQ/3QJgcmo0c2bL3NX/FA8fs2YxNhQjk1BcBcHK3PpKMAQaZGxuQ4RmeHEAw50mTu2x1Mei6xgzTJNmoydIousyZkBW5A5EHtVmCKRDIjdTc2Mypy2hBjbqKgdyZuqTznS2WHqORO7ll7yBta1RWg3O1FYPPf9cXHFEC92NHWIaNYgHZ3Sftt98p/EFF5/wPSVy7f0To/a5hMNbRZVz60OOP/dpHLl7/2m6RMlsyCuY5G+yNo3Se25wIma9ty23VNw7GOxtFyvGV3eZjF4utynbbbpqqqtgYz9L2oDoXyqOmA1MZw8ipS17EQKDkxBLgcDMzA3tWJWI4UmrVNTMgGoS9nRhpParGjVx/Q9/Q4rue2KyrQimwGqd9bw+Aa/2b9Krpmiz5G1bfuydarQhfMtzj3/d/zkW4t3nPs+hM7Gl157XdeC2JRJ80f3hwlA/HR82sjGU5OexclcUjhWxgkSBGZAWFWeqYnZ2YyABAwGSeTN3YiN1MuwQjNnCCGwlMGJKmOplpl7M6d0m6A7qwzR7C3P2cuHfl7/1/mJhATH1qn7mW1O8AvogRmP9K5I5eL2DiBQ7JBBcBQJrzZj00gK0dbG92IWxub/zsz/3Sa2+8/Nt/4odRFqZHl8+NtjeKr3/l9ccevTBRO9w/aHX469/cGw25kjB+ZbZ35F3KedrSVjmdzmbuZTVMmcjV3VtmmE2n2qkQEyNLYE15f5ylEma4cpcpJd2ddW1KReBCii7pdNKM20jXPrH14c9mgOGYm3/7Nz5RW2Ut6/qJ0SafF6BdDPE6mn8mLnmGZLaOIa6feA/bz/ugAJxu1ZnfrP90J4z2ATSu18L6USHtcjPeC4BmxDIQZwlOrARr2nZ/0hg3s6NpMzrirS0Tr+pwYfNamnTT/XFZDc00cCAiwFRzGeTapdEXf+nXwvl6++IWkxUcui4NpEx2tH2uojFuXJ/9rf/kbwyLwfd87pnxNF999NK4O9zY2WnGszfe2KNiyFx/9YWXOrKtnc1Z6oxsa3MUBNRocHbtPBuE5nH5xDlZCJYT+faTGodCizyFtOQlC4FzBTPcZWTeBj2IkVrHXR7QHrbGfM/kyffltZY+I4v1TMcePD9pbhykxZ8LqZIwz1hL7h4GO48MRttHu1OkJhSiBCVPjqxGTAwXgzpU4MQeZGI68yQaleERKcPBmjiBZ44OpEbWuZfCwoC1WcWsK6kUYrCpqmX1EEDq2ThBApEQBfeO3QVEbM5wYibvHT7NMiCBRE0dNC8SaSFKkQNPbaJAwdFcgUzzTIjcVxBOlrusaqQKN4calAymTJ3xLAtRddhZ19mk8dYwS5rNlQiAwc2MySPQuhEBBlWYQZWNYMytkTLlbCQU4OyeNZMTWDlGJ3GoEsaT3IUwTcnrnTbH//lP/3Q9GsKdmObpK5b2NJ/7uN43iX1RYODB0QdqQE+LFb6M2CcCUghH8RFofGwrBSmOrHSZmRm1XYJ84/bhF56+RphMPGxudRt7R7O6aJN6LK53uj3ttgah7TptQ0dFVRRdN6kqtuxt26HAoIiqoYFlNUKAMIFhpmQEZ2FzE/NSpCLfqimawTSlbjAcqskhhRdu7Y5G1UefPl9V1OSmQBV0bLNdXzhXzN3R1hjYB8O9TneEkO7aTXcxpbzdfeKsneaYV8jaTRemY7jm/JHPfOff/S9Cmlmf9r51j0VwFiW0Xa5VGNlhIkESLMNJ3RUKILuzqZkht5SDE8M0J4PNH6yle0qqLLPOuhbCpmbNlKxTIWMm9CUYmYjnHjBExCLmxjTH+Jd1cGgeHtBvreSL9IME0nmJAO/r5jCxu7MUUC0Cxzq6c3S+nY4O9m4PhxuB6t2buxc3ho89euln/s7f70yvfugRa9pBxUd7E88+7RJpUkMoygo+Ik3jMQo4IaWGXYhMcw6lm9N4Zjl7EI4CCCZNEiJtxhKkr2Cc1KaaD7u8I8FyO711sHl5wJc+8l2/64/PYlUDi9zTq+E8ZQ44Pb8WcTlnTb51cXf99xPY/zuWR5YXvQ8KwEk1eu3gTiLdaRX8AWkCTovVRU4E06ZrJgQ4U0o5Z3VVkJmrdj4Zj0Mpns3a7GqBiyLU9Si8+tKrsbxZb47qwQhEwpbNXJgdG6PBM88+ST6JIn3901iU6LrhsKzqYv+wuXS+xqT723/tP+Xpj3/209817aZJ881bjTBdefxDkYv9SW6ao52LF9584w2Y71zYMEsu1jSHJTQ1LRGbWYBwCCRMQiFGK66Mnv4uCyPxec6/YwB0nyGaTu8M96GP7/tInTCB9fQuH3F67p2Kv7/f020BChzj6gsAYQUFYMEoenF/rgaYz1UAcpZrz378e37oR37t53/p1Xz9heu3FdKqi1MkrgtiDiIw00yeQbNsB601imiaHcmQ3NWSu6vI/lE6GDsLtjbLsuDcaUckkYKbeUESqLdn9e4JZEzqyM46r7xCAPfp/LV/RWKQwcnFWaHMDO2rdhkB5uQGbf2NG7siYWdzE1DDwve5R1UcXde1ycxDNlczgxNzzpaytp0ejnXffdpacmozWvM2W1YzUJ+bog8lNkVjRE5MUDM1h5AStebjzlo1NSixI5CIMDRlS8kTEVOXsxBP4V7UUgwvPfbUn/+L/7vz1y7HogTmpc2Widx8gSDQGot+t6vgwclrK4H3A6EQWCrkCySojwsThOGVj0+67lPP1PEXD2dpoyo4ZisCtR2Oxpi1s3qjeG2vefLaaJOPZoKZlE3yI6cXjvi5C3Fn6PvTJsHjxoY3SLN2VA/KQhoD3C6eq/fHLUimmdusTua9m5yaaSrIqxgGgUYsw4JT6pLlndEgK795oC8fdB38e5/dfurSsO1mGiS7d5omk8MNZ4L2VZZW77fGXT4Y8Z7u0YHixE9vecI7a8piH6YlLnf6pv22LRIf++R3PfLct7/+6/9gM9mN21khnjphtcAA2lkXBV1G9EyEwAgBRKAy5AxW6qamgME9IQjQB/Aa5nsMo1F00ImiUwhgCYMKV86PqlAwC8/LgPHc6b/P1oZ5eFdvEKY5EOLr+RaXWsF8CzR3NvRFZeaGBMCNewSJ4YQgcnFn4we/91NxNMRsmpu9Rx/ZEOncu09/6rmt2N1+4fWachnL/bZzAxP64pcDcXdzik32JmeiTKZMgFkJzUrThJTdCerc5TghCGfX1E6nRSgtFgeH04MWxjwZz5ipDPHa5Uflw7/ZNq4VLHAn5xN2teMa3WmI3hdj7Qs0dmEx8VUWidNyL07MvfXwrXsBRE/Ny/fZAnC27eL4r6e1HzwozkWrFhEAy92RaReCJLWcLSdlcmEelnXHOU27g9u7rtJUDVwpkBOaphWRZjabHh4FYZGaiIQMTgleFOHytSuHb75GnokAoxAGXW5qGQyK0daQo+dA7awb/8Lf+a9vfPWrH/7IRz/xueeyGMHL1EIntbVOzSsvviyx3hqOymCbUQ/2X/X2iBTEDrcQyEjVvJC6S4dlGVJ9zbeeESkJ5t6HJs/FylVW0AfTr/f9jidmy7qu/I7JTx0cu/uDoIV2f0YMwAKjW7D/hR16xUDn2ZfdiTjwsHz0I8+98Pw33V7vujaF0FcJNSMS2qxEweKcVLP5oaVxRgIcwajLjuySSdRtmtJR5yjYWqM2wTmbInBKihp1BfOQjLPBzB1qMGZEJkDVDA5mZhKHAcTsJGCACVmJmdkl5yQU5hn+s6k51HLK06nWNdzgTnAjYP52zgDcDY7+mIgIykxwV01EVhScDCBR81atNcwMxuwgoE9U7aqksJYC1Dhbn2QFhgRv1TPgQBHZDdmoMS+GJXFBhM6s65pWLQZOVBzu6s4TO//6n/uzjz37TKzqBba6sOcQGXxtCNe384eVTm02HxCw7A6GOyMPVfyRT7wartHhP/nCM1v/+RfNKGpqY4WqLMdH00kzHl7Yuv76fqj52sUL+y/ttxQGhZnyTOnmND29E9WaSTPtJlKGSDEkcChjSJa7WSSuBcwSCK1Qm0wJljWabTCJ2zBSGULkmNoMspx9mmii/kaTpq6ffbL49idHQ9FWBSHOWhTuu0eH5xPKchVSjKWFqqf1fJMf0PEt5F0qSO98P1oDj9cdwhfF3Jbw/zyrJo02v+3HfuvNV/8x3doVpcgkFD0rFIHEszoFYeq6JCRlFDgZG4gzLBmI2NWNOLO5OROpg4SEkIyn5Jo0AwncMRhBvHvsQvHE4zsmgUOQEIVZhIn7uo1zTcABMMGd56rAslscAHil1RBRX/Cif83+HxOBYIbe9OpwZ+/ch0PZHhaJ0RJcZWNUHM2ab/vchy+c3/T2KDBfPrf16q2DajDocifulo2CoaSOMO44iLQdQIhl8JQLoanpuMkUCmXkTpV4d5Zr9SqYEJkLhJvOuuTZLGd78sJWnk26vPnmLn3Pd/9Aw2W/P53yzlxKJavhWv23OovmOZBolRPp9LQ5MZdOIEp3wc3PoONwj7+PFoDT1o27nH9CE3hwYBWtzG1OpN30UMSrWCJR23Ug5uBFUTWOnFFV9ZVHHplNbP/mzTwzFi6q8nD/xnA07JruaP9gOCg9CsChJGfN6gQSRoyhmyWCF4FT7soQiMPlSxeGdXt4MIvhYJp4pnjxhS+/9NVvfOmXfumJZx7bGQ2GZUnUvXb9+mhna2tzxMGeuXYBOs7TWzQ7aCaHKMu6HohxiJwsc1kZIxQligujx7/X5fwcSF5tAf28PY0BPoybwp2QVD/rtPv97AeUKX1t+vsZnwsXhON43ZxhgEFgOAwuL7x0+A9+5YVxgkNM4qSdJRIXJ2Ij1ELIXnBwoSaEGXNmMuZE1MGVqYW3RmPjqampbw/iuZ3ajw4HRezz+zctus5zhpOA2MgcSiTM5KJGyiQQcultwQwGswsLgVOXcrKiqISDwXrow2BzsV4QC7p8ecu996RUQB3Sy9EZKZu796mEqFeInMnNs+Wcvawi6tLLkpMcXt+dJbeiMPaUVd2ZHAY3F0IkHKWcmdgM7iGwmyuxixi8YCJ4WVWZXUX2W60kllWhloSpHpEi5pl7XX7hJ3/8w5/8rJQlgXtLt8+VgDkvXxvbuQb3MC6nJX1gAbgTLauikFDWDvWHfvAPvvhP//53Pxr/6ctHv34EAONZasGXRsWFizv7b+6Xo60bB3r16uD84HYzTVJE1DKb+q2JDYt8blRySM3kCEMGR6HYF9GOgVLTRiYmsHlprqV02XJy03YjxtGgKosAICfLUGGxIt6e+m5rbUpPPTL6gc+f2ym6piMOZU1RiPO0mRztgmC9Ea1PcLHkY4vBvg+2qW9xmiOxvt4t75Ddv12A8k49T6vfV01ZVhh0zE3BDO5P+/xv/vF//LN/88btX75ITgc6nrZtTjAMSw4hGFSdnUM2iJG7w2Cep60mI8YcSmFhMhcmtd57B1k1mzPAAktsxHB9/Bx/8unBhSHDvCwGDJLQmwEWDkC08mkh4nnTHQ4yKC9dgPyYVYqZ+28XXBMgJ2FLSjAHEyEQSJGacSgKKaTYKuB0fmt4bmuzazWDLl3Y5mrw+t7k1b0cY8ht6+xEWbNrwbNM1rVFlKpiCX2wTZx0edwZm2mngUkCmyejmCFK1KbOWZtMbUYyChS3yuLWq7d/cerXnvtNqYlhSD6XopbBvP3LH5fz18G9YzOgh/d89fNJI88Zc+mOE+yOjzljbi0f8r5lAcJaO080+MQrnCnhPUi2tdBNzMaTvXJU+mxWlqWmTBEES6ntuvbgYNI0+tI3btx442hjq96dNZdHdT0qmmkEsZk2h5N2NK4LikX0TKEcZVA2r4YbAXnvzd3cNm1Kps7EyVEUvLlZxzLEgRxOuqPOtkU8F+3Mnn/+tUE1rKpQ1t3Vx84PdjYuXrhwfnvDjm5odzjZexMzLVCXRcFEUjKFGDxIMQC3QaoUPzS88HEQExTEfdWOeU/3qNAd59R7ujvcy8ieUIVPXPyOm7tm+jlr+cwhGFrsC/ejW05gBOvtOL6SF7VUaN00SAtmQ+Bk4cOf+37/W397ROVW1hffuG3uSp6ZZ05pkhidwKoYDVDmcWdZWMk7o0AOcMoYN90EGLcYRt8Yho2NcrS9s1UHbfPe/lFnuc2WkmpmAoswETMJA8LCIkQRzCTMIQBQUocCAuDg6HB8OL147mo9KCWEnBPIiEBiDO2mLYvt7NS5U207z4lg2aDKTkauDoCYyNTUQGrmMLe+2rqWVXU47mZdtzfFwVHqCLEi7bKqJTchMnUWIpGULAncvGB29eTI5gpNqlQHVS2kaFqlEFhipMhlNZk1gW1nMDhsmmS+ceHcd37H9/7R//W/kYWFgrn1Uc/LwVyVdME8gvttYDPvPS0xGAC/saXAM8gBEMHQY0HC5FJ+/Ee3P/VbX/2Zv/ndj1cvfvFgBgzKgpLQeDK9tbsZ4k5lL705vb4Xzp/bvjXe66Y2GLCF3GS6vu9hJz66PTq6fVtby11Lka3kEAMkpGyWCIJhJbV7axaFjeNoNAiuDPMuETgQJ5cm47CjI7gU9umd+MlHygtFmz2kzAWLswV01LSz/dcZLXkEwdCbzxZcbpVg4Df6uM8hv2MbDz1gAWP16Lf8BrRAzVcYEC2/JbKKWOvLP/67/mfXr//bR9+8frA3qcs6uDlQFJy7FIQlSEoJTE4xFEKwtkuAcqDI7MrkiOwOYmED+Txvp3cKIXCCqkngS7V96qnNpx6tKmqp2JTQe/swsWDuAsRzR/+FPwutTEzOROzsmFf87VUtYpqnt3BzkB3TU024TwxB5hwRDO3Gzlbbtl1KRQzgaE6zWVsUsa63z2lMfvTtHz3Xfung5ZtdBogR3QOgycqKu4mralWKqXXJck6TJrcJAVmIqiKw+SAUwWCZWsWs46bNjaJzRihK00sX6otbl6e285v+uX+5LWKBNfZ/ktmvZAqff95xN1iYeI5NgDtNvzPEpHWpZd3q8FZEoPfHBejdnPDgFqevDRiRw+1w7/b+7u1zNZzY+kTlZLEIEsJsMmubdPP25M03x3u7h5vnN3a2H60jjTY2928fbZ8/d7h3MJlOi5kMtrb6qqIQCSIewXUdBnXTTGPvnaZcF1U2LyKHiCBVXfBmlgYmLtbR9qVzKrR1fmtjFJzS5WuPIOXJ4ZucDrrxIdTrumKRTAZmNWU4x8olRjbTwYXnvm/Go5KXFbv75UfAyvHszp393tG6GrI+xGcer5/my98w12juMkXudOe3bhotbX33tVtotfhXFkBagj9nJZPvkeVFGUCR6sqnPvM7/qd/+C/+ub+gVMaidEjX5CkhO5dBTI2hs9bdHAFOwbMay1HXNYpsHYg7j9OcYh2qKmb31964/fS1HQ8yCtjfz8Ls8GRqjqxelJsSRCSWJYkQUXAmBCZmA0iInYiDSGCiUBWYNJN2EuvaPTPIwHAj5+xK4uYKWM5d1mRm7uTZ1CyrEoeCiy7lrNrXlmTi5NantDP13HVt60edjlsoQx2UFD73RU3ZmGCgaZtrZo6VpTRt0sbmsGtV6kCAtl0xqMe70xDD089+6MnHn4arzrqdrcE3v/YN6ybVZnG0uz8qN3/LT/3u3/u/+GNJwqCqbVU5b20GEtYq6r0dy+z7QvdPmf0fHi3ywS00OqIIzLz8/O/+X33za7++8eov/+i3PfnzX997/eBgUBfnNjlEvX3jzc1QVMiv77UfvjB89NrOK29Oumm3NazKSE1jrxx2szZd3tjcCPCcm3bWTr0cFCUXQdgh6u45ecoBrM4Bos1Uk4YQyjJy4MxEkMm4SY6CmmevbH72anl+A2aMULkXkUtQqRAR7Zoj8dYRaC7h0hLHwAdjfpqOd8s7BpLuhU6APHf+XLmI0CL5Q3+Dfo9zJ3IvQvXJ7/yxn/ztN/+zv/ofjyevSTfLzrOkwtKkfPmRxz7/3Z/5+z/z85Px4aAo2ty5daRaMJKZw+sy5s6Y3Ek1qwiVwjBPiWbZJZLBwRgU3aeeHj3z5FACYjmUetDmtDkYMgv30tKyEFjv8T9fP0zzLDfqtupkW4tKZ+aFRcBhgMPMmDlIMHTMIZkRs5oGkfGsAbGLJMsg1uyj0ZDYmeTchbC9WUi+deMC7R3SfseRlTI4ICsOj6bkKCV2TeYIl+JoMmsSlCCgAKSmKQWFDLRLzSTFqo4sbdbJLBejsgrllQFJkx7/jo/9nj/y55vyfBFrnBHDe0ISp3VesozpXk6XddH/HufeuvRyTAA6jZTj+H3PmofvtQKw3G7eze74gJgXzcWuXip2U21m043RwNMU8EFVTZsZByFGrOrtc1uHU2tn47oKg3rw8ldffvLqVvnItnIwk2mbi2q4dzDmAVezuhoNNRkLKLh1BmIpi7oeaJrCzdy61Jh5DGFQ8NZg2DVlp9KZoWthaXN7Guuq3tS6LtVYDg/IcpcOp0d7hYgVpKIkFITcTVC4BvUEJColXn4Wm58QqQnq4KXtdxHk9rA4gd5F/aVTZ+L4RJqL0PO4o9U1p2WbU0r62xN/HlQaxjmPXGuKL1GeZX4BA7EBQF9jfelb7sRQij/y234KOf7Jn/7j29tbk6OJhxzrqI7bh0fJ9cK5zXNbW5z95v5B15mwNE03Ni8jNIibNMmSUojMIe4eHdYFfu2bty8Pw6efOXf50vbN2wdF4CgSIpdVALSIhUgwc1cnznCwReEIMmbJDoByzlHCztalna1LBDYHOY+PDs1Mc2Z3WJ9vVyVI4Jg0iQsYyioM4WAeZ02CZ2aOgZO6m1uGG1xRFMWkSRnIoJlaa+AiZEcoo6ZMyQ0QEaYwqmOetbNZunLl3Oc++9nLV66ylN/23d+xv3/r//SX/s9HswPUePTaI//mX/gLn/z276VAOVkRw+uvvkZdbpojC7j26GP1xvksXhURq4qBC1R1ntQOS4zOj8fWPLz0sLfv/aE5b/HFHm1OpAOSdPljP/w/+TdefPn3Psd2dFBfu1S/eH129coOitgpIzUXt4ffeOXmS56uXT1XRWlTTk2qqiKrZcMbXW6m2Cxpo/DRdj0gqObczgLFrCqB3TzGaEBN0Z3aWbaCpRBUYZrtaNJyEbi0R4fxie3tK1vh0jlC0Ilq4TQgILXqpYAVosTJYyQB+sy6q4QsfupNP6AlvWfY/+k97vQnFrjkmhWg/3aZWRKBaeblb/6dv+/jn/vef/BzP/PC81/eu/7yK6++tj+d7lfp89/7k3/8T/2r1x75j/7O//Ove3PUJW6mqd6s27ZtW0xbY7Zxk8salx+/MJs1UeLkYMJKkyZvDWJWzJpUl/a5x4dPX4rbOwUTR66Y6lhEwAwGh4jIwo1HPRPJ3I/fbSn7Cgdz7eMYmHgubYHMnJlB1ucTMjO4weEOcxfzCAnEmZJ7FnZXjSFIqJpOi7LQ1EjBmrrIMu2a8zvDj32IX99/s70FZt6suSxwNE1OHBhFkKxZCulSJkJdchWY1WvgwvmaM9pWWfOF89VsNlODEUYRo2Cc20e2d55+5qmf+l/+6eLiY6GIWBTnOTW2vnbomGtuOCatr2ubtBzlXltYC5A4tVRXt/DlhXc9G3eb0ASE9x4CevfA2BlGkPtDq5Fzd4lVUQ+Su5u5KblJYDBE2KgbbY3OtXZ+P+/tTlQ71e7lF1/f2dlwKpxCl2xzNDw62NvfO6pGNVd1DCADuak7SMq6Dsjd2DVKbrIrCRlxYhJXZ6aqiLVwFeo6CkIORQQpmkMokmdLObESu2sCkIUUHpyECk002Cg6O5BY0ObTmx/+sVbOhTk2iYUu2ufgWmwKd+3K92Z+3PsjTiiQx5bU8Rudlv7pHqbfXVtyYiW/axB1gTD2WWNWzqerMekfdCxyb6HrYB4X5lxSlbX94Z/8bZ/+zLf95X//3/+FX/yHB0eH1x6/+uHnPjo9nA63Rj/0Qz+A6exP/4k/1WRljgao0swxPL+9tX3+5a+9Vu/U7eGMAo4mXfBQlcPkdgC8ejvtDGhnZ2NQYlTZYCAS57qjkYFB4iIcQkAQhxExej5OcOLsECEQUpuIxU2LomzbBj7PR0vG7pw767oMSAzRXQHv7VVCQsiBpUMmYpZ5qXUDu1vuC+mxdA6OIZqGQTltu6ZpUzLmMNocdk1quvZDH/vIM08/8fpXXv6pf/53/L5/6fePD5NzuHDl0v7t29dvHPzsz/634TH8wT/wL3/+C9+voXRBVHenqx/6EDMJyMicwU6ByNx5kfGIFtG/vR8nrcYSAJYRAWu8+mESt5aq88MDAzxstErVxYASmUlx6Tt+8g/9uf/r//Zf+yMbfuvqE0+mWfvouVJUZodd6kyiSVW82RrvT86fG4XA02amnReRI4lmHjeaE80qTJJuRalipZKJRFVzUgBQIyLzBCcjUUHbZWSAWQLHoJe3B1vRd2pIwO2p1XVZlnVGzOr1oE9LBfIgTpbMKwBYRmL6mokUD9d0fD/pOCz7HtFdrNx9Q+ZbAR3zHlmL3J6DQO4oJQB85emP/PanP8LE8M4st+20nbUyvIiy/hf/6L/2L/z+P7D3+qu/9qVfuXXrlZwnu7dutl3a3xu72ebGuSef/fgnPv+5ja2tw9u3/uv/8v/xT3/+Z452Dxulo2keBbm0E568UF7cHMBjUVYkRQjBuZfzvc/9jzXDxZyv9Cn/fWHMABZn9mvK0SP/8yI35ObEZO6mSiGAmTz2ErFqBsB9iVMydYN5jGKWYxSHh8hd6kZ1EXa2VIvv7AK+vvfK9YOmTVFQFJAgW6MSrR0cKhmboYhhNNp4/KlHb766a7P2M5955tL5c9/8xhssUtXxcG+PN+vpbJym7bUrV6+/tvvRjz/3R//tf6fZeTQUEZhLUafg7JUxuBe1lnlWlv6htJYQ6fhk80X92dNI5bEDWhwtC4afjaqfVjSPU28BeI8m/H0xiD/ghbq4/RzhsyrWzbitWdUtFiLE6PU+CdnToK4uXRp5zjev729sDm+/sXfrxu6VaxcNntJshLqqy+nRbO/NveFgEAbwUIgUxp41CceiGhChHTdBtG1mvJj87sSx8CjCgRiJTKjKmc1crYObIwu5uEuIULOcBQIFAbHgsqonOo0VHenw4hM/1o4+IgIyd+ZeyMS6yaiPQ6e7hSo+hJvEGXPprlrMXXXgd0AnFta76aEFCrAOCfjcGnX8nLk1oOeXPH/heRYad0QpILZ97dqf+rf+gnoyQywiBc5dBjEzDm/d2Lhw8fXdN1mYszddN9je/uf+pT/y237PT9144eVXX/vm//Hf/UsvfvOFKhZUlftNG0pRolfHKZaDbU6RdGuzHI24HoSiEA5EgURIJIgIsTgzC4iE0GfRJXJzsKKXoD13DbmzgyECjiRKmtEIuMlN03REEmIQ5RAUZGasagQQMzOZmSCQwL0vCubmbsY+n8MWmYnIshaIQejaE4//yE/+xBd+8w+w87lLl577+DOVxKIqiLnaEAohqxWjzT/2Z/7UH/U/ycQxFiYU+jhj6U1K8z1YMAe34Mv6NXOHsN52TccZ08o7aGUHnmt484rOD4Nh4MRm8gGdovmidCcYSGBasGcvtj76o3/6P/wbf+r3/55f+rWvV3kD+1PdPjfcOXf9tdvZ8+aw2jtqb+5NLPuoLOoQxu1s1iQCUxFRCYiaVpuZTYpYBS5iWUfmaARVVTOAHIFFJCuyewblZFXAoKSNQOci7wwj0CqMEJl6NzpzBgB3ZDUmNLOD3O4XoyHW2HsvYXwg/a/olLT1Hqjp6y4fd7IGnGgdrTmRO4zAq6qlhDkv6mucEzkCHDFsxlEOCObw0chHw8HVp6595nuLwF03Y7DBk2aCiUSSWgLMgOwf+7YvvPhrP//X/9O//It/9+8NmYuiunyhuHi+kJKCREA0hCDcRxCjz/OzDINdwFgrAGsOhPd8cv66fVIjh4FAxHMmKuTuZppV3d3dRQKYzeaQkPU3ZyE3U+UQADK4hGBqIoGYg/vWxfhRKg739w+u25Rw6dzguU998vzFJy6e2/jKF3/lF3/pV7iUwUb91JNPffcXfviz3/mdVdwIIVx87MqlK5ebRCGWObuqc1kJE0yVOncqJbZEdZT5RrzYf49D82eO2LGB7eWuhcuCL6WyeccshhTvbCr26BydmNR3IH9vg4DvBX99y2sf5OJcGwZ3uAfhi+e3Z/tvGHssg6smVXKKMUrohDEsZGtUdlvlhcubRwdHL7z4TS5oa3vntVde3/XdzdHAyCeH472bt+UiyppVJAwCM1vOHGLJNXmw3IFE2xk8uXuGCRu8U00K7zwzyhhYnCQywExlWBjQqE8SownkLEEkttpUg42Zhgsf+6185TshdXAYOWwZ+7vs0l4UWdUAuHO3PESbxZlT6O7tWzLZU9c+ZE4aa2r9ensXOZPpRE5JWgEKcJArlWUFB5lA+tGmWEhfgn2wffnf+yt/5b/6L//zrz3/9emsefzxR//Zf/b3PvupT0gVnnjsmWdv3f5nfuvvPNi79dN/6I/8w1/4+Vmm6XjypuMmYTab/ZbveGQrNnWdy4LLIhZ14UQhhliwlIGDODOFwELqpEhuoCjcVwHO5s7MXHCwZERwS24GDkxCJEzBM1LSwIFqFkAkAK6AKaFXJ1ZCNjnAHABkszz3CHIykHsMoYyFZf19f+D3/+F/5ac3z53fuLgpcDNWt75kZZ8/yR1BeDgawgEUBiPmHqfpVeR5aodjPp5zHH/R/7SEa1amG58P3MJ5ZIF20eJjARo9sNRSb4MW+NEHdHeaO844iZlGYqtqfuTTf/b/8rf/47/8v/9v/qv/27mY9vf3uwzzkJIJtK7C4TS9ut+cG9rVne3zVT2Vw2bWtKlRYYQYq8LNupzbWaZGiVwIgSGEvtoGZSc2d2RL5BQDFyVXQcqgbNmVOLIIEYQMogQ4tMu5sySMus1Gpl07rs/CBj8Y8RWd2hLeg85Zf8TpzXVlmZtHeq2xnHWpeu7Au5AynZYi5ZpSEeb1d53MKRIosANlUffsp/ReDgATXAHA2ePG9se+84f+lUef/quDP/XL/93fDTFf2AgDoSoIk88TM8giBI2wwDyWEDdRX9WrzwPk5PPkb72vaw/8Y80cteSKPUclYpgbVJ0ciG7O7OgzOvfJhpx5nmKInDyrBkCYGJIL3hgNWPwzHx5988W9rSc/9yf+5B/93A/+qGKLWSPr9VdvDDfqQT0oypJC4cSqSSSCGI5BBQKKtR5nCuoBAHOffhu9usPzZi/8d5edseL1Kwvw8eEF0JfoW/BeouW+QMfPuzudnKsLhr6GOx07+aRhgd6TIOD7KD+eeJ/7TYSFvQoAETXt0aybAh6YTZUAy0YszFLXFWWdBCtKqspoOQ+r6mD36JsvvP7kMywxjCdNCCEwpdTuH+wx+855MreSq6J0EXF35lCNytw0RVGlJmiapbbtU7+bZTPLXUesRdQ2OQwcJVRFCEEtcBHKIrCLpoQgzNZ1MwlmYXuSLj3x+d+Ztz5mMgoGkC6rlK696lJ6fEs55OHaL86Yx4uDe5xpJzTJh0O/Oflaq3m4xkX6VB5za8BqKyDuERees3oOcZGCEgCIyMljxc9+4pN/4hOfhDkIUKhmFGAKZr6xc94cm6H6qd//B3/hV3+law7BZaasZb2XlWI9HIbhhldVKusixmhQFuEoCMIxOIuTODOBU/JAAiPACEwMS9ndmdlUXdUchpSde6ydg0OUua9hye6ZyAGjeTyZzSsDEDtB3UBQWHbvMnImS4CCHSFyWRRd6q49/tS/+id/erB1KQ4qIQZAggDx3lPHe6NXv9pNmJ0g4GXyid6gsGTnq+lxnDcvgf85FDff+hbBb4soG1oYbRbY/0MEwM7L0D8ELXkYaaXnYbHqvPdRZhHEAo9+6A/9O//B93zPc3/r3/uzr+wf7M/ijDBVRGFYV5ZxluzGuL09fePK9minKrfrajpr267rOgUjxMKjJ7Y2qxsAJqU+1ZcwRFgI5FoIV0GiWAkLvWDlcDchYiNycufWnCgF82xmzaySQCBPU7IOwAJSmMslDxnfe7/pve2C031+J6x36VSz9kl+DNztmc4q1eZqN58XUVxlrvM5Rr+WpXMpqzq451oMMycGQYgHO48/+wf/xJ/5t268PL758qiUIpYUCnIjqCHPA0vmdyFg/qBerJ0frlZQD33M0/24rfamvi3MbGZuRsTC5CIMYeFsydwMqurJnJSMmAisXghRD9PMrQ3mZiaROJChiHji8vDHfuz7/oU/+R9effIpk6IU7gOTrzz1zJwv87xji1DawoFzLtYv4JxeTBJa6i6r8r2+2mJpAbovLl3bm7FW2MuXiNHic/XjYl9Ynwlva26uLlxMknU14IR2sbzkvbEA3AdWcxdV5v4yMsKioCdsfPBmN5uKt2UcmSNbcjUICKiqMpJrszk9agaDUA+Lo72Da49defX67t4vP//Rj3/4aDplbs5v1mR5cji1ZERha8eJ3JWKIpAwMQEc6gHBpWRNgWeFdr1LqJOpK4rgpGqddamNiGUVu7YbbFWxrong2VAUZVG2zYyHMmkCbz33xKd+dyovc7khDuJ+vZ00Cvkife27y3n2fm4i6yxk/SXuvU39mcu18Y7NU8du964uXtfQjv+0hBXWWPccSVkM7uJ6WqYU8+WP7vDebaXnxM5MLNHh2tflghQRTeZPfOZzR+NGyrLpOiM/alPpfvtwdm1raJhJGaWOoSxUtRhUUsAFJEQSlYMJCdcE5qCelNzNdF5M1Ty7gsHgsgyZS8/CTK4WxFgc4hz7Pc5ATgIAIQaFWb8e2dzcFE7UdLlN2impkdocTQqFkKetjeEnPvOZYjCqtzawUOgXrHmNMfc9xexr0vuiDxenLWwBJwbWV0lV5jo0zRXqRb8v9+7FHtHvGLQMGng4pK++mQ9FUx566mdIj30aHKBSiiO37Sc+pkXJMYUQgk+ZvG06V69CdKGUaZb99aPZ4dh26rouyo2tIWBd0qbt2i5BeFiV7siqpj1mSIH7cjEikDrEALfcJLMg7MJOrm4GdqiQupsZ9yWc1Mmd3a1rmqp01dbX8OMT8+6DQQfea238hPZ1gumf5DMnvju+WS+Cg22Zbh9YqQdEx+7SC+S+9qylhDs/hQhuc3m6x5c4lDtP/Jbf+Qf+5n/y52NQin1NdQYRyJ2MyFniwqds4e8yZ/ZORH0O3bkYPd+gCATiOVNeSwuEueTlTkRRAgjMBEQwubqawdwBF45BzFQtOxNFFmYinvuDUi6Eg7gUsX70kR/8w38al56RIkRQNiMnJpJVKYL5rrn67PcCWrBzmvP0nr/bMl/6QhWgxbstUhrRsSW23KpXNdxWCsNqwIDlDyek3LekE9qCY/Vqp69f/24p+TxYBWCtB4Djr3e31t2Blr164tz7t3jnDSQid0qT6dGtN+oY2ykhs7CophBiCFFhmk2ibF8YZe2KugSxtnz+/Ohg0n7pS69x/cbsYHJpowzXdoZDcmhKM7ebbulc2HSP1oXRqBQRogAnU6fAQrGgqGU2cCEhlKEuopDP9vb337w5m3S51Y3h9mBrU+ohxTIKccxGOafk5Wivq6urn3j0k78rhVEo2F375PHeM4iTPTb3RfZ1ZvG26b1gnHeaGnSHBbO++k5M+hN3XFkt37X0776Q894hHd8Ojm9La0mCfDGm8+oqcz1gwZmWzGjuGL+6O/uCCfduMPMUQwvG7GTZLLCMRnUhoZtOOScAISjUX3np9R/5ru/bHE275oBCpcLlsKZBKYEUCiZwEC4oFBKHoRqk5pC0cVe4masEFoaZKuBkILWsGpyADDiEiQeDWEZ2UTMldmEykCsDzuyBkdRN1YzU++QSDgKxeHAQwShIoGRd23z/D//AcHsTto6G9VXG1vp0mal6+e0KJ+nF9DmTPI2Rn8Bx5mK0L5Je00Jr6JfWfIvog+4XNmPyh0Hwfv9b8K1DC3Rtvusz3NRLYUlWkXo3QxZquyB1DsHVVD13KRBRIdmxa37rcDIouo0iDMtYh1BUVYyhy9lSIpCY9WX0grAULCEwkyVVz+pJApRZi5iEVKxjDiQFOTnEPcAJRuSuXSwGSXPwEh41JdwXaOM9od8Is/FOe81pFG6duyxPWsqNK6l+8dcaZrHGzhZy5bG7rD9vlcTG50D9HNoGgz1Wn/n+H/yZ//dfrtKuaio9BGIQBTcx5SCLhP+0REV6tIXnbv3LDWfl1QrvAynn8VS9XaLPDkREbisGbZqJuDcaBCZmBgXnkgjumt2cnLN5JhYWMicrQuwsZ3NiKjYubz3+bOLIrksPJPRWlJXcvaYbr3fQWQIrrTg/rZL1rA3JMoHHek+j376dfL3P12zHS/fQE5kA70UzXX/g+tRaxmDcSaNYXvKeWABWaUvuzInuYemvK68PhlnQakyIYM3k8JYeHUYRNzfXIEHN3DxEYWYyFMQXr5w3ntx45fVrVy5y1I2RcMRLrx5cvXzhhRdfHB9Nn/3oEyVZTtPbt2/tHx59JDx65dI2E5ppdq+jBGYxmBBLKFrvwFIWNRdltpwVDgqh3BjUkp0ZVcHCUtVVUtKUQa5GRbW9O9546nv+x3Hr2ax1CGJmxIKVVLqcTo65lj53IHy//ZDfmu5F+r+Xa1eXLFfmWnGvh4AWy9mXWPJ6kxf233Uf9Dl205+/xs57NWBpZp17tiyDkvrxX6l+vVJhDiOXwkeDAQ2r116alEAACTRzUe2cY+D8+TrllsmqzWEPufSpHSgEdU7qRcUxRFhq2qmbSxA1N4AldGpFGTVnSyYxuMLVQGCWeliTuGV3duJeL2WHtzpLOTPY3ZjEsoHEHTFEEeXkEpmIck5dq2o2qCpkv3jpsoPA8xxta07uawL7ArJa7pNzpukgWqb5nHfsyRmyxncXZdqwDBlYwT3LEVnbi9e4/Ps/7x7mGICHUxzsk732/dYjiXJ481zBNweDo1sp1ANr1Kh3yTNiJYPAMc91KJMmpTaPJ21gKyQUwmVZSIjuJAGAgajL6m3SactCqsbkgVEVYauOpkYuPQcgwMzBfRp1MjVXJzAHGXc5VgNVa452uVdDaSEyPoR9uqD3rWl04v/7RqdBqDuIiMfOxzH582wsd+mWhnWp/5j0v45oLKTMOVdbwESnlz4tBOT5+VJuXt06d7m9casoCoCJ2EDqCTCmAgAxr6sm823FCdTXa3QQA8YIIM/eCQSYF7WkxeTsTWpuzkwL4QQi0uWOPEhgBtiJwcmyuTlUmLN1KWUisEhmH1YDYWJLwS0MZOPK4431JQ36emOrl/S51dbp2BAtt0tgTVpf6EVLfWsRH7YWyLvK+3C8R1d3PCav0FmfZ4z2PU7IOVK1avzZFoDTRA9eAVghlzihiK6fcmKVnKkLL+hOkt873TDWF93c6ZeIDITUFsEwKNvprJlMhqMhwSUynLqkIQiTwOEkg+Hw3LlzozI2uR1GubSz9Y03msOj6+c3tr720mS/ff27P3d1WFTTafP1F26++vr+cx9+/KnHL1w8tznuxqD9UBSjwZBMnCGxpAxYQIqCSDBtkuSwOdyKHI0NLJPJLJHEonSGUXXjxu1f//WXv++n/vU4/JxDYyA3ZRJfV+7XDlaB+t9SdLq5d2Kj61+emBVrXLD/8751wlkpgd/xrVa4TD8pF2wHxzjOnIlhIXAupf+VnrD2+itxf+7GSKtIVaLeKQjkGAy3v+d7vvf//tf+syvndjbr4tatXQK9+kZroa43ttUPJMaqjEAIwYpI2bKbM8MdIRZlVYlE8i4fknAgdnEyJwq1tmLKyBwIaipZTc1yR442zVTV1cy8DxvwDGFOaVJG1kwcoG0uijhNLs6aAKWCOYGIwOwSOHDhmQZVce3yFZqnnzum5CxMvkteO+9vrB0tAa0Fv38L+XjN1N5bXXyxX4CWGE/PlBfPXgiR77+I6w+r9I/3vWvuQItVNXcLMIB0d2eo9YH3mQpLJhNC9janEApVbZOCiJ3IPIiYe+veZW4NnA2zqROYhAgMZyZCn/OKnIyJisBEAifvNLKFKEIWXFidyMjILQjIWZwILqI8jAWbdYf7z3/xFy9/9PvrejSff99qPP89osVCvO8L8rQUcwKXPWNvWjtesqHT9z2+uy0ErGOcbA2hmJ/b7wxL5HrJ7Xw5O3rcCMQOJzUCXOTixQs338gMyTA3SIwhspE33hRUsQURBub2ZnJmkmzJyYTAwu7mc5OqBFTuCvJFNWAmc4AMIDcmMBl6hwVnd3aJcLCC2cxVc2KUBOcgBi2IKRboU3Qxd8lzmhGbsNuslWIUpJ7HCZzo8LkQsHawlFRXmyiWcnR/1pq3z2IE6NhdT1IPFSyY/zHB9ZjMvviuV6AWSNQJofgudPYs8tU8uQs9aAVgXaHF2W90d+kfp7rprk+6E925K48v0l59BeBsuQmRLETDTGLhBCdYViKKMRARzJ1IQiwqGW5vWttKiIOB72yFnUm8fnt6o0lFWV4/Sr/20s3HtyvVmLT45jcPnv/G15569Pqnn7v2+BPntjcI0k6nTQiRC8SyEpCQAyGnxg2k5llnOScLWeM4caOaD8ddc3tv/+DgYJpSktGHNh79WIIXRGZOLO529uivxQk9pHvsHchPTaZ7oROv6Me/Pa17vmO6D7L/WjsW6sRa7mBa41CLeK5FjZFFBrY1nkbH3s7XPpdaEM11CZ8zSXcwcXb9i3/5P/gf/Y7ftXf71pd/5Zf/i7/2N1ibMMwyGsSBlkKz2VjKIpaFeZo7s5BnAweEIjhx0qSqIUbu4SZGVsQ4OHf10nj/NkvbNUmIQG4wEepy5/AQBEJdNhiFKFmdiU0hLGp5rpmLB6ZEAJwZQsQKZHM1luCuhCAcN7Y2fQ17OJGXDgsfqXWhaLkx9GL/MaPZ3eeHH8d+FvvF/L+1THDHIob7x5yth98v7eCtZzet73If0D3QigWRA320isW6kmj1Rnk4bTx3bgwshH5QEHYnJ/c+lmWRes0WCXyJyM2Yub8dYORwMxEULHCKRIFAbg4yJ2Eh5VhGQkcszqRQ4kAkEkktOWFyOI5S3/jKl76vm1o97Gc8Fo3/YLhX9D71xUKPPCn43OM+chcpaM62fMHnFqf3bIWOqfxLlrRCK1aoBxETJ4vlYNOEDQjEQQIT9T7SDO4TgbrPA9MBYpL+al7wUYeD2Y3hxO4gGJnBQQJiciMwaYC4enb0NZcI7hA2WCBxz+ZGEOb+PfpcCkSEPsuQADCLLCl1g6IUyhUHcYVrr8vMg3bpjI5aKEt9Arj13fLYyQttapFJ8d4E83UlYlUj/lQo8D3c6xidec3yywWA6Msn3eUB74EF4MSEO/7j8ru33ycnBuJd87W5vu0LZXo2vTUeH8is2drabFqNMToyS3CDmYkIEXNgcyrIuBpO9rSWinny2NVR5JAnk3HHznL7IH/xhaPXqvHl86NpR3sHaTLVN/emB5PusVfffPLy6JErm5cubAyqrDO1KsGtEIFUOTfMzAwzawyHU+wd2q1puvXmwf7tg5wsW3dxZ/jkI+cufeRDMookWEj/ZwB7C7FvsdQXouW76rO3ogfxgHvnkmdei1OX368Wvtvk7usbtPvxm60gmvk8XaShXyI8x1O8rsMaJz/XIIwVBNwzbQckRinij/6O39a19s9Xf+Dbv/D9f/Uv/R/2X/w1ompjFAQHHKUoSwoxa7LcELF70pyDgzzDE4w0tSCHOsTNPWcURc2DLT887PIMJODUO/ObGzGpm1AgJxFiYqdoailbXVZtyhLIwSF5lzIgbm7ZYCAEci+FCmEjyore+BzKYsWzF+6a84L0fR+siflnDQHW5shb2cpObuCLaK9TLG1lHzqmbeDUKqFjt33ntK6XnE1vuT18QCdoTVuab7Psk6pCZKsK2R0nEniX5v3KToa+KB4Zee/qwLysL+E2j8kHCG4kRDD0qUiA4CiAQiiyRkYUFwluoek4EIwgoSBh64+JmEFEGcYcpB6EopjuTifT8WDr4rqqub7ffkAnxYj7dMu3lGKWjz3Nok+cc8atF3SmaLWGcgA9/1vINot0EHTqdCwvWmNeZO5mqKsBw4MwMTMLExEzMTFknginn8TkxAw43Mjd2UDsBgaTihs53C27mCOTE4GIWFwcCCgURkRs2jvCwQNAAhWeKyOYRy2CAOtj4N1NQXBmIbhZYme3HtrKNt1nyotkJydFIl/17ynYbPH3QoEBcByruZfRpdX/S7htwT1oCdjNTflzSKGX10/y/9OPOvHnaTUSWGQsuMMdlvRe1gE41YYTHXoP1/tZf97LNnbPCsXiyPPk4OaoLrQLgSNcJ+PJYFA55gw75xyY3GDsKBAQYlUI88b2ADnnafvcE9vfvDE7UnVOt/bzgfvtcdslyjPjGFOSr70yfe2N6a/Vu+c246NXN5+4tnV+Z3DxKg2qmHJmmVo2dW6afDjtbu41r1w/ePm13Zv7LbtcujAaDofXrp6/dLG6dD6e39lyE+Zl/t0zRnxtdtM9aIb3hx7OPeZM5nf6z3d2z/tBDjq1qnt5fVm4ba4h9CZcP7EuQGfoA+t3W51La8jEPKsaQZ3Y6yK0XfeDP/aTP/GjP/L8L/x/efzl5LvuYC5iNaJYUmo7VQkE9SCAWW47S2MQPKcAGIxBbkROIQ7cpCjLWZMDAwQmTt4nVGGaF9YyIgKLGYgp5xzL2OVMIuwoSmsSnLUogpOQolGmRkOQkMnVCcQgy8Ygdzc3EV7tZv0+tWTqbzFgd5ojZw3WUifDSg9b4D6rDl9q4MeVAz92k/u/Yj6Q9O4rnRg7Mu/GLJItqZEjkqc+n6w7OZwBQ59WtldCiedh4QSQQXsMk3sXOgfg0ic6DPO0K4CZuwEQMaNZwrhNiWzIUgeBZZgDRMRE5J6BoJ4Hw6FI/NCzT8eyr1p6TLX9QOtb0QPoiBOa95lPoONs4YRsc0I9OHnZAry709pe/baWf2Y9veWJc5c8cu6uuHBZIXJQLsqSKBBx78/CvSLAMs8qtB4Pu0hKRwyHmzkReJ4EFACDnahPSkgrQcXd2R190iBjEBAYwfvM/LAlrELz/YmcnInccw91m1MEmJlidFWHGtp0cIso+1ybXllC7jxqa663a1G06yZiWju6G2Oltf9pfSb4YgvvlQIsP9cGd90LaNnyu9HpE5Ztfkux+j1TAM7Y3o61765vub5a8KAQ3IXm3MsIpEcHt3Zv7w7I8nhqhKIoqC81akbmzERCzq5dBhBDrIcVEbrcDjbLutDHL1au9PpRmsySgyTELtOksZwduR1shoZCSjhI+tpu9/wr4xhfu3ppdPXS5vaokhABb2fddJb3D5tb+03TakG+UctGxcMtunQem0N69GJx4eLg/E413BgQi581Lde6bjHnHEvF81toH1iuwHtQFe+VTnTAQ9Efi0acie5g5Si4YPN+8uT5TY7Tnd9rbU8ghi8CxojhHkRkY5Tb6ad/5Ief/29ez+lAPCTXwikghII1zRgE7Vj6PMxJk4JdrBNxUmIQiQQzTgncIY+jG3IiuLMwB5ABWkhhqs6u2dQ0pdzMurbVEEomEhJzY0IU1EXMFJmIFNoaQU2VhcTIzRgOcwIRi3vvZbHYL6n3o1qkPbpvdHy7Xxu+OfP2+Sj5W8/dBzH77nbP93+2f6vRKnE3Ae4CE52hL2XXKZEAieFGLjyHHZnYQc5mbg7q6366u8Po2MLuA0jgUAJJHxXQO0MY4KSKqeWZp1lSZVz1CszZFMbRHAY1MiM4pZSDUyH11uVrdVX1Jt8VJPze99rDTMtgn/fn4ScPTv96km3Mm3o3uZDWjuj4LU4FFSy0hP5o4T6yPImJYyyZaY7YMy/8cHo8mxayvBGBe50B6mbEjN5BCABs7ivU50twNiciNzKHEiNrArlZvyqCkClngzuZWl4kUcMi9pmEARjxvHQwgUKQQASwWw4gAYlnV4fg+MZ+l27ztXNO8PEzrr77lFlX6pZ5IRYFEhadvewRLD2O1hV1p3uYlyeEltOje/emvmcKwJmYNHBCFT7e3uVPfvy09cvXf33XMq2vtcqmR0ddl3IziUGq4ZCIifv8IMw8N3YRLEggEjOPoYAnDeKiO1t1nqVPf+rSk21ZfOnV20fT/aM87SyIECR1uZllKOoqSoxmZhwmnR292nzztSmbJWWKEgsODiaYyzBW50bx/CZtbMbRTnnu/HA0HF65srF9rh4NAgeo5iB22qt41UvrAaOLSJb+jHuaaA8NrU+Ad9ns05e/nzrA2rPvFqS9Om0JUCyQ5TsCR0taLbJjsNMitrj3UV54EVL/pKKqvG2UiuwoCGUsQyw6tRDJOAg5iXieW38DGdxcNWsKzO5OkMg03d+N9bQZH4ib5+yeXamvQNBDQixkBidSs5y1azNRgDuLsJmAxDmwMpGw99iPOuBGEOpxqR5AFSIhIXbw4u2Wrwcsjx7UMC9gpCXnWuvkZXP6P/0Yq3tAdLdXfSg03m8tWnEfNyfKXW6mmQsHqWWARAhEcOq3CCILfU4JFjd36zPYgvpQcAL37NhdqA9nBxOCoPf7LwUFexU4EKtRY7qbsytdQmRjcgeUHOwOU0LhcCKOxdAlGMg8qC5n/gd0Np2A3u/7iriLyeXd4FknxKH1Z2ENV14hEWc3YQl0H0djF7gSAzEUTNJD+Szz+BW4Ewstdh0Gg8BL93Pq7Qjz0ieYWwds7sPqWKi1CsCtLxNADALYFIZMRAZXMw4eetyG+sK1/RbmDjD1VWPQB9Q4hAmmnuFSFkURVlvAqsPuQrSmH52Ms367E2MuyZ4ce5/H3GG5Kldy6ypXxGqLWAF9d3r8mRNgNQ3uKhX7g1MA1ltwp3l+4tszx4eO/3T6Ze6uAN3ptDuc0t/BGSCIZkJWIoSy2Nze6pqOyImlLzrRJwZld4YISfaOCM5cDmKaTAcjDkOJlQwSP/toPZ7xS69PXrs5TZRboBihKKWKpSd0kyaKw3IIEOaSmAxlzVxFJq6YSsqDGLaHo0vbcediUQ7CxtZwc3NzMBgONwquIBtUbZFIolV47xk9vMh77qvDxUkPWgh4yOWMExr++9Ta9bFbcILTE33NxRxLXrHOuHD2QvL1S5bww/LnNbGmL7HeF9dapEozqsoLVy6kV78eICBS1aIojTQUQ88TUMjaEBv6Er3qwtR2nYQIZ7AJwW3SjfeiZXYiCVlJGSwmpH3NL3ciYoKZO1xcpagLMwtRTJ1BrEwsAFLqUqddRmpBREXB1Ga1LFL1jPLo6Oi896sYwDFvSOCY8nufqWfVfocVeOLc9UzS78vyWKmMDx3dS5e8P9223LsJSk6z/e7gRjbqkgRxpC4ECUFSygDgNk8pu8T9mMmsX2I0j2QEQXzhBidEgRCECuZCUHAuGLGACJMyQDFgEHi7LEoOADmUWEAgpwAPZpHYJcDdHE1qT2Fod5ElPqAHq4ivH/jxL+/lwtXfd96y6dTBamNZ5kE7yZaWKMVSBFp6qTn6rFQi1te+6GN2SXp7AJZOLfPIYMD7xSHE0HkCfvZ5cQETdnV3F3N3OMHcjYjdySwIFTC4zjg6HJYzMYjYnMhNQOgd6gDPyoHMrdczer+MzIbckbZuSJa9mc0Vi3vp5zU96RhrWf5Pq36iO6RvWF9jc7h/7ZyFw8+Jkafjj1n7z1dX4Q55/U+814mbnrr7GS8dHgQjPXXHtYl11/bdaZxOiPh30W/upPHc+Q1PntuPnSIU9XZRlFXBDulMldQVElz6qUfam3IJnlyzZiZ2U3Fk7aaT/c1ycHhr783dwwEVw0EsrtUXtqQcbtw6mu4ezDRrySkUkUPBsFa7NueiKIIZco6sw1pCKcOqHJX1dl2cP7+1tRHjgEMZRhvD0WhYVdWg5jCMoYRUG8wCMBa1vU5v76tsL6df+IHpAHcBP9493a/b3uNqeWC0TBK2HhW6ZDPHZ/v6kNLxz7syudPr8WzOSJiXrIKjL4EOJw/Qpm0bZgQJ5m5uSK1UEUXMFiABwuZK5gRjIc29QMRM4jAHYmA2VnNzdVNnyplV+whGZXIDMQVCDsKZrB6MJEo7awB3hrtn06zqDmEuInVqzM5OcJg6EVs2Dsg5vfC1rz/5kY/2egVOddh7Q33frnxKT3X1kk35KtLsfjdwMaOOBZud3LgeLmnw3jnGqb36vaDFOC28l8dv2HSvbZpx02Xtk3eBbO76T0zk5uA+9boQAcQh9MyYAe4LXhAAZyJ2L5gCUxEokAt7JC8jF4zAJuJD94ux3Ai4PALrVDsuKiIgq4kYWGfJQjUSeNdMNoYR7cHscHe4cX7VZ996+Z8fMK1BvQ/sCauD+/CUNSAHbzX5l1jR3AV+af4/1o6z29UrC0wQYXMPHPtyWmpKSlIwiOdRW8s2EalmJjJiV5cQLGeDucA8EzRnNxLTTCGY5iKqqZt6O5nWGyNi5ixJKcZo2YwQi9DMmrIaZFe4W24kFGqMbEwUI7mBmYuisJSFJTBnyxQ5kBEUPi9GvHjDO/bWIlXE3USAuctOL0EdyxJ0St3qhaxjTH1N/jpjCNYG7Nh08RN41Z2uW+0md3rDtSdgjXmGB8E/1+44d4Jd1wHWm/K29qLT2u3p8+80gMefta4mnPFMh6vRxvmLdVUiTXfObzmxc5lycoORrj2P4MSOwDWRE7E2uQghbl2eHk6mqR0WcaPYToZ2ervaLi4+tnN5Ut16cxydNqqQpmlvf+yRPQ4UVlXlQAqddcMa2xeGsZSNjUFVxkFdVGVZlhKqEAoaDgflIA4GpVBukTRL57FAyX0e8vkcXSmzWM6MM3n/fZX+jytjvaD5AHecB7H9vw8I2aKA7zH1deFJQos2ncTufAHpLAyvb++Zi8/TOsD8PwLc5kZeA3EMAcREzqnrQnBrLVaDshg0zRFxCO7ZNEjIKQmLeWDqw2mYAFNVNZYiWcNCrh4CQV0dveMyi2c1OGsGIU7G01hGJrgZ3F2diIrAZsGNxDmqh9yRQ5MRPAg3XQ5VEUL54jdfNjPmY8k/5322/or3nY7vzet02iZwrFDDfZhzJ1CRVXtOtu+OFz4U9Fas+63Pf+A0V6vYEaJOGTnGEGLM2hibMZhEnLJmAgQCJ+9dJhwAWGAO6nMTms09hVwDcWAKsEAogEgoAkcWAQpH4VoEVAXKMhaU6xLO4u6a4CzERWAhdxWfpPGoDHUVAlJoDsRmAFaRPg/RUP8GorsDt++AzkQ5zzxt2QI6+eC5KWGVQ2JpIljkCXKgR+hFBOYSWLPGGERC0izMTOQgNUev7hJJCJ6NIbEI03YC0kLKLuc+yUMHN1WG151d6wABAABJREFUA8pBjsYdcxGKONjYmE7HRQnPBNje4bgeDMtRpW27ORyOZ8r10D3pLBEMHEM0Ek9Zq1jEwA6LRfScKSAUgcFCWtTliQ6/W8/fi2FsmbcXK8z09GXz3qS1rG9Oc+iOliDfQixaYnHr+/EJ0W3tISd+PP1Sb2NKAPRALQB30XBO78p01ueZ1979y7e8CYCzRIKTN2VAXIeDwto4m06CVCSMnnWD3UHMfWk7AA5jIbjDJYSiCIVxt1FXUcpyIwyL0e39/brk5EBqt0oKW1IH5pyqYfHU1StZTNmlDuKwcWetXbm6PbqwGQSxjLEsUZaxKEIgKSQISYyjzVE2I2KO7GSWzRUuRljWITo5o98/zv8AJYwHcd/3sqNOJmbw1f8rO+PSQkBLoGLVUF8kL1jeZsnB1889+dxTfGRJC10Ra+6aIImxGEoo3Fp1EAXNyS0HiQCSkTuDRSSatk6AswPufa4G7pGYwKKaA0fPiQWe5kGS7soSTBXo5STKKeWUHF6WcR7LS3PLlpuai8KUDAQmMlN2CFMQKguZdfnXf/WrbscT/K+93YOmfuB87fgYre1F/SpdsP97lgoW3G1ehAF3fqulw9iJYgXH23uvz30P6QQQ9rbOf5B0bPCcgO4wk2UmBYUYKHXZLOsilyfgBrATnH0hEbgth4IZDBcGOwlByIUQmIQR2AMjMAIoihXBCvZIHEmZzPscKUzEZO6m1oduWiYP0aRqsnHkrp0dHR5uz9fwsfTjD7SPHsYpdQ/04Jr9VgLHu73nmXRK6/BFeZn5BCQsI34X0b9rmab7c7gPpmLRbOQWQ5zHtjhgcFmuCeqZtKkRkDw1s6aKVVXGw4OjUHI7PuQYuNrObQKNCUkoDEebalWTU9scMhIzZ4TJ5Oj8tUs3d98suzgMHDlKKH79xets3SM7g2pzcPtWQ+300uUdNTM3NZipkMcgIgyoMAUYxdINkOXr37HnT2NrZ3YmrQLkjkn/x0DPBT/21Y3X8P5FEPTaKXTsySfuSHfk2meO8jtYeg/QAnBc8Tr2Kide6y4/nb7t6S/v9NpvxezO/sX7hIFd9+pLX261hRkTnJSE4Ox9tlphB3pLgMF9XnaLmINzlhBYJGlXF2WIxbRLs6bd2h6q2fmtKlbFtJI0abJ2W5tFtVErsrttbA8DqDs6yp1deHQ4unTekYN4UVYIBQgwIIgF4jJKGd09pRkLCZhTR57cI9EylexcB1i913ti+10sDO/TdfV/fKtuCA+elhGpayx38XF6wOYmgeX4OrAc71WxweVVd3HwXl96J9R1P/Ys6u9uDhYQuaqRVCws5tPmwAKHWElRpdac4EEZlNUA8bkM3jsVgUnM3U37vxxgIRY3UgURs6uCiGAchTs3B8zm+eaQmUBEZq6e1azLlNSZIMwMhEhCFBiac1WNHn/6WijimQHxD34mrow5wFsYZtZ0PL+3Bq6P1XKYcSpJ9frhXTWEh5Xu0t4z++i9er/luptHeli7l7q2bdrUdvDevY3IQctTqPf158XCJcztUs5EvEqv6EQg9/6YiUiImIjB7MwgYiZA2BwxMvqweBYBsc/LBTo8EITNoCIhg1TTdDZxZ+K1oqZrSMG3HIDyrbiZ3AUPfTf37Onu8s/xP3s1cB5DNhdNFy2b25tXlbAWIiwIzH2Brz5LFTOjz+WJ9XJj/fwlJyJizzkWRdPlNitJ3NvbL4JI3PjZX3rl1s3dL3z3M5d2Bu1sRsL/5Mvf2D8a/9j3f3568Hrm8KWvvVrFotzUW2+Om9nkez77CVN8/fWX/unz3/zJH/hebQ5Uwo2bN/ZvvL61/cmqLkWYGSKsKTuglosgYrmX1Z2XG9nZ3T9njseDIu98Jk6cdkxYXaJ1K/F/Hrq8VjxkDXk7/Uw/fsdTLfK1z9OnvIM55u9lHYAHt3Tvctu7wkgnW7TQyJxg44NXX/jyL4/CPqd2MKqJKEok8ZzznKs7ORnIuC9mBzc4ubF7EOJCcs6mPG1nbWdwGxQxZd8cxFgIzdBWlsjKuiuCl2U5GA1GWxXnFC+M3rhxwL63tXkJoQqRirIEokGyZgNzGRACSQ7OagisnlwkiJCt/PtPoYrH/IwfIBf15aJa0rccw34vaYHRLsEX3MkzZCEpLsZ3+S2O8aV1xnBvO8+JK043EXCSPhc/QE4CYULyIOQ+NXeORRSwUm46AExkMAPIAIaTuylBmCJTdlcjQyaQmdutvYPIsQx9Agkz9EmEcuoaNtGq6NvmTMwkzEGoCJyMcs5ScJdImNlB7jFwM2m2BptPPPEIE/UKyLuY6/d+6XEM6EQ66TP7f+HZRYvzfGnLOUNPX98ZVoEii0pmZ1j3j82be1IqPqB7pWViEwO0OWIqPUfhmFJLElNK1q+TBbvt/eB6OYSwwBHBDBLqIwH6sJsFaLiWKpDIpU9K4X2SFEMvgcEckcDMzEx9OpQ+q1AIwUH9cimrEDgxrwGLp1jF/e+f92Q+vTfWjAdB682+L1rBPWnLtNpiaNWCOd/oDxfQ/yqrhPe2zKVT8Rz8diZ2YsNc3u8jgPu7mDqT9dcdHI7bLk8ms0uXNxy6ezD9yktvuPmvfPnr/8wPfXtZD2/dGo9nbZNmTW5HmztHbXrpxmuf/+Sn6lHZdRnCTW6KWDWpe+Ty+eGwNCp1lra2Ro9efC5GSamdtVZXsYqxKMJc0CaYZZfsbjzvA1/U1jsl8q1yatzbVDpTNF+/9CSitm5yeasRu8tt70AnTnm7a4EeqAJwvHEPMNXkvQzdvZyzJlp10/1XJns3pUhlkFdeur61WW9e3K6GNfFcDGKwg0h4Lra5gSBZWZO5wX3z3Oa0yUwUCqoKIc9FyVXFuWsjWzEqhhdGzWxaRGZoWVAsvChDPazf3MPs4BY1j8TNkZuAiYWJxZ2ZJBZRzQElZ+RoRM7FDFuFViEszbzHpcSTcsUD5Jnrt15nc99ybPo9opUwtxqvpfS4Sh/cf9/HEB1n4Qu+dlJbOHlwBzrNQXx5hMV+AHN3ZjaOIgELGJPJyRKpZB04xxA848jNidisT2E+bx0R9zWRaJ7xh0DGSjBcv3lrox5eu3g+K6kaGIEgoa+MZKZwODFBHQ4RKqLAQ6cQcTYWeBnD1DRymE6b4CHN0t6N3a5pirJcvskdIJW709uds2vjt4arna2CLbbbxSU+twOcvROtg0ZLncYXcNNSK1g+an3ZnVQkP6B3Sb247mQK5qMbOXcKItYQ3Dq4w0A215sXQ9QLJgtvv2M52MixqJ8EUL9q+pJLRGCCANSbyxjCJBx6mcLdnNzIncgJqpqyBXImEJC6NrKQ5/HedW2zlO/E1/edTZv3Rq94Ryv6vaYzO/COLPe+PmV58+VJS5fIXhM4xipwvBHHGsRQQHtkvy+wCHY4zDIgRDBehFuZmZm1SZ1QlSVY+lydTeMU+eK5c09e3tnbO7qwuZGn7Sx1u9dvnquqK1ujZrfZOL/ZzZpPPvfsqKzQ+VOPPp7Ep50b/LHLVz/86FAPZrlrti5thsv1RjmYNA3HYhAocm9tBhHclFiYnMnVzc1729ed6ETZxndPK1Pu6gHLxNAn4gbe6qGnfjypXJySr97ua7wXFoB1SPhBQND3hnKe+bjVdavaawB6j03HuXPbAxo348n0aLy7e+Px+Pjl8ipx6Gs8GtxgkYJDCc4gIgviHDx3rp5iXZXCg7Lw7M30yI3KojB3Yy8GpQhvbAzr4cApm3exEGJSUo7l5auPXH/p1enheDQsqY6dG8MYmRgkfZFT7VLLznCREJKHWNUQObbcjylf7ylYcmIJ9A8+wW3ucu3dm3rip/uIA70Pe8k6DLPIlrxsjK9pBXNR7sxXXcvuesY6eKu1cZe3XpQugTs853Y6kaxBiqQOArMIMWk3nR5aVYeiym0Cc+9CR/1RnzaOiMBqZp7d3RaVxhwWJVy6eEkWNWKYCQZ1xCCj0ajLORahS8bERgaQmcON2JjBZAoSoigibD065aYOz526nVgB73ienOCuJ3jYyYU2Z3YnNLJltPY6819esdgjluO7lvph7bf1vWqlpTmWONaxRh172ltKIR/Q2yAiwIkQkGc3XmiacWcK74hg5hDSDBADOl+zi6vmYCCtljPRvAJTXziAQEwkAhafY/gLDQ8LS0LvhmwAMcFhWR0OZyKBGgJZTpSzBDa31M1233zNTfvCle9fl70rOhNgve/0QHvnTB584st3rwncrQFLIWydTyxsiFgqq+sN6b9xV03Us/E5/uBqmowM7g6WuZTv7qp91h0vi8py3hwNrPaL5zfNkpCFlL7zY082quc3RLVJqfm2b/vUl3/1hUcef4TNctYo/IlnPtI102lzeOX8pVffvFkNRmR+ceNcpHLWHmqohXgYi5SaMrLBVZXcg0BNhYOBhIOECDdHWBU4u8OkeYdz6c6Xnf0QWgzAW5575wee+DyzIe+Azz9YBeDkrjSHre61cfdOpxXaM9tw9nXHe9ThjlBeeMzqTc9p6/KQBxvGurmxxVLAqddzHSAOhGBQCJHN91xiZ3YLpGJUMUd2ViSUXFRVZWDiIpYSAhA5cFSXGAspBA4uJLNWG4Od7XM+SzTxclQosZEZAIgwgcBBTJlJi0IK8WhUkAVTp9Db7Po80+sc5cFxljvR8uF92Yv50K+rg4uGnR6du0qld/vz3dD7IBLRGUd9rrEeEVxkAV0A/2fWb17afHped1JDukcFGThxUl+dCH0OTg1ihCSBUk7uZMZuTFLnrs2kO+c3qnPnD15t9ndn20UB1z7jYfZs8IBAIIM6SDi4OsjU3ayD48rF82rJu6TOFMlbzzlnQ1EGMAkLEXMQAkgBJgYFA5mKuy98JMTcPUuQYV1RJcOdQTmozJRZcFeOeQ9Edzi4AzvBQj5f8KNVWbBFdbX1MVlsv2tagS+8av24n9CJM9b3al+M2IrFHtvrTzucfisKg+89EzuDVlo4Czi3jbqAWIi0y96SCDFTNgMczgbr+3/uVu3L4QTNy/M6G4iZHYGJYNSnUwGRMcjQOwCZm5KKERARAgcYyBikpp47DSU5sRppStnHg+GQwM1kNnvzuuYk1LvSve3p/1Aoiscb8f63563I73CMUyLQOmteV/Df2bPO/PWE6LUCOheJgU/UBFpxKu+jv6RNyQDVHCUKSw+PzkO8qHBng4sTHMREzIVIWZeT8USYhqPSVFU9Z6WomwNcrEfjo/0u5IPJ/u2jW9eeuJytm+4dbJzbIM7TtMsSUIRb45vTyWHTTa9dvdJOxx5T683m1sZsNhkNB8IF3A2qlhgIEgMFAhGMYMQsFCSWvVbdL70znSuPf3HPXX+mFPL/Z+/fmiTJkvRA7PtUj5l7RGZWdVV1T3fPDTMYXBcLLnYhy5tAhLLkCx8ofOYDH/jjSCH5SKEsKaAIRXZFiBt3F8RFMBhgMD2DmelbdVdXVWZGhLvZUf34cMzczW8RkVmZWdndpV0d6W5uduxc9ajqUf10vriz0OioX8X7/P5fnU71gQdrevrru4sBwL0L483SqTT5YEcAO+kfoo28fvrRx3z5Yjtsvv0bnwwRLCUijC15I01MYeAApctFCZmSagBardYtuCty3NzdkVhfX18/ufZ+PdRUCuNQa806wHx9tbaeVZt+VbjC6sm1f8bhy+fmn0RUW1/NFqEpXgwwYzHidntTwTFt9eLHH2EjXTc/vYMTjSYo8O12+FFnHjK1wwjXthqxj7zfzePX5uxvZEv4GvaVmVXse2HWj/ZDt/Mi38f2Hs1o7svC7qB3J3Vin/3l4LXnK7TADZrA4SbIAwaQKTlpdCOr2FlnVp6u/ItPf/wJYrx9ftUVN46imaOpLALJREpAMgGRNBcjIUUoVcdtRjbF1UydF5qnD7e3m2qjhBaA1ipu5mlwF5iZWSNDkCIi0zDmuPn85g//1b/fbjar9WoHj7Tccb8aLXv7fHkHXO4cy+N8nfOWfDxAXIDITel4JOyPEXb6xPI5ziLmtIHvQtIeFeT2S0DvWwuq0K+vYN2qK32/Sk2OEaKZFTBAMWcIngOVcEILn7CtJGuBluCk97dgYCRnry83FKeRzuLeS1unMWXFEmE0RqVcyRg38BVSOW6JNG26/og7PJq+SRrwurS0SC5Nk0vee35HvmytvJ/OPsSTv8c1WB5RLUTKFtLbsgppvBlvX67W1+OYqzSYZWo3fUGwJbYzUjA3CFHr1VVXa4x1i5QizW0Yt+vVVTE8++CDm+3d97/3B+2gNtE9+fiTF5vnV+sn69XTMUI5qsaTqyt1T37++a3X248+skB88fz5k1URQjC4GRwUVCOz885owTEUhgl1l2xCNy75QBzwzzfHYHbpVxZWuXn8d3DfXM6L16TH2vYu1vPdKgCvO7cfKPONFdGmQEKgjfG0MDV2vRX3yj4UzKH546cgVLI5HhBojp/eGa1cbe7uNkN1eimFic661XW/XvVeCt2cIui+QnKbY1m596beVldPS5cZqVKvnq1KvVtf29DR+g4qDSiF5qBR5qtV3W7Xq9WYAxxD3ICDeM1pyu1iCqdO1zuXAM7OS87yzP0T4eyxwPKnX2raN2HREnEh9e0wG7n8dqHpOy5zUuJsdTgeiksdOPe5sJcYBRgEhYBMRSo6n3J7iarDrWJDrL/44e325pauEGU2ZhhtKiMltHwCRoJBZAPPMiEyht58myKQKcIkRR0Q6kuvTNMuNloGRCITZkYSMCFFmVODrJh3hYO6vnSlZKaZnWvgVyQdMt7d6Jz7fsqhF89N6+BQQePxgztNYC/bL37dL6f9s+3b7gBhubUdt39SRn7Z19S7pHldZEQgK8e763J1fU0WC0XSqhnAWjnN3GUHTxHBTWSaLxlIanJpE83NoBxhcKJjMzohIty9EBbRuQ1125cOYg/HOARVo+vL1VV3dVs1jlUi+07Ddru57VdPdyr019FnX5negLD08Bu+YvGXNqzlr6f6wBl6Lel/+RYtyufi777sOZ7o+N62Qy/ikzKhqG5KSOYvaxS36/WVRe26fhS6rtAZEZBKV4xGImI0o8PHcc4LlgRK0isUCUS3vTPr3Ff9pz/9zL378osv/+C3Pvrs5+MPP/vJhx+sPrnurz7s/8Uf/cW/+sM///3vP/0H/9P/tPO1ahSwbgd0XdaEIjQS1URnFitSuDsq6hDjZpw45nH3HHX2mzSNCkddv8P9PNl+dMiWT284oaOhxOHX15jAnMCKX5d0+OGoI8/WZlndN9LxZxt/WvID71rUSQQU44tPsXnpWeuooYasZbmYQPZJgUkm0w0FAmXOQhaR9M59DXMr7v2KpZTSQSZlZhhQx+1muE2qdN122Gy3tznmytfOtaFkjSdP1h9/5+PSr4RuTMkEo8wEpCDS3MzYdatSrvr+g65/Buva2TGmcZ2Ex2Pj4uXWvw06z+ZmX8LlPWdJJ39Pf31TdCynScKx7fyN0IV10RS3ex44+E0Xu0/QQvp/xQYcPrBQVQgD3A0EHDTQmSbvvFx119fdB89WH36w7nuDJRsuybzFqHksWwtfQYo1K21KWdccHMwbo0QiQZF0d6dJCWaiJQMDjdaZuxfvihdzWjECZgYzgjHGZhi6dd/wUfbuNzj48JXp1JDHgx93ly7N3Xl17hS35bkO2wjOD+q4hN1N2O0wO53xYMnsdAedlLGo6y+nSPi10S7HQ1d83N5YF8RNDrfEtpS2hSiVjQXvgD/nJxM4uO4TwIoIUHMeJskII0TBBIK0JvzXOg6xjczeVq5isCE2KFiV9dPV02Ict7dFeNI/WZVutVo/ueoK40iXfIW2vgdE/tLP0V0LjpgF7/36qnQkHR4VyMuWouN3zlYFCoAFFYBAJUrpu66DsXHsFlgyZbMhlRkZkQGo1hqZgEUScLPOvdtGrWabGj/44Y/+m//PP91W9c+e/NGf/ODP/vKnZfXko+99589++Jf/6B//85RVqHTlar26frL+je99u+t7N/vggw8A3g3bDFEGsZQOLILTTIR5AR1q+oaRppnVXjrLmgPSXqe3z/w9MwCz88+BfWa5M83qwau/ESdL9P6vp/SaJwDnUEz3rzwV+073wVeWTV6FTmt2r1o1OWOCAAxK5vbP//0///TnP/2NZ33f9Vx53VYvcDhT0eQhkvRgEk46cwrKUiaN/fUqM7uuSymV9GKljFGN43rVG62a9e5IrktZu1sCUZOVUt0GtoFR43bQNUmaO6blVhqGOhrAoikzZShXT7PtLgRl2fB998rtA+b/t8FdeVnHmxTZWcI54omPmRU7a8obpK9ri3m9914e0d0vPHX+eQRNAsgchzDbMUxC0Nj1RZluHS2lNPeEDTWG7UvUCoU8jchozjdU272lnApEWtJpMjNmIiePd7p5WpWZu9M5VNEG1QQNaJhCMrNaYxwVSUCilAn55FVknmN0nX3w9NqMmUka5hCsN8dtjtjbyUxf8vnlLfMH7QX4+Yed9a19W7oAHZSwKOie9szQ09Npwd5GcmwkOoQK/qr0GiaoXz6aOJeQhDMittuatQxhEa4IECYRFgnNcWFoYQB73WEqqrFAM86BH1PZzYJDTcnEDCDT6EYASSIVNbENdWC/6lt48TiGjDlUZ0YODvVc9TFsX/x8/ex7k544j9KjBut9cgB6n+ry+sTDz2ftA19lQV4y8B0P/Dz83P+zjBRqrGjCFOzXnVGWcue60CfbCnfxLDPO7WTDMlrW9M5rAimjGx1ArfX58y9/+id/8nf+079Vty8+eOrGzebm5ccfPPvOtz++vXv+k8/+o/nNh0/7jDv3Jxnjb3/3w+9++8MPn1xL6Fblbnt71dn16llD2kVAAic8Crq3CBpjcY5M5ezTfTH4ZcGqX3l68eTvdJ3HrHrx89LMgz04425EvsIkv+fRe5r3mgrAPRLlPfXgYVXeyIr+yoXMNdovCSDHv/jBHz7/4uWHXamb4Yo96dM+LxK0CWA2GyYcUWQZQBHdTG7JhAgzp1mEyDEjM9e+9q4vxYvXohi1gVQHp+jBUmxVOhOef/kLjJsP7COawzqjqwFBt8RtMnBMIjLT3Pxq9fQTWEMIgjAnl91190OH/G985+bJ11NOd0mAukdzeJfE17IKvFe0kyov/7r8dul+JZJ1rHVbJCBBb3w+hL5bx1hlDI2mwYtHECYza5pwOwkQk8aQJIGikWlOyxBhmamE0YxmSDNLM2JUJtDQJ8ikuXXGcYzUuB0jklE9MglrAZHelzqOXSmf/uiHqhVmB1vdvBG8oa49+3lBJx25W4g7BWKJ5rN3Vd2Ny1JMPFAmDgpd5ByYZMj2Zbqm88/NVT8DTvHa9CshoT2aDMKoGFAzakSoRtKomMIN91v95Paz08j3O01TCHYQ5bPEoIYYSsPeVCLNQ2mUIwPKcax9uSI8VGMYg1iverNVQEMNM1rkcPPyJz/58e//5t99D3jqe0kLPvg29sFDefvgRTp3G7DkD191zHZs/UgM1r7JOxS5GXFiZ5DOiR0ZAFgNWQeQcCiVIWtyk3YcRwANPqFZgcVLZQKepFJwlzRsti78/u/9lgSy3r344j//238jQ5taPMc/+Cu/9Zvf/yvX16sYno/jIA4ff+sjC8Y4qmjM6NgVZ1aQ5qULRleKmQKiVMyJBqOboKDUznKy79qTETro78dOAZ0M3L7PsbSQLwo8FowWQ7Ic9Qt09nV41Xof0leKATh95XIGn52+X2WBLRv55tYqgUPTGJFjzXqXEX/5559Fxvd/rzz71jPCp2ElTS4obSScBJStSiK8GGQ5ZguLpxu9G7YbMMD06G30riO7oip2rOOmlN77nualXxdzqMJydV2u+m6c0NJTMqAztIyOjBTlKe9Kn/k080NipZTmjBz77QQHDO5iF7x52uNTLl+kc5+XX3Xu68V3vD2x4/0Om9w3/FIX6AHZ7liQnMralc8dlAFh2t6hbgUNw8gCsghpVoqtxzoSIkZ3h2S0yCSdxpYjuzFE0o2KGAlIqjHWWodhZIpAqDKhiEREKIRECxgWKSfDUNxqwAgpStcNNZqzkBGUCj1DTlrp/+jf/BtpVPZmM8o6p/+/CXrE1rwTynfzmQvpX5y34MUS0ax0HiB+nsgF83hPAza/6OJCONrwv6E3QK3Tk3WD4UY5AB2USsFcNZpcRMwW0j2S1zRg0w3NmIQ5mLINkjWFgU5Yy4Axv7FtNKQET6ZkXekIUIpMZynFjbleXd8qom77J9cwifri858lNOFhzRPhmwkx0WXx7J28EzjHSl6bUy0Z04Hdb+cTenjb8ncsOI4AkplZ3J0laZaRqZCsKb9oh7pzuC2AyQG5hbgzxZopGMCf//yLL198+fHHH3/8nY+fPHmyuR2e9E+hhEMM1rsiPllx2I6llNXTJz//4he1bj948qRfXX3+818UGApvN9s6jB9+fL1+Uq66J8NQ63B7/ezKzNxYDO1wrGnZTjNzqHkbz6rMIWAyFtrL/GnHZB8bNLmU3oEF4z+/KR++AQte/oghvyQR7a6/xgR+AzEAh9PsQJQ46h3sTB2PE++OXnQspFy+81VJh1W0zr7z/U8++fa3KvjjX3z+xc0LZbNEGg1mc5J2mrGYFdqsXROCzMxXna87uJNevFv163698tJFxhBDEgGl5frZ1be/+51nH3/YX6/pLgqONMGK9dfoS3DMzAgpjXSwAAVwmNELvQBdqGR6pJpH3oHpeidpfB3WH53En56txT0XuZw8Or7hrbZJc8zl+0kLueDyLafL74T2u8Whzgg06aWJKGnYKDaKWkohGEhJpSuBjJpoMScTaqkaIhaNSQhMhahsYvjk5pxjHSNCmRk5uUc2D86pFk19bvtRQ6smAGUmQoCbm5lbMZDGYoZMVUBIxX/4o3//h//iX6N5rh628g2Np44/7gTxxbwk9tvIHoTuQC0/untpE5qTfOlkqjelZnmasVvix5jGBzcs7X7LD6/Eir8hAK23LGoMWxRPtGS8mFXmdgvnaQvORwKckVOajb85+jtZ6M6W7Its6bN3GCuYznGkliPMjQZaE8RoFMPcaWYwZKVV9+w69Q4bs7DEdmtLcLBlG76hxep67YVwftGdXDn90OgsU9qZdZdc/JHsizvTwAmjOqnb3q5wLEE23TTUKde+btCaZu5ejGZmBDNTOaWJnC5yzhpcvDaPOPNi63Fgf/XE3VrO96hBa5GMRsjdGz46gn/ygz/7R//sv//hT5//6MfPf/Ljn3tXfvazn5bCrvjH33n27d/42FQk/+lnP//siy9BtoTaRitezJ00FrdiMKMtMn0t/t3z4F3refD7DIExue0t2eoB/1503ZkePp0W+1Hkgaysc8UdFfJoDQGX6nOO3sAJAE+nzhkTmZbs57QT9zcd/qrFB540bCkmvp4CdPJuQQgjegfj4w+fuPHjZx9edeshAhPynsCQSBQjKaQYbdzUzsVopYgwOQHrvLvqvQhMsALWlT4zMod0bGkZlR2ve5dqNBjSrs9ufRcWnbm5s0QLZ2nxYICzz6zOPuEwrsqUPFKgNAcA8LBp75YmQW5xtnI0fGdH9ujD7uHdCSYO59VbbNb7Kvo3Wu4NF38+VATP3nhkAdprjJN4ak3YzJpRoyulg8bUFL1oKPKrVb/dDLBEJgiYZ7CYSWlsi8maq6ZAQy+AFn3pBkaw4Rp2Yx3VfOZg2dIeUDSYYLQAaPC0dEoV9Ey40YpLI8MoQZKpmG2HcQz9y3/+z//Of/FfANgHQhA4POp7A7QrbtovTrp3L963DE6TEwiX0HA7OZy7vwtD0X7olmaWOb77cGD3+o0WDGCXiIaThrDnljxT+rukr4MzfUVa5PPUXSAGmRqMf4MAnWM5SKg5Su8UgnnINJ3OTYjORpoxGyBoC9+YNF/CIGMiZUhFNm2caWIRXYwIp7eXZCS9MNmVdQwdr66vVqvVsw+uVr3var+QOn7Zev7tEI8/vka3nD5yVgK7VPIF0fy+ou6n84z//OtnTrB/cEKpJQjlePPZ81/8qFuv4m7b99513gDYGM1hM5bi8cROEiQVebXu6phu+cknT7/zvY+GYWtZV73BWMWUNuO4Kj27/m4bElGNvv7sZd5sr19syu//td/61//sn3z/f/bd/9Hf/Zu3492zj545NNzdPXvylCzPPnhqyn7VO1ljZGbXucHkKCr0UvrVvkqHmJt7UUSXBn2WXwBMMVnaIawsO07nHnh4eO4di4due7h4nJswZ8v7SgrAYvvbbyvLPVEX3nsk3i2v4/Di7s7Tx3Fy8+nXx7ZjL19KwDjk7c3Nh598sPpo9T2uAnNYrcHpSCRFFKNVjAWknEapEgk0x5+O3jns9uXzu7ubD5497VZPYDlsN91qDaMCbiUyrfPVugOxWq0yNla82Prq2YelXFn/VOWJc8WUuSlSVCgk9LAhtgFYuUaOzME4n/gdihV7GWIn170j4tLDDadC6aKOZyfrhS+LF7yhbex8Ie/3DvlAhc/oXBd1gKNCdlwPmFazWbH+qltdZX0eFGkCIgNjHbcv6+1LylLZ0ROyckVWozIrU+DktJaZYBHT6JFVilR9/vLLq74vpbhbRgqoSspTAgxycyPh5iGJ2RAUm2NESpmZUmSauXsmZe497SlXn3/2M9rUhqOGvTadORdeClPa3TbtqAswPUzuIJMHyE7NarvsvAOf8P/9Ve4K5qGb0EFdjpLd7GtyUObirmUVvw56j1fYWWo9l0jK3OqNIqHO0nLUEKqZsI7TuTrBnDOetJTYmAdD03QkQZjTWq6whpIlyYyg02xGL5FSRPO/SKVZg8niuvSAMkMdKnJFRK01qq2Kio9RO0Tzi27V3/X4L1vPvzVa9MnrcYgD0XLx+dSwtRCy93cese2z++Olr4/j6Y+5cxKAJlTzmUfQ8rM/+3d/+kf/pnfruo6wTLPCiATRdWU7BKaD/um4KgEjI4PWQtYjKMqyVmYSCUNmy5nhVhzO7TiUbg3vAmMkPvjwg+urXyjzP/75n37/d36jlNyOm+t1X4GMuF6tKUD1+9/5Tq0DJSnNrJTmLZeZOdTBECtrFvw5neYs/xz1/7kLCwy9RWLOeUCPUqedbK6HVqHzg3F025ER5qiWO0HuyIXrEZP1dFoe0ZuKAThu8fFmqYM968FZu1w2uDB9dXjnUa++GnGSk0lIedVb3N1pu+l7+Kq7uxstC1nIGgqqxSu6BGfJHEgYoxgYEFhD5jBqHO42mxvFBloTiKBz5SxOZkTpSr8qgOWI1VUPWd85EDXHhAlu3putcrSuWJonmQhLQEiF0fqy2ig245fd9vNrjYLP+JrHOtSbkpVfqUexmKhLle/ICnVmqi+ePcsuX8Mo8hg64MXvMHXaa9PBmJ5qWm3km1b+6MYsecyEHpk1WVgKI9GCdAGDQ6NyQwyGAvrdUOl9jVJh3tXMtCmPcPNgK6lqSjDhqMaErp8+7QJ1HGpWgwF0Fk37CN1sO47eW3PmaUlVjUSkWTGaJXrvt2HKvO689vbyZhhRxuDNi1vIdzL3snWvbVghuDPtHhshFvNy+ndp/NB8edpAZn6/c0k9qdCibC7YfnsBsVvSR9aQ5XpbFHW4yhaawjdi4CuSZkCpLdBt/uLZKkpmX3orNO8K/G7MESJdJJBkKgNEMUZEc56ASLOO6IhCUbVjw/5vmLYBhOBVYsgBIwvNGEYQ7FicqjnKUKvWXel7A9MVAIYw2WAattvb6PsY7uzLzzZDXe+PAb6hBb2drfF02zr9ygv3n7LqsyXcLx0dPX62kOM6TNlfJkyqZvO+u/kiaojqupVkKasjalQyLbqudJth23e9GQsdohkV2XersY4IuZkkNwMmlPJmTTcR0Kp41mrmUtSh1hzcu9/+7tUHT393vXpy1fOqT6q69xI7M/arqLWUrkYoKpXWnOZmpboKPUtXxOHOjGRD3Z2cIs52yyGdyheTuWXitYcjd16gfaQkuhzFozt19GXJ6ydV7XgDulD8cWEn9JViAI6quVBV2t+F/HZBnNJJZ+NEJz58xcGH03JefSEvdEOg+V7e3bzc3t4Ug0HK7Ppu1gAnz80EU0oEk4ZCg8ONHplDZAVyxiAvfXf19Onqat2V4u5WipcuU1ZcgDIFKBQZAmhOcxHBrBozQxkzgAQ05U8KIBpauhvdHOqIAu3gkg/0snbl6woCvDSUuDAvj7gSd0UcCjQPlvNKdH6yvf/i/6XZftyJy3ijh+l4k1BL3buOtIbVQ9FoDe3cZe5uRvOuX11fP/vw+7/9V5988FEwzU1kKnOWfw1UKpUKMN1pXXFzkmoZG0PRjENeCow5QZ8QUwQlmn3U3CKVMFHuJGUkMxmJhDnYTuJorQVHPOT1loL2/GXP03aF7iw4e8zyRZfvojF2TuFzCYfV4XH/H35tTubc8bqDF52bxGc3/uPSl4z7G3qI5lG1EO5+/Ee/+NlfZA7GcKM0RmbL/SsgEdOcMTRv/mJmJJQ7738CRBoSCiqLyTjlWG1nXaAmyGlMAHRTnCWSyBZALyAgp5WMPqMLrRydIm5uLdKBHJ87667239ABncbYHPz6qJVxtl+XBpmjD7uvRyLa0SI+3Pcujt79ykMTve5r41z2kR2CZAauP/z4g+/9xrbWm7u7CoQSkLtLGVEBdqUzJ3bJS4QGUgIglQCMnq0fDZMj5OT3hloT8BRyHJ3qzBi5Yn73gydPV3KMFM1LJIbISGWVWQEmJu/WcLJa1MEUlgBji5Asq1XLRL8zx+yG84xkvL/UGCIXvyyk/53of1TEWaXt9KvOXVzu1BdptzfM7Pxo1E8ePSskn155Y5mAOW9OC23lou57Dx2162y/XXj7a9NuRxdAyXOsAKKqu74eEl3xrJEgJzBOATYl12VLHAwzgG4oifHq6XVXisFUcn31bNV71zsoTzJ7uJubKCoS4UYrpGcoQTczel98cBM0KGt6b+aY1meII2EgUTwV7lf0rrv6aArCP0L8fDXZ783Tg+89K+7cr7keWT7eWtN0tObfDzpdTyf1O752Hgv0UstOVW4CcG9ZX5QBMGGCiIzMRBotkRHs4KExs9YIN2RISu6DGAVKSWUKWbzENiDdbe661TpCmSJNRiXM5gOIhiPUstITUrY1B9rkbQqkZOaKRCZTYx1qHZoRa2++OWndK9HsiXFwxLYDuprg2znJE5x3CO344k5mn31CGu6LuDNOXazaYpi5GxfOlYHOnPod3PaY1r3dpfSrRAKmMes2PzfDOHK7tWGQuWtII1wSch4vUmjeELsenqR/gpQpTTSi5cwgRbZAyTZ7NEcC76FF29YqEaKlHFnEklg7crRPB7Mcnq1vP/rg2wl66W1zy2GLbvWqu+SvyXy4V8Z4uA8WouIZS/Ou8F3nn+XgR5aye3bA+wdx/+vhffs23tMgtb1iYpcpmfP66QfPPvzgyx/GZhO2hhV3t0ggIGUxpwxIITVBFCInP02XcgpkJ3M6+GQLgyIdYVDzKFXxHOOuCTC04mZ0qxmSjIWdi0iJNAORaDDTMANl2inTpE0Quga4zdL/vtWPmdE8FKh3+8deMdh/PXew8EDZuMxql7LaxS3r3MNfYaGWN875F7NuD610v0h3dvaf1umsOvHVRP/TUpHKu9sv1+uSdxq2NUufEa6kswkzhFOUWgBwCHBYE8rLqmPHvvOMCvPSl96s703ISFkxZy8AUSPCkdZZ6Wnu1jkNgYSJjlJKO8AKxAISQi0haleYQIwK1fRBpfTXHybLcprvm/S+buxLA8l9gzgvilN7yfLxt1C/r8VH4tQYdPrT0ZWTR85ajSZjw14avseetAsNndxCAaQyQqnJrukd6aA3/isZ6eZlsxlutz+rGgrIDCgngRcEkIiYSud0miXd3d5uN+P6+mmto5mpOXVGgjAjY6pVKKUGomJANJUATWACSWSMNEcG6RLH7QCEYNQux9ID/fsQTaVotwnsGNBc1uJb2+c4/Q9L6840ElrCxCxfIyyvan/3ucGc96ADAfPotguL662kuf7VJwIQkdu4+9F/rNutcyWgHeXOM5QS51NXYn/ywyk4YDqtpWvWBFraUptmvU9W/7ZIJhQsW4wum8nJYC0harrToP6Pv+Q/+fOb66fjP3hytYqx53qdfPHF5zdffv7Bkw+hfGWR5VeeDmN1Tuix24umNX/pJQefz1pFdXLbPRVacpP7t87lU2des/x94pCT+aJB4LhfKTylGkHS2m00Gls+YKOlpIb5LAopSIqW/AsSzQibejnJlkZGBjgmqNskUgijW+kAJpQppeQQsOOUhGVWOnIas2Ri3l+odsgMQpoilZGzF3/rEO0wEs/1WxPsZ8n+wMhPYPGs9tvjA+N09uslU83Znx5D5yTmpSB9j0T9xlyAztF+yj3YMh2Ohxb/LS/i5Ov9qsXjaVFJ/ewvf3B780Ugb19sVA1qOYVEc2cxuDXXA4p00kgHLBOZ1vVPsFqVJz07pamyVtVE0kwtrhElouVVxXSEpTADnWzSjwSnyKTSJEYq2wmYmffderOtL778UqpCpjZiwFYNDx0XJtJ7uNXrbK1OjBb38MRD6esN09e0T/Jym+5p6yO7YWIF96ypo6uzw0lTBFr2u+aOEMzqzOb9kyyAe1cMdW1jx01BIuUwh5sapjkAJCjQaCYyUcyyim7Nh62dYWVGC95qG6qZTa5AkgHeVGGJlLUkwBRNqSjFu86KmdKh0hKWYWE5XXbEq9PEIXZwSbM5jYvIXjSNZLlfzHl/d6qBlufSZ6Twc0L8qUpwhqdfbtWFX/aKy3vIH95Pal1myM4ib77seiczapCJTAAtfa8TnICX2JA9Z3ceGOiAgw4Y4UYnWipTQ1sCbRLNrg2QcebuizPe5usgwwAM5KjuJq//4y82f/6j5/HSnoz2LeueoWAzqN7d3HwJYC/9fzPeOzoM5zmmR+tLO2vAhZfsl+39Bqz7lXZceMU9tVzuKAtLyOlduxdOH8xMyOuPvrW+emLUet1nKlIptmMpd+PkxjYFseckcJt2fHfih4KliGRJ9IEuYWEKzzQEMWhg8TbLjWaAlE1nGGtGzG6ks0VFnjAolYlIRaipHSlByoyUgIAqDtnbfRNfCwFqoRRqZvY7Z7HF9nhvv59y6lNp9ciuuZN6j0rmmU9vhN6wAnB5dj6W5Ux7/2lp9xbAR91139OTCQ10y5svfnr3/OXLL55fP3kmsfcOE9TzdL8INpBCAuZpTKnWKqOv15mkrPNOYI0MZSQTkFlVSPLiq9W66/u+64t7IjmB6LrBaQZz+SpLj9IRDV9IhEMlxlVuO086paxApbEBI6Z2UsXcD3O/vCdZbU9n9ZmlOa+EvT6uMwvn8N43Q+9HJ+3qoZOLlyr4+IofqwpnrQJnyo+RSNoEYq5IKQjAfAp4d4JwN6d62AxaktjjHZNqmVnM6GYmwt2ffevDq6dPQ5HIlkcMartG0qAUaRAJB5maMtQ0ecoIL24GNyPhjmKow+jFVterhlaKI1POK/bX+Y4hlnrFbDrbJ+DeK027oIA9h9o9uP/7YH1Ot5JX2gfuEU12DfiaNN5fJtoNU8LQ9auPPhqxCpn3vXddipoCxRMzes88/NMAzkJRO9Zt82OW7glBVBDNZbolwjMcTBGhCTi1qqVjRUJV2MLvgNtPev2Xv/3xP/hrv/PdJ0/qUDd1WzVa5+aTC9tOff2GJuIbm//36ADL8k9FwbMG2t1tr7Trnb35VMg8d/+Cm80sSoJCRl6vr7q+E5ttPhsGibLhVqHlm2vFtSMswxQq1pZBtpBJKRsCkDSFwRiEmqgwT/TN20FCUlAYWVMtIr6dAAtqrqHCbl01ib+lmbFWc2VKzKymBLDky/cJirM4ImCCVJ/7Y5amltrZYkgvBYrcv7kuBf3d3yP9T5cefsS77v1xV/AbiwHAoto8M8kevb44ge3jUvPPLTItfnlgt3uIsuaLLz53YdVfxZh+1W3vNte9R6QZ2kSGTya9UaOZy2xNvzIXqmViFEAvXYkxNTIlVbCjHFmtsHhnosHINGPfYB4mgTczEijOtdnTiq54yzET1MroUfPl55+5Dba+9uJKwFfwFWhGHU/Fna76XrL93XjNxtQTNrlfg/cJv1x8+Cqj/770z74eZ/jz3MCTaI9HV//++4QTF6CEhheIW2OUzocx6SxiRFAgrNBEyBSIiGoTcPLk/kKKZhlywpgOkhKRjGSUvi/GsW6F2k7B3EUg2tkup8BHd9YEEsY5F0akuRWY0Ywq7nUcSDD07MnqO9/51hyQpJ18/sj+eXQX7sQpzVrAwSRe8kMcDyCXcsAj33fKAr/abD/gFl+Rbf460MxERWCMLL1vt3c5lJKdQ6YsTXjfn/E0tX2nii58ENhC1OeZQAk2HXm1dBiEk06f1F5hcnpuko0jszm4OUySqo1ri7/7+x/2/knXy662Y78a4sa1+taHH1w/vQLAyXmBRx5l3xC+gvXnwX3nwp588U4+VObyhtes7r6o4zfM03bygB22N1989tNCK4UqdAeprOnFa4Y33EOjm09Z2Y0tl0UiW6yWEiJcHUBghEImQAZnWlIjRrALyOnGMiGdpNO9gjJWCnImHR5IeY4RTlAyGAHOyIwFMFQAZuR4qzpgte8x7CSN88rTrKUDmDyHdoBr2D/TGMASv+F+xMBT6WQp5Z9ODh4+e0nbOxWyD4b1AdrdsD8BeCPmT17s3kfqq5fVqeU77v3lbKc9ysZGCvLSfee3v/3sWx/c3W1/8oufR2q9WmeEkVI2PG8CJA3u7okMaVQdY7sdb59/+Ytxu7nqVxQygmRzE+q860oxZle6WltEewwxDFEToFmKonvpaJ6RhBvcWMCupYsRVEcNt/GzH//sF7/4wuhmblxBV8HScKTPd4n2s+69omNp/7LOfCpIcfEfTn795aaHJu65M81XajcvFHzymyZmF5vnqjeZY9QBioyQVKyQbtbNLpoCaOb0osmKlJkZiVRUjCPGGcxnOixOYBNDzbGjFba0vg3us+E3hBdrgffCtI5gZmaC3M3NM9MKI0YvHLfRtOV13z1dryeHa0zQ1m/hEOyeabv/YTeBdyN1yp1eddI+/OJXKuYbF6BH0y6dhCtc6vsOie3tcHszkJa1Auq8cIrf1awVaseFtbD9Y7oKAxwyNDnGncXhhoalnoBanGUqxHYyBoc7e6HLNADtcMxL9tdD7bcvVSNw3V8VOmps714C2G8Q30j/CzprZX888eTvI3Xp5So+a/66ZCO4dOXSSw+KPSvwH1/jrKWSmX/xx39++3y7Xj0p7kggVYxgkpiS2NElQtkEpEnFnKUwTXButOYtyWyuoEJKoQyfvSQBF3qxSG0TmVyMoFQGISHNEFGNLQLGoPbWVAanFClF1gkY734Rm5dzgxatP2eQPukFYWc4al+5d/qcLu+biOWn4xKPGL12E+4gTczBg6fP3iPcXP7pMZOwTK95oxzhVOc507/3C6XLNfQqtimdW054RAHEhBmyHba3L7al9/X1erVaibEJ9aWjzEhRBlKZFJBW6Z2NmSt2Rg6od5sXH1xdb++eu5FKVgpwd40hjjWHWgs8S9cZEApYCMyEl97dQtuUvFAW29jCeqORfc0MWrfyXjFqsCpWmhMqnXfmnjzbVQ8M7ddiCVq+75Il4zHmjV9Zm+V+KLmQW5cp9nh4L16tM3jAfO67cXJGNiIyBnMQ2eCAJoRPc8nNJSUFA0MN9rBHjCYkKEOiuU0gBSAyakYlJwbfdX3djiRqVEVKOdYxwzISQq0VsFpzHLM51mStDsuaRBnHQWFmjmgVxpNn1794efP5Z79AjrRuZ9BqHw5NN++Ozpp1AMx1+1oNsr+aq+jtkQDAQENZX1dthsTNxodUWXmVjEikGQETFVP2+BbGDqMl5m2eLHIakCMJA73QERTd4VRHdyvOlKIAxhYgSe9sDhsOhALeF+96g7rtOFYXtOIQMNZBL26+/PEP//Lj3/l7Zm2bwyTQfEMAHrClPpZOLbw49/X+x++p3qkwc1awxL376UNSogDu03oBoPn62Yuxo23Hl1lWxNqtU8ZApXvXGGoiaJMZvnlCKmlwUYlMSYaqKoNyzRTVUIOQLWhXpWQxVRkGbturDWbGRFCJrKRnBLouhb70Qw1vApZigsoFIwZoLOzM/Mq64YufITZzuDB2lvzlWOh+YxgWJwan3c1dUpDDbjwa9dO37n44Nf/fV49XIN034gdkO63mzRqBdPL38Jf580Vj54lV6lX09HPvfQQRkvq+M5e7lVXp+s4L2kQ1s1lkIUDCoXQvtVazrBrkCuX1k+vb4XaMTWoA5N513ap0PdwFdKWwJY1X0q3vV33Xd33f9SuzQtBMYtSMhMyMxpSiab5EKrq1/ZW//tsff/vjru+IcE/GhgjuwwxbU7j79H7Sab3Ocq57rryvLXtDpCUL2dtuLpguXrMzDpjiCR+YrBUCaXSHeaSJLigRgYiWuxRKIQkZrMUzEjQCk6fDbHokWnBXwzBsb6cEWTFOtiSaF/PmPjrZf4SGh5KgrME9W4sKzhYJKaDWWF+tas1xHOEs/aoOMTdq3gHeA1P3iZGPR4PwVl96Px0Mvd6H3nqvaGc/l8AEaowVQauFUbrSZr0SDRu3gd7uGPEh12oLK5u7RMttTaqdWTXP6syW51qAKHXmRncVI5vgI2bkGOOYNSRW+LbG3VAjQSFHKCjYWJMaXjz/rIXOA9P29c3QznRsFn+9xXjPU5ekkccPwSVzNe+97VIhJybC3Qfufm5rPxNaPfvor/y1DXiz2dI9U0iQk9d/Rox1qLVGTcomO0s7wWU78tIUsMWIrEgmp+PfBZROEmkTTAoAwSgqgYgs1q/L05WvVl2HDCFqDMW8FDOwdMXNSukEQGxrR2Ido+8KgUnEbVW53Of3dd20dy18SLX47Xyp947t0kC+vHPeph6oyUGtlg8elHTJtHpENpsE3qQ4dfTWE+Xo/G8XH7owcx987+uRskKRVe7FaMos5tHCrZpFNCVRYrFeGlelGJGWY2bXrTabzfpJ3z9165tZpgNLyCEVkxvMYMYa41i3Q2xr1pohCkRVHWOIrKBglhPwedsDSISQVeOTD558+OFHg8Y7jWNucnwJjdN9+654nI336yAt/sMhB8K5z/df/FWlx8DD3/fzI99y0um7Yg+AZ6wHu5SRRrcpufokIzZ0OEp0ujVHHQAQbUKCA9G02dYwwqwByc3h/kY2ub7Fw5OWDUWTDTDOmhfcVCxh9KaZgIxUZpr7MNYk74Yayb/6N/46yhzgtNSF5xOBr4uWs32nvL3z+hxaBy5YaL7xE1mS9v8SAhUY7mIcC2GqncOobJb+bJbNaapp99RUCkmblYnJ4dhaYDBaXMBMAJlkc66mG81bGhpNyYmYExYWAqmEkVIkKkuiA2Ibq65DDi9e/CxyO8MRvc+bw7un48X31RfjgXX5Mh0dFBxdvL9WZ5WKIyP1UWkPW1A1TYwJVbNBnHT2m7/9u8N2686aIYdMIjSJ6miKKkhJOaF3TjqsJmVWhjRlAY3BacNoyi+QEjIZYvOXBhs4qAvM0q0EqxVDjTFH9gETpKi1RqSiwX0WdxBAOksKyYwYQVnXzUgMF5u+78lLN80bsY7Hqa3o+dJSKxAPryy31cXQ7azvOEktf/ThvoqdGdd7JKsjMuDNHwZeFE3EAyvcsRi4SJdz+vwjKsnDux7drKUSpu3m9ssvfkGj0VarvtaRNrPdCZy2LRSlUg4hirnLel+7dZ989IlbGaNGuxsmc3ZGM2VGRKYglNKV0hfvzYt3XaZqjkKU0nVlVaxzOps1aDIRTjBxIUVGWuS4oVJRb29fIqfQyAO1qclmD/TYu94ITpnd6d9LtJBN30rF3it6i/XZ851DOu3Zxsdk1l3TOqVAh5rXwg7+cse3gUlsyV1c7OTnrN2ZMgG1DcAatLk42yUZGWPUqqqWEAbmVtoeQ8DdSW/6A2cNxNvipHkpICKy6/v19erv/Gf/yfzuZeu+fhCUs7P9ZLt426TF/8/bZB7iHL92tF8cSlHQyDogO6mH9YECa0l1soEBcdJQW3DWtFQwxcbM6ZGAhoAOYCf0A5gTp1LZpKnMKaKglZVtq8wWbMw0VMaQEciMmsOYkbHNqtKFRPrV9VVEPEYI/PWkx4jgj6Ej8eNoefFk4R/d+ZiSdaEQLG44+ovDBl58F4Fmk5mt2iRp9q2Pv9ut1xERNTgLvFOKalnjw9PEnNCAvDmv7jCCpvhEtameDU6xaaMTwPOUun3CdmuYuA0PMcAhas2gwUFz89L1/VXzkIisTfeQoi03AkoQSgqlTCsQAHTa/wc73jmteL5/3jaOR/RkJE+F7uOROIwlwPK2ufTHrFEuVvNXmLJvEgb0njm3l+nnMMD95f2YLPDxHrMmLtfh9PNDtK/VsLl9+cWLu5sNzM25Xq8j6k4JnCssJFrW90SJah2f3t6Mf/kXP8yqTHblagwbo+VIKmRnVtyLALR0YklLc3pnpbgbZQb3zlBKSzVAIllEm9DPjSyEd95drdYmK7Aic3K7rceB6vs2/dKw+7MmjbP3PHjbrwq91VZesE8t4KImjiSYJVZX5eoD0jMzQztcfzSf5OmQF9AEBq2WKLhBZjXdWdH4VQLZgunRbsjIrBE5bRcizGROmrlbiRqbzXaMoJkmGFEDJ5h1M29Y6kaPEOh1VFZ+8vF3uGsodm16j+ic1P2OX37eDvzrsbhemebdli0FteWm5NYRyq1xNAxAFWTNvqnmA7dUN6csYdjJ+gRIaYoIaFpBkpIl0NRazRISgTkXwORAR2hyuKalTIlIjiNqKBJhEZaRY9aR5PrJVSllVtp3ItE3NNHrihvHdFb8ODJ43v/Ug8TLe+X9TOTCW44f4hwkNduo7emzZyJqzmhU2uUJQyojlanMZsNuSNEtVQsgMskE0Qz+EIJsXkSTxbK5tE1W+gnkuR37UhTdMnRzezdmddLCmX2t3Zcvt953EKA0M0m7PEiSzLxVA7IJa0sHnX9pRJbWIu4+nS6WMzLuPCacu3SpZBw9f8ZKvbyksx+PaSn6P0Z4uvD7G1MAzk5HHX49+XT4JGfb5OuqNY/rh6O3AgcB3qrDdths6jjkGJvbLaBVv5JkMyjh5K7QIgJklizWbYbx//EP/1//zX/7T6N6363rkG7eMmVIQWSbkn1ZFe8EjHW8G++2MURkHUZlOs1BULXltZjcLIJIkrQCFsAjNNSsqQRraByrlRVoxyBAnG1O7ymJj53pE51ZNW+oJm+hzK9KettjxyXfWurppwMhymRr+NUEwKmMrBPQp1JMETSDTa5BqQmUbWbstmPBzf7RcrZkimyQ6BBaxt6WUMB22ZNI1IhhGBrWQ0tCg3azWTvkms2raDi4ZnZ1fdWvVzOQ3b5L3x+6XJl3Wc3ztqj3aiG8P7S3UwkCHBvEUAqAClWzME6ouS0rRZMGZqT/KSPFzt3WppRfmK2t0/EANCOwtN1GcwFYltOe43T6RRct6QFPqoHHdTQnOpPF1jR41EKb5JUZ3uRd9+B7T1995p+WsDfsvp2VpZO/l+45e1UzX97B2jQhfyHjpheMGY3/zpNULfN1ZExe95A0n2A1INzmC0SJljTBpZYSuM3DeYMwEtiFfJlhyolk7V+n4eb2TmEQTEVZXtzVP/6zP++6vi2PtlXsyqKZmfukA4SApmO/WufPCoOOMCPODPBh7y7He6dD7cT0I4F4yVPO0tkDi0sHQBdaOOv85+nNnQAsOuIBNfdo1zmW7F7fP5EnHx5xL/ZVJwgM2w0NpevubjelFKWU4W45qbQtvUXzYoZSAFP4/OWLl7fDX/sbf1tYbe8iMiMG6C7jTuMWMSozlS0CwKwU766u+lVvxblarUi6Owkni5mQNQaiuqohgISSKUyatnWlczd3MnH17BN4j/358Tyj3rMgvsP5z72j3DQfHqjsch29UXofBZ63WyfuerNNmgt4ZJizqAvsrsvqCrCcvOBq8+gnaPKizqwYnHC6Z0sgU3OXOZ1Uy3pNyowtTiAyG7Nv0k2AIgI5ZThq3g8JNuc3MMUWiJPTboQUQhEZUEOKUwaK+2plipHaBQG/d8TDXULzGDx+bl+yJj743pPvF81b39CSFjIWA7R8WTWMIABz0rIzuURkU3jJBaDKBAgK7dE/NW3mtJ06npp/mQdhlkMIzCmkm0INBBFtPE00NysdSgezBlfnEBDgaLat8fLLF9vNuJdldvrIO+q8Xw56U71xSaU+skDv7Jx8c1yKD33dv3r/WzuwXQiKS0u4ENtAeK2a56LlIkzdjOZTdha08HSllJkQlNQUykivoBiJyGYCQhPOp+lOtLOAOQ4GJFUMilivV/26o/UVReaB/Pz556t133ouMicHJJtWVAPbygzVaHrJkaH+fP8svsyO/XooPGthvT6wcGv/YSevHw3zwWxYim4n9zx4DnD2w7I6ix+PiryYCOyrINNxftN5sW4vpx9WbXcacnQCoMeuj6WMuE+Y9MDTBxWUtN3cFhMc6ryZXLbj9qpbWxN4Who6J5oTkFtWues3Pv7g//C//99ZUtr0V70XWlTl1qji6LsiBd2340gmaKurK+CO5KgkipUSGcEBGlHDMou3RVKlaGJak6tozadooDvTutJ1V59UlFa3ZT/sxu/9EYEOJfhdOIxOFsc39NbouJu1nDlHzGtni6zjuL27Wys7LwKCczqulrt39r5MwoKmEjEM4xYZRrjbtKhTtY4OU406DhHVWdrBVaZqpIQp9IWz/dRhhSW7UgoJQpkhAJIZ2eBFARB1jIasBYLJl89fPH3y0Tvp0NekpbXi0RzupIRXeXLJDSYWoUMm/A3dR0Sbx0QC9vyHyC3VZXR1bEGNQMbk+txs/VPAbutqgXAa2/RlJmwHT95iYwBJCZoANOR0i+bxg8nlrRlgmZQhBNRUB4fRYc2HQkLKhpSYzFhpNQx48fxmc7d5ul4BLV7zfeS4Z+dyEw9/GSfpfrObbV1H4tCBOP66o3FU5pG4dearJhlZh/3NA12z3d3UV6WUNTtvPFbNG7/mmKjFeu/KGEMT25tiYJhgrYA0GJiZGuIKvUPPVUe3VRKU4JqwQJVgIa3NfbgAy5pRt9fXq9VVTw61Bou55bMn5TsfPxuHTctGzwZTx0n+T6Yya42CGhkT4NVCwNZJz+9654QnXxyTuYQjS87MVbkoadnNy3fvqrIv48LrltW9R6S79KJl9cCdfN0ulaNblrV6PTrqRJzMwvM1XlZyLmXaotoF7dSyB159MIkvv2r5w3Ir/eKLz+9ub771pO8LhmGEYb1aR4R5kRI5T7kUmCLMFXFHcqxDV4p1ya40U6VllKue7nfDnTuxzVW3AuDF3GFYD/WusqIOq+JmpoS7Z5rJSHVdGa2Dd2j4iKKUmSayaoMQsl95uequzXy5os9xm6+BdDQixxXSPKbLNXGeHr7jG3okHa+E3WRpQvfOKjjhEpAUVNxVKzKaZdEIQJRClVSKLeV6RtLoxTMEaM5kZJiCBtJC43aLiLuXd0bAsqm2SBQwwYSR5qQhkDnGiAiHU9n8hpgs3itHM5BVojIgHzNpzlQpLnQ1/bSd76e98/Wl/9d69mAx/kovp9fr2PvKWxiW4md/Prz47On6quPGAnGHTqUvuq2DFPRCQRlCc3RuEPywDKASZlLn6EAnzCiklGZ0wJAEUxlpTssUvCQUGKVwWo+uGKEwyVFIAzHG1rtuBFUHs37FHum92Cuv6NdXvfcApCapTWr/+zUBDlTT3YJt+tIOReCXjYiH5RZ85XE4kmsPaNGhpypHE1qbWjunTt91OwSFxO6Kps4xjsN2u7W+FEfzgXB3gqvSNxw4SMUc1KCQiWaotOyDz/74h+O//cGf/C//y9/+6APWYeumrnSpSlrIjE60NJAJsB2Gdd6PMbCEpQyAZWirul11/rf/2u+Nm8G9pNLc23lEVayKGZg5mLvVsJatbG7TpX4792UvQ54ZwfMCFvc9u1NbdebB6cmzNWpM+R6tTifrBCfDr7M3nBQLcMIdeyM0Y24cKkQnd138sitj30FzUOAjXv5QM+4TLQ+revPi5WZzNw7jdlP7vqdZZLKJOZhAHOblNBtnyIqEZ3pN+pjouqdE33VrAWMMiTCRycysEUMdIyMz3AvlbkWCgsYCMKWhxlhr7mtoaCGR5mZevDd2lIuWYsoaJsSOQy6a83XKPDz5AOBwiSyv4HD+PkDvpzD3S0DHJoTdGeXE8vc3zntAm/ESJYSqmmvNBHNONPuNJofMkBIS6F7c3YvDGApNS8W6zlervnRdjSSRGbSmdrQcRwoqpvSnAhvAD9VcfTIBZWq2m8yRLoYQE7ZarVL48vnNMMa76c6vTpdsII+lV3zgeFWeRLj9UspZ5+gNNmQyjzahQBCs6zGobmOAtsZxvSpkQNkc83OWAJrHNJBsP5BTBi+izXFJmcIEbsvZEaJ5NGNC/pzDzzCZqsIAQQWGZK0WSVsVWkJJc1FjraBNgTMIMzgqm+lXM5t9z0b61F7Z+nuHMfaek+79+sinHvngQX/o3o3zctfNJoC9C/Su26c9gST9yYcfffzxRxGVxWlOMjW51kRU5XQGOzvzTCFZIrIlBAB/9uXz/+P/7f/9k59ruCvDpgXKK1OUS56AsxhtRrw1hxstFaXrKFqLKIYlEZQSTo6xSSXJiGTLcdFS0siUPqZSIzN0rmsu9fCx7HpOluX8VSec86Dvdgr2rv917ra9XXMnO58Uy8UjPLl4qT07reB4xz+ge2IAXmfNPfjMxcm9E8TnfWk3B7W44Z6XavH1EYvqaDTmq8oaA4C72zsaS1fMnE2Tm2KoJv4MADDLYuqM7i4xEETtMKw19IbVursqXmqOUniBmzKChIN1u91ub0hcXV9bg8xCekuH1HKOWYI5i1+UXDLQBJpZ8RVltC4TWTMjD3Zynvn0bmniBbg4nY8/PlzcxVK+oXdC5i3de0qzJINJkgF2jgpQzmtXEiI1hYxJQLOEMkHvihlFTxFwUGAIEVmjwSwyAWWAgJFARkuX0RSApn7sIEjJFBMWqXHQze3tdJS8p18W2JNXtXO+ghh3ZPyb/l2ESn+zpi7SqQnRakZkBg0sFoR31nJVmOQNDqg9OamoMMphxYob3azQzAwEJ+RQEJgiaybNYLaBIadllTQApmSQVPJ2xKc3w22og5kZ3QzehK+oERnbutmOw93t7eblrSbnhL3h5f0d8a+qFr+Bd77244dL7NTGeP62x1TjyMg7T0st1+/5x48qsZOyFpXRAipnZ2JBY/KGftUVL+alwbXlvCISE94/prTxkuRuDkAIZvi2rDe/+3vXf/WvPCs+RoY5QUoG72Q9rcBEm1RgknQDrMH7gGILnQRMhjC1VMKhlvmrVRGEqFRNVe+8dI661bidcxs80MOcSjm8YcclefyUDi6cs3DOvTibqQ5v2I39JVHtSHC/NCcuHRScK/JsTc8oAK+9DM5K3stfj3SY8zR1DXf86SC29fJDy7efdt1Jw+fbD2VUEqX4arUehmGS+2lmE5ItJyi2FukCJWyCyjXI22CrVkdqHIjoihmUNaKOEQOQRBZjKUXIYbyhD+6iMlF3aCoshlStQ9SBmDO+tEPQdujWopB9glevdet7neTSrPwaaK7AkWlZJ1cwqctn9YW5qDfVnFc4ZfjlpUc274ClLZjc4dAIkNG8J725+mjnuzXlxGObolJOEG5mMMomUQZgAoGUA+4tYXC3WqP5NCN9l6yFmjOWYkooA+QcWkYSsHYEkDmBxjndzRoI9c3N1go/+NaT+Sh516avfTU8il61lq/kF3GygbVPl1fdNzTRUlQTEgS8M5LD3RaZZlPiUyPN2pavGQILLT9qE8DM4MZC+gRbKMwBubMk07ynNYFsaUqvlBFSoolYCQkmH6P78Zf6j1/UsGLOTCnaesy2ZjJTIt3MRQSJCcjlvaTTamm/hicb6S8FnevfM/V//DAsLdHLp2ZtbhnAe6GXTmXQAyC4WUg9qFMT03V3d/P8xYtSihERkZFN8MjQzsbXwCSkjBREV2HSpJ5ZvH78tPuv/ud/73vffeY9unUh5XCSSlDFVLADMSRBNAzEFDjBnpB00DT9ikSaTbkgJRhb8gAz2AwzV8Exh7upiQ/19WTTP+if+wSSA13qSOSazuomGEdwoZ8dSag6Kn5Ry+OfLpv5z35+HPGsAvBVltlSUDuSvLX474xQfnLIwnmGXqrPqTpx1mpweVUcCgazstFiyK+fPiWYUtQxJZpN96jZYlrqCsCQKVUoKWI73t7e/nw7/KL0Q+bdGEPECKUpFYNU2waAzIwBOda6kZIdQdJbSHwCNC+dl1rHGqE5rCwzoQn4OTIyIiNCtW5eILRjMTsN/mtllwt+1+wKy4zZx8nzFkGou3of1v5oOZyujtep3zukd423cf/KOXPr7pH9olgwKxI075RqEG1N3oHMaEhMaY9igvdPQbAZxK1M5ht6kAEGDaX4qu9WXUQwFTHO/g3aJXMx+iTxS2inDpwKTSXIOobt0CIkMxDZYo6z5rAZdke0BzvcrzfpDOc9e+EbWhJ3kD6NEkCXfWe0zKyxrZFKybz51Go2+WNGIGlIt9N+NYkHKYNswi+c8ddnlkiR4g5yPdH0jhZ2ZoJXcQP79GXA1+tuTSAShCOhyMkuJRYrq/Waytvbm8x8jFDx/tCv2LR8sDmnW9/pAB3/+tq2sbmgY8PhJPc21wsZc9wOSg21RirGyMzmw9YOg7X7/8KYECEKBiVUs/am3/ved373d36Xshy3DTMoDYkUa9h2tpLPYoOAzNniM3F+GAHSjFTxYi2dNtv+RDO6N/hQulFZMwbFYAu+f1bDvK9/eLZ7MGn2Owl1/nn/qlnw38tkWozu8qlLY3fKqO8ZZc5qyImV/dQcvPxdRwrAV1lvRyLHUu7HYXN4tt8XFdT8935u9eBq4SVl42JDRXCs2/6qX61WZj6OIxuHBpy2e4PNh1UNDrcZKqPG9u7m7vaL7e2Xub0tlLICclpXCgRlRox0dp17sdVVZ57WyQ2lWFe6liJeAs0zoISZz8KYgAldlw0SlCUykbF5+ZmxQli6Sh6Evr9r0om8O5/D7UwGB0tlOWt2bOkM6zuYEjpYU4+nr6lTDixA72jfPdvUi+8+7ssD7iRaWYN0L2alieTClJFol/CI0xxFZkxiEyi0+BcBllIoq7LZcgRFZtZs06D5ldokbE1oKs2PGQLNWipxTrlk2qkxMiMjIJl5KV1Gbu7uMqM5Ke2tP78M0sTrKYqv8MzRIB+ZXb6hB2mCSU/cftE8bowaa2TGJBYImKJjaGzpjxozm/0jmh1HMoqQwya/zzk8ADtGTu2c/5uGQG/H0YCRXjZDdo7vf+vpVectTRPdCOYEjJh0kshah+1dxICU7Ty+dgLXe0ZfV5XOiyVvlO6RZ472skfWhMtPOrl4dKsOvx7eemAsEabwRrSU7EUsEsexAlMeRtIgHbFWEu2wSQpvR70C0zSAA3766Rd/8tNfDNkHDKLJiDSMUE7GaAOmo4CcxOuWFoPtMJmSIQ1gzRjqWKPOG0XanCYPgLI6cn117eurXMSQamaw57tXuPjTYbct2efSO2j/qp2FcynanNqnl4rBosijKp2nCz/x8D+c+7B7mkcKwFJYf1XaPXvQO/Ons9P6VdfbPQocD3vufr35wjUCJuUvfvbp7Wbz4nY7Zvb92uFOF5jGdnI7m2xcUGUErbqFrG41bLaZIzGMw0vDAKthNU1yt27CPwwpyCp5cS+gZY3ahJXm7ePmXSkdzZQOOovB0Y64SCnGepsxGGvHMAx1+wI2TjhzDzb6ndDBKfMsIN5nrphm6GICnon9PriyW3i6d2K8Q7r/nQcr4Ovcd4/ffaizHyiR+4FMKNVFBCcOmw2Os4X2egP2RxrT5hTAU64vhKbkv0nC3YlkttvkZkJDi8aEiI6WQSZq1iYxJZQpNUnKmEgvJRPFC2EpUOjczSYkov5qtbpaRSZ20WkHzXx/Sa35r/7g60+nx+0135CWR5SAoPzyF3cVmyxjbWDQtTN55sLut2PJMxIo2ppqwSsCWuR603TZQLMATBzT9pldZmtjix3w9oyBfRe/94l//yo6jEnCKTAZJFKMFJ0Attvt9uZFYZbiO+MaZsMSJtvS+zUBeO7rW2Wbb7n5Z+zQZ827p0bS/RjdUyge6p3TbXLWBA+26+XrNc3Mb33nu96VNl/M6EYgG7rbpE824XsSUWxyy2Rxc5cj+8xiLP/4H//Lf/Lf/8DKM0efCWk0C2AopMmdnakzTYnBYIQzQaEgXZUaCbXV5YFMRIuwr7UOdQQEhTJqzSFzqDV8xetv7cSPRQuPO/K8Pfoyb1z21sPThkuFeyej6aIs9MiZePLsKymQu6ff8AnA2Q+XXvHI150V6085wj3L6eRdF/uH0LC5efnF8zpua9QUxDrGoAbEQ9oEgQiC5gLDKEuzNI1JeF+u+n795MmVuwmKqgSTSCgUMhdXN9vxZnM7jJsaMY45uTVnKHOqW2SMY2wHR0cWTFqHwGqWxa1f9Z2X4lCM3XqNeac5bN/7Zd/hcs85omnanqnwYqCPDgCmvw9Os3dEonTPvLrc9ld4xdvYpHbigI5fof03ykv/rMUANN/7+R+SJlBIKp3ZTDSYPJdTAYQQMqCYW3MdQiZEsu87Cdvt9ubuNiJJayoBoULvbdXEp4wA1FLEtLCwzPTmiQT1xYq1PDYpy4j6/MuXP/3Jp1b8oJnvPXEpaL6lVyyU6/dK4FvS+1ixXZpTAkyM4/Ynf5ljVXrfr8wc1g+jhRTZgHGnIMlmz9QkurQ0R9AMagtF5m1qkAiaUpktsZ1cbnDMU4IkW1ZgTM5IEdsPn5Xf/s7VB1ejYaOsXencjPSIdEsSUj+OHCNvN7fb7TgtnwPb//7ra6yQVzJXv3F6U2vlrdb/jNR+uEvf04evV7HzT50dXZ18OLhfBER89N3vXF9d1WEwN0BGRuScv9HafzMcnCXTzBIijSiC0eEdHC+//WT8q7/97ac9TQ0m3d06c6PSCMInPbe5g5IN/8dkEhIRqKkKhVIFXtB13qUkyqy55biQtJaqIM3N1lc7P/JD6/a5Tlp2kS7+cnTxEQKtFnLKoUC6u6zlW9/AVvX4Ii4mAnvDNAP56qTLTrWre7qBJ315T1MvSYeLwhYvFMCMSGfpzC3GZ9fXZqCy966Gt1k4OWaiisbEyroaG8I0piVMnYGlW42CE5vbweHrfkVUy1yvnwwqm2G8vrp+9tRRP1+v1lZWWdUyY8sQkSahlFC8eH7Tf5iMRAkIlAuIUAFlpY5jpsz6J5/8VmLddLnDHn6oA94WnWhby5RfDz97UO3FE4d+uO8bcWcdbG04k0fvqyFZv7V0OMtaX+JhZtcffLz5SR/1NjPcO4O1VLuUsiWuroOoSEDZLJTZ0M1bGHFk5kCp0GnKsbZzACFTKvRxHKkGOGFiJJnMqVdpmSHLTIvImi0IrDMat2NHetddrfXy5Y156dxNuR3GiHT31qadavkom83XSW8d7Xy/4/DM9feBjjbi96Biok14I2zhvUU2fKntncuvr5zPK0o3IOXe9etxjHa/Wng8LIWkAhLhYKGbpSBDdjIjkqxEMToMyRRqpoiu0GmlJRFgDjXh5sVl5m7yMjILAirwfqgEuep83TlYI+8Qw9P1ulv1n4cGMGAtW1jLwkRg17vLTFWP7/AHt+C3Sm9kqeiNCV3niUf/PvSus6L4K7Gss/efT6M0mwO4YwiHT0qCmXlR5GrVj9LN7VBY1uuyHe5gFlGpQm+5IAPegiFdhkFKJCzZEMr55f/6f/W36E+BMcyNRpUhqsHN+i2qIyc/NyIRFuYoVdmw/IF0S3O50eBKdzczOUi1TJOTA1I7jl4FvKEHAc3FiPvEqHs54lLfPiYJ7lIQXa6aEwY7nZrv0wLsHjgeFc4nL4si9erL8lXoHhjQN0NtYMADZnHW3nCgC82ftegBvPpKuJcOi2xnsqnuerX+cO19GSMixs7LWEc6Z0i2ZtAxgrQE0spknAHYr7rVk5UXH8YhsrobHCRX3WrVrag0oTcvVmi9dVfkGtmBRWBkFZMOkIKx79T7MKE/VCGa+tuSJLU9gCaWrnvy7eSqzbGv1ySD+9/+cEaH2ca/lPqPP7za6cY77QpqYcE9ms7CfsId6PuvcPZ+YqP/qrQvate7R8Hj04hIlr4eF5BqUmJCPleiIfgLGS36imSDIJfaKYDczdoBsTMzM6Mqt7XS2XUdWoqNHXAKHanIGpnmboSZESblnF1VIYXSCLe2K+S6L067eX47bAcg7B5e894qAe9KmDrolQabMcVcfP30PjCxI5oRSlq+CotIsGZngdhub9yTqn3XkuaF2yRez0cGok0nAEI7y0qHHCiwQncWo6ydIU7JXzSb/i2hVCYiM9kC7HemAFkI8gL4nECgTqAjbt1q7exNQB07kxc0WWTS9+f3qLmdLcMwf52Ir3f28Wj66tP4VUt44ATgpLHEcms+3GYJgn1XkFEjIkCb8rqUrjMrc85fgpQ1lGcnPRVyyJkizZFJ1HWXnjc1b4HZ0mPm7IwFljNsjiQavG1EhQ30EM3RfzoAZoKVTKk2j7sWkBAxRkSxPoYYB9xuAmODRyFaBP7cwiPxfjcF9r1waiC53KO4pHTtP+pQXV1wuLYaz0zCo6dfbR488l69uxOA4/btNZpTuX/ZWO6n4leiCxqUsNPBBBDuZCQS66d915dV3yNrV8qQo7mxHVWpQb3JzMbcUk4YEDQCKg7kWHMweCmujCFr361lDe3EzMK9rZurGG2oY7dyM4XCW2AkE5T33frJdbdyXzVQ9ETj0qmEDJwzBFj2T8FC5EETFwv5HfP0BS+Z1tEss5/IuqfTeqfsTn6zB1vV/AE4fO79MBMuVvQZw0+TBfZ/F3c+su6cWfWZ+1+zB5Yy4HIhHg2KCNC7J4JLCRqgyJEMCQ22s8GcR4YiM5tvM9kchQRgwoyLDCzNLwRh7sULpUC2yHqxBQ1kZoBmlBsiRcmMgIJkUHWKHJAAgwkax6FbX334rVXXl51b3HKB7179vsyZQ/qaqtTk2rd++PB40uHwvGUr7cN0cPhIkalx646sGLeIaikoZc3lR3OAZDZAz4YgZwTYUiNNKY/UnNi8nd0qNXv+k9YkllT63g7WJIUE6GIBqTAlkuZJWtNQsiGok/ISloP5CmaixlG7nuSUeqYl01hy6fdnDrw7Omsdf0N0UPCjrblfRW3g4j1nCjz3du2SAC/3K5GQi7m5zYgIt+KdGwPemSFADuPQWxmjNvdOpGQadEcyc1DKDUy5EcneuxGTFz8anC1IGpAuShma3BgawMe045ulAgaaMxmRI4fe5W5tvbhZcSfSaFCC9H6lfGZdieEOV1jEFp7GSZ7pvcU/r0kn0uyE+LvLI966dx6kS7vRfGjwitI/Hl19vjMFYFkhnQg9OrmNJ38Xz37VChx9W3SvAEVUdzeR2YDI0+DGGc1VIlJMEBK82XYkSsN2yNx2JUspToLo3GWow93Ll3fFsyvoVwa6OMLMZA1KhWQixEzBaQ3ImX13vXoCNylATqEHCSlTFXWsdTDJXMY2iJowQucm7XxWv26ajsDOzOEDPbsJaKey54GGeKou4qE2vrON7ZwE9WiW/wqvWRR8eO01aWmeaJIOd/rWflfwrnAG4ZlAn5Uza21e/aEIpEikakN6hqgGXt68/4VUzaxSsOVAVTaLaXtby3ikXYhBcwAipV0uDoPCzVs6mAY3lFkFKTVsZX2s1t2qXy07ZzfRuJOk3oOF8bXTvNu18XzUwfc7oN0YvbEZ/pVpNjpMp/imjLs7jZFVGsEa7oVswSqAxCk7RgO7Ys7MzUjuwcwaYtYMosiUTBPQREOcCzWYuWZMmRBxmyohQcgMS6anm6Otzcb4W7qAUAxjRZ+9dx45ptK0X+5Ee+JA/H0/FeO3SlNgzJsu9gzrf0zP7oXVM1LLYyp5ZCA71gd45uKe3+/qjVk8Vbz47MchpMjElHvFCIA26b2DBrDzzoWkWkxwyawSMkH3qKG0QUON9NK7tVgUGTyzygBMjqJIyFpAjDktEWQlsi0JEA3qzbAhUwlzMxLM1KiMlfXKNGZhdHHj46eeFbbzviHahnSyi+7lDO5b/kpL4UhkXRguj/nY+UHU8pbDD69u7Hxk1fUOXIBOX3m2Zg9W942LcVPujMPejtryrSBSw7iNcWwoJEYQSWbLeDrhj4OUQzAYyMioWSMCQikrg7mhcxhjGF7W4UuzgTYCW2CDGDODQNd5KVOqMagDOsLUfumuIUXdIqslTGUKQU5AwUwkMyuQUOVk15mb9jXnAVhSQ8U71voAnMjzOvy3iQEnOsH85FJ1OG3sqVb5ddCR9nqJtOTAF+5YrIClq8ZXH+YjZXynCxwMB82d9MYgbRIaE80ZLiNjrHVQVjd21nG2eUznwhNTZcttmpk00FjM+1JWq47zeCpVa0wN3huC2IKPI6qAlpfDzJQJg6iGgZjKUtBf9eOYL56/nBB0LzT5vVkdXyfN66xt6++JkH284HXy950TZ/tdU06D2GK8wThCHCoETxi8JE2wWYTQXOOdl00rDM0S2AAOE8ym3k5aWM4pU9qg+LR2DLSWEdUTTDWb0JREOJKRmYqmkSdYwRpq2WKs89J1LUvGzDY1MZRzC+HXbWm8G7+3h9WAox3r5LZHVvPo6SNr9KWX89gkMkWFxLD9sz/5N2mqEZkZUTlnuoLkVggqI7I2W4zRi3VmdPfi7u6RzLSarFLpSzPTSy2Z45RJQNO+oBZB1gLmRdFRUWHt7n7clJ98trmrpFuNCkzac7akASKQVgKMRF2VHP7in8f2ecxJ4+eGnorYh3rXawmblxgUT29b5gbeD8k5Keas6PMIVvjIqhOwd7zez3TH4sM9VeHjbnuVqhwq/gKQEbWYVl1HggZ3urm5ZU52EwPZ+DDR0rMkIhCAuq7ruk5Sg6dtdhqztGJ9X1arsr5euxfIoo6Zm8iRnrQQMjMkkJ3UNzNNVGQosqotLYnpBp/AoEGDGU0yY8dpEz+Kdf+asd3md8+y3RlGq3NDOivJDYhY53Xms0+ercDXatM6u4L3n47+3oNS3HwYWx+Ke57xKFn2FWYBz3yeTIX7mCopCDXnfkjIiDrUYYgYldVsX0cpAU3Q0UY6vPjqatV1HelAtmWkzDm9CnbZ3Vumu4QSKSkis/3NTCVsmt41UlQi2fKwpmrU0vWTInF2J/z1M3PeRzM8w9cr+R3JH3vB7KFt8RK9wc1i8iabZYhMBSSlFN0KLEqkuWnKHb9zchCIXTaLhmc7GY44i/mTx48BNrPJ1GyInE9xDTKkGQwsCSY9SSQhTyKn07Vpk0xNOVrFDAshnTHcPZ8dpg96+vQo7NdqaezVNBx8eOCp+dmzbLzRJVHn4NLyv2W5j6/KQ/RIE1T79ThrFjHc3f7zf/KPGChdT5iEUKSSDf2BzStZzayT2c66mjAimmSgeylX4NOMq6jMmHCtJtTzGflYzbehZubkRyeFGuw/WrbHIMuf/fjz5xsy2umDpCTgpNPcPBFj1jAmYVZ+8N/9d/X5y5Z2DFOM2dwPh3rSbkPl8upr9fMjxu2hJXdGQTmkNzQ3pHd7AnBJGuLxcDzw7BviUE3OWNg5E0J6J1gtHbZ3tz//9NNSOqlhOuTkOOAJIiEZHQ6IyFSal1JWbr3oKRpXtK5ZTsu6dFdPa3TgNdCrpnIgQmoy09YQhubgn0gwYUCMQyGKAWLW5mPKYkbUzDEVUtQKs35OVsZ9R+monV8DLU6wHlOF01HdM8UHn+fi7lMW/LUKN2daocMJP+Ge3auqLEMg7uupsy29b8Gc65iThUowEjSnG5Bj3aYSSsWYOaZqKiKijrXWjNq0FME45zUympt5k3XcO4DDuL25u91uax0jIgXSiltx81ljTDTHODVLJyZAlYagWJ1iktHMqCGDOi91zKjxwYfPdrbORd9PW6ve8pT42oXpV6BTM8i7pQem7sJr95JodanA85aD86+5v8TpGHJ2lmcERI80ybuVi2kZJdJCBGB2JqyPzSVoWhrzP+0M2TklxmuIupqc0smEQSWzy/RMUxDhzM6zRKA2VSHc0k02HS4gIc1pIZWpzRDbzfDlT3509+KllomLWtt+aWbqW6ElQskjqbGTNhfOPvhYzZOHos9uA9/5Yb4WHS2Tx1jBNN1PHd6U0nbz8s/++AebzdaK0VJRI2rU2tRUN1fLWW1gM4uaZTQ3z+kEagz79PPx3/7Hz74IT1pEaFpRu0SRJlqKqRZe0ySgTKrGSCIyM6qDdL/Z6OZuLN61UwOnz46goIFA1K1B0jhsbot31vs+E4zQVJSL/p/3HJFc7ufTr6efjxSNcy+aL51lW5c0hK9GEyzxO6PTaj++IQ9qCK9OR/JBc1OBmdxQ6He3N3/8H/4kQ1GTTfmVZNmeIplZk9XY0k+TpJVS+vX11RMju2Ldqqf1RIfoay1j9GN19560q+6KZhkBxViH7XaMzMhUjoiqqC71nVEjoIazS7QjsxSQFA3KTHqiZZ0HdjsHppOur9hdX31zmGf0PQvuzGuXH09qwIN/Xq0m75pmmf24zucr07j/xS6fVgB3ptFZzDzQn89uHPcN40JL2gnKC/lgKpGguYEUFc08KUhmLTGkZaq4rbzvfGU0U0uWDXcjXWQiY8oOhnGIMSIbOhu9BsYqpGcoBcrUpBnS3ADWqprZ9grAmpPE5LKSNCukBVBT220tXq6vn3zw7AOzafYtfD6nJrd8LKdB6W+KXmUf+TrpSBj8Wqp9YWvb6SVzTvi9nebEbKqDv7sFspvOOsoafqjzPJI010skJRtHJrJi1a0AWijHKGTXPOVoaofIk+FxkpHYUHxSjYcnlUDKqtp52t4gPwURAIkMRCDmOLEQa6Aa3eBsSCpwwI0F8Al+VGZ0Ar35WOsYw90XP7n54qc8MjTo4DyA92hNv7r0GtL//aL/KxR4JCo2vvZ4F94TSwMXg7isDE+sTkdPAYAOveMBCOM2ajLGTVdMGZvhLjJoJSFInXtXur50pXhxa7lZinVCksyaStYs//Tf/If/y//zH/+zf/EfYOZUQ/Uh0fxDjShWnIW0FgnfEmUYrCt9imZupDHXXX7/ux8YRyKRbZ2HEVkDlBeaecQoBVLDFt/9m/9Z99EnYmmuLkKa2UHvHLR/2a0Pj+Gyn47KuPR5+YLln+Pbl4z4fhXzEea7++mdoQC9Aj2q+9/Ye6bYKhAhxnCn4SUBKp4+e2J9GaNaRxqREJkJgGFgsthV5mCwOm5o5qtizFKYWSNuay1Zn5Bu5YrK9fpZt7r+7Gc/ubn54jvf/rAUVkXf062ANKN7n6qGoA/ENnPwgPtVgqAZC8DEKG2Qg3sxbaGEWTYIXmDB1g/k5NfozFbAG8IF0eN2luVS0u7jhLx3r8L9IHd7y1vaxQk7D4Lmf+5Hm2ieGMelHX3fxejO/3IXvbmMmOby7/nDhSWzX8yc0/oLoJFJqGZkjF0xgLSSmRAB67pOm5jOyVwAkCRdCU2hYU5DV9Zj3ZLWobceGJPB7c0mU5Gj0ZRJoLAQtXi/rUHSaNsx2lu+vPkS9NIgqAHJuvX6xcthNL9LdH1Xa92OG2sp63dNmsU3LkTAS7v4N/QuSTvNl/PfHV1a9ot7tFz5TXA64hfHi+EYYuhxtAPxBhGFN9QdYjStg6VSg+xm5KaFt2OW3TNAGIhUgVHp5gYUiCFRMlYoUB1ZmjhEiApV0AoYObhRCTpoDIoWaRw4in1RWbGrqgklVj3dKDcnjZGRUQoUtyvvrp9dv/jiJz/94Z988lt/rdgBg913aVsov96L4v627ySGs7ftmOlyfj1q9zmargej8ugH7737SLA/Fi/3i0hc3EHg+tnT3/id7/cv/vz2dlNW637dJWxb1XcEsmpkzQb3VnNMZvGE3EtX2ymBqlv9gz/4zZvo/uD3fzMjYqwm90IXI8KYqWpmkoBaipt583UTmcnCrtbRrG459Nj+jd/5oOsMeWtuLFN+1L4rbpJqMV/bVa01a4e8vv7W9wfr1NxKJTqVDY13177F7tB6/tha9PAIHPTf4dfTm/e7zz1vWQ7SqaBz9PncTHl8G8p++c+f3tL6X9bptQWyr64bHFYjZ88ZAmlRX/70T3vbqLih/+Dqw7//9/++ucgWB0yAMlPbp1oiUhEUzTJlTiaG4W67vetLRmTWwq64l767SpTt2HBq3Yo3d6KG7BCJUlZm5jRkQBjGSqapAL3YXo1EzgI5jZ1YhMFpQKVVpU+1wTyce0/xV+bqb2gO7ER/YGlavm/4F6yJc6TavXef3+gFzNvwW6f7JHYubEa7D+ensQ4FeE2YI0djJzQckZ1uNP1/esPU462z2W6f/x7XblHJk4FqMynR4N4k5Xj3QjmQ1Q0Oi6xQAx8EgeIepdTYpqoSMx+RwIZ1IqWz+cuh+bnBYB0z4V2JDKGFhqWiIYomgDHGDCQgKLJmwlgSVpVJDjEmi8ik1dR2i+6KCcicpWuJwJpwuYArmrv3FbbZb+jtEw/W8qHaNk/Zy7xgJ5sfbIqLn/cb3HK4H7md7OUEEUyUDInagEFst7eWMruyro+KEVGtpVTI/RqYPrVQ3r1KLiiVTqNczERCMKA0ji/tZHUlwCbakzAmKdJaC6ypTZEAYN6OGEgzZJqMymG8Iermxc/2HLX1yIzKuFeN7uVRv+Z0SZo/KwVivohFfx4/fp+i8LjuXyrDh+/Coshjif+ozvt6U1Oq03bMa94/qeytOMWoYdYTTYRO92nqTKFXEzo5lFmzJltkilH1t7999dGzv94XGkd2XctlJCTZQHMFRvGCKE3I0uydnolAGimmE0oaK6QwSOr2CoyQ2baNSq6vfvMH//7TsvkFf/OnH//dJAwpUE0Z124fXh4Bcy7nFensMnlwZ9kJaMf2jqMPly7inGXkcNE+cv2WiQ8cHw2+eeK9Xx/50yvRpbFpvLfJRTvjqQDlzY/+w7/A5sXKVUDveqMjApNaT80pJVJGax4yJVUzzMwYCdXUaMjiNFNmZJW7KSPJ8fbu2VXXf/Kt7qoTkplkkRy0mgm5Q4mQKlpuCyJNILOBozPIlilmkm+RzKFaHbOGeeFkY246wM7ezLc5qvfRYt+ezWfH6+tQvd2zydlSuyhgcd8ZLnyOkb6rVu8qu+ejZ3n5RXVkXn/z7jy1V7OjyklJiy7VUYsXJwizCnH44jOVOGAbU5rQ6TQ4pxdZ1ritGDtTRmZUAqFwN0hQkGbmWdHQegib8xU1ZUU7XPMWMiakEKDRVVYF9BwjI4SsNVMROY5jVUSIZiVjlGQGTCihSCESAmtE1hxuN53DrQvpdjuMGQkBMizxbebVcX9/fK30LgUv3TNl33M6MZIdQPVfuH2hK09THbhk8lsso3lRmggwCeRWbEC243BTSy0lWSDlmBCS7SlhQiZpnjYTShup5qSPWY9PySA101Jbcc0FTpK3E2pIlHHy2JGB0xITKd9DhKJp1pmwlvQrqQQxjAZtbm8w8wPNhqLzjnDfiP+X6VQ7Ou0tndyPxywxznrY0hHoDQ3EqXpw+IsmuWj61nyIVVbdt7/zyWd/9EdjuoHrKlPtVw6YeUPngU2B7K3+KXlay9rSgoTHJ2arAhZaTfiEhWsNgYZANjmntoj3ae9pyWUgukkxxa6TJjMoI7zlYiUdUIgkAlmx8vWw6W7vVnc/07c/e/ldWqh5/thsTWtW20ng2BvY5pXxSrP/VOXD4ZXloGv54cjJ654XnmrkD/LrR8+d5qT4rjeA157Sr/Tg/V2aLdx8cjHlmMN/+Nf/5M//7T/tbewQ7oXJOow1xxbmIqrBN5jobFGJGVlb+kwjoo4ZddWt1uvrUoqzROYwbDd3N3W4jeGG2kgbQ9y8eFFjQ5NbKdYT1mAeYFZKt+quVt11V1bFOks3eNtLkkprifeMoALj3Z02t9jeeLFJQ5m0m9Z8zkA653vgbdN+XV1820IwXq6kueKT8nKYpOZ+Aa5N+hlR5t0pAYdL/oHXnphnuPvAyTwgHJVy/EW7lA/792qvOOy4GQ+4zhFplp60ZE773p6HkADqLWJrSmXWGKNhb0pCSFkzAumlM3e0VEcwwkUSMhhlLVQLbRHBjM0QxCYwtTzKkWkt67ZkZubFWnJsEvJMCoyclIUaqKnNNjabqGNrStSxklHWHb0kOCMBTcLXWSvde0VfS8W+RnnvwnvvEajOs5MD6f+hQjWvH168eaFp79dEWy/q4kUXYdXHMYYo4ddbsLZEvjoweggNyH+KemyCjSARDU8OaO7+EJDtuAzMlvF3EnoaU5ij9YRUZitQaIE0mCQpU7P+a3cnlKi1xjgUA1SnWumwH3nwb1sk7+0C+Xrpfu5xKqph8fVil+6n7qwVPnp7flBuxOLv+WosrNCal8Y+eZaVv/43/o5ow5jDGC9f3mw224gJ/60Fg9HYIiFBKhFKENMBGLOB5xpTCtksyKMlVC1THIt8eu+ULwazcXY67m7oEZRNETVoCSgJ0Egzy5CS5sWYf/Sv/vXm9kW36l/c3gpmbPlbpVROsWTzRglOgc/iST9d7NV7vp4dgqXc/jr0yMfOMfEHNU+b2cdD7X479Kpi6GuLrQd6WAv2nY5Tm6YaOQz/8P/+f/7X//K/vbv9Yru9pSqJzovBlciQMqWQIifc/RBGYTDImNvt3ReffX7z4jZFWheVSB+HUKiQGIc1guNtDsO4HZzo3Akbxhb3KIK11shmC+qplaIbB2Y6ACiraiIIGotbJ2Tpu48++haG289+9KOUoZ0HA02pxW4R4fy4voOxbnrLJKwuttr9q/eC8HztRFo94qSYJyrP/XR0/1ddeK9Jez4yKWL7Bs7VnH44Mg9MK3He+Q9Vp+MPCweg/XvnrV/z3yXIFXHCEA61q1YecxZhhARQoWrYMl/QqnIwY2jAhHiyiK8kO+vcillxWltcgGA0yiCw7QR0NzcYrIVyZWoc6jiO4zg0aP+mHwgyoxcTctqUoh0hcKgx1CBKpo2BoUYKq76MY4hc98U1KoNAmebfLhC0eSXdq5b+2tBy63vPOkPHS+bSXa9y/ZRLPDQNuGNPCyGZGJ9HbrIGFLIIjEKNqKRPKRvbrXsrBmebf074MbPNVFhEvU/4VM3/wQSCDtgOuqdlw9C8sBtyYpOOmiiWyF3prYAUaqBzN4t+XebAmEN2Mp9xzKzlfZsMb51eTw453V+44LBHFx9eYnvV4RDJ4RUrp5PPulzGQvZv03S/b7VnSim//wf/SdddhTRE3twM2yGGGlOW1Ha2u1N3Uw2CKiNaWHsmA5ZkTkI4IxQ1MygZ4EQRHQAn3Ii21AyTTZSauXVyZx8DABLJTGQiU5nGcB+IvLIn11ph+OKzz8NXk/crNWcvm9fyfsrPJrDH0SvJEmcH/awAcx8dzbZLVeE5BvcQlVe8/w3TkhG/vTosCtekYpIpsWUwhZCp7d2nP/pBbH+e9cWT64+ihrGQTBqz5cET24kXs2VnkcLMpOz7sr29ub19sd3aat0Vf6KKdNQaXWe9MzU64ZaStpvN6qo3UURmVVJMogvIwYgcc6h1a0Df9S2OBpaUSSmYYGSp9S4QL29vf/6jv/j85z/4g//FIcPZb2uaMb5P1P63POiTDbt5FLatba7XHtl/yRcv8NRThXvZnvuNK2+7jffR/t3nrEJN8tr5yBBYuOlz33vLEN8jD4ddsWc41/J98yuW/XSyvQjgzm5oJKA58wUTOcC3uf1F1u3kHkQAMpmm1Eg062ICHyeRgAkxmSmFmPWKdiisMWOsMY7b7ZBjc49oOw9p1nwbSGrKbcHUlAhsqFlb9mx0AgO5rTFUG9MJdf3VZjsY6RnPP/30O7/1+2V9BTKzwtxgEpY9/vpIe2+P5h48N2nextuwOH16r+gR0j8W3OMh8Yqn63DZ+As0e1Tu+KmmlRl3o6HSYJmSYlvSkB2QZmVeS/ML29lxS1m3/AWQ2BK9W/MQmpyD1A5vJaVoc2hPas5C2VbH3otHRieb2CS2ZDVmpCFFBjJbqg53m1u/OzfYSfsH/PfByfB1stY3TI9qx1F7l7uPLlzRuYsPvAOzvnl694Uev0cFPq3weZr2hONjoXlu0Li+fXEXmcNm6N1CXR3rMKLZcRp4OZokPpl7qOlwuGmnzWdTwnxg1eBvTVQBGEpOSTN8EhfakyaIKQFqILoJAZlCmbzYZoBeg4CIEckvfvHiO9++fvLdq+Fv/ebq9393UK6ndZWc0OtyJ36fdMvDfPfojgdnz1lG9joK9oNL7kimuiBBHdH7ggL0SG5yho+/cuECmUqapUR4Syjk/frbv/G3/+InfzrePlP/hL2PY8iaC4N6c2UKaknp2LD5VQ0dmCYr1pXS+dpQCsTMHMZqFk4oh8AI+qp0m+FujCFut90Vy6oHYcZ0NO9OZnNOG2mjpTHGruuUWZHtvLhmUmGBvnu2vf3y8589//kPP/28+7GU+0R3O4Gx7T4LieLILPEuiJhlrhnb+IAt6aReF4tZbvSPny1fG00jwPnk/8wWwBboO52478V7zSUcyDYHgutSx8PiuZmXT98nf/57gg/aiOyirQm0PI2kI7PW0bGpt58zXo6f/syGFNJI905ZOlPG1gSqJAVi1GjoM2VMKNDyHTUDPqVki4xMIBXjuFVUhUlWtxWQMunKRIggvXRjTQiZOdaQmh9nKGUoAUaMQ+Rm1BAo6+uaSljGeL26+tM//MPf/N0/GKspNla6j7///ewK6Wi6xVcUYN6C+KODf9/h6nwP1OQLW9TSnPXQLUc3Hj2xc6je/V2+/vTzQYnzQmyuCckMdOMXYyCzM8Nme7sZLVUgIMQCpYxGUYqcnblbli/KgJRBkwwuTnZ8miwhYxJMZct7YTRJDXEXiESmaFNdDIpEJBxgpmgscIOTLhHJNJM5wUyzsnMi2usghzYJPX4y/KpI/9hZRx4jk52Vqpcdd2nKPlD4kSypw3nJc/rAvYUdTfAHBmuu+iyq6qAm5PUHH1ghglFTEKWsocqsBksYU4CZCHMHHHBkzqtmUmUFZMBsiluXkkISDoJVYItHTaoh9QNiprHFwQsG1YYcCpiTMIZS3iJrTFSVMIx55aUUu3rW9bXWfi20uLG5pQAmm+jO+X/usEv+oTpeJyd3vNqgPOb68U2PfPGRPrq4dmlhvy8KwCmdrfHjFa/d/YubmzEm2VzQEgq5U5kRQb/67b/1937+Z/9M1t1u6tqzK11kRcJsYq8QMsTiAAiZu9GHcUyqW69+4ze/pww33m5eGLZ9twJyOw5VDNRS2Hv3wQffGqOa5dpXmQio5ihAzlrrulsBKuyYNbStsoJVQ4JOE2XuVsd0ItMy6FoPLzU+TVhicpPA7DzCRSTAZDJ6N6A4EzVNZBoE7tEFcMKfdrHKj6BXY23vBe1E8JPFvgThnkdnf8fM+U9E/Z1P0UPbCon9oC9tAppixCfhZOb5LRxRBEqOlXnncfPpX/7py0///Le+9wHHl1RNcIix6wwMKKE0dBCC26CaC4Q1V2fCplDg9Bb1ZVSmmyWBFECJ7g7Ztz784PbuzoyRWceo20GIVAqIZI2syqwky1C3NXMbdZTVVDt2EFhrCKhJkozhH/3D//pP/vDfXz15+nt/9bu3W3z43d/5r/63/5sR2Xk/Lw4tos4foOOb3sK0O9q2J43sXUjoX7uavDCSHUSj7S4f7N7nC1regpOVsT+DvFjYKVDadOMsjZkSqUwbiH7zU4xbD8+KGhxGSxQxaaaIzDD2RmvxJ5p04IRkZgJSafSpkfMBmKRgIjMbDiKsJdumQTARKZqUIlIGMzBpAhOoSkgZ8mKG0vosW14xMwg1xox+HKpmr98dQ5oZ8CstiF9Bekyjj7TFI1uk7v3pYX59SQWdWPR9VosThfdMoy4KVNQ+6njeoDU5qAlE6erTq9UQXe3TqRglMKpqjVI8hpQjOJqX1aor1qckxRw2aUojEJGwQnUpQkoNZqC87YKBhm2ibDMcAOgQ6ZkyMiPAdCfIJECn0sjIlgwMBjdD32PVr25vtj/88Zdf/vwvfu83nnvdwsCWe3tyjp7goWcesNwZcWYJvJ3lsJPLH6YjOwUORZ/jnePkkZMfl798zQrApYVx38TVK0iMSw4PAJjsK00rZGFN1oTYrXr/rd/65E8/XKW+3N6VitL3ffOsH1D7/ophlLM4EhUA4eqGsRbvb+6eszjDsta+2FVXasW4vfXeVqvO20lsZqqqp5mPwzZXvXlpjtHpnsjOi+/DiwtFoouUeRaowOQEtO662N5JuSrDDbZPPuw/+CvfNSDhs1oD0DCxDSxhYN4lb5+C7BeDMOMLPIopnrWg/FJuT7P/DsBjKWPun6lRp2271FrO4tJxJ84PLPInXHh+rhdz3mCASDOrScOG8elP/vh/GJ7/pOvLKr7Apr97+QOPL7s+qTRfKwpUIWtQ54k0kjSDAiCK1HJmYMKUE5QyQVkdqCBSCGVWZ9mMQ2YYHVLNcRi3Maab1xhqWo2GZ1iGrKBHDYPFNpCOJJJIETYOdaj1+noN108//cvPPv2MRmz+9qbiy3/933/8vW/9zb/3P+GKxTppNgMBpyckp/329qbc8l3zqO7woHj2zje9BN6LJTXN46WBZ7/+T+f5CS1/PP28ALic18eZAs4XSaRk04j0X3ZIPN98/u8z7xgdUuOoVAwhWG9eMVTS0ZIAAEIGGmSPSEaKRKFPeBKYsVA0iQMN4CqT1kynWQ3e0qMWM8ohQCa5YEggKIMLyJQlWSKrq2QFi5sjIouFW5oq6m3N7GhEzoeTgCYFhDz0P/s1o8c0ejk9cXj/WdFxR2eltYs3HejB8x6O+wx4p2WeqsvHD+/Y/v6ByfuGe9OTDYE/+8GffHG3vdlE8ULGy9utr8q6uy7lervZbLZjaBTTvcQV1l2ak13t3Ae1UADUSqp4sQopjQZ4qUiiiqSZsjT1oyU4pbyJCyHzwsy0pLxGJlj7hsiOKkW6CyYzS8PdxsltvdU26o3qbXK467yCHZL70BwklshwwOGRyzud/A+rhQ8+cFYNffTT79EJwP19sR+iVx2eCVYKnCVhI6PWmtH1q5q1ji+eP//yhz/+d//DP/2HHz17piCQ43Zr0rrvI4PGHJNGZ7bpOweiN922rqzLGooqhIiUImWGMaKDT1AMY6rY3XYDZuktmo+EhESxMsQoS5gpQcmM2ypPeQoRpZPIBERlqBS/ffGis6TXanLcAVFTvbUIgR2EzKQDtF7gQiR/B3SyY++w5wTMKuzl/WYp8Z+ld6/SfDU6K3ccePOcx+Ka030tDoJ1cLRy0NPzScPulctNZf9RACXNvlmz2YeoFcVe3vzo//fTH/x3q+Flyfj8pzfPrvjTL16o/uLJkxg2d6QNA9xdUjESNdrZgeAwTIBtMLjtDp+yGXjC6S1uzM0J81JyjJqxHcesgc5CkQFa6XsbR6y6Mo5jZkoms6FuayRpmdlg6oymSDNuqsYMkLeboVt1/+7f/vHv/M73vv+bn/zRv/1X3q2D5b/+v/6fhlr/8//xP8iexiJNjtS7XfVodz+6+JbouPD9Hr9XHB945A3Re7GUzggpR38Pb9a5r6eWg53lgQvOclLk0XKadfIG/RxKhsN48+Hw//3ip/9uU/X8rt4OxYyM/z97fx5025LdBWJrrRz23mf65u/O77751TypNLZGkACDkARN0xjREY5owsZ02w47jCN6inbjpt3RQLsBGzBEmMZMcggQLRqBJKSSSqpBNderevN077vzN59pD5m51vIf55zvO99473vvvqqnghVxv7vP3rkzc2euXMMvM1fiBIRUANbpdjGamVeTblQAECEyk8mv2VJo2A8POtsbhQo60Q0IaIAm22h0mppEGQGjslWdbp1REEmIgJOTlciwJkBr0HASIsOiHBM6Nxzt1KPStNpk9yGhQ0VPv3kSZOU9wA7fSnrwr32rI+U4P94/333GOUEmPSgdxBSFqRw+VBrqIT9jqhCmoQ4BBZRZjTPw4pc+nWLjDBg01hhOwAKjJsK4IQVWSBEYEAmUhDB4RE1iyLCA4jQUlShIAk5siKxFBAIVoemuMASYbBBQnYUQQkA0AMKMBhUQUNEAOkASCRycM95mACoszA2BFo5IkqYyt7a1mC2vPN5bvSjaIhJEmbYiygyU2odFcbZSFg6a+mx/7vR+O/vmg3bcPJ8cUu+npJ5/dIxfzma/b7MDcGLlz06jb7FpD6x/mAr0UEXSoDy6c+fOjeuvvXbtpVeff/XaK8+1cfDdT14wkCuIJglJUMVYxyGhn4bhRzGT6TGCCQMTIIIIgKABTQbAoiVkJmQEVkEyBGgMymSxGqsY8mgISFEADakmQmAVBSYkJNCJNc8KJIZAVQCZEwkqgpJqp92No1GsuR6k61/44lMf//Wlp38s2gJdZmkCKck0cpzq4XGvRwzDd5X2B9NUEB34Iw9Uh+NuwJF3ftfqqCmuM0HdEGcbDc9Ah060debEw31A/yMi7cAVm+63BcUqJEs7L33+Hy/w3Xy8lSvduL69ORhvWqDULPVstmI4NlmrsN5MpmpVWTGpArBAQgViZSRgDQREk9Dmgiyowjxd9gMiAJPt7GTQoCoLa4wqklQUxGiCxBwTyCQqopCIsgIziqAqpZQALSGJBCQLCDGlBCqg7VaOhFmr2NoaCAgZBzxYXVsdbtz+xud/jevxJ37gJ2xekLEzF0gP+02HGhFPasR3g+YszrnBcWwB8MOtwDxs9F4dSodcXMQDp3lipd7njcPY2CHA4VTUdP+5wkT0TgKOgCj2s43fqF/6x9XGzXpshv2UUuZ9EQOA8YDIAjgJszPbLbwfKxF0drbeZC+jEky2kk2XzAGAghBOQujSNESWEVRQNCAKU5dXgWBybCqBKiFNYokCAQt4Z0DBTKYgOJF1CEoIRgUlYLU13PzCwuPfE6JxVCDNjpWhKQoxM/zPOFDhO5ZOGwI6Q6vgFGP8tJsnC+wzaF7onGnlPFBuMx9u39XT2Q7y6XFGs42Bkz1qtB+kavrFBtSExKM3v/mlf/4/YY6dhUUByfOsqpoUQ1NGAvbOMickcsYCWUCqGkayAAYZRCbovk5igwonBRSOCmQMWFDg6TeSKqsqTPb6TsOZK6glmgwZQEFUIiRQg4jGC0AVGEAVohH2uSWLFt2obsTJcre7davfafqCEsEQkyBaBFWwBnB60tDsEID91tRZu+FskvgB6AyrdT/XE+3zI+jdfPpDGZ2Y6Iyy3wq9h2YAjtBpn/aWFrLPdjfqbCU21aMm1v3/6Z/8nc3Na8bq7u6mKt+9ttk1aIWYNTQiRpMIiCTCtrEKyKqGUVSREkxWzQmhChqKKYkk0MlpAdTENGEuQwBEwigTXuMJypKQUFREBZUAUUSBk7EOpmvTVFViigbAECEBM4MyoSAKqiZmRHaI3mO7RWG0vez05V/5m+2XPveBH/sTtPxYki5ZR/vruyfu/77Zf3BA2LtLR8ypoxFXcN7axSNP5umID/AetlTeKh31zPXYEJ8Gct2fITgw32cpZr08bc2ztovNy5yDWzM8BCVxYQbP//bP+9HLKYzHg/FGHd54s79TRqPiJL4eBx/5yNWVXgvZOHUJJEpCZAIBmR16CioiBgFVkKZBgGYOh04iC6mIigoLKiKYSRxQYVXVFMWgFdEkUgWOQeoAMbLIJIoDK09WcxKQQaUQmUEZqE4BgLw1ziA3jStyIB9FNwcJPS50O6OYeHP3tS997YUvPbd3d/AHfvZnFQQnVTwWJuvbyWDfWgf3bOTlPUCHFOU0GCYefXCU9LC8mP3F2dMDpE8PD6u5aATTQlARkZMYNAmNxs1X/snfu/XFf/qBDz6KtadGVEgkBaCGoyghKSpNQ84SKgsiminEOYlZgqAKNDuDaD/UlaAxhAAIPDnrdxp3jib7gFEVZvtzYOJCiyKRKIgoABgEmTgDqhoigwHrMjWoDEwxVKnrsrh1/fO/8Nc75z7ziR/96fUrH4kJWEgRSMFOtlfOI43fQaL2ndN9G+MIfI8nDeW3AP/jMcTncNoTaT7VfGQNncXSn747Ybmp5JtGjgIESAJoWWCyaC3Fiqo7m8//6rkL7bs7JQCxQGRGM2Feiokn01POWECYGEopSVS0QMLTGFYiKGpSgihsjWMRFvZA1kzPuWYVBKMACjQ5Em/mjU5i5iZCJBKDk8X8IIIqysxN04CmwlljhYwAojAaXwC6rb0wqrnafsFWz6VkNQZ0vRTZZQsNdU27sHPYx2Q6HGY20vyq6YdC98XkzqJ5OfZ2Sz/uYOxX6b3rAJxGx83GswfnxPoUAASsao5h8Nf+h//SY9Xt2sB1K8fxOBBFDiFzkPU6TSMxxBBqSOpTEhZnSTUHA0iohgFEIxCSApJhUZBEiKAsRpUlhRScUcoNAvA0PJuAskETQnCZTQokCGQA0AAaoElgrMlCaeEo3KiKz1HJC7NBVZ7ugSejxhgejod37+zt3O2t5ta73O+stW9vv/RztPi+5Sd/lPGyGjeZSjhs7U84e2qrfWsmeY+Vsh/dZm6G/JjKPmqUfafR4U7RQ8x8aMgeRQP2nx7s2zqEEZzacIdBUQRQFZ1wgyqQ8vDe67960d6OOkByd7arb75879pm+eadnUcur8bB3nB3y2WtT378ap4SRxYUEQYQICECMKBgJDIhMKdJvHKcAEqiLEpgVEUAJakyqAALsEhKEgI3Dcs0/ihG1hA1Bm0CC+MkViIogCjqdCIbkUQ0CoMhFiWDsUwUYG21tbC4VtaxIjuowt64HNUxo50L6ysXVhYGo6G1/pf+2c898+FnLn3g49ZnEz/5yOz4cV1+ysOHyaPztvhBn+/HzD2lW79jxsgh/j/r+cnPjqKtevjZXIrpWdAHowUOt/1+NXTa9iJIk6VtQzd87aVP//qrL4LrSBFTiAkzK5iECWKCNNnsi0SoMgFUJzJueg6wKpAi0nT7r6DSBPSfxmtQg0iIQgoghhAnMa8QRVVEGZAIJifs6STkumgSdQjMoqDG2Ciae6uEhlRimEQjYhR1HVpc6ZcNlFsbL37m2Xrz/GMfdktXF3rt9upqe2EBqMfQstN20QM76DuIx+5LJ37pERY5jY4nmPOlHgz+P+64nl2zM0vc776Dp5MoyDjFBKfR50CmJ6wzBmMbBePAyobc/ka6883yjRdf+tTnr12/HVuF1gMk2zABSK+VJQFMQIAiE38UjABiSgkArfeQjCGyVo0IJgYWSIITlWEEHIEqgZLwZCkmKBAgCeD+GdoGUDg5C0Q6OTDMIIgiqzYNM2gKaNG0LeUOTVIUAFUvttXpDeK41/W9/gu7v/mXeVRl0vcreTPYaXC598E/1jzxh6NdbCHiZDZu3tz/FkaHPgvFmE/0Dqx/OAahHiH7u26AP3iDTIL+6DRkGoUQDVZ//+/9pfOLWA0YkphUS1mZFK9cWO7vbOeZG5Uhd47QcpQUQuIkibPCKZAaC85OzloUBREGMk1gY0hZiUBZENWANjEaMKKCZNAQkSEiRIOoLMkBTsz+KU5gDNLkKHeJiTMkkSSJzWQ2jCfBnxkFWDiCEIAaO9revvnSK7eu3XXkVh9ZWbnQbXtG2C0HX997uexe+QFYeh9TYXGqiqbRuKabMWG67PRbQofY68AbmZN2M7wJDovBB5S8v5vphMmYQ+bL/p1DL01Dgc/lcjjRyU12RNpMQiIrTOx/CXHzBb75nFa3nNRvXn/99vXNW29sbO1Jb+HcvVt3ue5bSM7BoL+rVbWEC6LkiwKINAHZ6ToHFhFJoMlYMwlvKKAqANMIDKgqzBFUY0rMnGLixMoCgqGJhiCoRpaqakLQJEYBBFQQEieeRFNHjMpJEgP6zIcmNk2jAO0WfN/3f5crss997tlbG6PgIahNCEkgsIZbm3fu3ruylF1aXxvvbP31v/wX//xf//9Y5+dAsqmrekQaHnNepyjavl5/KBbSqbbCrKvnPOWHTt/+ETbfgIe4fdois765L+HEmz5mSM3YHI8o96nhjTMfUAEOgqURTKasTEqgypLt+fr2ze3q2Q2oX6wvt6NP2G9CWes4oDeZN9rnWshaaziJTs60VpgEMidVBSUy0zopAIAgoiorKqiZeAuTiD/ABFaFhVgVFaaBdRXQGMPMZCjE0Cq8oLARNN4girGEACkq2ZV2gc7VIfjCNDWYrHdrFzdu7/jYX15Yu/Pil7V/mw22TJXnrru6dv7Df3TpqR9XsAeW4+8yu+DbTMct9rcmJk6z+4/8PD2H/Zn9aar5g2UQD5Y7TiUKTl1UUCIFUVUXG3Bwrb72qWrzi/neRtre7UQzHGxvRW1GqYlVb6FdOBeaGGzy1hgiUDaAqqKiIESkdUyMGBIZ43yGGVASCBESEKMmYQJAxMlZ1iKTvYnTzWiCwKqqQoATi8koOIPWgAElAlVIwiEpIwYGQVcYsDYZI6DCqnVdL2YZaGh1TH+7PxzvLA7uLeXZqL85HHbzlu9KdetX/kr3x4ne/7MMYBBguvdhvx2PtfE7HghnZHDqIz1RH7wd2vfpT8z+Yc4AfLskxmmFzqsDjuIk/qt/9fe7rVDulb5lpKqRtZVlLW9Zkl/uGANNkxBNO/edxaWmqQGEVcZVDFGKzLdy7y0BiLAgTDYFkwqqIikDN0qKxgIaIqdCwiSEZIw1CIqi0Rny1hAQIdFkmxfR5LxHmYTAJYUkTklBSTCBoFGUZDT5JmoT0yheu7d959rNZjhqt1e63Ra5rBrGvIecErVCYW/svP7PVp78Q9j7iJjW1Ls++De3EB/gXe20fdBND+TfAW59olEzp6+P/t3v0+8Imn3Q3M7eeeN8lmTfHZj77oPm2JfmJ4zw08XYROZNz2RQBVBAy/27z2Y4LGMod7fb7eyxx1aqOGhvhu1yu12Mil56+on1H//hq8rVzsbGcLdytNi27VEY+JaTCEiYYiRljjUZ0JgUlJOwip3gntbSdCVzUgEEUWZIoAksZTE0wh7FJ9CUOIYmMCQ1seEUISaITMLIgKzEAs5l1ahBgx7dUk7nHn1kXNGXv/LKThN36tCoVTYKICqijEhlk9hQFNvfKauB9se8eW/jYqtDhgBpJihwvm9O5LRZoun/8xMHb2kg7W9CO9Qrx+D/ffaYv38U2Pvdb62d5bEeuz7NQ8N98XbEhZi9cNDcc8GGDrXedFZUpg9YAIGxUV9z6NPetZc/9ZvlCMT23rwVi9UWKX/hnrk9EBJZXuuYAnRYpqhBEMAqJzSABpEMEKKCpMCAqsmCEqKIoEFVNYigEyDWEBpFNJNzZYCFGwE0E1fEQFIh9dZ4RrDWJgCRuq5ClhMxG9DVpZaTprvQW15YTEB7VWBQlPDVbzzfO/dEJ/O93mpy7sZg8Nro+vbW4HwuXY/twn5AnvnEkz9hEFBnRxkf7pUzBsW/gTTPhFMg7wwb/r5Q1nEj74g4eAAw7CiUdOjnVOHOqjoNTasKRACiCR3YsrXz2c0v/QPde71TSDmqhjsw7LtXdk2jeT0Wz2CRTRFTSlJYRgqgGZElBCQWFlFjTARVxRQn/A0iGBkSixAgTTQOggUlEAUWRVIARDWskCZYLSoSExGBeEfWgsXJMTOTqTAAJAArKRmDxitZYRYGYoAIJmJeNsEQrZ5/PN41sY5b1dC2V++NcTm/VNdlYe7s/cZffOTx39P4CwYFJidBTVfmnSRJ3xbTHwE1Tnx6ltB+2CPtiBG1f/1wHIDj4+FdouOa7/QSJzNeMIFVlJtXn/vc3u3XIYY8c2XZABmWOAEsNYZW4Ui1vzNwBkuuV9bWOu02EDbVaLDbr8ZVagInV3hvDAEqAsU6EGaZt6CmSQ2iEDA6REARDjUTOUB0fsK2YgyJaFR26BRJSKbROmG2GYuliSE1jVVAxKps/GIetSHQVKfYH9549fr1F25UgVWw3Sq66x2XmXEYd1pdIWZOVoEMZ7y18+KvrLyvrQsfUDJG5UCOTGL+HoBq76JIn/HZvPQ65GofGFJzgk6PvjCf1Vvgrve2SXTIu58Ckcf9sgPXac4YnO3anb150KQPbgjOIA9ERFHh4Z5Luw7GJle/6Arvxi2ztvr+sgTI17sLreHevVauygPhaMD2+9Xu1nhvcG3t0tozH3iCLO3tbhFJSnXh0VjDLKIQJsv9QdCQJjEWuZEQowWTUgDhJkROXI/HCshI46aJgUW4bmJI2AgBWtGUWJXIWRfrhplZApDptm1vset9VgcehP7N17YQrBFc7Lb7/VIMiAKjRTAIaomApRxVUQfn1tYK133phWcvP/7EATaMh9ziMzCF+f+P+AMPQvs6eP/HYUdXDxWy7y8fdgCPXL8zVn+vDpQD0+dQ9U6s6/woOTW//e0eU4cAAOYOJwHVSbB/EFCrAqokXAlucf1K2Hh50Y5g4wVjwJKWO7u9iysjCc9tp1t1AbE8D6MuoUMV4KCkqIIYogiCUBJUAnLABtGBoACB4KzqTUqGZuGzDIpKUhualFlFIVYhTB7AGKNkqtDkWW6srcvhQi8vvBbU6nZ7nQyYxDrO8k6x0Ebnq7pkjd12t5NnLb+d5/L+D17avXnzmy/dvtlv7u3Vd++Mzi0UywvZ+546/1i+No5mwU9ii+LxSNvvSRb5ttEpzunpieAko++ox3848Xw+73SMzrzefXsIAACIJhGnTK3sx18Z/fbfLvpvbo14u0p1iLub/NUXdp690ZRsValMFAcNA0hMNoOutUpZBFFlMJiSqERjMkcYQ+OcB8EYmsiaBMhYUUAWskAW0RIAsQKIEioCiWiSKICIah05CwTijCkyB5BUGAhUUIVAScFERjLGOzUWIouiGjAiakzBIMg6Go5cq+cXz9/b2xzu7q6sZyhptHl7Z9yyoXzica5f+VV97I+nlrEgANMzC2Zi5CEsjzhuqR7nmbM69jjweSKfPHA1joOqE3o4DsADjYeHQfPe9hkO1lTMzxDSxGlnd/Nf/vIvri/ZiKopICQ0CioASbj2OUkIhLjUbQ+2d3vdzHJjyeV5rnYhU6yqceQY6jo2jXdeQK3LrHEpxRRiVmTW5+N+qamRXBETeer22rGprbGhTMaraswsIYIlIoPCEVQJRVTZAEpEVdUEzsQqhSaRc6tr53d3Kktmt98fbw9vvvLGrTc3NBjrM0vYkOkPal/g2oW1ot3a3rq3vHiOIFIhlJoO3By++osLH7vY0AoB4XQ/zbzB8O4q/mPsOisUcX+q8khg0nmL/7TKPUiND+Hj71E6wBwBAGahW2HqDep0BS7obEUizr154DLh/opmhWNrPu9PE4sTScZ7d8Pgjk970oxhOKhBWDlzhiwXWeZB1ek4DSXD/s4wA7+8uPLFLz13Z0sG33zF/+YbTz956QNPXlrsqJq4U4/QobUOgYz1SXgSwtYBakrOOOMLDoEFTGZy1cFepSChSeO6aWIAyFLUyNJEB2Q4EQeIISUQweAMFoVttZY7rVa7lYemScz9QcCY3nexbdvFMOjugKus98aNnWBJMhsUUCFVVW7gA888nhvkoIur3d3N25oSeAcTd2h/d8Gse+D0QXJM8pzgSZ8GT8yfyb2vcuawwznMf//X4ayPKAV8L7P5O6S5RVnHgVU8mvaoN6ZwaN5kBjjMbz3CWQzAyULJqfXPCI0BTBu6/QrtvTi882Wo7m5WYWezjyavx/1HH6H3Xc1f3amjNCGRtzoY1mOQruHFFhWFSamuqhhRidA7j84QuhTjoEkKGICA0BBaS6TokTKLKgKcYqwtiM9d7qwzYKw1KKTgvSFUa6G12DGGMu8toCFt513BYqOOexEuXOgiDzlhaGA07Jchtlq5iQ05c+XS0s5guHNnt1/xN1+43U+ujNwqelp071WDK9i0cLjsQZX3HU6dBcj9juWuh0QniIhDt04Xy0fALZj7OS8X5m+emd/k6an7meazgokQElAbDApU1z//z7t3X+4PdXeAiPnuML58Z/yZ1/u3ymxhtbU3HO81FSmUIBli6tdVw92aV5cKYywnUNEylACYWd92lgwlAQZO0hA54RKN99YlZjI43cEVEngvKkQUmEkZgHNvclJrwCA6R8IJdXIgjKpiYggM41CpYI7qCSgRehMUUKAwuXcZ0jiWe14EdenuCJ67q7dvmvNb6ZGizrOmhNb57vmRjsPe11rlJ8Q8oT5zqJNNorB/GufMAX4oRsTZ/Xa88094863Cn3McdHwUz1/bt5jtqfTWa/hOy5r7qYc1xbxenqb4xlc/u76yUA436zR0LqIFAoCozElFo4q3NqWQY2aNbeqmv7e3tLiAmilwkblOdyVxk5ogyk0MZRnrEFWMshhFKEkoETMlGo3ToBrEumwXWdFxS0tZNze9BdcuqN3p1XUlzNBU5FBQBQUthzpZ1F6nW4+lv1uFsmnKWgE27/SrMtTDcrA9lEqN+rZb8nnGHK136s1uf3h5ec1lth5ULVtAE4SH1Z6ysrWQ9l4Od7+kl36PkDWTGbRDjXaC1fIQad6Qmrtx0DVH/NtZrU7g1+PI+Bl0BqryXqKpfJnF7jzg3AOzXhGORCLDY1nsS6hZvOe32qHT0xE1capCM46xRGxiFcFD3SggJt6Ow2Gvt6ZS1zpaXO1BTdde2Rqz/8Yb91rnLqSRbjy78bWvvvLdH1h55sMXn3jmMdbUW1wAwCSgAFXTcGJMoQnNxYuXtjc3YixNO9+9t8mIdYJBJe3eOrom1WXm2s2gruo6JJDQKCdjUqvr2u3WYqeXtbzJLAKCCFBqFa6JNDnksZVb8P581mLK+qPosdktpSKIQPWoRg/ve+bKSg7trOgPq/Gw/zu//Tt/4Kd/tt3rGYc6Da6iR+YBjjX5WXR0V/3R6KJHWFuP8fj02OaDPWlHvJBj4Uq/8+iUT5ubKDnF5znemsfsqiNLpA8STlNPWleIFZnAjV5sbnxOr3897ryxdfvm9ubetVc3N/ohxOzienbxknULcXhtjAEdJE8aE5YMDJaiEWSnmqEutnwnc5m3ZEwSSEp+iA3LoOFRw5zII3Yy5zQkbXrdvCisdQUBGGID4izZzBBlKppSEo5JZJxK400V63O9ok544256eXvzjd2BI/7J73/8yUXjUxPSJoMJEZpmHHLfai+22p2kUvZrQfPEU4/c2dwZb29+9H0Xuktt8V20o5ef/e2nP/r7Fy9fBTCINDkq6VsWKOJ3NR2R0IcEyIS1jjPrfNIzGvhETXY/hOzQE5yvx0Hcy9ltBcUYIb355ebF50M/bqZ8MBhtbm3c2GteulPeGGsyiMDOQB1YEIZ1qAAjgAg2oZLICx1rs8wYZ5CaJEhJFCy4kCCJoDHGUWYNg6CyUVVmISLvnCURUWHVCAjGgjMmd5BbIAQyBhhY2aIBQVatGm5iiqKGJHe2IPDERIZZXeZ94XwSIlVrWqtLFN0Xv7b1C7/2/DXN9rbrHu891kqF4ace6fzhD3dcLUX9RrP7GWe9yZ8GAGcmMyO4jwmcCOvch860To5DS0eenmD6H3nzATzy00Cr06pm75fhe4v29eBxf2PuWvfTTCLrcEwvffMLLR/aGYdQxVBbCwQ+z0lRDXmNxA00JeedFuYhxhpdTpQFSJnHHGyAmGWmlRdImGIauNGoiuU4DIeDpixdnqGB3ObNSF69ubuxV9VNLMsgou0Ccgtrq93Hry6v9LYeubC41PW5V2upqWMMIQhxDFwONUlTN/29gbfaXihWz/eWV9pFm+5uDr3y4sLC3u6o0tJlsZXluzujlGR1fUWaVA4G7XYrluDWszy3AYLNO/3QdHq4+cq/uLj2MXbnwChMQqFOWhEB3l3hfjB+Dlnkx1yzed5+KFb7Qc7vRbY+BOPMZkMmDXWkN07tHDz4Dw9Os7p/652ipxAQgCAWLYJKQhAFLRZyJbDoiSiEJnfOsMvZhZL6e3vtbGG4U+8OK79cJKt7/fFYYtYubt3Yu/zIMgYGhPE45Yvddm8JjO2AGY36cTR0Cnb14kK7l1IjddNdGWkort/oP/e5V26+Mdgb82CkeR4sKAa9vOgvrxfdNhImX7SyvMi9F8OTiF7MGBiTiG3lq93eyjrtbA+aEKu6ViM2N088snB3O270h72lJbMeW4S9QrrSrHQ7eav7tZdvjXHRkSGaHZuHuB8y+CBe3uk+qh7j4fkO05nrpvtx1Q8HG51XwzjzxqfFTa3/OYN3Gkz1qHNypFO/A+iwkT+92veWJ3SK2XPoWucM/mkW80uk5w0vxUkow0l4HVWMRrLqi/rK3083nr323I3hKH3l+f69oXvtRrU5Sk+vu95CazgYPfuN8c6QCp/niQ1SrSExj9lElt1KC4uZy7JAoyZYCAIaWZgFrLdERgEThURsoI2ykMXzi1nJMuRU7QoR5l5awA7VZs5aErIpggM0SIRMErzgsMRBar26F17YGDC6Dtp6oNoiNUYBkXJy1KRmUMYgdQTPSXwB3XZ29ZGlc6t5J7969fK6ShpW1WDEr7/6jf/fP/wrf/rP/beJi8ICwuyUsX9L96MD5bU/t7TPvCcqoROY9SRn4Gxr73QTD0+4OTtpZrYEaBZS1w0jZOHazc/9XQzbm3XrzVHn+edvvXh9ey/vRG1lHSwEUlOlpkaOCtpUEBWFa9S8Noa5iNjDKjkLCSRpauVZbr2z0IQEAMaiByAEAUyxyfPckEFCZgEQ4TgJDFHkLve25Y3xSCpKJAhWowCrusDQMDRRYxJEdKK5JGfVWhWKzpnMIKWIKJxkWPLNW7ss7ptvyu1qdZNN3pLtnd3h5hgSDuPuB5fK9UtujXcLfDHGjsJKDSsGkECONNtbhrPPAjBORi7m05zYm2+1Km8JiFcA+1a/8QFr8C5JjhkPT2le5R4faxMbCRQiR5YyJQaTuA6uIG4iEQfR8ahJTUpV6u+Ol5aX1i8Uo91NCtzqdKy3wKyQyBElRANE6KzJvCc0vR5VVaiWOuV4jJaGg9He1uD29b290u31Q4gp63Y8mXI4LkPa2BrfvFXlWD19pejlkBteP7fqrW9CVVZJQ3luqeUtLq4vPPLIU1lmi157YbXVWTASG1eYW6/vbd4alQ0JZlu7u4vLq1m73ZQlUkuU7m2WPTZ5q9ja7S/7vNXOhJkbrqVasDJ889Pdx//IDDyc24D7bhrIB6ttZ712rGPm/5+3hI7kczBm3tI8wNkoybePDn3xRAbr8dqeWelDpudk7dAZ0f9PzWOWlU4jw4XAHrzN0JEnVAEloNQ0HOM4DIRzRqaINtLO1u6Lz10fDmyL8r3tgY1paalYWs/7/d0y1ttbm65lIdW1VopiMq8Mw/72zs1bnU6bPAeuAcULGZOGXP6jX/yNX/vCXszaATJOwM3Im1RIQ092PvyRq8s9TaFUsM55a6xtebSOrGHGmFRSEgVNWo+qzDpgBZQ6VN5nvcyUeVxevpQX7RCGg3tbYLL1i+dt7t58/kZgePLKpaLXESVCOnCGYDouzkBbHgCFme/mgzxPe+nAADik/vePvNpX7Lq/f/+9x9gPlw6NgzlmfdAP38ce5pwBnYU/nEs38fcAJgeTgqoQ5OlO+tr/fOdzv/Rr39z4ra+MN6O7fS+IdXVMYBzdq64sRlxqj2M9HNRdS9sSQ9IYOCkASBl4DDAkyJzx3mVgUFBUAQUNQDQGJIjUyskZ410A6XT86lrny9cGN8dhNBZv3dqiv9AGQkYj3iMKorekjMhEZK3x1iDaljeLGS8YTBwfWej2nGoIFcYsc8ZZUWGGFJqQmLKl3LgctR7vOKDeua7zWVAizMhqp2itrvlb13/n1//R//sn/r0/q5C96/vDvoMI56/OEMTHZcCJ7XuCPjiW+KTpw5NyOz7Nf+BQq2IAMLp947f/dnP9S2/e6r++62/UzZ3deqim3VtcXeg4UEVsQh3qsDcqt7cGSTQmBtW91DiHdc2cWwOYeYvOpKR1KDOtvPcGjXOoIqAACYnAOIOozFE4JgQEcBa8M85Rr5Xn3htSVQFVFJ2Evo1BItd15JoRkByhIzKoZFGQxJgILJEtkgBEFkmJfMdirynLR88vmrz9O1+/ubM9brccLSymIr9Zyz95fnv5409c8s7tPe9kO0CN6/9egq7fx37mAJoHnAQ7ZKUfgzHOcOL0cHfr8c59i4SHK3MagLV/86FFATrirsC7o6VOgX/2+2gf4Z61pKKADAa3ydQphEG/seKhzgZb/UHZv3l321qt+qOyKtfWlwuGL37jueWFbuHMXr90y1R4k0LIMswyJ6pIaJwHRZcDg6502ykthpg27m4Vud0jBjuOsVxdzrY2S63AZa6py3YGhuDKxeKJy6tPXO5cvLBGKu1OYS112u2Ne3u5DQttk7dsvtCjVlutYSS0OEqhbMpiedVu6a3n99oLF1dX8ru3Xq+hZU2rP9jzds+1GIzZ2CyffObyue7iQlY0m3t5uyVJDVhX0LB8UWVPYMUcBAM7tSUfOh1hx306Y5bqSHqde3wk0MEDugHvccJjFw+Yfv/37HxHeCtWEsLs/BcFJJsBZMI2z1vEjSZBTMKNskpdGmNq3gBrBzv9zNhhNWzqtHszFAt+KbFotY4m3tlaWzTjje0tFy48upoXKE1odpuslXFgHQxk9263fcGONkI1TKkiMCZSsymOJS9cPwi0BD0RuapCyujWMGyXTZahxri81LUm975Aa6ynJoaU2KMD60dlZdR1OkXmu3VVpiY2sRHj3LhGi4mTwyo4XHr04vq5lfFosDEY3dgb70Xa2Rs0dZW1urPdF4g4FfzHXc3jLfqWhNu+Fpnl9gBKRechjbnDb99BNd77NPsWPTjW+kRI4C3lqLMsJ78P9tofeBYzWUORxA9eq1/67Ob1zRs3Ohs77kakhqNFaSdcdP6RNl1ecr7o1KPoES4tyE6Tbo9UJsPPYkqKiAE0Ra4Sw8QKQ6IJdq/cpOCNKVxmDCaRUmlrKBeuLG/EMEwiNlVkNodohM53XaaJohpDk0NgAEFFOGGtpokBQZbAfOLSinBY77U6OdQSo6pkbIAnS5oA6m7Wq4zUkDyhMp8/t8YKNmvleSfUdcd531kwhXPef+FT/+BD3/t71p/42MPHBd/b9E4mn09nyaMb/E8QIidenI0Vw6FHJzgdhwqaqwNM5yim1ilCLOutb/yL5z79z2ySa2V3Z1zmbbd+vr3++Lml9Us5oLE8qBsxi00ZRqNmudvd64/7gzKlqGIkSaXN5uaWN94454uuojAHStIqRBksqct97jPnjLVgiUaVtDJqtXICRADnbKudFRkZYxJwSgzMAGQAE5igGhPUojWrdd5ZdIC5xTw3qCBgqiCgFkGCICgy+sy7uubl9ZVuL4tN/cRV8+Mf/cQXn33j+VduRAyVlTv39Fa19OytTvur9fm18uKVYXzzbvujH2nWv3vfNJqFx55gcw80Do75WKdaO4eUyxErZ77fjxR7Iv+cRHrK9fF6TujdOghM5/6eWPDbzvasrOY17QR3QyXAam+vv7O93G3FEIpiYatfVk3YuLv9+ss3v+sT73v8wtre3va5y8vWgLPZYtstdlreicRarLcGFcESKSl5w6qgapxFVRa21lqXX716KdXNuZWlJ66c37jXVwJBSIyxGj9yab1XYOHT5atriwvOW7ZFFmIyFn1uO0vLS9uDzKW63FPDjFkgQIPGWkW0Bs9dvbr5ynVuUtNgqN3rz21du76bZanoaFX7C5E07D5+9XzR9uVOv8/13p07S8v53qa019e965VlanSrKTeNXzEHgNhEGz7kWYDjUu64M4pnCrfTbh7xZc9Of7xK3yZ6oKm7I6LhbdR2yu8z838OxTijKNgHRic/hLEpa0simrIiN2TRGGYuB33glFINEId7eyppMEqS5NHH1nwXM+8KT8urV7LcWA8XLi71llyRi1ISjt4abMZNGKYqaYornVaztwdNxVKissmy3LSaYclVKEiLzEJWcOAgDQuPhs2Nsbx+fWMhX+lmXhIwQl2FrF3UTRBNk0ATTRNTYrQgymA0b/sAXHTajG5cj3Ksi1ar1/bsczZFfxzeuL33yu3t7UCc+XGK1llVAaJ93/JsCO++3XQ63LI/zzMbfni060/o2AMf4OA46CPuwztkbwU9O3LOt450/osPFjs9yOK2U+nQdx2RIrqfBicewGQ9tJqw8zqn6628fuacFtC8cBeHo5YmXl+Ajz9dPLpMbVPfHe2NbKhb0KZik/n2oCmMcS1fVk0DYBCVFCYBDgEnYjYlFoiiYB0BUEpsGa2z44bHloYVbA+qwOSsEmBK2kRiKAiRUz2JIAfTI1IpiaoiGQOSehmuZSRqi1wlhVFSBQaKniskBOGlhdWtQfPl63d3axvr8QefPP+JjvNQAYxLFQLMLBiTV1HJuyIPL33tt9Yf+5jSg5sc3wn0zrnsOFwFCNOFaGfgwEfU21tv9KMlHi9obuXgbG/RJBhnslx9+Vf+ceFMXztxoX1xbY3VVR0bvVMA5pSa5JyLop2W9566uR0Ps/G4G5hT1FFdlmWVgJMkYahHQ0UESUXmxhw0EYIa0SzGzDtUNYjkMPPY7XZybyyoaB451pkVEJmG+BEWA0qJrWKMzJAVxnusUGJEACLudT0EFW2KLIemscBFyyFA1mqHxiE7Fc6t7WRFy6KHwQ9+rPvDH/lgKzODBl95rf7Nz7xy9/XdL+3EH/rhRxf6ZsHV4dZLtP7dCkAH8vkAf3iQ3phh/7OlD3Mi+mgO90U0TivygbniCDedTQ/ZATjCgcddXHjHMuXET9pXtDA5R24Gb04wTkAStfdu9ynoopN723erwV6/v7l+pffkBy+9/9FVS2G0u9OUw8fW1kHRGlJkg0IQbGZC5ARgCy9KhtB4ssYoK05OOTKRY2hZXuo6vLhcXs4ICQBtK2+1sm43b+WA0DCoJSuiNitcgYoiypzYdlrYbRVVhzkktIgEYGzmyWKsRsJp7fHzN197c2ewNxosP//iDbLmsQ+sf/PmzsYek+23lRfeuPbIJf++c/zRp1Z63daokgtXH43YqoKBFhPvULyH8LiCP+iCb9X2Ljzl74PTxOiZP0lgP57HfBH7ied/vve118OorZ7YFGcWNX1xch8Vnc0NuqaqJSH6tm+1TLEAtk1FE9UR96UeopF2Gy1gURTdFf3wJ1cXut3lXrsMw1Ed0LvR7qDsm7vX99qdljXWeC3yzFjTVFyHhFGWllea0CQonLGD3WqvqevSWHZpq85sDQCcooTGAxnQHGGwFzbuVGMDTRc7XfLeCqcIFRqNKYZkFY3JSLFGBQsUGcjZKCmlpmVp8dyCMVaSDmPc3K5euH7v7ij0o2sCALoY7WBQLy7nqAcr9A+W2xxrpn3wHk5ah3NE+Z4I/B2cOHy4Kw7L6zlDeNY/+9bxcYH+QF7mWYQn7Sz4dtCkYaafMd0BP7mcNuz9vvDERpgbG0cmE+b2COC+A6gCyKmffOistt+Xw9OXwnc1+c7d+lyvI+Px2sqmGCxL9Q0UOeyWsD0qMehCVoCm9dWlmzfvKEBS5QQAOjmyF6bHbqAhMqqQFFFqZm/AWaMiTRWcRKojEjErWhHmUGLoOclJBDmqBUVENAogFgnIsCNCYE81RQAJnGw0RpGEkhotK7I2us5o7J+/Ofz1r27k7fXhOD5/7aWvPX/vj/yBDz2z3BXgJlSBxfQWqxEjy/mLq1tb10kjgn248NDbpt+tTggeuzgN7Dlqr7+1QvT0n0cKmkAIM0gBo/jxmK/fuNVpCZq8IVpdaIdKdG0llVEE0FiVzFjCpBySNblmcanV6cUkHDXpoKzLclTWkYxBRRZITZ35LM9tYEYPSqIEmSeLzAmIsI7MapPWBIAAdi8WhW/lmUWVxKpCqIwOgBInIOYk1pKlmgAMsBrMvR1vjoqinbXs7vb2cs+vLOUmg6WlBetaIpBCY8HE4IwrTNFxZNv9HWgGxbJ/7JEnHnmsf/WR5TBuOou4/OglNrY/vFZtbKxODkmdtM9bt1SOADInm/RndPrBnWNr2d869x+fgjiD3sUZgP1mPK60YPbordK8Wp37zok1O/XbcMbrE9wtMdR1LawpVrmham9zwVPk5qlnLvYrvHH9+uMXiktX15cL2N0SVRLygAYIOMXMWVAAZkEmAKtAKh5tirFpuBpXrcKTZWuwKse5tSBUjsYLiwuOVLnWyJ6cMYhonWsnSISI1htLwkyodYqSki/VgCRUm2WB2dpMnWdJPm8TD8e726sXuleuLPzab7z6xJWlj37vR2/du/P6jWqx8COJm3tldFl7iHdN/eEP52uPPmJb7e7KWr+qJCVjZHFhFRhVZApIzf69S/b/Q88UAQ6HLjirhPeGqpgfgN8KJwsPjFI4HbU42UACAEUlZxXUKHCqy3E0uQfXMoUv1lc6a0txtFlu38m6u6kadBfBGr/IPNgblaOxNKgoaDw0tNCydcPnzq0hsrFABMgSx5HrCBKJzGg0QgesqU5VMw7QUCjlsfMt+C7z5jaUKP29unEwTpwXuOBhvDN4ZTDoZGZ9udNbHLZa3jjNeyamJu9kncUlQdNUjUSWyJqACeo6FK1WURSFX7AMzLjd1Le3hq9vlG/eGQ4SBrRockO2LuvpLsc5CbV/eRxN2z9D84x9ffvSCU+6Oe2o07riSE4H2Ml+jWb3jmXxDpjsvWH9n0AHPtG8SkQ8/HuOTnfGDnbJHFLBiKD7EyCooIhgkVM5yn1b29ADt7Fbnz+38MQyRgl3t9rf2Kte36M7fe6PaTQy23vSr0JgzttFRo5iWF5q0bgcN5oARHUaeh1p6gNM64QMTAgA2ISQEg8i1FUUgKSAqMJsEGqWcYIyGg+OVEGAkBDYAiAYAiJFRGLRyRnCCCRCBkAFVDRxSiw7TbW52zd5d3VxAaxJyTTJvHJr7wvP3ry4ttTrFhKacaxH27sIhQUg49VlSjQ7LObb7wO8+xV4KOPoMB3w2VyW8wwKcyPvHbg4h13aAx4/wPcOTrqD6UrRCQyhwCxCoN1OwsISDnZGGVLm7agZt1uLBJQ0idoUG+OdZG40rq23CKkoctIsNaHVaY/HflhXKWiRFT5z0lSdlkcjrNLUKYlY54whVA0hoWIdknG+aipW7XQ6ElNh89xANS6F1RhAwsJb74nQihjm5DLKraubMissWY/M3ZXloshjCE888uRjly+NRtt521sLChSYbTurhmPnPdqMjB3XaSiKLo/UrrZHqa5s3pxf9QsrCwrWtNbGOzc51nQ44PLRKdq3Rrgvek52+aaAw9x5Pgfd+XbtsgOcaSok55TFWfRuOQAP+BVvqZHnvnFahB48mDL/1FDcD3cD4AysLrcuXlreCttabX/vd313XfGdW92dwQCSriyvfe43Xhx+LH3wA1cuPLE63NsOCVSdKBNibGqR1FrMHGGoKg0MhKEJMfKojlWTjCGuYn9nvHnrXqtwvYWVGN3N7WGnoKWV7LHzFxixiuMsN1lmQiMmy8mRoqIlQ6RKICQQQmrIkLFsuHZImgSBYywVGrSmu1T8wZ/+6JOPDV95eRP2xs//5kur59d+6Pve9xuf+SZzqx40i+vm+z/81KWLF1xnIV86tzXexmaEkHNreXvc6ZxbyylDUAXatxRPBCnfCeEp1++Q5ofliYW+tyAiPRSm/1jt3o3K6syynPD9vIl6BHM4qTKqgGqtWGRMJabasSlHb7ZXGq8kPQpFj1qrnYUnoBzGwVYz3NW69Nj0INdU514KZ1OKTV1prH1GximjAqomAAYCbbWNKqXAoKxJcu/G4zoFRtCFFp1f4szTSgeGEeDiKkA2HKW8g4Y5jMo4HHNkskjE48GAJS1Tp7PcQmf39kZJNVaNCntnnDWZN9123lvsWdfN8vz1F2/cuLP3tRduvrkX9mqqxUjmRUwKLFANdzchVagLgOYsYOZwe923B/GwmDriCUzT4NGi5kXZ9JyMkyzzI7ffOTMd2/LwXqRZPLcZt943/ZEfU0joyHFqOv/fNBWKl3TvZr95dbfjCUKW5xeHo97OqP+51/lzN8yzr0qkogFTN8pqRHTR1Re7YjwpwGjQ748iWswQkCEpIAEQCQIAiuj0r4ooEEBSVYEEZgS8W1WucKMGJudSJ9TMm90ytQgXPDgBsQYUDBsBAAVBNoI+I6PsFDPrhcHajCxE5FGTDGHw2Ytv7N7aHf7Yj/3gE3W2tzdSjuMxN3X52hsbG7t9dO0qVIlTp8itpsKZSNRZv8BC1iAcO1v2O5TmcLF3nsuETlRLp4IKZ2T0YHRI3s/wvYN89l2CGc+rIqB3qM0eJz13aT0jef3Gze2+GfS3F1fd+vl2GI6bqgbrrTd1jN57l4jDiFQK3/HGlxq3d4fW4tJisb01Hg0GC4sLC8utUDWoICF1Oi1SS9YSamrq3HhWabdzIFhGb32GRjLjYpOc0dZCbj0pigAQgiUxBpTRoPOaUjNeXGmTt0hYlk1Z16RtMrTcLjY3bikBOOOCakxRtQ7R5c5nmbGuHAwwwVqnaGU932t1uq0dSl5aBqpxOV5avpgS21aePJujqlFxGl/vbS2WOGb660kscUihnMgMD17K3IWe8PhUerccgFll3q1sZxj/fjFTGa46W9M5a4cQG0MuNJJ3208+fWmhk3Vy0/VFneKzL926du3m5q3+IFwfJXfu8tL5xaXMARGQwVhHACXjkNSRcRa4biQmQhFpBqNhf2esvPTyS69zKO7d3nAOz11O9za3s1Z3sLObd+IPlh9f6LiVLl04tzIsd32vJw0zJEAxhIgGhA1SVSciMKqpHiOrao1kJQWAGFJDJiuWVrIFw3vchvWvP3fvIx84r91er0gy3F2M8ImPnv/Dv+/Khz+0zMbWkbkpWWoJoei21Xe++eLej358XQSIJoe7HNGPM5nxjum43fOQ6aRMj1jZ33ZnYILyHZylc1Ss4Ely4B3SIUvyzEaaq+PsemJdMWsVKpMCJhFCAoqDvrDKaGwvXrWL54LY7sr5fHmxO+jHcsDN0C2MUhilelhjUqSQAlkEFaSkRAiAlpCw5Z0l04TgM1c1dZ5bVOwttIsib8bNWOr1Vb++4tZ3ygC2itSENLSBHIKxVeZTj4osyzIJaeSda7eyMvTjeLi+dL7T6rJgt3Wu025nmSUxYdyMRvVgVO9u7b15b/fVV+7d3qm399JY3IiFvAsMkZmTECaROJsTO7Zg8+Cc7KMtq++g+07k1VM88H3s9djcBMAs7tO+VfxOqvReNPJw7ly86Z3ZF+73woNXe7aeV/HQNNn0AIw5r0xBhBj7ff+VL9/7kY8/ZliNxnJcvvTm3i9/ZfeLg24f2lmskRkZCaiVmYsteep8HtWOkzgkQN8oWmYvkECYISqjoE7Oo0YUAESarTlTIOO8c03DTX2u10l7oYxBRVCxqbFR7oPaFmUIRpMjNagGITcmc7YoPBlWFURFo4UvXGaRNMtsVYck0jS41+gb22y/8EI3X1haXdjY24pl5by9tVn+2mee/V/8+CdWFpZT03RbrZiq/miwcu7ixfOPGPNe2Rjyu49OhBKOIJewP5yPqYi31+5zo2MmFubi2cxhaFO5hqAsopL7fGsgRSd8/JmrVx63X/7GzVG/GseIrUEvN2gwVFXWytGAcOq280hVqkNqmjGnjY3+zdtbq+uLvcL2d7e77QUR5kho8ywjV0AKbJCNcSpStDyrpKTWIABZQpc5BE2SvDOtzHMMoKogOBmugozBGyMpGK9ozOa4uvPm+M6tzd294aWVpd//w580HKqtN3fHO769WI59brXt2mqszTLrfdHrbW5sksZur0dgMmscCMQRcW04oKPlteU8y2Ig9ZmYzrG2nzblcUVwFs05eHPG/UlWwPF1u0eiP79tmp99eoC6v7sOwDzNDwGcqdK3moPOZTJn0RzEyJidpnMQxti67Lnn7/zdv/Ovnni89+S/+2OVUm+pOHepd29r43Jc3GmGN241z714bSA2fPYrV1fzD7/v0dXFVm9pIct9YTKVaA3FlMha24JYobK0FrqX8lY9eD0z2vZ+ENPCxbZB111sJw39Ubk56HdsO0D7pRdvmHL8oQ9zd0Ge+nC7qcYKYhDRgE4mbBEsJpVAaCUxTUI3RDKQVCIqKLooZDQtn2+5HFcuPL6zU+7slU3c+1/95KOrnaW19c75i9n2qO9b3aWFLlHToHYvX9a899WXR7/wSy/9yL9fGAQRRaJ5npxZqwed8lDoYeVzGnscN5veBav67RPChCPnUd0HXsP0Tgq9/8NDKgGAJnVTJRUlRWccC9RVMAhNKluQQEZN3DPji66zxlQIam2jWe6SX8ImtcVINQzDIYeYQ12PdkE1Kjd1o7EhVes0gFqABATKDWhsaqMGEZNy2YTRKMRxaqpaxhIa3h0kFqxH0RfeFMliJMemiPlStrjQ6xRtzHK/VDzy+NXMEiQFMnt378SqXw9lb6e5dWPnjWubt+6Ve7UZKo0DjhNFpigMjhIqA6LDpOy8BW/IZ6JAc4033Qp87Diw6dOpVJ8sSZv088lG/OT6xJ/HO+PQzamUxNmS9fnRqbOTCvahvu/Yc5omPsC+ljzynSd+82kSYM6Fwv04e3ogXWZokiKSYaWq88wvfAOulXd+5MNPLMbdtZbAuv/oxexWPa4a10JGgQS62uZLq3Qxw/NrrVHJrWDbBno+DSseB60Fq6RBEBGTIKsqAxq1MpH6asmiN4JIyjmkpcKea7XXPN7YqAc1+8yCSGapappd1pY3zlHLgDUmcyTEERk5OoM+p6LlfO6cJUJGTgrJOlXwOpZHl93eON+4s9G3o2yvvTuqfNsKYhPMm/eqGzf61IBDaUa1zx3Z9r2N+lLpVADpRC/439KphKcrrMNjft9aP4zJvJOGnoMvYE7rTIubAgUHpABoDAkV7W4NxWeffb3I8kcePSeZuXV946tff6kxReHw4mrHJlvXDWXOAnILyzEgZzc3hl977sb2Tr29XV25VP34j3+PX88Gw8G9nXG4Mc7z4srFzqXlQjlQRi4DQE+kGmPmbWzqFGMlMUuSu9y5gpxVaZgFrKoaVLVIBKoM47rOcr9bp6+8eOvrbwzGY6dR8vZyNP43v3Z9eTE7vwhdH1fb3hYtILx8+bLzmckKY/Ktnd1Wu+hkiyrQhFSDDsuhDZE5LK+sm1bLZC1jM+LdFIcBVGfq8Jgx8VZmAI4DOfv46oHFtd9DJ4U4e0t8cKKTedihOJ7wCL27DsARpQiHr98Gzx9Rt4cnOw4ZtPseWOSUdbIy0Re/9qbUv/K//dN/SNEPxmOX0eWLS5cuXfno04M7t/fe2Ni6ux2+8uXnb7z5ysc//vQH3vfM5YXzJDY2ogDe5aJM6KDIUgiC4AieeOrxcrD3+NULd+5tXXr8A2zy1169pmoKal+6eGVpecnZ4vU3rtNofOnqctRUDfdSU9oExjmTE4iySIrJGFIIUckYElUkQrQqkZukBESGGJpU58iIMcbQymX5arG00L1x90anbdBERbNy4UJQSI5Src6tRly5s2v/9P/hr77vk38QbAYIEw6Hw2vbDi7eOxb0jM4SqbMEJ15/G2kuBtUxbPUo1jN/fPXDKPn+CY5BEdO7aqz1PvNZ1qCmpoqAKskkLpCqwT3gptnY7LQWGRlzVefIOevaBAUg+nYPe8ZaWjSXkBAkskTSAMygjWqSGGNTE0gTGo4NKmRZjug4MgcVIU4UK0ByjB5FkVVDsg7BJcoUvaGMcusAiCFvkONwXG1vjfbKexvl7Wu37t3c3trRUQNlMmVye0MTrRkrR0VRq0SqSZBEkUXQoQingNbnopO5ihMa8cTWnAaGO3YOyYmW/Vvqm2MvHNc6uv9nluMBxvS2Geg9MmpOpOlJajM36P7pT7y7H1ZIdR5eO6I7dDZjpyB2/cJL44Uvfab/y8+/+qd+YOmHr9pVN/rJD3bPdeMvvSR7/ZgvaHfBP/P48jOX2+X2oN1pN5GZTVWHlJoqpHHgca2jqMNKhjFVAaJoCBpESb0QpiZYAwkEwOXYPH45R2v6/d2OtU9c7FZJo0pdxtDEFAStJeOsRec0I+scOi/eYZFT0aZ2z3c6eavXsjYDQBRNKbAE6/J1MUvn2wvLxa17g6g6DE2309amLlp5t1e879GLj1xazpzd2x16bxFMNaoEzCNXrkxgovc0czxMegjf+UBCHA/9d3D34alenJtkhgPZoEdSTIgQYkqjyrx2bbBcvP5HHjn39MXWk+ceHe9sbJaDrSjEcXWxx0ow5lan2NjsF2iV8PPPvfLcKztkHAveGcVvvLq7ubuzubmpSKkurbW2uHJhpZ23nffWe6eCQKIJRBKCkjeQVFAZgwcikDqUHNUYAlQwpKgIRITeeOMtuXx59Txcq8pY5b6lRWfo9KvXNy6Ui1gUVx87315e+uYLb9SNGN++8uhF8m40Knu9LkiNwlUTjM0AKAYGNMsXLi91V2q2iUIdhlTdhXHTurSKwAA0b67qUbn+YB2wTwow3We0jzEcjkRwmn3z4OxwesoHnyn9di4BmncDTpwsOZ7+hKd6aGgdmTsGAARdWuq2ukub9+qvfuXOFz79wic+dnVx0cQ0WF5eThx7z6x86P0XIxvWeO/Gqy6zxpncZ5iEkAwSM7MEBVAVIERjFITI+E6e2Z4BeOrJR7/yzVdfurn77Dde+b7v+Z7HnrwcUvzVf/6LUI5G26MPPHZ+eaWX+RSqmuvak0t1k0rOckuIDpVrRgnO5yDkgFTEEDA30AQ0qClxSKhmb1xWQXyvTeS1juPAnYUFRYe+g6aIIcvbHbFeWgCLiy/erv+j/+RvfP02/OClc4QJ2JGheebDw+bEfhjEbyOdYJ8+QLJ5ei/prH2+PnZj2tTv3nzAkSLhUMHT+wpTYEgRUIxjg8lZFQvGiAAiagSN6I1F5ZZHr+Px1hanspKEKs7l1ntl9e3CFgUULcxa4FqQZ5qQrQ8EagDJIqKxJqYaWYmDscRGFDhxhcgEEZuUMVCqjVYmMEaRJjpLwMBBYoqQlMdRQkh1bELa2ypffPbm9ZthZ5xV7HYGWaO+X6WSU0JNhKwxKQB5R15YFCAo1Cw+y5qqRtZur/jYRz5W5K0ZLoNHmulUHpt24nQt3YnJHoI+P2r978eWmyr1g7Of31lJ7z2v/2SadsqpxyHcnyZzNvN77ua6fDIDoEhoNF1eWrpY+C9vuy/fwZ1f3Xnpo8VHzruLXfzuD/rLjy1s39nJc+8WW0Wr08ulLgyyilq0NkYn4pJqnZiT1I2Oax7V9bjkcZLAGJKGBCMxonlouBxXlpqLS8XFKxe/fn3rlTd3vDO9Xmux8NpU3dxXSaKFTivPc+dJDIkCIlnncGHRLi3nRdflnZZxWdHrIHpDBgkUU4qVVcpQu8vZUsf3L7etySvJ86y3uzFoLXVWekgEaGWjvzkYjY0xd8e8269t6HSL3Mza/N8M2ofKv12O9H3ePm4o3gcam60fxIMfUzf60EwAqimyhbVLw+bF3/na3Vbx7Pd/4olOiz72iaf2UvbqyxuG08bWLvlWqGvaGqDHxXPLg6ra6ZfGEkssCtdU4XOf+yoQxRiLLM+LfGd7EKvU6XRUGAWszQ0SSw0WmzotdHJRUoGUGK2gTXXgBtQ4I6IGUVQimURAihxYRFCrJy/0uj/2oS9++aVr9wa7u7CzMY794fkfutDOVzpLFz77ldc+9ZvPXr365NJa08DtzFlIevH8xXJUgQoYz4KBOctbneVutrAYNYvVWHnQ37xjdu7Ukl/54Y+KGofTfRJv3xQ6pDwmyMIEWDh4fkLih0KH8f75Oeez6Vu3BOhB6OwaHwX+5ycXpvoB9ld8TtUjgjXEDfdHJbR8neAf/NPPDiL/8A88mdtsp7/X666SwSaMCpdxqp55/GIEGJZN7osUE2VGrSIb1oggiUE4IUDD0RkkQOO8B7uzO+qXzbXbG0vnr+yO4dY3XkhVee92+KHvX/ixH/qZ0L8Xm2F/c3e0fXtloa2h5awicRwJkLosK4rW3u6etzngJHy0JTRNU6oKc7A0cexdTKpIkRMkQCKGjC2Bd2xb7e5a1usyA0BGvaVf/cyz/+l/9fdf2gTMITRlqDlr54fmAg/jz297QuZEehBf7sS34IGHw36FTyzrLRb9UK2gwxj/kWJgsvF/hrtPFyJ/iyJtnAhMy8RlJptHMBaJvPdFFqNaQ4gqCCLc7Ra+3d65s8l1hRqxaSQ2gYZVjKlOLvdRky1aNu+CbzfMxmXorGl7kzlyFslaskAqoikm5yx5REwYK9SGUpCmiWUVy1JVwliaRkaDcWySMsQayroZj+tqVGsUCRCTLWv72qthdwzawn5s+mUSK0HAeKcEykRgUZNBK6yqkkSbBBEgNhHJFB1H6J547GlE2l94iXNreu7vYR7usyNM+La782AwHiAaJzH4vkJ/SJzzXnYDjn/jg1Z10pWTA4BxLp7NwTzO/kQxKCgigaoALqxefOLKpa9d32TRV3fN9ufqL122n3is+OEnXbejF6/mReYaq5BnQMZ6YBZmAUKvgpBYpDvdA48xcAxNrEMtEiM0QUPkvUAJsqZulNtLBbY7C1+5NX5lY7wLBsHc224WbN3WeGm12+m6LMtYxWjwoBagKFxn0S2vtJdWfG8pt94Wra5zWdYuFIzPMgBUg8qMkYkb19QWO0URyOYN+KrkpdWFRKZJiQG2t0dVUqKWzf2N2xuvXd98+tGVvLcgOtm3/MAo4u9uOhmZf0u0b/IdDTb7QCUfZPCAuvMMaAzgIAD63AzAlOl1zjhCpMh0+bGnG/m1ujL/8lMvjxm//5OPi9Lq0uLCx1dHG/fubNwqegujEcW6WVpdG4+Hn/vtr27fG3U7nTqkGIMmIYtGoJU5VqkbXllZuLDe41AykQUbNaqxIYQUIxCStxLZEAihIiUAMsYRCSdVikmQUIQFGBAy5wHVAmgYPtLLLv3A+zb6/NVrt15+9V6rt/jEo8vA+NlPvfKpz3753/0P/2Tv/OLP/ZW/+xM/+PQnvvfD1pndnb2mbIois8Yjmk6n7dueranGKUI13Ny48doLS0tLC+I391pXszW3H3V51pYPdOz6zL4/1JMwN7kOhxnikAO2//MYEv4gg26eVU5hm2//DMB9a/A2Rt1Rx2kfLMPpVvf9xbETAc9JEH1VB7ayqyKthX/0q1959vVbf+jHvu8DT50fDSV3TeHE4bjdAmUVcEXmRZPxwAhocnXi2IWytM7UdWmBMvIpiHG+TpXPssXVztqg13mthaZze2v4zW987ZMf/Mgf+w9+6tarX7h83hVSbWzcWl9bJg2EOtwbdtreWqsCISVXmFTV9ShinqHBOtSGqCjyPMtBuBw1ImCNB7DOqjUk4FRyRu9di1yWd/IsaxkfB7Brs7WYiv/X3/ylv/VzvzmovQHVJnayhayVH7TT3L6/d77hZL5T5jrk4O9bzeUsAXoSf8+XCKf8/NbT1As9PDd17APePtDwUGhf4hEZanVcvhD3AkLIc5NlgBZAKKK6ImOFGANzQgfIRM40QgYpqaQokDTFyOXIdFFNVW7vZc6HGESTcRYMpKjAxEnqccMMoVERqqsYG44RQwMxad2kcaUNQ2RQoJS0rKIiqGIUCYmEnSZRxKAuGqMWSluDUgChrldWPwmTiBqVvXVGTAIUUGZVxJhqdI4RY8Uas97i0o/8gd9L1k0sxAPl+zC6422PqqMTNAf57UMeCLOAkkd84LdHCJPAAG/3/W8NHY5Ged/vPTTVdWj3zUx17M8mwMGcwERrkLFu7eL7P/k9nS99vY6piSlo9sL1/sZGubPbeWK9e9XoardeXbC5GQhwVnTcYhdcniJAkukRi8wqworKwJJSCk3DGikmUElNgjriqBq2Fjrtbv7z/+rrX7pRl40zwojARFVIPgegxhvBJq0utvM89xAWF+zSemfh3GKv02m1MpdliOSyXIREFJGMiPeFICYNatRQjqYTU91xFI2M+7v9cjCqQmQgBAXfryAK1SFuvLHx4ot3y9p84GPvi7abTXbH7x+O8O5Iqe8wz+KdTccdVXtHsc7Tbx56S0+cyT/Q95OHUzcA1GX+R3/vj/+Pf+/v9uO4n+xv/s4bN27f+33f88laG+sbMOOlZXPl6toXv3SLAJ/7wtd6yxdWVy+ZV0epZGB0LjMZjJrKABgvkQVS85FHrz7z6LrFKsRQhtIHT0qBAxIgoNbsDAkiGJGUrFFHlBtf1ypoWJKCiiADqzKwGGdqFeMQtekV8fz62oc+eP7G9kVkUzgBY28OyqVzi1/+nS9rlm9tbQ+2Rzdfu7e6tpZ7s7q6lrc8kvXeOSxUpE7je9de3Xjj5czC+rnlASbNly9/3x+w3WUAASWAfetoPmDAfXgV5ztmf0mX7oPRJ2r/I+bX/u4kPJB3Dy7pDlfxLSGw39IlQGfw7oTOqPT+pIbO3ZoD8GaA//TGgdhHoouPXrpy9fzLr16zxkU1ic1Xnrvz2ov//EPvP//R9z/ygacuPX5l1RaYSJIwIPncxiiIJgEIK6o6Y9R7BGm1OzGmuhq38nZKybd7CU1TjZd6vceuXPjGy3ffvFf6Xme37lfSlKF549rNj33g8iKudrpd0ZjEIMh4jBYhK5x33oBPDed+EYBCRMQWKybICbLEQRDB2orRugKsUSQgi1S08sVWq+c7PTGCJCojg8vXNvQv/Xd/55/+yhuc5WwAORiFspEYgssKOC6k7ucvPiAPPZxZrAfO5bjfC+8NXXIw1qczUTDD/adG1mmu0bfkWLbjOPUMt1JgxnzpkXr3hVQJQtPpdNRwDEJkEb3P24im3W1Fk7ji2ETlkJibKmotwjaFILkhUxmCDDQMh6GqYwzOOUBo6qhsyzKlIDHBXr8R9aMyVVWqg6kDCdqy4ShUs0SGJIBAEZS8E4FRGZUYSBGIgSOyMZlGjEzWACuiEqAQGUAUUUNuEtuIjEXEqm7GHJEwxpRUU4J2YUndlSuPGTIwsRFhJjv0QIic0YJw0I77ENDBSzMZ9Lbndg6kl860yKTcQ8sbD0Tf2ypkWpK+7UU13zqai/AAD1DVQ073IWhu5gwcDiaqB2UIKNQ1PfLo453cbZTBWEDDqLob6Je/sks4uLToLnTtBy/bZ67ChY4uapNArG+My51z5KwopogEklsHojEAR+h1LQFxtKkpUwo+70Tt3t4efPHZN77wWt03bZvD2kJGZEeDOiVeXurmbel28wxCQeH8pc6F9db6SuZbhSkKstYbsq4IUQCNKKWoMYq1MsKERMYaibEaj+o6AXpmrRu23vR63VqHezujUMm4Gt8byI07O7t7o2EdYwLvitUL57zPAWB/On3fhXqY9F5nuLdFcx91ouF+Hzqslw9w5JPePS3DmYF/xObUiZ87jTq/b9kqAsBTTz29srDU3xmho1EdXnh+sHPzU+eurD3+2DlD1XKvGDXN8oXzXZ9dufJkWcXucHxja3h3u9zY3TNguu1u1/vRqCLETo8udehnft9HL64V/V2CxljUelQnFsyMioYUc5cDaJOStURonLcQkiNj8lbNYggjCxKpqCQIDRsWAB5XsbKqKwvOG4XYyrtxPN7Y2t0L5rXX7zG0x7f673v/8v/6f/8fdEx9YfUcMBQtn1QSAKqawCGNxtsbr7zy9f727cXcPvr447bTevVe+eLru3/8+z8oJjto21moh/leOdUuwsl0ytwb+9jfvmA56KsjNuy8Ot5/dLy0M+kknXRinU+jb/8SoMN+0Kl0ghs1u4Uw4WadD7eyL++JiACurK29/PXnQKnBOs+p1VvY2y0/98KdL79w69FzS48/cvmTn3jf04+tX720bkCtMz6HJtaELEnqhmuOiMoKrVZBse501+K4QQqDcVnkPZe3l1znmafo5ddfdc02krDQndvX8lZvNAjbW8OL55fBGFRnnGepTSszpJibuqoNo6pFAANGiJAyAhPFMhnKepRhIkholdpkC3I5eQvGgW+X6sRn1qU6jBBX/+VnXvrP/9t/eu3Wtu20UwqgrCI2NwsLHWvtxFLAM2XKaW1+3+47sSvfDt1vO9TUDJr7Ce+wxIdKB2w8Hc6zI8URDuLTHk45M2y+BR9xEn40tajU2A5nz1y7+6mrFx5D3ozEzmqeOYnIjTZlNJacyZWizZHrqKnKrRNU8mjVDWJtHApKNQrSJGVNCUMiMdQ0nILhBmPwg2EYNzoqbRlTWaWoUCeOzCwxCqQEAYAtJiZAHYfIsSEypmtFVTgJswKQIZEIZMnnSESAqohoQVEBRJABY8NkfVmlGnQck6AxxoIkJJsV0F0q/uM/92fy3M/CTd7HhNbDnHZI3iruz6fhXPqD5SVzBvoDAzqHuGMO/4f91aSTIEEPyzR774yg02ja8nrgDNxf1OjhLjnJ7YbprI/OkBEiBONgeXmp13awE1jMuNIWCLoqqiRIw37zch9+5/Zo9bndJxblY48vXl6253q27V3myLW87/SCMnBKQNY55xCNVvU4z7Pucs9SsTscCFpHlvewbHB5aaVr2KI4TlXVGIJEAMNGfV55ly/45VXXWewxKZOLaom8tQYoVbGsm2R8rpAlgiCxHtXOO7I4HoRmXMakSpZjqsfN5m7JTNvjuDVu7m4PNzbH47JONhuNGwAk4w2Ejg1L+QwSwFPQ5IdBOivioef8baT92M8Hd+Dk63nrbz7BEWTzCJ/r4ddPfBFPnojYVzHTxRE62fsF6oxTwx//0Eeuv/5GXZapTmsrvdsbw9vbo9t3ht0cr1690NvbQyLXNgjj3f69Ry5c/mM/+clh2bzw6u2dcSDAzTu3s6LbW+l+9GPv//DjS9vbN6N2lGV3c7i+st5d69Zl5awZj8arqz1nnTBT5BBKUSnrChXyjNpZ4Z1pYlIRYY1VNMY4m1kHLGyRhLk/aAwOCbKyQY3euNV6t+8pS4E/9uGr73/y0qOX1totk+emqSsPdbe9IMY0IW0Pt1569ZXh3t5CZi889oSLyr0r1/r1P/qlL5dm+X93+QkHCCCHOPLM8MiHUAXYx2hQQY/tQT3S23rs0UmJH8SQOpIlHn3y3nIAjphuJyZ421nvO7r7sW8Pll8rEGKn1/uBH/7BT//WZ+pxnZQjU1XvoqJFWzK98PrOa9cHX372zUtr/iPvO/ddH3n/xUtrvcVOVhgiFKdqDWgxHg9AU9U0wkJEvYXFcuTzleWtQZ9JFzv5eer+8Z/50Y17O+O6cUU71jUKuMTtxSwKiViRZE1hixZbI6hAjjqaGADBexIBFjQuQ7XgLBhDReF8zoYgCmDuswUgTxaBlbwDoQhh3C93tke//K+/8Ff+x399Y+gY1WrUmFyWgUlZ7tEgEu030Xx3nOiKzpMek1xn8NYD4Rxn0ZkWzRx2cYrHe3Kdvz00Z/MdtNnsjLDDBuHku3RfiLztMu/34UcF0NQ8BQRgUGwtPPmPf+nFR670/uCPPtODyFpCaojFGCpHA+fyopUZD5qMy7OMuxY4S4QAKFQUrahBQmJmEqiGlSo6X4Q6WZOVdYPJiJAqpMRlHSNYBhtZQkpkjQKoYoQEzgMAiyiapAKIkcFmmcZE1rJGMmTQiqoAKJIisTAAKqAgiKgQMSuTrUKqQmpIoqrzLjRJ0Rpni6L4mZ/6Ez/5Uz/d6rRgFqVh0nBHgJpjnXmoHafX80Cyzm3wmC5DhPlo/Q8i0o8VN+GYiSWG82N2ptLfHsfMQerviQHzYDRnkN6/ymd04UFex3WtGoN37mxwE1cW87v9YL2PseaoNIkSIphUh0qDPtwZ6Dfv9ZczXGvhes+e7+mVK359eZTnuNgxWbtrc09kyJPzkESG5Vg1sEUDEJqq26bv/sj5R59xKMGpjstRfxzGDVeNNqNojK01Li62nnj6kkYe7vZ3NmNrCQJAPymACpCykA1osphEY7LAEkglNTGWpeyN4lbJwwp2+83d7XKn34xqHtepiTFFQWCGpCC5d8wJAIxxdTUCjYB2iqm9C4wxp4LeW/ROxsFpuu8IwHvUUj/z+rS6HcnwRCsST7hxCKWeKSI1ZH7kR3/kF/7ZPw0hCsvdjR2r5Fr+jTvbXuX6rd0o8crV9ccuri90nIDZHZRNCFmefd93P4Uu7+Yd0CY0kLU6We612a51fbtfEsrauUuZz27evGmtW+j5xXOrQFqO6xij91m7t1SOq0F/15DWdT+2oiUrQYwzHHjYL4031tClK+cIzN7uYFwOLeL2vY1ub1mMv3fvRuHbqz4+9kznqauPPv7U5VarGO8NoSb0RbvTzoTHZbk1uHvt9deGu/eW19YuXjnXLbrQQErVV1++8zuv3d5gM6pCaMbWdUBoeu4X7jca3J8j9md/DyI0n0gPYhkdtsXO6NeTmOk0i+6+XP0tcgDu6/Qf93dP/IbjFzBNM9vUOrO5YIKRISoLkMEsHzVBEJWQkwKAQ0whIekYJRht5zKyen1vb/tzv5Mb6Lb9+lJ3bW1xdXWxu9ghY1qtLoJJMXZbPSStq7LTWulX46LlSJJyvVhAz9rH1s+RcxGKAEAu44odSZblwkQ0iTku3joAsM4JCxpymY91hcYiEGYOENVaZYxIUSBr5RYiKZA6BVEJJjZQNlJW/Z17Zdm8eGPwd//hp+7sxKDJGogNE3pRZBBAt7m51zSNz1ow2xc95xwd/D3Npj/+83jKEzvl7dD9h9tZ8vG9ZsnMcP9pKAA4WBc0qeo+MDOX/u3SAwiY/cynmmA6RhRQUTh1Lz6iVz75c1/49c8/f/f7P/DIT/+hD7U7taQhSQ0ckOK4rIkFhY23UBkBsK0MiMO4UU2ompoogSEhAAGRz7OkkliRbAQYh2ZQN7WiazthQ8ogYp0xxgKoIEBCImJBIFAEawyrWkMIaJAISY2KgNBkdQKpakqMEx8EkFWTYhBpgiSEmJRVYmSbubKurfU2s088/b6/8bf/1iOXr7Y7BRFN22E+rOeZ4MuxRt5PO1mAeORYrgPb/QH7VU/i8PuKvrdFB1ruOwuHfTCawUXTn9PLyQFrICkmm9fkpYkeiQyIoAEjSQCRQFDBSAAIgrAjdqN0L9eoW9I1sPRGWGqXPddc6vD6Sn9lobtcUCfDxS5S4TPHucNxnQAJWURlsYB2i4lTXXHHuU5HY8wacZykGcemDr3c9e9sW5sB5Dv90aAcUlapMQImiKSUBI2CSQx1w03kkCAEHVZxPA7b/Xpj2AyrWDcxCQsziEy+F+DgMKI6RARwHtXh17/xza2Ne6vnLr17whTP1DjfRnoI9Tk9i+Oq88Trfd/+OFw8//NEQXHC/QmuoXAg8/fH/VQ1IRq68uhjtsjHmzuYtNNrAwtZbxCaGJqgqNlXn7/94vWtwKHdyguXPXrx/Pm1xWI3rK0vNKOxyrjV7lBZGymWe84odBYXiswOdneXVhdW1t5/5/Zt0UAWQhOz3OQtb8mU4xJQL16+MOzvJWVbOEILTtu5x1Hp2r671ClHo/5o0MmK3kKxvN4txyU6ZOVuxy8880isy8WivbLkukUoy+vDYb7QyZXjvTvmjY2qHsTbN183FK4sdz545QJ6Lz7nhE3Fd2/tfun69hv9ZjQM1udWGVWBSEFpdpLayS175Ne+FgeAfS/gZDrFcjnQPUes3TlQ6kTP70jWc8jI4c6/P1d/C5cAzZ+NNKOzwbb9b7ivOJqdzDN/Ps/0FUHe2dz86pe/KYCimkJ03qHBGJMhW1c1khrTahd5t+icWz63tNButwxgBIp7dbP1+rXETVPVRdFx3nXb3XPrF/K81Wt1vfOdTqbjgNggpMwV1ueOGIyJ4MoESNZ1ckJIqkVeiAIRGUcGUBGRjAUCVWONeHTesygaEzmRWiK0aJpmCCFYSHE8ZoHYhKYccV2a1DT9QT3cY+o+/9ytNzfrBGAsGURFNNYAqPF2PKzzvC2sE+U2XQV4uKFP4asHpeMC/W1m9QCWyLzTC8eGxtuW4O+e5zBFgnG260phuhrzcLKTTP+3VqnTXKMz0k5xBlQFMsY3qflzf+Ev/PgPfu+Xbt97887481995clHuz/w/U8/cq7Va9mUgGPlSbMsU0kCRhIjK3NSlKyVBQ4auZ17jGrJBmEVlphCSImxiRCTGEeZs8jEAdEmo4hqkGyQJCJkSRAjME9CMjqjAGSNiCqiTI5qQUgsScQ4AjLCqgoKGEQCSxRtmAOrOkwASVIMYpgQbR3in/3f/On/4v/635CxrXahrFMdOOmkuWX8Rxr9RPQBZu/NG/vvfEnO2bb+w6IZvjQTl9Nzzb7jaV6B46Em1oNlu4DqyPzYz/zRf/Xbn/rNX/3VFZcNYxwl9V7JQkisAIqQFADAK5CwAY2JRWA34l4wr+9GEumgFC72srKbc89jt2WLTray0LHUAEm7XSx0W4iIKZKj3IvPMrFWQVWMtxkIi60H1e5uieO9puhacbYurTIIhqhaJymTlKEJAauGm6B1lKrWURUkQUrMIolj4ghwFHWf2C37axWmtqboYLf69V/+/Bc+85U/+Ecvv6uAyr85Pudb+tD93pm/M69bcQb5H7GLFA5NH+Ps5cMm0cFe/4MAKaAKfO/OTh241ekOdvbKqrYGjeYLi4u3bt6uq+CcbXWLKkRy+bBMm+VoZ6cPTSgKm2d2bXlxYSEr2sVCr726vNguHAJ5EzudPGstbA2CMQ0VGSm1ukXRgjz3RGgMVaVjhlbeGRSgAoCkANZhRrjUcavrPZ+5wU6LnLTbufctn3lLPnEITWBW73NvF1MaBZCRZqOdsQi9cWN3e3P7+Ve37+015851L3Xlh77vk1fPLd5586VSIKbadou7ZRhSFmyWRMmQcT40je8SHsRInYfkTnaz5g9ZnjT71NxUPEUPn3jzYEXnMS446KqjoDgcTXvELTx+fTZ9+/cAnEgn1v70rzrRcZi8AUTm1Vfe+OynP49gDQlYIyrcRGddTGKMFxVWJid5TnXTMHVst4UGrQHnTQEodYMRYqjrarwxHG1tv7q9t1WORkVeQIJ2q7XY6bZ7rQsXlovMLS90CBLZ5PMs99aBGWzvKqrvxLxwMbAIxBCLdpGCWGebJllCVY2JmMUSoqqIMgcCtBSqfo0IoSwxKjcxNQ3HoKRlv68sNfELN7bLBIyAhCpgjCHEJsTMuwblD/30H5yEh5u1xwny94Ftx1PpYaCSJ/TinI6ayrM5BP3UTN6q5nr39NHEOJxtwJpM1B3ChE9HG/CUp2eX9SCvTJe9zwzXCW4tmfXra+d//ud//v/+X/7X4wRLy4u/9eUvfu65b6y15bs/svrUpbVnnjzXWcRqNNQoBi2nUlJAUZ85qRtI4gwQKgKFFFSiqhpQaAKIcd4mwRDFoDrSCOIMiEBSUmPAsAol0QScRERRcRKgFDkpkhIZAVAgJEQGS8iAkUUMNcxNTE1iFmAVVogxQcQkbIxTBCBqL7V+6N/50b/w3/7lOoZWu5hOyEwPC5/Z/Q8ce22+racIzjFI5vhbb68f3w0bbD7DyRzpwy7hPU+Hm2BftEziJ1rSK48+/t/8xf/n/+0//Y8/++u/XlfRAGgSUTRIiNNY4aIaFSAqAiMAAVhQkpQwiUJpoBLZrhgq8tbKvYjAhWtEAqNmdizMRJJbm2XOYyILxjoAkxksCpN7HVc8Lhuf5SEy4GBcJ1FA8uMqCGCILKBJlBCbOimAqiBiiAEMTZB+QpwEuJITjEvFg3+AhDHpysri6tpq0WudqiceHr3XZmvfU3QaTjoP8B9NgEcTz35MpD1OXIcZ6IP77oECGuMwmaaqstwj6aBsIhBa7Pa6xoxJNY7Ha4tLdQxRpd1rpxCp1VFHw8TDu0N8c9xpZVFvZ4Ula7wvHIA30GrlSwsL1miegbewvNwFkW4vK1o+b2XVqG7lvo5p2Kg3llmsTRk6QMxzn4Gy6tqF9bu3rkOROW/qUKOpDUhIjYS0vTlaWV+oqtDfGw3H8eatnY2tOoQUUxpH+9GPPPqBp4pnLhQL6/kb9+7tbg8aKDbGuzXcuN33hvwO0zAqtgtnMwGm6Sz9oWO6jjT4YbfgoKGnG5Om/sPxjrpvVx+5PjbXcOze4Y4/6gYcz/ds+pY6APdtkiPeznH/aP/OSXMyeljQTawoFdbzFy+E2FjrxsMhGktIiigiCEpEohpiunVnG0JoFZ1hGfwodnqFM94iOkLIXSNV0V7wrm1AAblTdJpQllW1vdu/vTX42vOv7Y3HgGAU263CYXr00uKHnnlkocjWl3rNeMzAeWayIut0OqKSQqxyx0kWVxaqcUCUWAfvHZLJM1+VZZ7nMdSxrjOHoWlaHY9lMAJGSDWhMjcMCqGG3cDXbo/C9KORrI0xxiRZlnlnfvZP/fs/8eO/RxWIpt4qHKxKfqCzaM+W1Mdb/G3Tqe8eAlnvT1NU972EM+HM0poYi7P2nz6aJ4XDUTfela1yB/PABw1LONkS88GPfuJv/71/VJl8tZ3/yr/4jc/9+i//5j//J5/+4r1Pf/r18yv24rJ95pnLT1y92MoWtCZNjRE1EEkNm0wkaRIEbKAIbESZMiPBcSMxYQCpRERJwKgFEJyuSlBQ40lRTIgiogaMATSqmiKDkiEDaJOKGmSWyMKsUbVqUtkEdTaqJFVAQjQcoyKlKHmrLSpZq7Bk/8yf+bP/p//LfxKFO+2OTDZ8TuJpzkx4PUDJTsUSDon7Gbvr1HycWkwP2gMnnWh1ItDz0Pt+BnPDnPfzHpsAmBclx0Gtt285niKrDtp6pvIRQdK58xf/h7/29/67/+z/+I9//hfyUO41XLOiIWUFhclRuQQAANabEFgAAgApiAIASAQABmAAiKEBAGuIrC+rWgAAGwAERIAIWk3MMNAGAFDFIAGIEICIcVUM0RrDvM+uE3seCUBgMiEBlkwUnu1SUFYgwMnMxsn24uw/JFDRiXGYF+7Rpx59+gPve6fSU++fwXuJ4R4azbPnO//AB9R3U9J5zTIn0fbR6inudHT8ENDlq5e6vc64GZXDEQJYQ3VVbTUhyzyAsIIzblzWIjIcVegaUjQenPiyajLvl3rdUIWq4ZCAXIpcxSosdjpE1WtvbMcYOr2WsJIB1ZhnJqVA3kgTV5c7LrPjcbnQacfEBGwsiiZk8Zk3ls6tLG/fubuw0PJZNhiOkAgBvC36g9FwHC9cXd/ZGSpRM+LBKBibiWgT6iRact3xS9fu7D77xnhjq/rQ01cHw/K1N7fv3t17fU8uP3ZVfO5brRg5KflWi6cG0rRx5i3PfUk5bWQ4yR6dSz37A3PscFo3noHtn4l1P1QN8a47APNfj6f8PIPR8YRGPjHV8Zdm2tng+QsX/p0f+qF/+Sv/whU+hhRFnTXKaqxBBIdkjB0OYEPixQuSO2l76GbW5z53yBx824smRUVv9oYjh6SZU8y8pTXnl0voudZWf7i1M3Im392tRCSX0Sefyn1IBWg1HprMEEJdD1daNnKAOmQuG+yNNGPP6r3dHu3abocMdboLzWAIXC91229ubmcLrcwSpqgxqBplrss6hMQx1QFGNd7thzvjxAgWQURDCGjB566J8Y/80Z/6q3/trzZN7HQy0bnBfwjGPGjPI97rEU47g1tP75eHQXPA/9lsfyB8587eexB6l+CoaRsebt9pQXhUNsyudQbRHD/S+v5lvbXa7U9FTGPhgAEELVpF7ghF4Ht/6Lt+6Mc/OKjv/vK//LXf+xM/KaF89tVXf+e37plfvbXQNd0Cz/XaTz92fmlhGTlKqCA0LjM+zxLUDY8AQJjGsXLeJRDrTe5jYFVFowBNxCaisxFI+inWnNmMQ2pqViQgiknE+yiJATmkEDmpNDE2TQQiNCaJJkICVDQpJjIgiZkVDZncZe3WT/7MT/30T//Md3/iu9vtDhFlWX54tcvk02fNB3O9c3J7HUBp+zw5b+WchhgdBzUmyviY3X2Ct/fQOXPGfvve51HY6ttOOt/AB2uzZuv055vtFA/qFDoiPE6SbQoAqogEpshMzenP//d/6frduz//i7/SK1qcqqhGSVBAAFXVGmIBFkVEi8DzBzQczh0AmGVU1nO39JCQUtl/kqYvAABIEABIzLNk+/w6fWHSNlEYAGQuzcQ3gPtKZlECmHgV463huYX1dtF6h9xwRKH8W3rb9CBm3lHX7pD/MZEzR4Xe3HsIhI9cvXL1kUdu3bkJqk0drTWIwKANs4qCailRCawh9BYQQ0pOIjJ22nmIzbjes8a1FjJCYma1sHLp3HgwJAssqdXLooTQRCRD3g36wRgjFRRZ984OhBDKijs9GQ+HREZEmxCsM0XLcKxbLR2NarKNRmznLjYhpLB+4dKdzX7u/NjprTcHSyuLuzuDMoWLly+U4zQqYXe33/lG+OGPX7w3oF/+/PU37o0+fLO5tNJxxert8XAQYVSxS+IUF3tdgjYL7c/kzmkDPTKOZ/P3c32Bh9LpQQ7H7f6TRuFR4wAerMOPuphHNM5bHXHfzhmAeXf5vg1wliA7JGkOI0gTLAThL/8//vL1P/rap3/rs73FliWbEqNFY0iYY2JrDSqWIX3zpTeunl8HQt/OfDuHhMqYxg0SsTCzkLWs4myeWDiyI2cMF9avdjv9e8PFXms8aBJLL8tMqHqLWVNud3tuVFeGrC8MQvRWq6ZsKGqoJJTtVntnb9sbUa6GdVMsOFdgVY/IQnvBB2lIURiBSBVCnRQosogSIKGjzcFwWCcBSAIsKXOZb5m6bP7G3/zrf/JP/Mmqanq93gHSd8iLmpxEe9Bsx9dAH1cER9yDd4/mOeG4rj7NCYF92+atjIN3SUvNeSOK89b//NPDd/bjkx/20R60rLf1BuxbpDDhCQeZAoAu9joA7T/+J/9DcIs/9vt/ZHmt85//R/+FWqM2jsaVDuJXXr7xP3/61VaG3kGRa07UamXtTmFAjKTM5y7zhAgcQx3JmigSEgMaZk4piYBQLJsUIkQx4yrWUUJUsMQKTUxA1HBSBCQXYmIABWJjBTBGQWMUIVRBRQmNRaxD440VhZ/6yZ/58//1n7/62OMAqCiGDCLpdCXWBBhFmBlP85L0RJolwSNddkZTz9uYePgCFPRg28H0zvxe4eN/3w3Cg3L13S7rLdGBYDnwnA48oxmfHm7Z/UcPRPu9MV/m9MHMzQBUbbczwPZ/9l/9pRj+zwtd6fVaf+P/+4uRgBMIT3wFEQVOAHOaf74YOP3ng9CDyNj7wgTHn88x8HSKwJLNLD32xBO///f/vk6n+w4nHvfxl/cIRz04vRc8liODUQ8LkPlkxyHROcW+/zGnuWNTIUhAhPSzf+p/+fkvfNY7nxfFeDwiNEiYQlQAa4wqKEuS2T4ZVWEJxNEbTjIaNQSQ5c6oSZLKmPb6w1ZeZORTkrIcWO9UFBUoQN00CtBut4yzdVkq4uLygnE0HIkh4xyhsarIiRWASQfjst1pI2kQHYdQx6a8dctmxreLN9+8Ox6VO8NhE1gRx/UNZiXrItp+hddu7L3w6t1vXN/ZrnT87PWnzi9dunj5+r1xa2khqZrUUNYKAT/+0Q/lrQL3wbkD0TNtziPS/mi/nGwJHbFqT6HjECDMLco4+63DN+Ad8O277gDM68LjdLYpeaKuPdwTOpVjB+XMp5xuvix8ph395V/511/72lcZ0s/9g3/4d/7W32l3W6Dq2y1oa101qYnkXTkKd+7tGLLkrHVeuTBEIKKaDKIIQMNESCCeEIhY2RSu3AnYdqbnpM1rV5eagVlcscWiL5vSIRRFjmSRqClD00qZN8Z6A0Tgw4jbuZUGWz6TiIVvYaJUSWZyqRMmQ0D///a+POiyo7rvnO67vPe+/ZvRrJrRSKMZaaQBCaGFKDgBJxIi7JIIe6rYdxdVSSoYB8rg5E9cGFc5FIUd27ErkBiCsOwKxggDBmRQkEDrjCSENNs38+3LW+7SffLH7b63+9773ve+73vfNvNOzbzvvXv79u3ldJ/fOX36NDKQsZAxCZIJ0zncDWS4sNhckLUnT002m5IIJMD4+IgU7DWvvetzn/3s3r2XS4Th4QGShAwtTtXo0jIQGauDxS9g/ywihh4qA51ZeV3ByjoJgATcdci80DnpVLve3hlkDTJ9QBKBlECMOAK85NZbX3b77UGr6Verr3vNP/3oxz946rFf3nzrDSDx6SeempqbkDU/aIWiHotQSmgihnEUOQykXPAqTJCUIbg+MgAhKYxAAgACSEBEQERk5KCQUkghpGqIWAAiSAFA4HhcUNQKpePzMBIuIufcq3phKCjGSsUHZLVabX5mac++vUuL9Q999AO/86nPIDLmMAQGwImSTfDanExG5QtKb5GKYrSohZaOi9Ivejt48qlzVS2/ISfCpWSfJbAV0A8AqEgFOWBLYE7yBNpTrkOJ29aneFV1qtEQiRh2gMR1Nx7/71/9amvubBTO/t39P5qenF2Usg4AAIEEAGCgje29ppXmubzCgOn8knySg9xxmETcd/DgG9/yb99w9z3Jjsw1ckOXJ5luKVpdade7juaUkjOE5b7kLqa+jajDT+cKm8KmatV/7evf8Mdf+eOHf/EIxUII6TgOAjJAAorj2OGcgEshVVEYkgQpZSsgIuKMAbJWGEsZAxLjKCQs1JtyaYkhAyIIAikoibpGkoio0WhOwrTrOIDSc30pJRBJVwRLIefMr1YI4nqzPjM/U/ErQdByHLYYtOI4Job1etOJkDFHhFEg41YyCIFEI3JdxiR56ExOLs003VOzcn6m4Xq15lJzYpoHbHqu1arJ0bAeVcf86sDgWHXne979Xu54mC7t2i1vwqVMn0JlqFNYPT/kcsKhXa6mxoFmQiOGfVfUjZmg0+MrczVYLZnaaq55TF22+BQW0thB0/NJlO3BNu5JSYwhgIxjEUkRNpdecfu/uDA1OTA4AARSCiRqBgEReC73GNu7d8eunWN7d41ftnNkcMAFGXmcOUgkpRQhgPRczmQsZBy2gigWi4tLc/XF82cma0NVv1JbmJ4+srd224sOVrmAIBqsDQ6PVUk2gYHju0hChsgBJidmLr9yP/crM9PTY4O1ZhjwasV3vVZzkXPuuk6z3mQOE5GUFIsY45hRTHGATRE3Q35mYumRF+bv+/HTT05FvFqRzeDAgcs///nfv/OOO5nnDtR8BCalRMZSRVVHAVJcp04ESRvbaLVivyyraK7HVJib8vJz2PYhUmcN5eBdZkcAgNxYzjaorn/pilYFSpwIkvA7RJzxIAxACkT5xGNPVSp0/9f/+st/9OVGo1GpuPVmSAyiWEgi7rhxGDEgh7OIBCGBIMaoUqkQQRAE6ohRBCAWS+E4jhQicbCJ48j1XJAQC8EdhshlLLnLATgBAGcEEDVDEce79u6s+ZWnn3t+746dl+3Z+9q7X/PAd75/9uyZq684/Jdf+8tde3YjMoIkwo3egZ2RYU/ump86Jyy92+Zi4sudbgrPipEaLLpUAtY6FjT3UW663FKUDQ5MfxLA2vYul9c1HY1J3onUICGRsYWlhThq/fkXv/g/v/KVX505j563SFIwAEkiEtQGnG0YtQMBLNF5GSAkcbQQ9AHViDgyNhI2whfdcN3I8I4PfvD9d9x5p1/xHce9+E7p6oY6C7h20qedjWzthekaDmWUnRlunE2VSPaiqm/o0yIMopNPnbj3zXefOzfRbLRikolhhGnXeJKUabrGfIXIJEkEBJCSEBmSFCrCMgDnTAoJDKUQiOg6ThTFAMAYEknH8YSIhZCIIAkYAymBAXCHJ7NkLKTDmRCS88TogrGQTKlEKKUhKxEZAiIwdAjIRXn08J5zFxbnFuvA0Ods7+7dAqHeaI4PDw8PDu3ac9me3Zd/4t//pyuPHHFdztBwhSgM5XS3FBV6xQbrRR4pQbuJLmbKIz3tW0IpR5gqdTnovGZQtHEHgQEkrJi/Tm3qbFKahrR0tGNepZaNNOKi8RgAU3oDcs44RxH7/+e+b33zW1//2lf/lyDJHR4GodNYkiIeGPQxIuK4FDYvzM7GLB5suQ6nqudwJE5AUjCQbggeoziOCEQQtjyHDbocBj0pIlGf94KFfQNje4arAzXZXGx4Feny2K84EgmANesBI859Tjz0h5iQcRwvMcaFqLvEgAhlxAAiGTMOrUbdIRGHMeeuyythzEPGG7Ly2KnJR06eefDxc6emI0CXubhjePdn/8t/fdOb75GRcF0HAEhqTwPFKWhxHaXbZY3+SWQEZDcAConad1DPCQvfe/sWXd2NkHaZ1koAxoSsbQCUNXOiyerhvc5UNvKSE26ZWhJAzgHA9VyGfhRH1994o+s4w0O7rzpy7fe//71HH3548sIUoZg4fX5kuFobrMzOhLWhatAIWq1oaHg4WGxUfTcOozCOXdeVQI7PyWVLs63KYIUi6TDGGGPAhcuiSLiO4/quEESEwDASot5q7duz+1Wvvuuqq4+Ezdb87Ny1x649fOTwzOTsDTe8eGBg+LJ9u973ngvzC3OHDh0cHhxG02NE19JaB+tgdWhD5QmNWbjDIyaHJfOTRt0qBKVOl0LxNRSpe8KsGD3IbZ0oLzDQLPOqM+18lQBAEjIAAOQMgAaHhmNZffeHPhDW579+331zC/PxfBAybAYBA4jTpzaJcq9O4YLUk4tQaIkQkTGGDI8cPfrJ3/7k8WPHx8fHxsbHa9UqADqOUyKkLw3qJN30cS56ZSO/TtdzEdI5t1L0D2A782mclAgWc8hYmIkIgLseXXv99X/xP776nz/9yed+/eu5ubnJqZnkSWRMxAKVuTANnQZAgBwTMcU5I0KQElUCvWNHKtBKBASUbJIhIuVBF8Vp6BwAdUYFABKBJJkUWEgJAEIQY0xKCamF1zAIICCqBQ8QFBNRTPDMrycjIQmRhCDuMMclQcMDw261ynz3plv++Uc+9lvDwyOMMwP9JxWz8bVuxbTpzAgOZF5Skt3cgKi7CFNDhgo1lu9qoy/JfL/+zApoKgO9AEUbtAKQUlEEwworkMCnkgGobqQDNJ9KCokcQQIgCREj8MWlxQcf/PF3vvt3jz7+SBi0HMA4ihCoWnEZw4rnDA3Xqr7LQHgu44yRFIgkojhsNkG2gAmPcxmHO0aHIYzOPfWCX/WYx8Kl+VuOH3rx9Zfv2DkQBc1QRE7FHam5XMBSvVXxKkKIWIZTs9O79+2SElr1hYrDYkkgnOqgNzc5Mz6+Y35xoTY04nM+c+58pToYxhQKf2Imem6y8YvnJn/yy1PPL4RLIUjmterhdcePfeT9H/1373lnrVbjmV6asmiqYlqoJ1WbLKXeaLUVYqQ+tSdSQ03HnylHh5pWZgleB9IO80QEhCreCQARMEJgJIUkioUQrRaCaDZb9ebSmefPDAzWuIMLi4vcZc+ceHYpCiYmz5KhhasAACEASURBVP7g2w/MX5icn5qamZ5HgGuuu/qa644evObQD7/7U+bA4z9/wq/4jutOnp++8upDkxcmJ2YXDuzf84Y33XPFVQe++fW/qQxUbrn5lrvvvvfGl74EAFqtoFLxk1aTkQBGjDnJOnUQhZ7nQ24SBkhHgTX19KJp2wcQavuC4iOUPQKw8Z2+zcy966uum1ETk/+SAJEYMkmAUiJjjz/00He+c/93vvcPz58+dfqFMwMD1fm5Ra9WqdebQkhBwJJ5lPTG3vUnpttE2piQMy6k4MgESdfhUaw2Ex8/fvzP/+wvrj9+veMwEQvHdQDUeM/iA68DbZCt5eKldtAf0uupSSI51lbPMWTrAFaeagKQYRgvLSxOzU7/zqc+9Y2vf0MSOdyRCVoHBJKgl1J1wHFGWVA1kFIqPcFwbkFEIpnYIhnDxN6fqAEMmSTJkUuShhUEEREBGGOxFKADYjDGJEmgdHpMrABIJAGTVQFpzu2SoFb1CDEKo6rvXX5wfxyLgcHB3Zftfdc73/3Ge97k+p7HGQBIQchyaNS0rhvnuxv4W6lAdkd06hfVewip6gKqt5SzVtJulJ3qVVAj8hn2ZChttAKQUE4NWEVN2j+V3lFRgBR/S5A66DWRTFapJEkRx8jwzNlff/v/fuvnP//55IXzfsWPWyEJ4Tm8UvU95kgpKhUPSQZBAAhRGM5MzpEMg7ghYxG0GjuGhmRTYDNmjEkmUEaX7xs9duzA4cP7RoYrDoAzMDRSwwoFrVaDIzWX6g7KkIKR0UHuAEVR2Ii9qgthPDU9tX//3vm5+dpQtdWKmoux54/OLEZLIT3x3OT3f/bcw8/Mz0vZAk/yyoH9e+64647bbrr95bfeOjI+Uh2oJuMFlUqsxiBo/xMymzzPVmbMDUVm7/QUOF2qZDVvGi2v2MbpRL8JjZ1YWE2JoQcSqE3jyWRlwCOSBEAITOoKJRZH5vDFxUWf0dTEhSgMZqang2Z4/Q3HXc/lVSdohBNnTn/7r799/sL5mempiYnzd/2bO08+/avBoZH3vv99V1x9ddIOJGUcC8d1JQFQcmI7cIZkgR1KxIYk85wF0rJJm22KkRxW30p6e3dh1HT1uOHBaO/8Wk1ua6JeWNTXm2yjxHoqAHqNzhyc6cBMWCuKAYFajcb5M6fPnz9bbzTOTZyenJq+//6/fea5Zy9MTBFQCrV7S6n3DhGU6ReMIUNUdlMAAJAAbGx89NAVV7zqVXcKIZ9++ukDBw68461vv+Vlt0ohueOgrrX2H1l/briIpEiuKhtWs1LcabsA6XnE6OC2mEltEyAhpRQ0deHCK3/zFecnJ1uNRisIkSFJ4pyTlNrUnuBWSk/QUxarBItn7gWI5jIjUqIJqFjMjKlN6EiJr2ZSC844AUAiUxDVlJ6IGQ3BU3uOVGsSxFRKBCLHd0Wykuy5cRh5njs6PrZjx/jLbnnZ737ud0fGL3M5InJE/XiJF2QmlNP9AQSGELE1gbbYP99DGmcZIkplbEN/63M9uWprKQDF71Ro4azhEjCSLgqnAeMMn6pUbKTKAADojtczKQoRC4YkKX7yqcf/8fsPnDx5or40T0Iqy4okhiBEFEdR4rW2uFCHOJqdm5ufqzvcGRz0nz9xetfI4O79O6XDTj37AgLGssV9VvXQY2x8555rrxrfO+bu3bvrxS8+OuD7Nc8hkhXflTKQccR9X8bBwoWp2ZnZo9cfPXf61OBQrdViFy4snW/wv//hiUefOzsxs9AkV/Cxm2696dWved2tt/yzw1dcXq35koC54DBH6HZQn2lzq0GZ+htkJgKtqINxiF1bfFTEqn1aIemJWg86NBauzPbVY9Lk4Q0kYz5W+0YyeACZkDE0GjSKnVSOqZDlBCpcSmLaYYmJlBgAQBREnCEAcM4RKBQx4+CgJ0lwxpS0kJJxnsiwPNeR+S2bOdBu0UwT6KEWS+mQUvMKAmaC1NqnVHwuk8iQlVPFeTeKt2FYYhsN5XUsbbq/ACDTyyzNjLRKQIRAUhKR5K4r4zjZH/bDf/zhF77wBz/80Y/mZueTPDlHIYgzFIbL8ioIARljEpSynV3OvrPkJ+OeFGJ8547rrj/6zIlnd47veMe73vGhD39oaHg4iiIRhZ5XISDX4UYuWT7rDf/TNZaLg0rxSQ6o9GTiWRZklpVphQf96Um9vtR4/InH7r3n7gtTU0Er5IkjPkMCRiRT25COXYaUTPSJPgCQ2GgAQEVdUydXkF5S1ncIGEMppblHCwEYsmQgpsHaCAi0upGSBRQV7NEqBOdSku+73HXiIDpwYB8ifuiDH3zfez/iDVQ916F0VySU9kqqN0HJlxKEXuweUi2Ty93+WuzNdoi3CIA7dOCK2GxzFICE2oH7dimhWDclgcFUlsBaWy857gp1JD4yMyaQFMex4FwuLc4//PDPHnrowdOnnxdRgCSjoBU2gjgOCahRrzOGUT2Ynp5rNKP5pVaMsDC7uHdsYHx8pBkF3HXm662lVhM55wKiSLRC5jAZtUIUMOACl7Bz2D925OBNN14zNlK98qpdtaq/a8dlD//skR07d9TGBp89+dzZmfqvTs08ceLs01MLSzEePHTolptf8oY33n3bbbePDQ4SMu4yEcfccYQkxpg5hAA04FErdACgl5sMI0EBP+lPe84oMmKuU/q0MkrNJnnGNJMAAmyGbXa5XlXwB9GoRyksV1VLuYeyW8lttcFAGXLUWXWAIJPr6SQKoEVBfkrO5mn707ShG98VZu8J16brJCU91PEVpgJQvNNBc1gv2ogFhw4tsqxQ6z6rNRDpHsGMjShd/bdnPTJ+6zkWAUhIGbaCpXr9F488/PAjDwdBwBly7jz4Tw/+9Kc/PT9xAQEQ+cDwYBiEYRhzxpiLYRgBMCEkdzkiIFEcS89141gigVdxSMowCIWMAQAZBwLkiMAAUUbkuI7DeRCE4ztHR0dHWvWW6/u79ux76U03vP1db6OYrr7qqtHREUCs+J4QMrGSJvssTWMmYs+GRqdm1irWxaoAdFOtLhN3kgvLIM+MhXFFEwml7voEDL/xv7/x0MM/a9ab1xy79vc+93tLS/VWqxXHEUDiAkQAwBhnnMVRjAyZ2mhOycBJ1IAkP7VaYLhmpm52VHJoj6GOAiIqwSBJ5jBzCugQwdzUiACMsWq1wjgHxiq+9+nPfOqjH/54GErPd6U0Jm0yjS5FxQ3si2YNSg5zTGqUrZ9bjk1WprkXLIt7V0TtmKSUttwKQPJnRROEnqkt2amNcnbOasq2+iP5qdcEAICkjKUUJKPZ2ckTT/7y8Sd++dxzTzeWFqUU9aXGwvxCHARLs/X6YiNoBPP1RiA4iXj/jtqA5/u1oYnJ6aVGEzzkDq8v1ZshcN93KpUwjl3uecgoEhXGfMdxOTYbjeEhxoG5FXdhtnnFoX0LC/W5+cUA/fG9u3dctv837rjjN//Vvz60//KR4WFkTGLkOi6kyiUqPwyQhAxTBs14y5oe0PpKZa1cpq+WMmtfAVg1ZSpApxZEa+7ZPMqB6kxttuF4ll6tE6TCp8Apet04DbCQEGISSyLnFQPpUjZ0nNdKmkulWAdErfYFtlcDik+Uldy+mA7IDXQEygWkWB94Xci1XL5uNVpWfKqoCVIJjkQACSGkJM65lNJ1nDAKf/7/HvpvX/rSo48+VqtVRsfHfL/SaDafO/nCngN7ojiam1lwfX7mzMTUxLTjOo7n+K63VG+IKPKrvozjKGqOjI3VKjW/6sdBPDw6dODAwWPHrzn7wlkp4/MTkxW/+rrXv+ZfvvI3gkZYr9evue7Y6PCo53ucsziOGOcMGGXjMtnWYx5dYAywjWnTLURrKlAHJFPk724w3yremKFXC+Xo95bi20I51c/UK52ISAJi0Aqr1cof/sEffvozn55fmPcrHkM+Oj4yNztPAlphC4gQmeM6URilQyBZIWAJBiYF3xlToXsSdx2HcyEEGlvVs/Ko+S9ZJUhc/DHRgVNKcXuyDzg5GJsAGIDr8iASN9zwolfddacU+La3vfWaY8cqnsu5S9rnRz/fritW0D8IdkjQnEJm+1SXNv5KFYN2ZALaLa0AmNROinevW2uPzZT9VOo0rF47rs+UATJaQqeVJCXFJEUUB62g8cILvz5x8olfPfvsyRNPzUxPnnn+XLC4GM8tjI8Mzi42KRa7R90Br9oKJDLwHQ4iqPp+DDAzHzZiPHbjS69/6ct2XX6wWqvEIl6cn2OMgmYopAiDRqXihkKGgdy957I4iC6/4uDBg1cfvvqw7/sMOXLkDidE7RKHBABSWrAjjXOiTCyk1sSygZJ8ZC4LxrNl6qxNWOiaVdPap7+LhQoTdmq6Mfh2K9SX7K+Zjm3PXZihilT0FKcXvTaV7q5SlwuCKl3fy3uwtjOgr5I/VjPV5o0OoIfYqjk0xwxr6nZjc95yWbWJULvOtD0UgGXIEDwEAETIGOgJloCkkASSMSZiIYQIw3BxcXFpaXFxcfHC5IUnHjsRhC0hxbkzExLiubn5E08+yxkODtWa9aaQYmF+AQCA8euOH7vn3nuOHrnGcdnM1OzOnTsvu2zXzl07pienXZ83m82RoeFKpVqtVREgCiNg6DqpnwOYvp9aMIK1vHhxzaqbRTkYBzYUxDYXV/qKXG7WzzIol9k6jTGXTqgpiyS5ZNHJFEeDiOMoDhlz7vvmfc+fev7aY8de+pKbYxH91V997T/+h9/2K04cxtx1ScgojhhjQiiYzrgjhUzOmuNcHcaSqAVCyLT8nDMpiNQGm7zAQAYMWeL2RpAZN02bRWYmAqhUfGDs6DVHP/CBD7z9bW8fHRmNw1CSdD0vjRsp8/tsi9Cm/XhArfQbs36GuY1s9M+NXsTPimCi3K2sAPSKDK0OU5iSzXspu2d7nQDaDMWU1RIPZgmCiKQQyAikDFvB1OTkyRMnp86fnZ04NTt5/rETzzz7xEkezg/5njtQ8313pOL5HOIgkg6/bO/hl9/x6rve+FZvdAc4bvIGSZKETLa3A6KMBSAy5BIkEgoZM+4wROQJgyGpopJ9fFdacjV6rMGjlQC1aG3rmNpdGfLKgJmDDclyF1dHOsONPPFoi1OJzr/crq1NJS009EaBUjHXmcw5N81UT6va/KNZ3hjQ6ml78cpsvA3Dk9m24h74Tlj1IVOkGEK623LpNWhY3tNC2T00Ltxwfks1yK3I5l1Suh0cDDt6yh2k/YwRUZJEYAAKCnHGAEDEsSSanZ6pNxtSEhIhssXFRUEiaAVzc/P79u89dOjK4eGhSMQud+IodlxHxFJKSUSu5yJCHEecOVm8B2UJAgKZ2P5TzVzJxMRGZGKUNejP25wyDLxmoVYuMXNUlKcrfZH5M99xViHse5gNNstWnQJWuyGkpOQECbAMChBGEQFErWhmbub+v/2bB/7+u5NTkxMTExcmJysVf3ZmNmg2qwMDQNBqBWSa7ZNQ/7raiMneYpY4HqUuGIlDA0MmpDS8l1XtGGAu1BUCVmq+63mc8d94+cs/9vGP3nzzrZ5X8XzH5S5AFqoiiSWqZ8V0y4DhNpKGtzDzx1TVzzViof/0lWwJe+NDrBmvzvFWaUE2TQHIqctrbyV9miYUhlgS2EFrAW08XwDsDQS68xJuI5LJoSpAgMBFLIhAoOCMSQlRTNPnLoj63OzkxNkLZyYnT7eWZhH52O69+w4ePnb8xtFdu5FxDqyIUvRQJUBggNLmGEufs0AzGfgeMqxg7KbXvGovBhjZroh6qAZcilJmm5NpPAKA5Jfh49il7EueTKRMGw0nx1ipm7JpdElTdlPi7UCWSU4NZ71nZ6UihIw/Xa4AaAC4KQoAAGynrrIp43sCQJCSmPZHTlG1OlAsnedNvaysd9NekASJ94PQEVGUr75uNg1N1JpaKjtMI262tFO0M+i9cGUmiD6tgDpPfx0MJMu2ertb1rs6YKlUE8jgrob72l6qUK8Ne7LtgwQEpBWBzF0HEWIpg2YADISQURDWm43Tp08/9uijTz751Nz87MmTTz/66KNxHDWbzVq12moFfsVr1huMcylEpVYVIgZJUSwQABlDRBHr1QPtP08EDk80ASaT/QJqYlPAdXRsCJnzxte//sO/9bEjV17tMFatVZPcMDkHGbS6k1SfmfAIsni/2ZW8egFpDtlvyCXJvM8N0+amDKbU9T2nALSTBFt3BWB1zae7WnVBpqmRGaXD0ubaUTq7grGDBRGlSEdT4pUDUkIUS4ehFLEECoKliouxEJz7rl8jxhylRhckXlq69CgJWyIgmBxquVjo4ilkBGojPJplNVJ2Ac26IzO3lXaQKWigL2u2NeVGWnZ1WXbTcsfyF9IhV5QimxqjoExTuNhwihWpwGi8Ve0IXlFbJbPLJu7J3PYzQRqIEDWylyRVJJMMP5Ba7NVYwlw9ypQ1ldjIPQmCDiiJGEMikFIyhqn2rSOuaM9mE/0bbl2bYIncNtSDyWQF9g/bAmjuVDQxSZfi1USvudRty5MLQ0kKQABabzR8gTLIhKAiNCSvTUyiQsrEwCglgSQJkjEOgGEUzs8tTM5M/uKRXzz7q2eeePyJ8xfOPf7Lx4dGBq+55tq3vPUtP/nJT/70K39aGajUF+oAxD1HhEoBcD0PAKMwQIYIICUdPnLVtceO/sMD32s0guR0i5e/4vbJM1PDY2Nf+OLn9+86uP/A5cl5HUmVbNuUqSgbTYZpbGsEANMIU9Jbuf5p00Orw0U9JHO1pLR4+StbVgFYHbWXfkXF20qTawXLrgMJwM5C62q5CWAHkMUsEjmQRCOwSRKnVj2ZDvlEchhPq+Jk4sH6JDROrUt8e9IeJ5P3MFUMsjbolQJgtc9qH7/YENylSWTNriW321CGS8zBl4VgsTbzW5x7kTqO6Upa1esRaFt2qKUiflNG5TZWAEhrTya3qvhyqaCgHLJXim7W6GUdbeLC9ErmdKye0aYfbRA1VxbaNWmWc38K7h2tSAFICFNNr9BpKzarGfZ9M/9cCbP3Gl9K3pWtQdp30bqVGisYgjoewGDjBAUlgfvjOA7DyHWdWMRPPfmUV/EOHbiiOlCdm5v7xMc/8d0fPBA2Wo7nXX/82JlTZ5utYGx05Lbbbt27f//C3Pzc3Mzs7NzRo0fuuffNL3rR8T/7kz/5oy9/aWZy8vBVh195xyviAK++8shb3nmv71Rcz4N0SNpCKR2PlOrqGY5KFYDURaSkC22Lle3uX5Jy8xUAK957xyJZCsAGFH0DTXk6b/VXGl2GYMbDMcjWENOMiKm9I5hN4sUnjUFmRliw8y3G/9Yv0YA+bRK0U2eqnfaFyBLo/C3/4VTXz7VEd5NU6YxWOsFtXznSF4K9IdIxfyxPST3/AoC5IpXTdrNEl25/ZBEnoTBvbdh7N6H5c92/zcgy4RuWwhSIr0jY5XohpxVkFllQMMcE/dvhMLetSkbbrZr/cwrA8mpADnMZb10F+i99uBT2aQ4thwe5lKoWBSY2l60AgWTGmTldxrTFJ3tgpBCMMyJgjIVhSESnTp+WUo6NjlWHqnMzc5VKxXW9gVoNGICO7M84E7GMo8hzvZmZGWSIyIhEpVodrNUQGSjVGyw/CFXArCUI8rVKzkvKkKHpvlMg0/vOBNmZa2pvdoStifKT0lZTADaPEgecJOwsA2CaQQEK8q+oAIDh4pVcZIAKtCfXMQ0lDZq38tA5LQfkjJvtP82NzdkKgGZihJJQi10K1c7zlDmSrdw7Jl4FXdQsd8mRcm/IptrUBqPu6mUrAMNQo7n9EuUF89BlgI0fEhnU3IwOaGNJ2w5krb3a83bRp2I93r6J3XYRUQ8asivQ3yGp/Xpc4aig4oJRe5We7PzbVTlFPACGI4FpsEw5P4eMrfcAAiTea7qokDgLERBjjCRIEupUMVBnECBT2CwRGpLUBOlwJoRMjoZU+2GY8nKjQqsWvSdAb4PMu9ip0qaNVlhMKVD5CkBJJ6w7UaETicoUlfYFu9hcgDqSeXY6WzY1Wd9TnSpbXVFLuclFLOsNyLOFZeaxecja9afTZZkZC8BkZmtm0n5/81ooG1mF2aKzctk9bZYIW4OXRWlnX+pk6qv6Culp1ZZr/ZYzKLNCbmyz9ML6eanTRk4E/U7qMdl62tqbN9uC2fmlObJ3bXZfBrKls+kIVMAgVnq0ny3iE/PhUrM4QrYht03W+qSUVE1QZxbpbCHzntMuDQiZ1Yg0yDGCmzO1pR70m/PoVasl6XerWpj1uFm/fIvlQk204Y9cGxaTbxgVWt1msQ6aCa1WASjVYDuXbwsQGV9wpYXS4UOzXyawyfaYKJ7v6K2c5WXo1abfFgAY/J1j6GUjRNpKgTV6i/YAsJm4dFLoTFujc/u0BShnWdKnTqQ7fNs9dZH69m9xUnPDlpmfL37KaQvLtnzptJzNz1s6WvC2oR62X1Getk1R+lPTSnFkYd5tm0OphtAh23xdilzYps4mWgEAAiLKwvuozJQBDu2vyS3Sv9M358KZ599rWf0VtCLtMI2p3alQWLJ216hr9jbf9tC/qFltLpVoIx3FK4N8x3XKl/Q/ANA+NSV3yciTjM+tQJQpjSsrVNqGCJAEsVVKRMp/kHrmA0LHhSQVlxQUXyomQkAE1ByuPxN9Tnmq6VypTfY56J9eNPm+2CPF70W9Np/uIqKLtFobRFbraWa2+HN57iFcJkGf1oNModBv/NXQSlvNnNqhC7iAhqUK7evJH4QuculTJ8rkYU/GQNGClr+9XH+telhS4bP4s5i4A0JLGS/jQ4158pM62WyN5h1S5n0G6Z51ldDIJXXGSZYBEI1QESlUSi9i21FkLaSQBmNmTpRlkIIsVRZVkGXEUVGJKo7QjadiqVKs2IGQ0gMDjcdyjxQT2H/y6lPJazrd3FiyKrNCna1EIy7YOzu+0FQZu6fOOv2yhe3wYGdFNpdDh4ltC/XvJUlbwfZQQqpYnQSima5PfepTN7R1LI4XAfWwGZcX6x2Atn1hRV1cCkw6vCfFbubxACt7S7uX2QpQ7kis3Lb11GVUWfY7Q89EQShCLKOlDNt/x7azcZg+/QtzSbIaFQtThpk3dzx27v12eilS5paVJe2cu8Vqph9Y17TpjbV6KjGMr/jpUiUhRynmzuKOGs8uC+jbUW5mKXYrlr2rG9qWvQkA25cV10brWmsy/tpv6cC2fQLYhLbIwoFs6Gu3OW1wexUn7X5v9YT0DlpKDdxryq3zvRyUXI6HikmW5bpS4V6KGToAgA4vylew1OysszCjDBmBcZMGT1Nlr+rGUpl7m5XGqDx2HCSFWidB7BTgsmpRRl0qF+tBBYWoDaJTbaza3wypnStetgKQyw4K+eYuZkUA/WR76sxVfSqZEHKR0FeVW0LLjqtiAYpd31mp2Ka0FRT3i49shkl1gH5Lby1SEY2x70Pep0uZzChAPduL1FZcFmFWWZrs7IiVj83Ob1idyMvjzi6AnwUn7OZQmxvbWIbaYVywwQnk0uirqcrRuZIm1EmFlLnvIIXOuRqV1rio3K0HtdMG8whN3ygtf1ZOStIYLkCdEX+pvqhSQLeV7kuahEqZuDN1Zrvi9a61s66os8Fgm9J2L/9WJnNi1dTBaNI5QZ96STo0k4qZvdnF6VOfNokIAMk2Vfcgy7VnkS7JrdpE1Q0oW2W2bUDAus4j3RZ1VVXK7fg1c8r15lp6ZP1oWRMtlFmEkyfZshpdsZ6G5rSasvbJpFLsg/anqVx2A+tzqmrPORULX7YprYWT+9SRSG9zty6WptyQ8vQJAFRjJ5vpUB8/0O+APl2ilAbVgNTyu/Ys82J3FdkWcWfp9W7yocKVYrZdlkeBCtSVXCd4UUbrhf4BQPtZFJcd0levuv3Xm6jwpbQB1L5ngiRyDejN0IjAUvjffd1KUmIZr3X/+KVHJkY3RxcY39PP0jFc7HvzSwF7rZ7aTUYbRsX26VnOl9AhGBtIObs/5CSGlZSs/u13xzqTWnxXx5eQlNRv8z5dspSdFwvQ09mn8/J+V8iZSr52L3yxUIaVOGq0zdMql12xrgFgPnGX8n11CdrpP/nEbTA+2k23sVrP8mTZiI3QTOq6DlQJZvRI0j+TM3EJcnuA0wpbsrmTOrvCJtkibbfptNJJp1RDKH4pTb8WynXv5uKF3r4djbDEK5q/+tSBCElv7cL85GKkAgDIpqw+rT8ldiBMJQQhY/bNPvXpUiJ7/u8J5QRxKWok+0o7cNXZ8NdlSSA31a6Kyt0TMKtdOwNPhwxztV722eUTlKVo58/SNjd7FwjZvbalJ0m7bgjZBoCM37S3EOrIS0zdtwU1qroS5qxzueFS+tmxkZb1VboUiIx/3VOXrFzsk97Sxutv6/hGo4HW/hYyTsa4lMkI19yhVbME+l8PV636ZJOx2kjZ+gwmZ6NvKZtWn/q0cSQhMYdS7/i/iPXbmefare1rkKSkiYm7VgoYej60c3bA9YMZq6YVrSeYOlLpv5S2/vSYL6H29iGwYyJpMJ/cZfosKntbtl42UK2A9lIZ2JxlrkMsJ8RNNWEVIPjioM4js7PJtPiztAHN/HvYwumK2EaOh2L511gjSv8aB69BTyqFy5y7cSmQaS4gtZDYrscIzO5YW8degjNJt2SyOJKy6xAB2waCrU996i1lEw5TNlHtE9GDbDvL5XYP5l/dcWmgnfJQihA6VGotE2ZawK2mAxSR1bLN0hnPrLpb15tyRcoxAJk8bc3/6sjZ5NAEJJJABMjsxY5MZJsxVXNfAPRBEmRfLaVCG/dlTzsqNnh6vR2mRzvBxUdFFlt9TdX6FwkhOOdrzCzLlVZ6HsbFRh2mgWLDkP0Xy1P1qXdkTB+a/yXniRdQv+X7dMmQFqtCCM5ZEf+s32uxzZdc0bJ7GdIyrvQqWGmfClTaNVsaVhHkobnNNqmxP/8QghDCDAOqAoOqFComqk7bqUGWBf7dNuDWbWUAaFON1WmEJo4vDvM2L4fumzHHFaugLcX0ZH/FlYRss1OqJiFjaKS3yFB3c41njqNczDjKEi9Lpd2bBaHTtQN7rIHxqg3rk/y7DOmz6nwSWwHadWunzFrP5saLeS+1VRh9l3+2/ZjC0veV5gNA7Rkv6UFCO5OkKzXP5GpXLIk1HxS0qeKRgCvqDbLrgwASgNkpyGzD9lmXzv+r4o5Ll9baVmXyeDV5bhGgUwa8yGasnhStZAqx8E/PTgJYIxEAKgyWH5BtB1quH0sn1na6R7uu72DTWTkV8Y5ZtC6etlO1Q+qlVzpk1F0FzST5xzeXuuujVEqCZnuJwP4/rV48d/O1QTYAAAAASUVORK5CYII=",
- "text/plain": [
- ""
- ]
- },
- "execution_count": 8,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "image_grid(stage1_imgs, rows=2, cols=4)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 13,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "CUDA_VISIBLE_DEVICES=0 python exp_runner_generic_blender_val.py --specific_dataset_name /haian-fast-vol/code_debug/code_release/One-2-3-45/exp/01_wild_hydrant --mode export_mesh --conf confs/one2345_lod0_val_demo.conf --resolution 256\n",
- "detected \u001b[1;36m1\u001b[0m GPUs\n",
- "base_exp_dir: exp/lod0\n",
- "Store in: \u001b[35m/haian-fast-vol/code_debug/code_release/One-2-3-45/exp/\u001b[0m\u001b[95m01_wild_hydrant\u001b[0m\n",
- "depth_loss_weight: 1.0\n"
- ]
- },
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "[exp_runner_generic_blender_val.py:148 - __init__() ] Find checkpoint: ckpt_215000.pth\n",
- "[exp_runner_generic_blender_val.py:483 - load_checkpoint() ] End\n",
- "[exp_runner_generic_blender_val.py:555 - export_mesh() ] Validate begin\n"
- ]
- },
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "iter_step: \u001b[1;36m215000\u001b[0m\n",
- "export mesh time: 4.015656232833862\n",
- "Mesh saved to: /haian-fast-vol/code_debug/code_release/One-2-3-45/exp/01_wild_hydrant/mesh.glb\n"
- ]
- }
- ],
- "source": [
- "# utilize cost volume-based 3D reconstruction to generate textured 3D mesh\n",
- "mesh_path = reconstruct(example_dir, output_format=\".glb\", device_idx=_GPU_INDEX)\n",
- "print(\"Mesh saved to:\", mesh_path)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 14,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "application/javascript": "(function(root) {\n function now() {\n return new Date();\n }\n\n var force = true;\n var py_version = '3.2.2'.replace('rc', '-rc.').replace('.dev', '-dev.');\n var is_dev = py_version.indexOf(\"+\") !== -1 || py_version.indexOf(\"-\") !== -1;\n var reloading = false;\n var Bokeh = root.Bokeh;\n var bokeh_loaded = Bokeh != null && (Bokeh.version === py_version || (Bokeh.versions !== undefined && Bokeh.versions.has(py_version)));\n\n if (typeof (root._bokeh_timeout) === \"undefined\" || force) {\n root._bokeh_timeout = Date.now() + 5000;\n root._bokeh_failed_load = false;\n }\n\n function run_callbacks() {\n try {\n root._bokeh_onload_callbacks.forEach(function(callback) {\n if (callback != null)\n callback();\n });\n } finally {\n delete root._bokeh_onload_callbacks;\n }\n console.debug(\"Bokeh: all callbacks have finished\");\n }\n\n function load_libs(css_urls, js_urls, js_modules, js_exports, callback) {\n if (css_urls == null) css_urls = [];\n if (js_urls == null) js_urls = [];\n if (js_modules == null) js_modules = [];\n if (js_exports == null) js_exports = {};\n\n root._bokeh_onload_callbacks.push(callback);\n\n if (root._bokeh_is_loading > 0) {\n console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n return null;\n }\n if (js_urls.length === 0 && js_modules.length === 0 && Object.keys(js_exports).length === 0) {\n run_callbacks();\n return null;\n }\n if (!reloading) {\n console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n }\n\n function on_load() {\n root._bokeh_is_loading--;\n if (root._bokeh_is_loading === 0) {\n console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n run_callbacks()\n }\n }\n window._bokeh_on_load = on_load\n\n function on_error() {\n console.error(\"failed to load \" + url);\n }\n\n var skip = [];\n if (window.requirejs) {\n window.requirejs.config({'packages': {}, 'paths': {'jspanel': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/jspanel', 'jspanel-modal': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/modal/jspanel.modal', 'jspanel-tooltip': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/tooltip/jspanel.tooltip', 'jspanel-hint': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/hint/jspanel.hint', 'jspanel-layout': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/layout/jspanel.layout', 'jspanel-contextmenu': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/contextmenu/jspanel.contextmenu', 'jspanel-dock': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/dock/jspanel.dock', 'gridstack': 'https://cdn.jsdelivr.net/npm/gridstack@7.2.3/dist/gridstack-all', 'notyf': 'https://cdn.jsdelivr.net/npm/notyf@3/notyf.min'}, 'shim': {'jspanel': {'exports': 'jsPanel'}, 'gridstack': {'exports': 'GridStack'}}});\n require([\"jspanel\"], function(jsPanel) {\n\twindow.jsPanel = jsPanel\n\ton_load()\n })\n require([\"jspanel-modal\"], function() {\n\ton_load()\n })\n require([\"jspanel-tooltip\"], function() {\n\ton_load()\n })\n require([\"jspanel-hint\"], function() {\n\ton_load()\n })\n require([\"jspanel-layout\"], function() {\n\ton_load()\n })\n require([\"jspanel-contextmenu\"], function() {\n\ton_load()\n })\n require([\"jspanel-dock\"], function() {\n\ton_load()\n })\n require([\"gridstack\"], function(GridStack) {\n\twindow.GridStack = GridStack\n\ton_load()\n })\n require([\"notyf\"], function() {\n\ton_load()\n })\n root._bokeh_is_loading = css_urls.length + 9;\n } else {\n root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length + Object.keys(js_exports).length;\n }\n\n var existing_stylesheets = []\n var links = document.getElementsByTagName('link')\n for (var i = 0; i < links.length; i++) {\n var link = links[i]\n if (link.href != null) {\n\texisting_stylesheets.push(link.href)\n }\n }\n for (var i = 0; i < css_urls.length; i++) {\n var url = css_urls[i];\n if (existing_stylesheets.indexOf(url) !== -1) {\n\ton_load()\n\tcontinue;\n }\n const element = document.createElement(\"link\");\n element.onload = on_load;\n element.onerror = on_error;\n element.rel = \"stylesheet\";\n element.type = \"text/css\";\n element.href = url;\n console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n document.body.appendChild(element);\n } if (((window['jsPanel'] !== undefined) && (!(window['jsPanel'] instanceof HTMLElement))) || window.requirejs) {\n var urls = ['https://cdn.holoviz.org/panel/1.2.1/dist/bundled/floatpanel/jspanel4@4.12.0/dist/jspanel.js', 'https://cdn.holoviz.org/panel/1.2.1/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/modal/jspanel.modal.js', 'https://cdn.holoviz.org/panel/1.2.1/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/tooltip/jspanel.tooltip.js', 'https://cdn.holoviz.org/panel/1.2.1/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/hint/jspanel.hint.js', 'https://cdn.holoviz.org/panel/1.2.1/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/layout/jspanel.layout.js', 'https://cdn.holoviz.org/panel/1.2.1/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/contextmenu/jspanel.contextmenu.js', 'https://cdn.holoviz.org/panel/1.2.1/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/dock/jspanel.dock.js'];\n for (var i = 0; i < urls.length; i++) {\n skip.push(urls[i])\n }\n } if (((window['GridStack'] !== undefined) && (!(window['GridStack'] instanceof HTMLElement))) || window.requirejs) {\n var urls = ['https://cdn.holoviz.org/panel/1.2.1/dist/bundled/gridstack/gridstack@7.2.3/dist/gridstack-all.js'];\n for (var i = 0; i < urls.length; i++) {\n skip.push(urls[i])\n }\n } if (((window['Notyf'] !== undefined) && (!(window['Notyf'] instanceof HTMLElement))) || window.requirejs) {\n var urls = ['https://cdn.holoviz.org/panel/1.2.1/dist/bundled/notificationarea/notyf@3/notyf.min.js'];\n for (var i = 0; i < urls.length; i++) {\n skip.push(urls[i])\n }\n } var existing_scripts = []\n var scripts = document.getElementsByTagName('script')\n for (var i = 0; i < scripts.length; i++) {\n var script = scripts[i]\n if (script.src != null) {\n\texisting_scripts.push(script.src)\n }\n }\n for (var i = 0; i < js_urls.length; i++) {\n var url = js_urls[i];\n if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n\tif (!window.requirejs) {\n\t on_load();\n\t}\n\tcontinue;\n }\n var element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n for (var i = 0; i < js_modules.length; i++) {\n var url = js_modules[i];\n if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n\tif (!window.requirejs) {\n\t on_load();\n\t}\n\tcontinue;\n }\n var element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n element.type = \"module\";\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n for (const name in js_exports) {\n var url = js_exports[name];\n if (skip.indexOf(url) >= 0 || root[name] != null) {\n\tif (!window.requirejs) {\n\t on_load();\n\t}\n\tcontinue;\n }\n var element = document.createElement('script');\n element.onerror = on_error;\n element.async = false;\n element.type = \"module\";\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n element.textContent = `\n import ${name} from \"${url}\"\n window.${name} = ${name}\n window._bokeh_on_load()\n `\n document.head.appendChild(element);\n }\n if (!js_urls.length && !js_modules.length) {\n on_load()\n }\n };\n\n function inject_raw_css(css) {\n const element = document.createElement(\"style\");\n element.appendChild(document.createTextNode(css));\n document.body.appendChild(element);\n }\n\n var js_urls = [\"https://cdn.bokeh.org/bokeh/release/bokeh-3.2.2.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-gl-3.2.2.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-3.2.2.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-3.2.2.min.js\", \"https://cdn.holoviz.org/panel/1.2.1/dist/panel.min.js\"];\n var js_modules = [];\n var js_exports = {};\n var css_urls = [];\n var inline_js = [ function(Bokeh) {\n Bokeh.set_log_level(\"info\");\n },\nfunction(Bokeh) {} // ensure no trailing comma for IE\n ];\n\n function run_inline_js() {\n if ((root.Bokeh !== undefined) || (force === true)) {\n for (var i = 0; i < inline_js.length; i++) {\n inline_js[i].call(root, root.Bokeh);\n }\n // Cache old bokeh versions\n if (Bokeh != undefined && !reloading) {\n\tvar NewBokeh = root.Bokeh;\n\tif (Bokeh.versions === undefined) {\n\t Bokeh.versions = new Map();\n\t}\n\tif (NewBokeh.version !== Bokeh.version) {\n\t Bokeh.versions.set(NewBokeh.version, NewBokeh)\n\t}\n\troot.Bokeh = Bokeh;\n }} else if (Date.now() < root._bokeh_timeout) {\n setTimeout(run_inline_js, 100);\n } else if (!root._bokeh_failed_load) {\n console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n root._bokeh_failed_load = true;\n }\n root._bokeh_is_initializing = false\n }\n\n function load_or_wait() {\n // Implement a backoff loop that tries to ensure we do not load multiple\n // versions of Bokeh and its dependencies at the same time.\n // In recent versions we use the root._bokeh_is_initializing flag\n // to determine whether there is an ongoing attempt to initialize\n // bokeh, however for backward compatibility we also try to ensure\n // that we do not start loading a newer (Panel>=1.0 and Bokeh>3) version\n // before older versions are fully initialized.\n if (root._bokeh_is_initializing && Date.now() > root._bokeh_timeout) {\n root._bokeh_is_initializing = false;\n root._bokeh_onload_callbacks = undefined;\n console.log(\"Bokeh: BokehJS was loaded multiple times but one version failed to initialize.\");\n load_or_wait();\n } else if (root._bokeh_is_initializing || (typeof root._bokeh_is_initializing === \"undefined\" && root._bokeh_onload_callbacks !== undefined)) {\n setTimeout(load_or_wait, 100);\n } else {\n Bokeh = root.Bokeh;\n bokeh_loaded = Bokeh != null && (Bokeh.version === py_version || (Bokeh.versions !== undefined && Bokeh.versions.has(py_version)));\n root._bokeh_is_initializing = true\n root._bokeh_onload_callbacks = []\n if (!reloading && (!bokeh_loaded || is_dev)) {\n\troot.Bokeh = undefined;\n }\n load_libs(css_urls, js_urls, js_modules, js_exports, function() {\n\tconsole.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n\trun_inline_js();\n });\n }\n }\n // Give older versions of the autoload script a head-start to ensure\n // they initialize before we start loading newer version.\n setTimeout(load_or_wait, 100)\n}(window));",
- "application/vnd.holoviews_load.v0+json": ""
- },
- "metadata": {},
- "output_type": "display_data"
- },
- {
- "data": {
- "application/javascript": "\nif ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n}\n\n\n function JupyterCommManager() {\n }\n\n JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n comm_manager.register_target(comm_id, function(comm) {\n comm.on_msg(msg_handler);\n });\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n comm.onMsg = msg_handler;\n });\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n console.log(message)\n var content = {data: message.data, comm_id};\n var buffers = []\n for (var buffer of message.buffers || []) {\n buffers.push(new DataView(buffer))\n }\n var metadata = message.metadata || {};\n var msg = {content, buffers, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n })\n }\n }\n\n JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n if (comm_id in window.PyViz.comms) {\n return window.PyViz.comms[comm_id];\n } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n if (msg_handler) {\n comm.on_msg(msg_handler);\n }\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n comm.open();\n if (msg_handler) {\n comm.onMsg = msg_handler;\n }\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n var comm_promise = google.colab.kernel.comms.open(comm_id)\n comm_promise.then((comm) => {\n window.PyViz.comms[comm_id] = comm;\n if (msg_handler) {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n var content = {data: message.data};\n var metadata = message.metadata || {comm_id};\n var msg = {content, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n }\n }) \n var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n return comm_promise.then((comm) => {\n comm.send(data, metadata, buffers, disposeOnDone);\n });\n };\n var comm = {\n send: sendClosure\n };\n }\n window.PyViz.comms[comm_id] = comm;\n return comm;\n }\n window.PyViz.comm_manager = new JupyterCommManager();\n \n\n\nvar JS_MIME_TYPE = 'application/javascript';\nvar HTML_MIME_TYPE = 'text/html';\nvar EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\nvar CLASS_NAME = 'output';\n\n/**\n * Render data to the DOM node\n */\nfunction render(props, node) {\n var div = document.createElement(\"div\");\n var script = document.createElement(\"script\");\n node.appendChild(div);\n node.appendChild(script);\n}\n\n/**\n * Handle when a new output is added\n */\nfunction handle_add_output(event, handle) {\n var output_area = handle.output_area;\n var output = handle.output;\n if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n return\n }\n var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n if (id !== undefined) {\n var nchildren = toinsert.length;\n var html_node = toinsert[nchildren-1].children[0];\n html_node.innerHTML = output.data[HTML_MIME_TYPE];\n var scripts = [];\n var nodelist = html_node.querySelectorAll(\"script\");\n for (var i in nodelist) {\n if (nodelist.hasOwnProperty(i)) {\n scripts.push(nodelist[i])\n }\n }\n\n scripts.forEach( function (oldScript) {\n var newScript = document.createElement(\"script\");\n var attrs = [];\n var nodemap = oldScript.attributes;\n for (var j in nodemap) {\n if (nodemap.hasOwnProperty(j)) {\n attrs.push(nodemap[j])\n }\n }\n attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n oldScript.parentNode.replaceChild(newScript, oldScript);\n });\n if (JS_MIME_TYPE in output.data) {\n toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n }\n output_area._hv_plot_id = id;\n if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n window.PyViz.plot_index[id] = Bokeh.index[id];\n } else {\n window.PyViz.plot_index[id] = null;\n }\n } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n var bk_div = document.createElement(\"div\");\n bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n var script_attrs = bk_div.children[0].attributes;\n for (var i = 0; i < script_attrs.length; i++) {\n toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n }\n // store reference to server id on output_area\n output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n }\n}\n\n/**\n * Handle when an output is cleared or removed\n */\nfunction handle_clear_output(event, handle) {\n var id = handle.cell.output_area._hv_plot_id;\n var server_id = handle.cell.output_area._bokeh_server_id;\n if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n if (server_id !== null) {\n comm.send({event_type: 'server_delete', 'id': server_id});\n return;\n } else if (comm !== null) {\n comm.send({event_type: 'delete', 'id': id});\n }\n delete PyViz.plot_index[id];\n if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n var doc = window.Bokeh.index[id].model.document\n doc.clear();\n const i = window.Bokeh.documents.indexOf(doc);\n if (i > -1) {\n window.Bokeh.documents.splice(i, 1);\n }\n }\n}\n\n/**\n * Handle kernel restart event\n */\nfunction handle_kernel_cleanup(event, handle) {\n delete PyViz.comms[\"hv-extension-comm\"];\n window.PyViz.plot_index = {}\n}\n\n/**\n * Handle update_display_data messages\n */\nfunction handle_update_output(event, handle) {\n handle_clear_output(event, {cell: {output_area: handle.output_area}})\n handle_add_output(event, handle)\n}\n\nfunction register_renderer(events, OutputArea) {\n function append_mime(data, metadata, element) {\n // create a DOM node to render to\n var toinsert = this.create_output_subarea(\n metadata,\n CLASS_NAME,\n EXEC_MIME_TYPE\n );\n this.keyboard_manager.register_events(toinsert);\n // Render to node\n var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n render(props, toinsert[0]);\n element.append(toinsert);\n return toinsert\n }\n\n events.on('output_added.OutputArea', handle_add_output);\n events.on('output_updated.OutputArea', handle_update_output);\n events.on('clear_output.CodeCell', handle_clear_output);\n events.on('delete.Cell', handle_clear_output);\n events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n\n OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n safe: true,\n index: 0\n });\n}\n\nif (window.Jupyter !== undefined) {\n try {\n var events = require('base/js/events');\n var OutputArea = require('notebook/js/outputarea').OutputArea;\n if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n register_renderer(events, OutputArea);\n }\n } catch(err) {\n }\n}\n",
- "application/vnd.holoviews_load.v0+json": ""
- },
- "metadata": {},
- "output_type": "display_data"
- },
- {
- "data": {
- "text/html": [
- ""
- ]
- },
- "metadata": {},
- "output_type": "display_data"
- },
- {
- "data": {
- "application/vnd.holoviews_exec.v0+json": "",
- "text/html": [
- "\n",
- ""
- ]
- },
- "metadata": {
- "application/vnd.holoviews_exec.v0+json": {
- "id": "0e47f56a-dfe3-40c0-ba14-c0213a1181f6"
- }
- },
- "output_type": "display_data"
- },
- {
- "data": {
- "application/vnd.jupyter.widget-view+json": {
- "model_id": "064c673f5bc04fd096b014526ad8b0cf",
- "version_major": 2,
- "version_minor": 0
- },
- "text/plain": [
- "BokehModel(combine_events=True, render_bundle={'docs_json': {'1f64402e-b820-4e34-9ad8-c743fa6bb32a': {'version…"
- ]
- },
- "execution_count": 14,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "# show the textured mesh\n",
- "# better viewed in MeshLab\n",
- "# credit: https://github.com/google/model-viewer/issues/1088#issuecomment-612320218\n",
- "import panel as pn\n",
- "pn.extension()\n",
- "\n",
- "js = \"\"\"\n",
- " \n",
- " \n",
- " \n",
- "\"\"\"\n",
- "js_pane = pn.pane.HTML(js)\n",
- "\n",
- "# only .glb is supported\n",
- "html=f\"\"\"\n",
- " \n",
- " \n",
- "\"\"\"\n",
- "\n",
- "model_viewer_pane = pn.pane.HTML(html, height=800, width=500)\n",
- "\n",
- "app = pn.Column(js_pane, model_viewer_pane, styles={'background': 'grey'})\n",
- "\n",
- "app.servable()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "gradio",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.10.12"
- },
- "orig_nbformat": 4
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/One-2-3-45-master 2/ldm/data/__init__.py b/One-2-3-45-master 2/ldm/data/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/One-2-3-45-master 2/ldm/data/base.py b/One-2-3-45-master 2/ldm/data/base.py
deleted file mode 100644
index 742794e631081bbfa7c44f3df6f83373ca5c15c1..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/data/base.py
+++ /dev/null
@@ -1,40 +0,0 @@
-import os
-import numpy as np
-from abc import abstractmethod
-from torch.utils.data import Dataset, ConcatDataset, ChainDataset, IterableDataset
-
-
-class Txt2ImgIterableBaseDataset(IterableDataset):
- '''
- Define an interface to make the IterableDatasets for text2img data chainable
- '''
- def __init__(self, num_records=0, valid_ids=None, size=256):
- super().__init__()
- self.num_records = num_records
- self.valid_ids = valid_ids
- self.sample_ids = valid_ids
- self.size = size
-
- print(f'{self.__class__.__name__} dataset contains {self.__len__()} examples.')
-
- def __len__(self):
- return self.num_records
-
- @abstractmethod
- def __iter__(self):
- pass
-
-
-class PRNGMixin(object):
- """
- Adds a prng property which is a numpy RandomState which gets
- reinitialized whenever the pid changes to avoid synchronized sampling
- behavior when used in conjunction with multiprocessing.
- """
- @property
- def prng(self):
- currentpid = os.getpid()
- if getattr(self, "_initpid", None) != currentpid:
- self._initpid = currentpid
- self._prng = np.random.RandomState()
- return self._prng
diff --git a/One-2-3-45-master 2/ldm/data/coco.py b/One-2-3-45-master 2/ldm/data/coco.py
deleted file mode 100644
index 5e5e27e6ec6a51932f67b83dd88533cb39631e26..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/data/coco.py
+++ /dev/null
@@ -1,253 +0,0 @@
-import os
-import json
-import albumentations
-import numpy as np
-from PIL import Image
-from tqdm import tqdm
-from torch.utils.data import Dataset
-from abc import abstractmethod
-
-
-class CocoBase(Dataset):
- """needed for (image, caption, segmentation) pairs"""
- def __init__(self, size=None, dataroot="", datajson="", onehot_segmentation=False, use_stuffthing=False,
- crop_size=None, force_no_crop=False, given_files=None, use_segmentation=True,crop_type=None):
- self.split = self.get_split()
- self.size = size
- if crop_size is None:
- self.crop_size = size
- else:
- self.crop_size = crop_size
-
- assert crop_type in [None, 'random', 'center']
- self.crop_type = crop_type
- self.use_segmenation = use_segmentation
- self.onehot = onehot_segmentation # return segmentation as rgb or one hot
- self.stuffthing = use_stuffthing # include thing in segmentation
- if self.onehot and not self.stuffthing:
- raise NotImplemented("One hot mode is only supported for the "
- "stuffthings version because labels are stored "
- "a bit different.")
-
- data_json = datajson
- with open(data_json) as json_file:
- self.json_data = json.load(json_file)
- self.img_id_to_captions = dict()
- self.img_id_to_filepath = dict()
- self.img_id_to_segmentation_filepath = dict()
-
- assert data_json.split("/")[-1] in [f"captions_train{self.year()}.json",
- f"captions_val{self.year()}.json"]
- # TODO currently hardcoded paths, would be better to follow logic in
- # cocstuff pixelmaps
- if self.use_segmenation:
- if self.stuffthing:
- self.segmentation_prefix = (
- f"data/cocostuffthings/val{self.year()}" if
- data_json.endswith(f"captions_val{self.year()}.json") else
- f"data/cocostuffthings/train{self.year()}")
- else:
- self.segmentation_prefix = (
- f"data/coco/annotations/stuff_val{self.year()}_pixelmaps" if
- data_json.endswith(f"captions_val{self.year()}.json") else
- f"data/coco/annotations/stuff_train{self.year()}_pixelmaps")
-
- imagedirs = self.json_data["images"]
- self.labels = {"image_ids": list()}
- for imgdir in tqdm(imagedirs, desc="ImgToPath"):
- self.img_id_to_filepath[imgdir["id"]] = os.path.join(dataroot, imgdir["file_name"])
- self.img_id_to_captions[imgdir["id"]] = list()
- pngfilename = imgdir["file_name"].replace("jpg", "png")
- if self.use_segmenation:
- self.img_id_to_segmentation_filepath[imgdir["id"]] = os.path.join(
- self.segmentation_prefix, pngfilename)
- if given_files is not None:
- if pngfilename in given_files:
- self.labels["image_ids"].append(imgdir["id"])
- else:
- self.labels["image_ids"].append(imgdir["id"])
-
- capdirs = self.json_data["annotations"]
- for capdir in tqdm(capdirs, desc="ImgToCaptions"):
- # there are in average 5 captions per image
- #self.img_id_to_captions[capdir["image_id"]].append(np.array([capdir["caption"]]))
- self.img_id_to_captions[capdir["image_id"]].append(capdir["caption"])
-
- self.rescaler = albumentations.SmallestMaxSize(max_size=self.size)
- if self.split=="validation":
- self.cropper = albumentations.CenterCrop(height=self.crop_size, width=self.crop_size)
- else:
- # default option for train is random crop
- if self.crop_type in [None, 'random']:
- self.cropper = albumentations.RandomCrop(height=self.crop_size, width=self.crop_size)
- else:
- self.cropper = albumentations.CenterCrop(height=self.crop_size, width=self.crop_size)
- self.preprocessor = albumentations.Compose(
- [self.rescaler, self.cropper],
- additional_targets={"segmentation": "image"})
- if force_no_crop:
- self.rescaler = albumentations.Resize(height=self.size, width=self.size)
- self.preprocessor = albumentations.Compose(
- [self.rescaler],
- additional_targets={"segmentation": "image"})
-
- @abstractmethod
- def year(self):
- raise NotImplementedError()
-
- def __len__(self):
- return len(self.labels["image_ids"])
-
- def preprocess_image(self, image_path, segmentation_path=None):
- image = Image.open(image_path)
- if not image.mode == "RGB":
- image = image.convert("RGB")
- image = np.array(image).astype(np.uint8)
- if segmentation_path:
- segmentation = Image.open(segmentation_path)
- if not self.onehot and not segmentation.mode == "RGB":
- segmentation = segmentation.convert("RGB")
- segmentation = np.array(segmentation).astype(np.uint8)
- if self.onehot:
- assert self.stuffthing
- # stored in caffe format: unlabeled==255. stuff and thing from
- # 0-181. to be compatible with the labels in
- # https://github.com/nightrome/cocostuff/blob/master/labels.txt
- # we shift stuffthing one to the right and put unlabeled in zero
- # as long as segmentation is uint8 shifting to right handles the
- # latter too
- assert segmentation.dtype == np.uint8
- segmentation = segmentation + 1
-
- processed = self.preprocessor(image=image, segmentation=segmentation)
-
- image, segmentation = processed["image"], processed["segmentation"]
- else:
- image = self.preprocessor(image=image,)['image']
-
- image = (image / 127.5 - 1.0).astype(np.float32)
- if segmentation_path:
- if self.onehot:
- assert segmentation.dtype == np.uint8
- # make it one hot
- n_labels = 183
- flatseg = np.ravel(segmentation)
- onehot = np.zeros((flatseg.size, n_labels), dtype=np.bool)
- onehot[np.arange(flatseg.size), flatseg] = True
- onehot = onehot.reshape(segmentation.shape + (n_labels,)).astype(int)
- segmentation = onehot
- else:
- segmentation = (segmentation / 127.5 - 1.0).astype(np.float32)
- return image, segmentation
- else:
- return image
-
- def __getitem__(self, i):
- img_path = self.img_id_to_filepath[self.labels["image_ids"][i]]
- if self.use_segmenation:
- seg_path = self.img_id_to_segmentation_filepath[self.labels["image_ids"][i]]
- image, segmentation = self.preprocess_image(img_path, seg_path)
- else:
- image = self.preprocess_image(img_path)
- captions = self.img_id_to_captions[self.labels["image_ids"][i]]
- # randomly draw one of all available captions per image
- caption = captions[np.random.randint(0, len(captions))]
- example = {"image": image,
- #"caption": [str(caption[0])],
- "caption": caption,
- "img_path": img_path,
- "filename_": img_path.split(os.sep)[-1]
- }
- if self.use_segmenation:
- example.update({"seg_path": seg_path, 'segmentation': segmentation})
- return example
-
-
-class CocoImagesAndCaptionsTrain2017(CocoBase):
- """returns a pair of (image, caption)"""
- def __init__(self, size, onehot_segmentation=False, use_stuffthing=False, crop_size=None, force_no_crop=False,):
- super().__init__(size=size,
- dataroot="data/coco/train2017",
- datajson="data/coco/annotations/captions_train2017.json",
- onehot_segmentation=onehot_segmentation,
- use_stuffthing=use_stuffthing, crop_size=crop_size, force_no_crop=force_no_crop)
-
- def get_split(self):
- return "train"
-
- def year(self):
- return '2017'
-
-
-class CocoImagesAndCaptionsValidation2017(CocoBase):
- """returns a pair of (image, caption)"""
- def __init__(self, size, onehot_segmentation=False, use_stuffthing=False, crop_size=None, force_no_crop=False,
- given_files=None):
- super().__init__(size=size,
- dataroot="data/coco/val2017",
- datajson="data/coco/annotations/captions_val2017.json",
- onehot_segmentation=onehot_segmentation,
- use_stuffthing=use_stuffthing, crop_size=crop_size, force_no_crop=force_no_crop,
- given_files=given_files)
-
- def get_split(self):
- return "validation"
-
- def year(self):
- return '2017'
-
-
-
-class CocoImagesAndCaptionsTrain2014(CocoBase):
- """returns a pair of (image, caption)"""
- def __init__(self, size, onehot_segmentation=False, use_stuffthing=False, crop_size=None, force_no_crop=False,crop_type='random'):
- super().__init__(size=size,
- dataroot="data/coco/train2014",
- datajson="data/coco/annotations2014/annotations/captions_train2014.json",
- onehot_segmentation=onehot_segmentation,
- use_stuffthing=use_stuffthing, crop_size=crop_size, force_no_crop=force_no_crop,
- use_segmentation=False,
- crop_type=crop_type)
-
- def get_split(self):
- return "train"
-
- def year(self):
- return '2014'
-
-class CocoImagesAndCaptionsValidation2014(CocoBase):
- """returns a pair of (image, caption)"""
- def __init__(self, size, onehot_segmentation=False, use_stuffthing=False, crop_size=None, force_no_crop=False,
- given_files=None,crop_type='center',**kwargs):
- super().__init__(size=size,
- dataroot="data/coco/val2014",
- datajson="data/coco/annotations2014/annotations/captions_val2014.json",
- onehot_segmentation=onehot_segmentation,
- use_stuffthing=use_stuffthing, crop_size=crop_size, force_no_crop=force_no_crop,
- given_files=given_files,
- use_segmentation=False,
- crop_type=crop_type)
-
- def get_split(self):
- return "validation"
-
- def year(self):
- return '2014'
-
-if __name__ == '__main__':
- with open("data/coco/annotations2014/annotations/captions_val2014.json", "r") as json_file:
- json_data = json.load(json_file)
- capdirs = json_data["annotations"]
- import pudb; pudb.set_trace()
- #d2 = CocoImagesAndCaptionsTrain2014(size=256)
- d2 = CocoImagesAndCaptionsValidation2014(size=256)
- print("constructed dataset.")
- print(f"length of {d2.__class__.__name__}: {len(d2)}")
-
- ex2 = d2[0]
- # ex3 = d3[0]
- # print(ex1["image"].shape)
- print(ex2["image"].shape)
- # print(ex3["image"].shape)
- # print(ex1["segmentation"].shape)
- print(ex2["caption"].__class__.__name__)
diff --git a/One-2-3-45-master 2/ldm/data/dummy.py b/One-2-3-45-master 2/ldm/data/dummy.py
deleted file mode 100644
index 3b74a77fe8954686e480d28aaed19e52d3e3c9b7..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/data/dummy.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import numpy as np
-import random
-import string
-from torch.utils.data import Dataset, Subset
-
-class DummyData(Dataset):
- def __init__(self, length, size):
- self.length = length
- self.size = size
-
- def __len__(self):
- return self.length
-
- def __getitem__(self, i):
- x = np.random.randn(*self.size)
- letters = string.ascii_lowercase
- y = ''.join(random.choice(string.ascii_lowercase) for i in range(10))
- return {"jpg": x, "txt": y}
-
-
-class DummyDataWithEmbeddings(Dataset):
- def __init__(self, length, size, emb_size):
- self.length = length
- self.size = size
- self.emb_size = emb_size
-
- def __len__(self):
- return self.length
-
- def __getitem__(self, i):
- x = np.random.randn(*self.size)
- y = np.random.randn(*self.emb_size).astype(np.float32)
- return {"jpg": x, "txt": y}
-
diff --git a/One-2-3-45-master 2/ldm/data/imagenet.py b/One-2-3-45-master 2/ldm/data/imagenet.py
deleted file mode 100644
index 66231964a685cc875243018461a6aaa63a96dbf0..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/data/imagenet.py
+++ /dev/null
@@ -1,394 +0,0 @@
-import os, yaml, pickle, shutil, tarfile, glob
-import cv2
-import albumentations
-import PIL
-import numpy as np
-import torchvision.transforms.functional as TF
-from omegaconf import OmegaConf
-from functools import partial
-from PIL import Image
-from tqdm import tqdm
-from torch.utils.data import Dataset, Subset
-
-import taming.data.utils as tdu
-from taming.data.imagenet import str_to_indices, give_synsets_from_indices, download, retrieve
-from taming.data.imagenet import ImagePaths
-
-from ldm.modules.image_degradation import degradation_fn_bsr, degradation_fn_bsr_light
-
-
-def synset2idx(path_to_yaml="data/index_synset.yaml"):
- with open(path_to_yaml) as f:
- di2s = yaml.load(f)
- return dict((v,k) for k,v in di2s.items())
-
-
-class ImageNetBase(Dataset):
- def __init__(self, config=None):
- self.config = config or OmegaConf.create()
- if not type(self.config)==dict:
- self.config = OmegaConf.to_container(self.config)
- self.keep_orig_class_label = self.config.get("keep_orig_class_label", False)
- self.process_images = True # if False we skip loading & processing images and self.data contains filepaths
- self._prepare()
- self._prepare_synset_to_human()
- self._prepare_idx_to_synset()
- self._prepare_human_to_integer_label()
- self._load()
-
- def __len__(self):
- return len(self.data)
-
- def __getitem__(self, i):
- return self.data[i]
-
- def _prepare(self):
- raise NotImplementedError()
-
- def _filter_relpaths(self, relpaths):
- ignore = set([
- "n06596364_9591.JPEG",
- ])
- relpaths = [rpath for rpath in relpaths if not rpath.split("/")[-1] in ignore]
- if "sub_indices" in self.config:
- indices = str_to_indices(self.config["sub_indices"])
- synsets = give_synsets_from_indices(indices, path_to_yaml=self.idx2syn) # returns a list of strings
- self.synset2idx = synset2idx(path_to_yaml=self.idx2syn)
- files = []
- for rpath in relpaths:
- syn = rpath.split("/")[0]
- if syn in synsets:
- files.append(rpath)
- return files
- else:
- return relpaths
-
- def _prepare_synset_to_human(self):
- SIZE = 2655750
- URL = "https://heibox.uni-heidelberg.de/f/9f28e956cd304264bb82/?dl=1"
- self.human_dict = os.path.join(self.root, "synset_human.txt")
- if (not os.path.exists(self.human_dict) or
- not os.path.getsize(self.human_dict)==SIZE):
- download(URL, self.human_dict)
-
- def _prepare_idx_to_synset(self):
- URL = "https://heibox.uni-heidelberg.de/f/d835d5b6ceda4d3aa910/?dl=1"
- self.idx2syn = os.path.join(self.root, "index_synset.yaml")
- if (not os.path.exists(self.idx2syn)):
- download(URL, self.idx2syn)
-
- def _prepare_human_to_integer_label(self):
- URL = "https://heibox.uni-heidelberg.de/f/2362b797d5be43b883f6/?dl=1"
- self.human2integer = os.path.join(self.root, "imagenet1000_clsidx_to_labels.txt")
- if (not os.path.exists(self.human2integer)):
- download(URL, self.human2integer)
- with open(self.human2integer, "r") as f:
- lines = f.read().splitlines()
- assert len(lines) == 1000
- self.human2integer_dict = dict()
- for line in lines:
- value, key = line.split(":")
- self.human2integer_dict[key] = int(value)
-
- def _load(self):
- with open(self.txt_filelist, "r") as f:
- self.relpaths = f.read().splitlines()
- l1 = len(self.relpaths)
- self.relpaths = self._filter_relpaths(self.relpaths)
- print("Removed {} files from filelist during filtering.".format(l1 - len(self.relpaths)))
-
- self.synsets = [p.split("/")[0] for p in self.relpaths]
- self.abspaths = [os.path.join(self.datadir, p) for p in self.relpaths]
-
- unique_synsets = np.unique(self.synsets)
- class_dict = dict((synset, i) for i, synset in enumerate(unique_synsets))
- if not self.keep_orig_class_label:
- self.class_labels = [class_dict[s] for s in self.synsets]
- else:
- self.class_labels = [self.synset2idx[s] for s in self.synsets]
-
- with open(self.human_dict, "r") as f:
- human_dict = f.read().splitlines()
- human_dict = dict(line.split(maxsplit=1) for line in human_dict)
-
- self.human_labels = [human_dict[s] for s in self.synsets]
-
- labels = {
- "relpath": np.array(self.relpaths),
- "synsets": np.array(self.synsets),
- "class_label": np.array(self.class_labels),
- "human_label": np.array(self.human_labels),
- }
-
- if self.process_images:
- self.size = retrieve(self.config, "size", default=256)
- self.data = ImagePaths(self.abspaths,
- labels=labels,
- size=self.size,
- random_crop=self.random_crop,
- )
- else:
- self.data = self.abspaths
-
-
-class ImageNetTrain(ImageNetBase):
- NAME = "ILSVRC2012_train"
- URL = "http://www.image-net.org/challenges/LSVRC/2012/"
- AT_HASH = "a306397ccf9c2ead27155983c254227c0fd938e2"
- FILES = [
- "ILSVRC2012_img_train.tar",
- ]
- SIZES = [
- 147897477120,
- ]
-
- def __init__(self, process_images=True, data_root=None, **kwargs):
- self.process_images = process_images
- self.data_root = data_root
- super().__init__(**kwargs)
-
- def _prepare(self):
- if self.data_root:
- self.root = os.path.join(self.data_root, self.NAME)
- else:
- cachedir = os.environ.get("XDG_CACHE_HOME", os.path.expanduser("~/.cache"))
- self.root = os.path.join(cachedir, "autoencoders/data", self.NAME)
-
- self.datadir = os.path.join(self.root, "data")
- self.txt_filelist = os.path.join(self.root, "filelist.txt")
- self.expected_length = 1281167
- self.random_crop = retrieve(self.config, "ImageNetTrain/random_crop",
- default=True)
- if not tdu.is_prepared(self.root):
- # prep
- print("Preparing dataset {} in {}".format(self.NAME, self.root))
-
- datadir = self.datadir
- if not os.path.exists(datadir):
- path = os.path.join(self.root, self.FILES[0])
- if not os.path.exists(path) or not os.path.getsize(path)==self.SIZES[0]:
- import academictorrents as at
- atpath = at.get(self.AT_HASH, datastore=self.root)
- assert atpath == path
-
- print("Extracting {} to {}".format(path, datadir))
- os.makedirs(datadir, exist_ok=True)
- with tarfile.open(path, "r:") as tar:
- tar.extractall(path=datadir)
-
- print("Extracting sub-tars.")
- subpaths = sorted(glob.glob(os.path.join(datadir, "*.tar")))
- for subpath in tqdm(subpaths):
- subdir = subpath[:-len(".tar")]
- os.makedirs(subdir, exist_ok=True)
- with tarfile.open(subpath, "r:") as tar:
- tar.extractall(path=subdir)
-
- filelist = glob.glob(os.path.join(datadir, "**", "*.JPEG"))
- filelist = [os.path.relpath(p, start=datadir) for p in filelist]
- filelist = sorted(filelist)
- filelist = "\n".join(filelist)+"\n"
- with open(self.txt_filelist, "w") as f:
- f.write(filelist)
-
- tdu.mark_prepared(self.root)
-
-
-class ImageNetValidation(ImageNetBase):
- NAME = "ILSVRC2012_validation"
- URL = "http://www.image-net.org/challenges/LSVRC/2012/"
- AT_HASH = "5d6d0df7ed81efd49ca99ea4737e0ae5e3a5f2e5"
- VS_URL = "https://heibox.uni-heidelberg.de/f/3e0f6e9c624e45f2bd73/?dl=1"
- FILES = [
- "ILSVRC2012_img_val.tar",
- "validation_synset.txt",
- ]
- SIZES = [
- 6744924160,
- 1950000,
- ]
-
- def __init__(self, process_images=True, data_root=None, **kwargs):
- self.data_root = data_root
- self.process_images = process_images
- super().__init__(**kwargs)
-
- def _prepare(self):
- if self.data_root:
- self.root = os.path.join(self.data_root, self.NAME)
- else:
- cachedir = os.environ.get("XDG_CACHE_HOME", os.path.expanduser("~/.cache"))
- self.root = os.path.join(cachedir, "autoencoders/data", self.NAME)
- self.datadir = os.path.join(self.root, "data")
- self.txt_filelist = os.path.join(self.root, "filelist.txt")
- self.expected_length = 50000
- self.random_crop = retrieve(self.config, "ImageNetValidation/random_crop",
- default=False)
- if not tdu.is_prepared(self.root):
- # prep
- print("Preparing dataset {} in {}".format(self.NAME, self.root))
-
- datadir = self.datadir
- if not os.path.exists(datadir):
- path = os.path.join(self.root, self.FILES[0])
- if not os.path.exists(path) or not os.path.getsize(path)==self.SIZES[0]:
- import academictorrents as at
- atpath = at.get(self.AT_HASH, datastore=self.root)
- assert atpath == path
-
- print("Extracting {} to {}".format(path, datadir))
- os.makedirs(datadir, exist_ok=True)
- with tarfile.open(path, "r:") as tar:
- tar.extractall(path=datadir)
-
- vspath = os.path.join(self.root, self.FILES[1])
- if not os.path.exists(vspath) or not os.path.getsize(vspath)==self.SIZES[1]:
- download(self.VS_URL, vspath)
-
- with open(vspath, "r") as f:
- synset_dict = f.read().splitlines()
- synset_dict = dict(line.split() for line in synset_dict)
-
- print("Reorganizing into synset folders")
- synsets = np.unique(list(synset_dict.values()))
- for s in synsets:
- os.makedirs(os.path.join(datadir, s), exist_ok=True)
- for k, v in synset_dict.items():
- src = os.path.join(datadir, k)
- dst = os.path.join(datadir, v)
- shutil.move(src, dst)
-
- filelist = glob.glob(os.path.join(datadir, "**", "*.JPEG"))
- filelist = [os.path.relpath(p, start=datadir) for p in filelist]
- filelist = sorted(filelist)
- filelist = "\n".join(filelist)+"\n"
- with open(self.txt_filelist, "w") as f:
- f.write(filelist)
-
- tdu.mark_prepared(self.root)
-
-
-
-class ImageNetSR(Dataset):
- def __init__(self, size=None,
- degradation=None, downscale_f=4, min_crop_f=0.5, max_crop_f=1.,
- random_crop=True):
- """
- Imagenet Superresolution Dataloader
- Performs following ops in order:
- 1. crops a crop of size s from image either as random or center crop
- 2. resizes crop to size with cv2.area_interpolation
- 3. degrades resized crop with degradation_fn
-
- :param size: resizing to size after cropping
- :param degradation: degradation_fn, e.g. cv_bicubic or bsrgan_light
- :param downscale_f: Low Resolution Downsample factor
- :param min_crop_f: determines crop size s,
- where s = c * min_img_side_len with c sampled from interval (min_crop_f, max_crop_f)
- :param max_crop_f: ""
- :param data_root:
- :param random_crop:
- """
- self.base = self.get_base()
- assert size
- assert (size / downscale_f).is_integer()
- self.size = size
- self.LR_size = int(size / downscale_f)
- self.min_crop_f = min_crop_f
- self.max_crop_f = max_crop_f
- assert(max_crop_f <= 1.)
- self.center_crop = not random_crop
-
- self.image_rescaler = albumentations.SmallestMaxSize(max_size=size, interpolation=cv2.INTER_AREA)
-
- self.pil_interpolation = False # gets reset later if incase interp_op is from pillow
-
- if degradation == "bsrgan":
- self.degradation_process = partial(degradation_fn_bsr, sf=downscale_f)
-
- elif degradation == "bsrgan_light":
- self.degradation_process = partial(degradation_fn_bsr_light, sf=downscale_f)
-
- else:
- interpolation_fn = {
- "cv_nearest": cv2.INTER_NEAREST,
- "cv_bilinear": cv2.INTER_LINEAR,
- "cv_bicubic": cv2.INTER_CUBIC,
- "cv_area": cv2.INTER_AREA,
- "cv_lanczos": cv2.INTER_LANCZOS4,
- "pil_nearest": PIL.Image.NEAREST,
- "pil_bilinear": PIL.Image.BILINEAR,
- "pil_bicubic": PIL.Image.BICUBIC,
- "pil_box": PIL.Image.BOX,
- "pil_hamming": PIL.Image.HAMMING,
- "pil_lanczos": PIL.Image.LANCZOS,
- }[degradation]
-
- self.pil_interpolation = degradation.startswith("pil_")
-
- if self.pil_interpolation:
- self.degradation_process = partial(TF.resize, size=self.LR_size, interpolation=interpolation_fn)
-
- else:
- self.degradation_process = albumentations.SmallestMaxSize(max_size=self.LR_size,
- interpolation=interpolation_fn)
-
- def __len__(self):
- return len(self.base)
-
- def __getitem__(self, i):
- example = self.base[i]
- image = Image.open(example["file_path_"])
-
- if not image.mode == "RGB":
- image = image.convert("RGB")
-
- image = np.array(image).astype(np.uint8)
-
- min_side_len = min(image.shape[:2])
- crop_side_len = min_side_len * np.random.uniform(self.min_crop_f, self.max_crop_f, size=None)
- crop_side_len = int(crop_side_len)
-
- if self.center_crop:
- self.cropper = albumentations.CenterCrop(height=crop_side_len, width=crop_side_len)
-
- else:
- self.cropper = albumentations.RandomCrop(height=crop_side_len, width=crop_side_len)
-
- image = self.cropper(image=image)["image"]
- image = self.image_rescaler(image=image)["image"]
-
- if self.pil_interpolation:
- image_pil = PIL.Image.fromarray(image)
- LR_image = self.degradation_process(image_pil)
- LR_image = np.array(LR_image).astype(np.uint8)
-
- else:
- LR_image = self.degradation_process(image=image)["image"]
-
- example["image"] = (image/127.5 - 1.0).astype(np.float32)
- example["LR_image"] = (LR_image/127.5 - 1.0).astype(np.float32)
- example["caption"] = example["human_label"] # dummy caption
- return example
-
-
-class ImageNetSRTrain(ImageNetSR):
- def __init__(self, **kwargs):
- super().__init__(**kwargs)
-
- def get_base(self):
- with open("data/imagenet_train_hr_indices.p", "rb") as f:
- indices = pickle.load(f)
- dset = ImageNetTrain(process_images=False,)
- return Subset(dset, indices)
-
-
-class ImageNetSRValidation(ImageNetSR):
- def __init__(self, **kwargs):
- super().__init__(**kwargs)
-
- def get_base(self):
- with open("data/imagenet_val_hr_indices.p", "rb") as f:
- indices = pickle.load(f)
- dset = ImageNetValidation(process_images=False,)
- return Subset(dset, indices)
diff --git a/One-2-3-45-master 2/ldm/data/inpainting/__init__.py b/One-2-3-45-master 2/ldm/data/inpainting/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/One-2-3-45-master 2/ldm/data/inpainting/synthetic_mask.py b/One-2-3-45-master 2/ldm/data/inpainting/synthetic_mask.py
deleted file mode 100644
index bb4c38f3a79b8eb40553469d6f0656ad2f54609a..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/data/inpainting/synthetic_mask.py
+++ /dev/null
@@ -1,166 +0,0 @@
-from PIL import Image, ImageDraw
-import numpy as np
-
-settings = {
- "256narrow": {
- "p_irr": 1,
- "min_n_irr": 4,
- "max_n_irr": 50,
- "max_l_irr": 40,
- "max_w_irr": 10,
- "min_n_box": None,
- "max_n_box": None,
- "min_s_box": None,
- "max_s_box": None,
- "marg": None,
- },
- "256train": {
- "p_irr": 0.5,
- "min_n_irr": 1,
- "max_n_irr": 5,
- "max_l_irr": 200,
- "max_w_irr": 100,
- "min_n_box": 1,
- "max_n_box": 4,
- "min_s_box": 30,
- "max_s_box": 150,
- "marg": 10,
- },
- "512train": { # TODO: experimental
- "p_irr": 0.5,
- "min_n_irr": 1,
- "max_n_irr": 5,
- "max_l_irr": 450,
- "max_w_irr": 250,
- "min_n_box": 1,
- "max_n_box": 4,
- "min_s_box": 30,
- "max_s_box": 300,
- "marg": 10,
- },
- "512train-large": { # TODO: experimental
- "p_irr": 0.5,
- "min_n_irr": 1,
- "max_n_irr": 5,
- "max_l_irr": 450,
- "max_w_irr": 400,
- "min_n_box": 1,
- "max_n_box": 4,
- "min_s_box": 75,
- "max_s_box": 450,
- "marg": 10,
- },
-}
-
-
-def gen_segment_mask(mask, start, end, brush_width):
- mask = mask > 0
- mask = (255 * mask).astype(np.uint8)
- mask = Image.fromarray(mask)
- draw = ImageDraw.Draw(mask)
- draw.line([start, end], fill=255, width=brush_width, joint="curve")
- mask = np.array(mask) / 255
- return mask
-
-
-def gen_box_mask(mask, masked):
- x_0, y_0, w, h = masked
- mask[y_0:y_0 + h, x_0:x_0 + w] = 1
- return mask
-
-
-def gen_round_mask(mask, masked, radius):
- x_0, y_0, w, h = masked
- xy = [(x_0, y_0), (x_0 + w, y_0 + w)]
-
- mask = mask > 0
- mask = (255 * mask).astype(np.uint8)
- mask = Image.fromarray(mask)
- draw = ImageDraw.Draw(mask)
- draw.rounded_rectangle(xy, radius=radius, fill=255)
- mask = np.array(mask) / 255
- return mask
-
-
-def gen_large_mask(prng, img_h, img_w,
- marg, p_irr, min_n_irr, max_n_irr, max_l_irr, max_w_irr,
- min_n_box, max_n_box, min_s_box, max_s_box):
- """
- img_h: int, an image height
- img_w: int, an image width
- marg: int, a margin for a box starting coordinate
- p_irr: float, 0 <= p_irr <= 1, a probability of a polygonal chain mask
-
- min_n_irr: int, min number of segments
- max_n_irr: int, max number of segments
- max_l_irr: max length of a segment in polygonal chain
- max_w_irr: max width of a segment in polygonal chain
-
- min_n_box: int, min bound for the number of box primitives
- max_n_box: int, max bound for the number of box primitives
- min_s_box: int, min length of a box side
- max_s_box: int, max length of a box side
- """
-
- mask = np.zeros((img_h, img_w))
- uniform = prng.randint
-
- if np.random.uniform(0, 1) < p_irr: # generate polygonal chain
- n = uniform(min_n_irr, max_n_irr) # sample number of segments
-
- for _ in range(n):
- y = uniform(0, img_h) # sample a starting point
- x = uniform(0, img_w)
-
- a = uniform(0, 360) # sample angle
- l = uniform(10, max_l_irr) # sample segment length
- w = uniform(5, max_w_irr) # sample a segment width
-
- # draw segment starting from (x,y) to (x_,y_) using brush of width w
- x_ = x + l * np.sin(a)
- y_ = y + l * np.cos(a)
-
- mask = gen_segment_mask(mask, start=(x, y), end=(x_, y_), brush_width=w)
- x, y = x_, y_
- else: # generate Box masks
- n = uniform(min_n_box, max_n_box) # sample number of rectangles
-
- for _ in range(n):
- h = uniform(min_s_box, max_s_box) # sample box shape
- w = uniform(min_s_box, max_s_box)
-
- x_0 = uniform(marg, img_w - marg - w) # sample upper-left coordinates of box
- y_0 = uniform(marg, img_h - marg - h)
-
- if np.random.uniform(0, 1) < 0.5:
- mask = gen_box_mask(mask, masked=(x_0, y_0, w, h))
- else:
- r = uniform(0, 60) # sample radius
- mask = gen_round_mask(mask, masked=(x_0, y_0, w, h), radius=r)
- return mask
-
-
-make_lama_mask = lambda prng, h, w: gen_large_mask(prng, h, w, **settings["256train"])
-make_narrow_lama_mask = lambda prng, h, w: gen_large_mask(prng, h, w, **settings["256narrow"])
-make_512_lama_mask = lambda prng, h, w: gen_large_mask(prng, h, w, **settings["512train"])
-make_512_lama_mask_large = lambda prng, h, w: gen_large_mask(prng, h, w, **settings["512train-large"])
-
-
-MASK_MODES = {
- "256train": make_lama_mask,
- "256narrow": make_narrow_lama_mask,
- "512train": make_512_lama_mask,
- "512train-large": make_512_lama_mask_large
-}
-
-if __name__ == "__main__":
- import sys
-
- out = sys.argv[1]
-
- prng = np.random.RandomState(1)
- kwargs = settings["256train"]
- mask = gen_large_mask(prng, 256, 256, **kwargs)
- mask = (255 * mask).astype(np.uint8)
- mask = Image.fromarray(mask)
- mask.save(out)
diff --git a/One-2-3-45-master 2/ldm/data/laion.py b/One-2-3-45-master 2/ldm/data/laion.py
deleted file mode 100644
index 2eb608c1a4cf2b7c0215bdd7c1c81841e3a39b0c..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/data/laion.py
+++ /dev/null
@@ -1,537 +0,0 @@
-import webdataset as wds
-import kornia
-from PIL import Image
-import io
-import os
-import torchvision
-from PIL import Image
-import glob
-import random
-import numpy as np
-import pytorch_lightning as pl
-from tqdm import tqdm
-from omegaconf import OmegaConf
-from einops import rearrange
-import torch
-from webdataset.handlers import warn_and_continue
-
-
-from ldm.util import instantiate_from_config
-from ldm.data.inpainting.synthetic_mask import gen_large_mask, MASK_MODES
-from ldm.data.base import PRNGMixin
-
-
-class DataWithWings(torch.utils.data.IterableDataset):
- def __init__(self, min_size, transform=None, target_transform=None):
- self.min_size = min_size
- self.transform = transform if transform is not None else nn.Identity()
- self.target_transform = target_transform if target_transform is not None else nn.Identity()
- self.kv = OnDiskKV(file='/home/ubuntu/laion5B-watermark-safety-ordered', key_format='q', value_format='ee')
- self.kv_aesthetic = OnDiskKV(file='/home/ubuntu/laion5B-aesthetic-tags-kv', key_format='q', value_format='e')
- self.pwatermark_threshold = 0.8
- self.punsafe_threshold = 0.5
- self.aesthetic_threshold = 5.
- self.total_samples = 0
- self.samples = 0
- location = 'pipe:aws s3 cp --quiet s3://s-datasets/laion5b/laion2B-data/{000000..231349}.tar -'
-
- self.inner_dataset = wds.DataPipeline(
- wds.ResampledShards(location),
- wds.tarfile_to_samples(handler=wds.warn_and_continue),
- wds.shuffle(1000, handler=wds.warn_and_continue),
- wds.decode('pilrgb', handler=wds.warn_and_continue),
- wds.map(self._add_tags, handler=wds.ignore_and_continue),
- wds.select(self._filter_predicate),
- wds.map_dict(jpg=self.transform, txt=self.target_transform, punsafe=self._punsafe_to_class, handler=wds.warn_and_continue),
- wds.to_tuple('jpg', 'txt', 'punsafe', handler=wds.warn_and_continue),
- )
-
- @staticmethod
- def _compute_hash(url, text):
- if url is None:
- url = ''
- if text is None:
- text = ''
- total = (url + text).encode('utf-8')
- return mmh3.hash64(total)[0]
-
- def _add_tags(self, x):
- hsh = self._compute_hash(x['json']['url'], x['txt'])
- pwatermark, punsafe = self.kv[hsh]
- aesthetic = self.kv_aesthetic[hsh][0]
- return {**x, 'pwatermark': pwatermark, 'punsafe': punsafe, 'aesthetic': aesthetic}
-
- def _punsafe_to_class(self, punsafe):
- return torch.tensor(punsafe >= self.punsafe_threshold).long()
-
- def _filter_predicate(self, x):
- try:
- return x['pwatermark'] < self.pwatermark_threshold and x['aesthetic'] >= self.aesthetic_threshold and x['json']['original_width'] >= self.min_size and x['json']['original_height'] >= self.min_size
- except:
- return False
-
- def __iter__(self):
- return iter(self.inner_dataset)
-
-
-def dict_collation_fn(samples, combine_tensors=True, combine_scalars=True):
- """Take a list of samples (as dictionary) and create a batch, preserving the keys.
- If `tensors` is True, `ndarray` objects are combined into
- tensor batches.
- :param dict samples: list of samples
- :param bool tensors: whether to turn lists of ndarrays into a single ndarray
- :returns: single sample consisting of a batch
- :rtype: dict
- """
- keys = set.intersection(*[set(sample.keys()) for sample in samples])
- batched = {key: [] for key in keys}
-
- for s in samples:
- [batched[key].append(s[key]) for key in batched]
-
- result = {}
- for key in batched:
- if isinstance(batched[key][0], (int, float)):
- if combine_scalars:
- result[key] = np.array(list(batched[key]))
- elif isinstance(batched[key][0], torch.Tensor):
- if combine_tensors:
- result[key] = torch.stack(list(batched[key]))
- elif isinstance(batched[key][0], np.ndarray):
- if combine_tensors:
- result[key] = np.array(list(batched[key]))
- else:
- result[key] = list(batched[key])
- return result
-
-
-class WebDataModuleFromConfig(pl.LightningDataModule):
- def __init__(self, tar_base, batch_size, train=None, validation=None,
- test=None, num_workers=4, multinode=True, min_size=None,
- max_pwatermark=1.0,
- **kwargs):
- super().__init__(self)
- print(f'Setting tar base to {tar_base}')
- self.tar_base = tar_base
- self.batch_size = batch_size
- self.num_workers = num_workers
- self.train = train
- self.validation = validation
- self.test = test
- self.multinode = multinode
- self.min_size = min_size # filter out very small images
- self.max_pwatermark = max_pwatermark # filter out watermarked images
-
- def make_loader(self, dataset_config, train=True):
- if 'image_transforms' in dataset_config:
- image_transforms = [instantiate_from_config(tt) for tt in dataset_config.image_transforms]
- else:
- image_transforms = []
-
- image_transforms.extend([torchvision.transforms.ToTensor(),
- torchvision.transforms.Lambda(lambda x: rearrange(x * 2. - 1., 'c h w -> h w c'))])
- image_transforms = torchvision.transforms.Compose(image_transforms)
-
- if 'transforms' in dataset_config:
- transforms_config = OmegaConf.to_container(dataset_config.transforms)
- else:
- transforms_config = dict()
-
- transform_dict = {dkey: load_partial_from_config(transforms_config[dkey])
- if transforms_config[dkey] != 'identity' else identity
- for dkey in transforms_config}
- img_key = dataset_config.get('image_key', 'jpeg')
- transform_dict.update({img_key: image_transforms})
-
- if 'postprocess' in dataset_config:
- postprocess = instantiate_from_config(dataset_config['postprocess'])
- else:
- postprocess = None
-
- shuffle = dataset_config.get('shuffle', 0)
- shardshuffle = shuffle > 0
-
- nodesplitter = wds.shardlists.split_by_node if self.multinode else wds.shardlists.single_node_only
-
- if self.tar_base == "__improvedaesthetic__":
- print("## Warning, loading the same improved aesthetic dataset "
- "for all splits and ignoring shards parameter.")
- tars = "pipe:aws s3 cp s3://s-laion/improved-aesthetics-laion-2B-en-subsets/aesthetics_tars/{000000..060207}.tar -"
- else:
- tars = os.path.join(self.tar_base, dataset_config.shards)
-
- dset = wds.WebDataset(
- tars,
- nodesplitter=nodesplitter,
- shardshuffle=shardshuffle,
- handler=wds.warn_and_continue).repeat().shuffle(shuffle)
- print(f'Loading webdataset with {len(dset.pipeline[0].urls)} shards.')
-
- dset = (dset
- .select(self.filter_keys)
- .decode('pil', handler=wds.warn_and_continue)
- .select(self.filter_size)
- .map_dict(**transform_dict, handler=wds.warn_and_continue)
- )
- if postprocess is not None:
- dset = dset.map(postprocess)
- dset = (dset
- .batched(self.batch_size, partial=False,
- collation_fn=dict_collation_fn)
- )
-
- loader = wds.WebLoader(dset, batch_size=None, shuffle=False,
- num_workers=self.num_workers)
-
- return loader
-
- def filter_size(self, x):
- try:
- valid = True
- if self.min_size is not None and self.min_size > 1:
- try:
- valid = valid and x['json']['original_width'] >= self.min_size and x['json']['original_height'] >= self.min_size
- except Exception:
- valid = False
- if self.max_pwatermark is not None and self.max_pwatermark < 1.0:
- try:
- valid = valid and x['json']['pwatermark'] <= self.max_pwatermark
- except Exception:
- valid = False
- return valid
- except Exception:
- return False
-
- def filter_keys(self, x):
- try:
- return ("jpg" in x) and ("txt" in x)
- except Exception:
- return False
-
- def train_dataloader(self):
- return self.make_loader(self.train)
-
- def val_dataloader(self):
- return self.make_loader(self.validation, train=False)
-
- def test_dataloader(self):
- return self.make_loader(self.test, train=False)
-
-
-from ldm.modules.image_degradation import degradation_fn_bsr_light
-import cv2
-
-class AddLR(object):
- def __init__(self, factor, output_size, initial_size=None, image_key="jpg"):
- self.factor = factor
- self.output_size = output_size
- self.image_key = image_key
- self.initial_size = initial_size
-
- def pt2np(self, x):
- x = ((x+1.0)*127.5).clamp(0, 255).to(dtype=torch.uint8).detach().cpu().numpy()
- return x
-
- def np2pt(self, x):
- x = torch.from_numpy(x)/127.5-1.0
- return x
-
- def __call__(self, sample):
- # sample['jpg'] is tensor hwc in [-1, 1] at this point
- x = self.pt2np(sample[self.image_key])
- if self.initial_size is not None:
- x = cv2.resize(x, (self.initial_size, self.initial_size), interpolation=2)
- x = degradation_fn_bsr_light(x, sf=self.factor)['image']
- x = cv2.resize(x, (self.output_size, self.output_size), interpolation=2)
- x = self.np2pt(x)
- sample['lr'] = x
- return sample
-
-class AddBW(object):
- def __init__(self, image_key="jpg"):
- self.image_key = image_key
-
- def pt2np(self, x):
- x = ((x+1.0)*127.5).clamp(0, 255).to(dtype=torch.uint8).detach().cpu().numpy()
- return x
-
- def np2pt(self, x):
- x = torch.from_numpy(x)/127.5-1.0
- return x
-
- def __call__(self, sample):
- # sample['jpg'] is tensor hwc in [-1, 1] at this point
- x = sample[self.image_key]
- w = torch.rand(3, device=x.device)
- w /= w.sum()
- out = torch.einsum('hwc,c->hw', x, w)
-
- # Keep as 3ch so we can pass to encoder, also we might want to add hints
- sample['lr'] = out.unsqueeze(-1).tile(1,1,3)
- return sample
-
-class AddMask(PRNGMixin):
- def __init__(self, mode="512train", p_drop=0.):
- super().__init__()
- assert mode in list(MASK_MODES.keys()), f'unknown mask generation mode "{mode}"'
- self.make_mask = MASK_MODES[mode]
- self.p_drop = p_drop
-
- def __call__(self, sample):
- # sample['jpg'] is tensor hwc in [-1, 1] at this point
- x = sample['jpg']
- mask = self.make_mask(self.prng, x.shape[0], x.shape[1])
- if self.prng.choice(2, p=[1 - self.p_drop, self.p_drop]):
- mask = np.ones_like(mask)
- mask[mask < 0.5] = 0
- mask[mask > 0.5] = 1
- mask = torch.from_numpy(mask[..., None])
- sample['mask'] = mask
- sample['masked_image'] = x * (mask < 0.5)
- return sample
-
-
-class AddEdge(PRNGMixin):
- def __init__(self, mode="512train", mask_edges=True):
- super().__init__()
- assert mode in list(MASK_MODES.keys()), f'unknown mask generation mode "{mode}"'
- self.make_mask = MASK_MODES[mode]
- self.n_down_choices = [0]
- self.sigma_choices = [1, 2]
- self.mask_edges = mask_edges
-
- @torch.no_grad()
- def __call__(self, sample):
- # sample['jpg'] is tensor hwc in [-1, 1] at this point
- x = sample['jpg']
-
- mask = self.make_mask(self.prng, x.shape[0], x.shape[1])
- mask[mask < 0.5] = 0
- mask[mask > 0.5] = 1
- mask = torch.from_numpy(mask[..., None])
- sample['mask'] = mask
-
- n_down_idx = self.prng.choice(len(self.n_down_choices))
- sigma_idx = self.prng.choice(len(self.sigma_choices))
-
- n_choices = len(self.n_down_choices)*len(self.sigma_choices)
- raveled_idx = np.ravel_multi_index((n_down_idx, sigma_idx),
- (len(self.n_down_choices), len(self.sigma_choices)))
- normalized_idx = raveled_idx/max(1, n_choices-1)
-
- n_down = self.n_down_choices[n_down_idx]
- sigma = self.sigma_choices[sigma_idx]
-
- kernel_size = 4*sigma+1
- kernel_size = (kernel_size, kernel_size)
- sigma = (sigma, sigma)
- canny = kornia.filters.Canny(
- low_threshold=0.1,
- high_threshold=0.2,
- kernel_size=kernel_size,
- sigma=sigma,
- hysteresis=True,
- )
- y = (x+1.0)/2.0 # in 01
- y = y.unsqueeze(0).permute(0, 3, 1, 2).contiguous()
-
- # down
- for i_down in range(n_down):
- size = min(y.shape[-2], y.shape[-1])//2
- y = kornia.geometry.transform.resize(y, size, antialias=True)
-
- # edge
- _, y = canny(y)
-
- if n_down > 0:
- size = x.shape[0], x.shape[1]
- y = kornia.geometry.transform.resize(y, size, interpolation="nearest")
-
- y = y.permute(0, 2, 3, 1)[0].expand(-1, -1, 3).contiguous()
- y = y*2.0-1.0
-
- if self.mask_edges:
- sample['masked_image'] = y * (mask < 0.5)
- else:
- sample['masked_image'] = y
- sample['mask'] = torch.zeros_like(sample['mask'])
-
- # concat normalized idx
- sample['smoothing_strength'] = torch.ones_like(sample['mask'])*normalized_idx
-
- return sample
-
-
-def example00():
- url = "pipe:aws s3 cp s3://s-datasets/laion5b/laion2B-data/000000.tar -"
- dataset = wds.WebDataset(url)
- example = next(iter(dataset))
- for k in example:
- print(k, type(example[k]))
-
- print(example["__key__"])
- for k in ["json", "txt"]:
- print(example[k].decode())
-
- image = Image.open(io.BytesIO(example["jpg"]))
- outdir = "tmp"
- os.makedirs(outdir, exist_ok=True)
- image.save(os.path.join(outdir, example["__key__"] + ".png"))
-
-
- def load_example(example):
- return {
- "key": example["__key__"],
- "image": Image.open(io.BytesIO(example["jpg"])),
- "text": example["txt"].decode(),
- }
-
-
- for i, example in tqdm(enumerate(dataset)):
- ex = load_example(example)
- print(ex["image"].size, ex["text"])
- if i >= 100:
- break
-
-
-def example01():
- # the first laion shards contain ~10k examples each
- url = "pipe:aws s3 cp s3://s-datasets/laion5b/laion2B-data/{000000..000002}.tar -"
-
- batch_size = 3
- shuffle_buffer = 10000
- dset = wds.WebDataset(
- url,
- nodesplitter=wds.shardlists.split_by_node,
- shardshuffle=True,
- )
- dset = (dset
- .shuffle(shuffle_buffer, initial=shuffle_buffer)
- .decode('pil', handler=warn_and_continue)
- .batched(batch_size, partial=False,
- collation_fn=dict_collation_fn)
- )
-
- num_workers = 2
- loader = wds.WebLoader(dset, batch_size=None, shuffle=False, num_workers=num_workers)
-
- batch_sizes = list()
- keys_per_epoch = list()
- for epoch in range(5):
- keys = list()
- for batch in tqdm(loader):
- batch_sizes.append(len(batch["__key__"]))
- keys.append(batch["__key__"])
-
- for bs in batch_sizes:
- assert bs==batch_size
- print(f"{len(batch_sizes)} batches of size {batch_size}.")
- batch_sizes = list()
-
- keys_per_epoch.append(keys)
- for i_batch in [0, 1, -1]:
- print(f"Batch {i_batch} of epoch {epoch}:")
- print(keys[i_batch])
- print("next epoch.")
-
-
-def example02():
- from omegaconf import OmegaConf
- from torch.utils.data.distributed import DistributedSampler
- from torch.utils.data import IterableDataset
- from torch.utils.data import DataLoader, RandomSampler, Sampler, SequentialSampler
- from pytorch_lightning.trainer.supporters import CombinedLoader, CycleIterator
-
- #config = OmegaConf.load("configs/stable-diffusion/txt2img-1p4B-multinode-clip-encoder-high-res-512.yaml")
- #config = OmegaConf.load("configs/stable-diffusion/txt2img-upscale-clip-encoder-f16-1024.yaml")
- config = OmegaConf.load("configs/stable-diffusion/txt2img-v2-clip-encoder-improved_aesthetics-256.yaml")
- datamod = WebDataModuleFromConfig(**config["data"]["params"])
- dataloader = datamod.train_dataloader()
-
- for batch in dataloader:
- print(batch.keys())
- print(batch["jpg"].shape)
- break
-
-
-def example03():
- # improved aesthetics
- tars = "pipe:aws s3 cp s3://s-laion/improved-aesthetics-laion-2B-en-subsets/aesthetics_tars/{000000..060207}.tar -"
- dataset = wds.WebDataset(tars)
-
- def filter_keys(x):
- try:
- return ("jpg" in x) and ("txt" in x)
- except Exception:
- return False
-
- def filter_size(x):
- try:
- return x['json']['original_width'] >= 512 and x['json']['original_height'] >= 512
- except Exception:
- return False
-
- def filter_watermark(x):
- try:
- return x['json']['pwatermark'] < 0.5
- except Exception:
- return False
-
- dataset = (dataset
- .select(filter_keys)
- .decode('pil', handler=wds.warn_and_continue))
- n_save = 20
- n_total = 0
- n_large = 0
- n_large_nowm = 0
- for i, example in enumerate(dataset):
- n_total += 1
- if filter_size(example):
- n_large += 1
- if filter_watermark(example):
- n_large_nowm += 1
- if n_large_nowm < n_save+1:
- image = example["jpg"]
- image.save(os.path.join("tmp", f"{n_large_nowm-1:06}.png"))
-
- if i%500 == 0:
- print(i)
- print(f"Large: {n_large}/{n_total} | {n_large/n_total*100:.2f}%")
- if n_large > 0:
- print(f"No Watermark: {n_large_nowm}/{n_large} | {n_large_nowm/n_large*100:.2f}%")
-
-
-
-def example04():
- # improved aesthetics
- for i_shard in range(60208)[::-1]:
- print(i_shard)
- tars = "pipe:aws s3 cp s3://s-laion/improved-aesthetics-laion-2B-en-subsets/aesthetics_tars/{:06}.tar -".format(i_shard)
- dataset = wds.WebDataset(tars)
-
- def filter_keys(x):
- try:
- return ("jpg" in x) and ("txt" in x)
- except Exception:
- return False
-
- def filter_size(x):
- try:
- return x['json']['original_width'] >= 512 and x['json']['original_height'] >= 512
- except Exception:
- return False
-
- dataset = (dataset
- .select(filter_keys)
- .decode('pil', handler=wds.warn_and_continue))
- try:
- example = next(iter(dataset))
- except Exception:
- print(f"Error @ {i_shard}")
-
-
-if __name__ == "__main__":
- #example01()
- #example02()
- example03()
- #example04()
diff --git a/One-2-3-45-master 2/ldm/data/lsun.py b/One-2-3-45-master 2/ldm/data/lsun.py
deleted file mode 100644
index 6256e45715ff0b57c53f985594d27cbbbff0e68e..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/data/lsun.py
+++ /dev/null
@@ -1,92 +0,0 @@
-import os
-import numpy as np
-import PIL
-from PIL import Image
-from torch.utils.data import Dataset
-from torchvision import transforms
-
-
-class LSUNBase(Dataset):
- def __init__(self,
- txt_file,
- data_root,
- size=None,
- interpolation="bicubic",
- flip_p=0.5
- ):
- self.data_paths = txt_file
- self.data_root = data_root
- with open(self.data_paths, "r") as f:
- self.image_paths = f.read().splitlines()
- self._length = len(self.image_paths)
- self.labels = {
- "relative_file_path_": [l for l in self.image_paths],
- "file_path_": [os.path.join(self.data_root, l)
- for l in self.image_paths],
- }
-
- self.size = size
- self.interpolation = {"linear": PIL.Image.LINEAR,
- "bilinear": PIL.Image.BILINEAR,
- "bicubic": PIL.Image.BICUBIC,
- "lanczos": PIL.Image.LANCZOS,
- }[interpolation]
- self.flip = transforms.RandomHorizontalFlip(p=flip_p)
-
- def __len__(self):
- return self._length
-
- def __getitem__(self, i):
- example = dict((k, self.labels[k][i]) for k in self.labels)
- image = Image.open(example["file_path_"])
- if not image.mode == "RGB":
- image = image.convert("RGB")
-
- # default to score-sde preprocessing
- img = np.array(image).astype(np.uint8)
- crop = min(img.shape[0], img.shape[1])
- h, w, = img.shape[0], img.shape[1]
- img = img[(h - crop) // 2:(h + crop) // 2,
- (w - crop) // 2:(w + crop) // 2]
-
- image = Image.fromarray(img)
- if self.size is not None:
- image = image.resize((self.size, self.size), resample=self.interpolation)
-
- image = self.flip(image)
- image = np.array(image).astype(np.uint8)
- example["image"] = (image / 127.5 - 1.0).astype(np.float32)
- return example
-
-
-class LSUNChurchesTrain(LSUNBase):
- def __init__(self, **kwargs):
- super().__init__(txt_file="data/lsun/church_outdoor_train.txt", data_root="data/lsun/churches", **kwargs)
-
-
-class LSUNChurchesValidation(LSUNBase):
- def __init__(self, flip_p=0., **kwargs):
- super().__init__(txt_file="data/lsun/church_outdoor_val.txt", data_root="data/lsun/churches",
- flip_p=flip_p, **kwargs)
-
-
-class LSUNBedroomsTrain(LSUNBase):
- def __init__(self, **kwargs):
- super().__init__(txt_file="data/lsun/bedrooms_train.txt", data_root="data/lsun/bedrooms", **kwargs)
-
-
-class LSUNBedroomsValidation(LSUNBase):
- def __init__(self, flip_p=0.0, **kwargs):
- super().__init__(txt_file="data/lsun/bedrooms_val.txt", data_root="data/lsun/bedrooms",
- flip_p=flip_p, **kwargs)
-
-
-class LSUNCatsTrain(LSUNBase):
- def __init__(self, **kwargs):
- super().__init__(txt_file="data/lsun/cat_train.txt", data_root="data/lsun/cats", **kwargs)
-
-
-class LSUNCatsValidation(LSUNBase):
- def __init__(self, flip_p=0., **kwargs):
- super().__init__(txt_file="data/lsun/cat_val.txt", data_root="data/lsun/cats",
- flip_p=flip_p, **kwargs)
diff --git a/One-2-3-45-master 2/ldm/data/nerf_like.py b/One-2-3-45-master 2/ldm/data/nerf_like.py
deleted file mode 100644
index 84ef18288db005c72d3b5832144a7bd5cfffe9b2..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/data/nerf_like.py
+++ /dev/null
@@ -1,165 +0,0 @@
-from torch.utils.data import Dataset
-import os
-import json
-import numpy as np
-import torch
-import imageio
-import math
-import cv2
-from torchvision import transforms
-
-def cartesian_to_spherical(xyz):
- ptsnew = np.hstack((xyz, np.zeros(xyz.shape)))
- xy = xyz[:,0]**2 + xyz[:,1]**2
- z = np.sqrt(xy + xyz[:,2]**2)
- theta = np.arctan2(np.sqrt(xy), xyz[:,2]) # for elevation angle defined from Z-axis down
- #ptsnew[:,4] = np.arctan2(xyz[:,2], np.sqrt(xy)) # for elevation angle defined from XY-plane up
- azimuth = np.arctan2(xyz[:,1], xyz[:,0])
- return np.array([theta, azimuth, z])
-
-
-def get_T(T_target, T_cond):
- theta_cond, azimuth_cond, z_cond = cartesian_to_spherical(T_cond[None, :])
- theta_target, azimuth_target, z_target = cartesian_to_spherical(T_target[None, :])
-
- d_theta = theta_target - theta_cond
- d_azimuth = (azimuth_target - azimuth_cond) % (2 * math.pi)
- d_z = z_target - z_cond
-
- d_T = torch.tensor([d_theta.item(), math.sin(d_azimuth.item()), math.cos(d_azimuth.item()), d_z.item()])
- return d_T
-
-def get_spherical(T_target, T_cond):
- theta_cond, azimuth_cond, z_cond = cartesian_to_spherical(T_cond[None, :])
- theta_target, azimuth_target, z_target = cartesian_to_spherical(T_target[None, :])
-
- d_theta = theta_target - theta_cond
- d_azimuth = (azimuth_target - azimuth_cond) % (2 * math.pi)
- d_z = z_target - z_cond
-
- d_T = torch.tensor([math.degrees(d_theta.item()), math.degrees(d_azimuth.item()), d_z.item()])
- return d_T
-
-class RTMV(Dataset):
- def __init__(self, root_dir='datasets/RTMV/google_scanned',\
- first_K=64, resolution=256, load_target=False):
- self.root_dir = root_dir
- self.scene_list = sorted(next(os.walk(root_dir))[1])
- self.resolution = resolution
- self.first_K = first_K
- self.load_target = load_target
-
- def __len__(self):
- return len(self.scene_list)
-
- def __getitem__(self, idx):
- scene_dir = os.path.join(self.root_dir, self.scene_list[idx])
- with open(os.path.join(scene_dir, 'transforms.json'), "r") as f:
- meta = json.load(f)
- imgs = []
- poses = []
- for i_img in range(self.first_K):
- meta_img = meta['frames'][i_img]
-
- if i_img == 0 or self.load_target:
- img_path = os.path.join(scene_dir, meta_img['file_path'])
- img = imageio.imread(img_path)
- img = cv2.resize(img, (self.resolution, self.resolution), interpolation = cv2.INTER_LINEAR)
- imgs.append(img)
-
- c2w = meta_img['transform_matrix']
- poses.append(c2w)
-
- imgs = (np.array(imgs) / 255.).astype(np.float32) # (RGBA) imgs
- imgs = torch.tensor(self.blend_rgba(imgs)).permute(0, 3, 1, 2)
- imgs = imgs * 2 - 1. # convert to stable diffusion range
- poses = torch.tensor(np.array(poses).astype(np.float32))
- return imgs, poses
-
- def blend_rgba(self, img):
- img = img[..., :3] * img[..., -1:] + (1. - img[..., -1:]) # blend A to RGB
- return img
-
-
-class GSO(Dataset):
- def __init__(self, root_dir='datasets/GoogleScannedObjects',\
- split='val', first_K=5, resolution=256, load_target=False, name='render_mvs'):
- self.root_dir = root_dir
- with open(os.path.join(root_dir, '%s.json' % split), "r") as f:
- self.scene_list = json.load(f)
- self.resolution = resolution
- self.first_K = first_K
- self.load_target = load_target
- self.name = name
-
- def __len__(self):
- return len(self.scene_list)
-
- def __getitem__(self, idx):
- scene_dir = os.path.join(self.root_dir, self.scene_list[idx])
- with open(os.path.join(scene_dir, 'transforms_%s.json' % self.name), "r") as f:
- meta = json.load(f)
- imgs = []
- poses = []
- for i_img in range(self.first_K):
- meta_img = meta['frames'][i_img]
-
- if i_img == 0 or self.load_target:
- img_path = os.path.join(scene_dir, meta_img['file_path'])
- img = imageio.imread(img_path)
- img = cv2.resize(img, (self.resolution, self.resolution), interpolation = cv2.INTER_LINEAR)
- imgs.append(img)
-
- c2w = meta_img['transform_matrix']
- poses.append(c2w)
-
- imgs = (np.array(imgs) / 255.).astype(np.float32) # (RGBA) imgs
- mask = imgs[:, :, :, -1]
- imgs = torch.tensor(self.blend_rgba(imgs)).permute(0, 3, 1, 2)
- imgs = imgs * 2 - 1. # convert to stable diffusion range
- poses = torch.tensor(np.array(poses).astype(np.float32))
- return imgs, poses
-
- def blend_rgba(self, img):
- img = img[..., :3] * img[..., -1:] + (1. - img[..., -1:]) # blend A to RGB
- return img
-
-class WILD(Dataset):
- def __init__(self, root_dir='data/nerf_wild',\
- first_K=33, resolution=256, load_target=False):
- self.root_dir = root_dir
- self.scene_list = sorted(next(os.walk(root_dir))[1])
- self.resolution = resolution
- self.first_K = first_K
- self.load_target = load_target
-
- def __len__(self):
- return len(self.scene_list)
-
- def __getitem__(self, idx):
- scene_dir = os.path.join(self.root_dir, self.scene_list[idx])
- with open(os.path.join(scene_dir, 'transforms_train.json'), "r") as f:
- meta = json.load(f)
- imgs = []
- poses = []
- for i_img in range(self.first_K):
- meta_img = meta['frames'][i_img]
-
- if i_img == 0 or self.load_target:
- img_path = os.path.join(scene_dir, meta_img['file_path'])
- img = imageio.imread(img_path + '.png')
- img = cv2.resize(img, (self.resolution, self.resolution), interpolation = cv2.INTER_LINEAR)
- imgs.append(img)
-
- c2w = meta_img['transform_matrix']
- poses.append(c2w)
-
- imgs = (np.array(imgs) / 255.).astype(np.float32) # (RGBA) imgs
- imgs = torch.tensor(self.blend_rgba(imgs)).permute(0, 3, 1, 2)
- imgs = imgs * 2 - 1. # convert to stable diffusion range
- poses = torch.tensor(np.array(poses).astype(np.float32))
- return imgs, poses
-
- def blend_rgba(self, img):
- img = img[..., :3] * img[..., -1:] + (1. - img[..., -1:]) # blend A to RGB
- return img
\ No newline at end of file
diff --git a/One-2-3-45-master 2/ldm/data/simple.py b/One-2-3-45-master 2/ldm/data/simple.py
deleted file mode 100644
index a853e2188e4e61cf91c3e1ca0da3e4f0069dbcee..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/data/simple.py
+++ /dev/null
@@ -1,526 +0,0 @@
-from typing import Dict
-import webdataset as wds
-import numpy as np
-from omegaconf import DictConfig, ListConfig
-import torch
-from torch.utils.data import Dataset
-from pathlib import Path
-import json
-from PIL import Image
-from torchvision import transforms
-import torchvision
-from einops import rearrange
-from ldm.util import instantiate_from_config
-from datasets import load_dataset
-import pytorch_lightning as pl
-import copy
-import csv
-import cv2
-import random
-import matplotlib.pyplot as plt
-from torch.utils.data import DataLoader
-import json
-import os, sys
-import webdataset as wds
-import math
-from torch.utils.data.distributed import DistributedSampler
-
-# Some hacky things to make experimentation easier
-def make_transform_multi_folder_data(paths, caption_files=None, **kwargs):
- ds = make_multi_folder_data(paths, caption_files, **kwargs)
- return TransformDataset(ds)
-
-def make_nfp_data(base_path):
- dirs = list(Path(base_path).glob("*/"))
- print(f"Found {len(dirs)} folders")
- print(dirs)
- tforms = [transforms.Resize(512), transforms.CenterCrop(512)]
- datasets = [NfpDataset(x, image_transforms=copy.copy(tforms), default_caption="A view from a train window") for x in dirs]
- return torch.utils.data.ConcatDataset(datasets)
-
-
-class VideoDataset(Dataset):
- def __init__(self, root_dir, image_transforms, caption_file, offset=8, n=2):
- self.root_dir = Path(root_dir)
- self.caption_file = caption_file
- self.n = n
- ext = "mp4"
- self.paths = sorted(list(self.root_dir.rglob(f"*.{ext}")))
- self.offset = offset
-
- if isinstance(image_transforms, ListConfig):
- image_transforms = [instantiate_from_config(tt) for tt in image_transforms]
- image_transforms.extend([transforms.ToTensor(),
- transforms.Lambda(lambda x: rearrange(x * 2. - 1., 'c h w -> h w c'))])
- image_transforms = transforms.Compose(image_transforms)
- self.tform = image_transforms
- with open(self.caption_file) as f:
- reader = csv.reader(f)
- rows = [row for row in reader]
- self.captions = dict(rows)
-
- def __len__(self):
- return len(self.paths)
-
- def __getitem__(self, index):
- for i in range(10):
- try:
- return self._load_sample(index)
- except Exception:
- # Not really good enough but...
- print("uh oh")
-
- def _load_sample(self, index):
- n = self.n
- filename = self.paths[index]
- min_frame = 2*self.offset + 2
- vid = cv2.VideoCapture(str(filename))
- max_frames = int(vid.get(cv2.CAP_PROP_FRAME_COUNT))
- curr_frame_n = random.randint(min_frame, max_frames)
- vid.set(cv2.CAP_PROP_POS_FRAMES,curr_frame_n)
- _, curr_frame = vid.read()
-
- prev_frames = []
- for i in range(n):
- prev_frame_n = curr_frame_n - (i+1)*self.offset
- vid.set(cv2.CAP_PROP_POS_FRAMES,prev_frame_n)
- _, prev_frame = vid.read()
- prev_frame = self.tform(Image.fromarray(prev_frame[...,::-1]))
- prev_frames.append(prev_frame)
-
- vid.release()
- caption = self.captions[filename.name]
- data = {
- "image": self.tform(Image.fromarray(curr_frame[...,::-1])),
- "prev": torch.cat(prev_frames, dim=-1),
- "txt": caption
- }
- return data
-
-# end hacky things
-
-
-def make_tranforms(image_transforms):
- # if isinstance(image_transforms, ListConfig):
- # image_transforms = [instantiate_from_config(tt) for tt in image_transforms]
- image_transforms = []
- image_transforms.extend([transforms.ToTensor(),
- transforms.Lambda(lambda x: rearrange(x * 2. - 1., 'c h w -> h w c'))])
- image_transforms = transforms.Compose(image_transforms)
- return image_transforms
-
-
-def make_multi_folder_data(paths, caption_files=None, **kwargs):
- """Make a concat dataset from multiple folders
- Don't suport captions yet
-
- If paths is a list, that's ok, if it's a Dict interpret it as:
- k=folder v=n_times to repeat that
- """
- list_of_paths = []
- if isinstance(paths, (Dict, DictConfig)):
- assert caption_files is None, \
- "Caption files not yet supported for repeats"
- for folder_path, repeats in paths.items():
- list_of_paths.extend([folder_path]*repeats)
- paths = list_of_paths
-
- if caption_files is not None:
- datasets = [FolderData(p, caption_file=c, **kwargs) for (p, c) in zip(paths, caption_files)]
- else:
- datasets = [FolderData(p, **kwargs) for p in paths]
- return torch.utils.data.ConcatDataset(datasets)
-
-
-
-class NfpDataset(Dataset):
- def __init__(self,
- root_dir,
- image_transforms=[],
- ext="jpg",
- default_caption="",
- ) -> None:
- """assume sequential frames and a deterministic transform"""
-
- self.root_dir = Path(root_dir)
- self.default_caption = default_caption
-
- self.paths = sorted(list(self.root_dir.rglob(f"*.{ext}")))
- self.tform = make_tranforms(image_transforms)
-
- def __len__(self):
- return len(self.paths) - 1
-
-
- def __getitem__(self, index):
- prev = self.paths[index]
- curr = self.paths[index+1]
- data = {}
- data["image"] = self._load_im(curr)
- data["prev"] = self._load_im(prev)
- data["txt"] = self.default_caption
- return data
-
- def _load_im(self, filename):
- im = Image.open(filename).convert("RGB")
- return self.tform(im)
-
-class ObjaverseDataModuleFromConfig(pl.LightningDataModule):
- def __init__(self, root_dir, batch_size, total_view, train=None, validation=None,
- test=None, num_workers=4, **kwargs):
- super().__init__(self)
- self.root_dir = root_dir
- self.batch_size = batch_size
- self.num_workers = num_workers
- self.total_view = total_view
-
- if train is not None:
- dataset_config = train
- if validation is not None:
- dataset_config = validation
-
- if 'image_transforms' in dataset_config:
- image_transforms = [torchvision.transforms.Resize(dataset_config.image_transforms.size)]
- else:
- image_transforms = []
- image_transforms.extend([transforms.ToTensor(),
- transforms.Lambda(lambda x: rearrange(x * 2. - 1., 'c h w -> h w c'))])
- self.image_transforms = torchvision.transforms.Compose(image_transforms)
-
-
- def train_dataloader(self):
- dataset = ObjaverseData(root_dir=self.root_dir, total_view=self.total_view, validation=False, \
- image_transforms=self.image_transforms)
- sampler = DistributedSampler(dataset)
- return wds.WebLoader(dataset, batch_size=self.batch_size, num_workers=self.num_workers, shuffle=False, sampler=sampler)
-
- def val_dataloader(self):
- dataset = ObjaverseData(root_dir=self.root_dir, total_view=self.total_view, validation=True, \
- image_transforms=self.image_transforms)
- sampler = DistributedSampler(dataset)
- return wds.WebLoader(dataset, batch_size=self.batch_size, num_workers=self.num_workers, shuffle=False)
-
- def test_dataloader(self):
- return wds.WebLoader(ObjaverseData(root_dir=self.root_dir, total_view=self.total_view, validation=self.validation),\
- batch_size=self.batch_size, num_workers=self.num_workers, shuffle=False)
-
-
-class ObjaverseData(Dataset):
- def __init__(self,
- root_dir='.objaverse/hf-objaverse-v1/views',
- image_transforms=[],
- ext="png",
- default_trans=torch.zeros(3),
- postprocess=None,
- return_paths=False,
- total_view=4,
- validation=False
- ) -> None:
- """Create a dataset from a folder of images.
- If you pass in a root directory it will be searched for images
- ending in ext (ext can be a list)
- """
- self.root_dir = Path(root_dir)
- self.default_trans = default_trans
- self.return_paths = return_paths
- if isinstance(postprocess, DictConfig):
- postprocess = instantiate_from_config(postprocess)
- self.postprocess = postprocess
- self.total_view = total_view
-
- if not isinstance(ext, (tuple, list, ListConfig)):
- ext = [ext]
-
- with open(os.path.join(root_dir, 'valid_paths.json')) as f:
- self.paths = json.load(f)
-
- total_objects = len(self.paths)
- if validation:
- self.paths = self.paths[math.floor(total_objects / 100. * 99.):] # used last 1% as validation
- else:
- self.paths = self.paths[:math.floor(total_objects / 100. * 99.)] # used first 99% as training
- print('============= length of dataset %d =============' % len(self.paths))
- self.tform = image_transforms
-
- def __len__(self):
- return len(self.paths)
-
- def cartesian_to_spherical(self, xyz):
- ptsnew = np.hstack((xyz, np.zeros(xyz.shape)))
- xy = xyz[:,0]**2 + xyz[:,1]**2
- z = np.sqrt(xy + xyz[:,2]**2)
- theta = np.arctan2(np.sqrt(xy), xyz[:,2]) # for elevation angle defined from Z-axis down
- #ptsnew[:,4] = np.arctan2(xyz[:,2], np.sqrt(xy)) # for elevation angle defined from XY-plane up
- azimuth = np.arctan2(xyz[:,1], xyz[:,0])
- return np.array([theta, azimuth, z])
-
- def get_T(self, target_RT, cond_RT):
- R, T = target_RT[:3, :3], target_RT[:, -1]
- T_target = -R.T @ T
-
- R, T = cond_RT[:3, :3], cond_RT[:, -1]
- T_cond = -R.T @ T
-
- theta_cond, azimuth_cond, z_cond = self.cartesian_to_spherical(T_cond[None, :])
- theta_target, azimuth_target, z_target = self.cartesian_to_spherical(T_target[None, :])
-
- d_theta = theta_target - theta_cond
- d_azimuth = (azimuth_target - azimuth_cond) % (2 * math.pi)
- d_z = z_target - z_cond
-
- d_T = torch.tensor([d_theta.item(), math.sin(d_azimuth.item()), math.cos(d_azimuth.item()), d_z.item()])
- return d_T
-
- def load_im(self, path, color):
- '''
- replace background pixel with random color in rendering
- '''
- try:
- img = plt.imread(path)
- except:
- print(path)
- sys.exit()
- img[img[:, :, -1] == 0.] = color
- img = Image.fromarray(np.uint8(img[:, :, :3] * 255.))
- return img
-
- def __getitem__(self, index):
-
- data = {}
- if self.paths[index][-2:] == '_1': # dirty fix for rendering dataset twice
- total_view = 8
- else:
- total_view = 4
- index_target, index_cond = random.sample(range(total_view), 2) # without replacement
- filename = os.path.join(self.root_dir, self.paths[index])
-
- # print(self.paths[index])
-
- if self.return_paths:
- data["path"] = str(filename)
-
- color = [1., 1., 1., 1.]
-
- try:
- target_im = self.process_im(self.load_im(os.path.join(filename, '%03d.png' % index_target), color))
- cond_im = self.process_im(self.load_im(os.path.join(filename, '%03d.png' % index_cond), color))
- target_RT = np.load(os.path.join(filename, '%03d.npy' % index_target))
- cond_RT = np.load(os.path.join(filename, '%03d.npy' % index_cond))
- except:
- # very hacky solution, sorry about this
- filename = os.path.join(self.root_dir, '692db5f2d3a04bb286cb977a7dba903e_1') # this one we know is valid
- target_im = self.process_im(self.load_im(os.path.join(filename, '%03d.png' % index_target), color))
- cond_im = self.process_im(self.load_im(os.path.join(filename, '%03d.png' % index_cond), color))
- target_RT = np.load(os.path.join(filename, '%03d.npy' % index_target))
- cond_RT = np.load(os.path.join(filename, '%03d.npy' % index_cond))
- target_im = torch.zeros_like(target_im)
- cond_im = torch.zeros_like(cond_im)
-
- data["image_target"] = target_im
- data["image_cond"] = cond_im
- data["T"] = self.get_T(target_RT, cond_RT)
-
- if self.postprocess is not None:
- data = self.postprocess(data)
-
- return data
-
- def process_im(self, im):
- im = im.convert("RGB")
- return self.tform(im)
-
-class FolderData(Dataset):
- def __init__(self,
- root_dir,
- caption_file=None,
- image_transforms=[],
- ext="jpg",
- default_caption="",
- postprocess=None,
- return_paths=False,
- ) -> None:
- """Create a dataset from a folder of images.
- If you pass in a root directory it will be searched for images
- ending in ext (ext can be a list)
- """
- self.root_dir = Path(root_dir)
- self.default_caption = default_caption
- self.return_paths = return_paths
- if isinstance(postprocess, DictConfig):
- postprocess = instantiate_from_config(postprocess)
- self.postprocess = postprocess
- if caption_file is not None:
- with open(caption_file, "rt") as f:
- ext = Path(caption_file).suffix.lower()
- if ext == ".json":
- captions = json.load(f)
- elif ext == ".jsonl":
- lines = f.readlines()
- lines = [json.loads(x) for x in lines]
- captions = {x["file_name"]: x["text"].strip("\n") for x in lines}
- else:
- raise ValueError(f"Unrecognised format: {ext}")
- self.captions = captions
- else:
- self.captions = None
-
- if not isinstance(ext, (tuple, list, ListConfig)):
- ext = [ext]
-
- # Only used if there is no caption file
- self.paths = []
- for e in ext:
- self.paths.extend(sorted(list(self.root_dir.rglob(f"*.{e}"))))
- self.tform = make_tranforms(image_transforms)
-
- def __len__(self):
- if self.captions is not None:
- return len(self.captions.keys())
- else:
- return len(self.paths)
-
- def __getitem__(self, index):
- data = {}
- if self.captions is not None:
- chosen = list(self.captions.keys())[index]
- caption = self.captions.get(chosen, None)
- if caption is None:
- caption = self.default_caption
- filename = self.root_dir/chosen
- else:
- filename = self.paths[index]
-
- if self.return_paths:
- data["path"] = str(filename)
-
- im = Image.open(filename).convert("RGB")
- im = self.process_im(im)
- data["image"] = im
-
- if self.captions is not None:
- data["txt"] = caption
- else:
- data["txt"] = self.default_caption
-
- if self.postprocess is not None:
- data = self.postprocess(data)
-
- return data
-
- def process_im(self, im):
- im = im.convert("RGB")
- return self.tform(im)
-import random
-
-class TransformDataset():
- def __init__(self, ds, extra_label="sksbspic"):
- self.ds = ds
- self.extra_label = extra_label
- self.transforms = {
- "align": transforms.Resize(768),
- "centerzoom": transforms.CenterCrop(768),
- "randzoom": transforms.RandomCrop(768),
- }
-
-
- def __getitem__(self, index):
- data = self.ds[index]
-
- im = data['image']
- im = im.permute(2,0,1)
- # In case data is smaller than expected
- im = transforms.Resize(1024)(im)
-
- tform_name = random.choice(list(self.transforms.keys()))
- im = self.transforms[tform_name](im)
-
- im = im.permute(1,2,0)
-
- data['image'] = im
- data['txt'] = data['txt'] + f" {self.extra_label} {tform_name}"
-
- return data
-
- def __len__(self):
- return len(self.ds)
-
-def hf_dataset(
- name,
- image_transforms=[],
- image_column="image",
- text_column="text",
- split='train',
- image_key='image',
- caption_key='txt',
- ):
- """Make huggingface dataset with appropriate list of transforms applied
- """
- ds = load_dataset(name, split=split)
- tform = make_tranforms(image_transforms)
-
- assert image_column in ds.column_names, f"Didn't find column {image_column} in {ds.column_names}"
- assert text_column in ds.column_names, f"Didn't find column {text_column} in {ds.column_names}"
-
- def pre_process(examples):
- processed = {}
- processed[image_key] = [tform(im) for im in examples[image_column]]
- processed[caption_key] = examples[text_column]
- return processed
-
- ds.set_transform(pre_process)
- return ds
-
-class TextOnly(Dataset):
- def __init__(self, captions, output_size, image_key="image", caption_key="txt", n_gpus=1):
- """Returns only captions with dummy images"""
- self.output_size = output_size
- self.image_key = image_key
- self.caption_key = caption_key
- if isinstance(captions, Path):
- self.captions = self._load_caption_file(captions)
- else:
- self.captions = captions
-
- if n_gpus > 1:
- # hack to make sure that all the captions appear on each gpu
- repeated = [n_gpus*[x] for x in self.captions]
- self.captions = []
- [self.captions.extend(x) for x in repeated]
-
- def __len__(self):
- return len(self.captions)
-
- def __getitem__(self, index):
- dummy_im = torch.zeros(3, self.output_size, self.output_size)
- dummy_im = rearrange(dummy_im * 2. - 1., 'c h w -> h w c')
- return {self.image_key: dummy_im, self.caption_key: self.captions[index]}
-
- def _load_caption_file(self, filename):
- with open(filename, 'rt') as f:
- captions = f.readlines()
- return [x.strip('\n') for x in captions]
-
-
-
-import random
-import json
-class IdRetreivalDataset(FolderData):
- def __init__(self, ret_file, *args, **kwargs):
- super().__init__(*args, **kwargs)
- with open(ret_file, "rt") as f:
- self.ret = json.load(f)
-
- def __getitem__(self, index):
- data = super().__getitem__(index)
- key = self.paths[index].name
- matches = self.ret[key]
- if len(matches) > 0:
- retreived = random.choice(matches)
- else:
- retreived = key
- filename = self.root_dir/retreived
- im = Image.open(filename).convert("RGB")
- im = self.process_im(im)
- # data["match"] = im
- data["match"] = torch.cat((data["image"], im), dim=-1)
- return data
diff --git a/One-2-3-45-master 2/ldm/extras.py b/One-2-3-45-master 2/ldm/extras.py
deleted file mode 100644
index 62e654b330c44b85565f958d04bee217a168d7ec..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/extras.py
+++ /dev/null
@@ -1,77 +0,0 @@
-from pathlib import Path
-from omegaconf import OmegaConf
-import torch
-from ldm.util import instantiate_from_config
-import logging
-from contextlib import contextmanager
-
-from contextlib import contextmanager
-import logging
-
-@contextmanager
-def all_logging_disabled(highest_level=logging.CRITICAL):
- """
- A context manager that will prevent any logging messages
- triggered during the body from being processed.
-
- :param highest_level: the maximum logging level in use.
- This would only need to be changed if a custom level greater than CRITICAL
- is defined.
-
- https://gist.github.com/simon-weber/7853144
- """
- # two kind-of hacks here:
- # * can't get the highest logging level in effect => delegate to the user
- # * can't get the current module-level override => use an undocumented
- # (but non-private!) interface
-
- previous_level = logging.root.manager.disable
-
- logging.disable(highest_level)
-
- try:
- yield
- finally:
- logging.disable(previous_level)
-
-def load_training_dir(train_dir, device, epoch="last"):
- """Load a checkpoint and config from training directory"""
- train_dir = Path(train_dir)
- ckpt = list(train_dir.rglob(f"*{epoch}.ckpt"))
- assert len(ckpt) == 1, f"found {len(ckpt)} matching ckpt files"
- config = list(train_dir.rglob(f"*-project.yaml"))
- assert len(ckpt) > 0, f"didn't find any config in {train_dir}"
- if len(config) > 1:
- print(f"found {len(config)} matching config files")
- config = sorted(config)[-1]
- print(f"selecting {config}")
- else:
- config = config[0]
-
-
- config = OmegaConf.load(config)
- return load_model_from_config(config, ckpt[0], device)
-
-def load_model_from_config(config, ckpt, device="cpu", verbose=False):
- """Loads a model from config and a ckpt
- if config is a path will use omegaconf to load
- """
- if isinstance(config, (str, Path)):
- config = OmegaConf.load(config)
-
- with all_logging_disabled():
- print(f"Loading model from {ckpt}")
- pl_sd = torch.load(ckpt, map_location="cpu")
- global_step = pl_sd["global_step"]
- sd = pl_sd["state_dict"]
- model = instantiate_from_config(config.model)
- m, u = model.load_state_dict(sd, strict=False)
- if len(m) > 0 and verbose:
- print("missing keys:")
- print(m)
- if len(u) > 0 and verbose:
- print("unexpected keys:")
- model.to(device)
- model.eval()
- model.cond_stage_model.device = device
- return model
\ No newline at end of file
diff --git a/One-2-3-45-master 2/ldm/guidance.py b/One-2-3-45-master 2/ldm/guidance.py
deleted file mode 100644
index 53d1a2a61b5f2f086178154cf04ea078e0835845..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/guidance.py
+++ /dev/null
@@ -1,96 +0,0 @@
-from typing import List, Tuple
-from scipy import interpolate
-import numpy as np
-import torch
-import matplotlib.pyplot as plt
-from IPython.display import clear_output
-import abc
-
-
-class GuideModel(torch.nn.Module, abc.ABC):
- def __init__(self) -> None:
- super().__init__()
-
- @abc.abstractmethod
- def preprocess(self, x_img):
- pass
-
- @abc.abstractmethod
- def compute_loss(self, inp):
- pass
-
-
-class Guider(torch.nn.Module):
- def __init__(self, sampler, guide_model, scale=1.0, verbose=False):
- """Apply classifier guidance
-
- Specify a guidance scale as either a scalar
- Or a schedule as a list of tuples t = 0->1 and scale, e.g.
- [(0, 10), (0.5, 20), (1, 50)]
- """
- super().__init__()
- self.sampler = sampler
- self.index = 0
- self.show = verbose
- self.guide_model = guide_model
- self.history = []
-
- if isinstance(scale, (Tuple, List)):
- times = np.array([x[0] for x in scale])
- values = np.array([x[1] for x in scale])
- self.scale_schedule = {"times": times, "values": values}
- else:
- self.scale_schedule = float(scale)
-
- self.ddim_timesteps = sampler.ddim_timesteps
- self.ddpm_num_timesteps = sampler.ddpm_num_timesteps
-
-
- def get_scales(self):
- if isinstance(self.scale_schedule, float):
- return len(self.ddim_timesteps)*[self.scale_schedule]
-
- interpolater = interpolate.interp1d(self.scale_schedule["times"], self.scale_schedule["values"])
- fractional_steps = np.array(self.ddim_timesteps)/self.ddpm_num_timesteps
- return interpolater(fractional_steps)
-
- def modify_score(self, model, e_t, x, t, c):
-
- # TODO look up index by t
- scale = self.get_scales()[self.index]
-
- if (scale == 0):
- return e_t
-
- sqrt_1ma = self.sampler.ddim_sqrt_one_minus_alphas[self.index].to(x.device)
- with torch.enable_grad():
- x_in = x.detach().requires_grad_(True)
- pred_x0 = model.predict_start_from_noise(x_in, t=t, noise=e_t)
- x_img = model.first_stage_model.decode((1/0.18215)*pred_x0)
-
- inp = self.guide_model.preprocess(x_img)
- loss = self.guide_model.compute_loss(inp)
- grads = torch.autograd.grad(loss.sum(), x_in)[0]
- correction = grads * scale
-
- if self.show:
- clear_output(wait=True)
- print(loss.item(), scale, correction.abs().max().item(), e_t.abs().max().item())
- self.history.append([loss.item(), scale, correction.min().item(), correction.max().item()])
- plt.imshow((inp[0].detach().permute(1,2,0).clamp(-1,1).cpu()+1)/2)
- plt.axis('off')
- plt.show()
- plt.imshow(correction[0][0].detach().cpu())
- plt.axis('off')
- plt.show()
-
-
- e_t_mod = e_t - sqrt_1ma*correction
- if self.show:
- fig, axs = plt.subplots(1, 3)
- axs[0].imshow(e_t[0][0].detach().cpu(), vmin=-2, vmax=+2)
- axs[1].imshow(e_t_mod[0][0].detach().cpu(), vmin=-2, vmax=+2)
- axs[2].imshow(correction[0][0].detach().cpu(), vmin=-2, vmax=+2)
- plt.show()
- self.index += 1
- return e_t_mod
\ No newline at end of file
diff --git a/One-2-3-45-master 2/ldm/lr_scheduler.py b/One-2-3-45-master 2/ldm/lr_scheduler.py
deleted file mode 100644
index be39da9ca6dacc22bf3df9c7389bbb403a4a3ade..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/lr_scheduler.py
+++ /dev/null
@@ -1,98 +0,0 @@
-import numpy as np
-
-
-class LambdaWarmUpCosineScheduler:
- """
- note: use with a base_lr of 1.0
- """
- def __init__(self, warm_up_steps, lr_min, lr_max, lr_start, max_decay_steps, verbosity_interval=0):
- self.lr_warm_up_steps = warm_up_steps
- self.lr_start = lr_start
- self.lr_min = lr_min
- self.lr_max = lr_max
- self.lr_max_decay_steps = max_decay_steps
- self.last_lr = 0.
- self.verbosity_interval = verbosity_interval
-
- def schedule(self, n, **kwargs):
- if self.verbosity_interval > 0:
- if n % self.verbosity_interval == 0: print(f"current step: {n}, recent lr-multiplier: {self.last_lr}")
- if n < self.lr_warm_up_steps:
- lr = (self.lr_max - self.lr_start) / self.lr_warm_up_steps * n + self.lr_start
- self.last_lr = lr
- return lr
- else:
- t = (n - self.lr_warm_up_steps) / (self.lr_max_decay_steps - self.lr_warm_up_steps)
- t = min(t, 1.0)
- lr = self.lr_min + 0.5 * (self.lr_max - self.lr_min) * (
- 1 + np.cos(t * np.pi))
- self.last_lr = lr
- return lr
-
- def __call__(self, n, **kwargs):
- return self.schedule(n,**kwargs)
-
-
-class LambdaWarmUpCosineScheduler2:
- """
- supports repeated iterations, configurable via lists
- note: use with a base_lr of 1.0.
- """
- def __init__(self, warm_up_steps, f_min, f_max, f_start, cycle_lengths, verbosity_interval=0):
- assert len(warm_up_steps) == len(f_min) == len(f_max) == len(f_start) == len(cycle_lengths)
- self.lr_warm_up_steps = warm_up_steps
- self.f_start = f_start
- self.f_min = f_min
- self.f_max = f_max
- self.cycle_lengths = cycle_lengths
- self.cum_cycles = np.cumsum([0] + list(self.cycle_lengths))
- self.last_f = 0.
- self.verbosity_interval = verbosity_interval
-
- def find_in_interval(self, n):
- interval = 0
- for cl in self.cum_cycles[1:]:
- if n <= cl:
- return interval
- interval += 1
-
- def schedule(self, n, **kwargs):
- cycle = self.find_in_interval(n)
- n = n - self.cum_cycles[cycle]
- if self.verbosity_interval > 0:
- if n % self.verbosity_interval == 0: print(f"current step: {n}, recent lr-multiplier: {self.last_f}, "
- f"current cycle {cycle}")
- if n < self.lr_warm_up_steps[cycle]:
- f = (self.f_max[cycle] - self.f_start[cycle]) / self.lr_warm_up_steps[cycle] * n + self.f_start[cycle]
- self.last_f = f
- return f
- else:
- t = (n - self.lr_warm_up_steps[cycle]) / (self.cycle_lengths[cycle] - self.lr_warm_up_steps[cycle])
- t = min(t, 1.0)
- f = self.f_min[cycle] + 0.5 * (self.f_max[cycle] - self.f_min[cycle]) * (
- 1 + np.cos(t * np.pi))
- self.last_f = f
- return f
-
- def __call__(self, n, **kwargs):
- return self.schedule(n, **kwargs)
-
-
-class LambdaLinearScheduler(LambdaWarmUpCosineScheduler2):
-
- def schedule(self, n, **kwargs):
- cycle = self.find_in_interval(n)
- n = n - self.cum_cycles[cycle]
- if self.verbosity_interval > 0:
- if n % self.verbosity_interval == 0: print(f"current step: {n}, recent lr-multiplier: {self.last_f}, "
- f"current cycle {cycle}")
-
- if n < self.lr_warm_up_steps[cycle]:
- f = (self.f_max[cycle] - self.f_start[cycle]) / self.lr_warm_up_steps[cycle] * n + self.f_start[cycle]
- self.last_f = f
- return f
- else:
- f = self.f_min[cycle] + (self.f_max[cycle] - self.f_min[cycle]) * (self.cycle_lengths[cycle] - n) / (self.cycle_lengths[cycle])
- self.last_f = f
- return f
-
diff --git a/One-2-3-45-master 2/ldm/models/autoencoder.py b/One-2-3-45-master 2/ldm/models/autoencoder.py
deleted file mode 100644
index 6a9c4f45498561953b8085981609b2a3298a5473..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/models/autoencoder.py
+++ /dev/null
@@ -1,443 +0,0 @@
-import torch
-import pytorch_lightning as pl
-import torch.nn.functional as F
-from contextlib import contextmanager
-
-from taming.modules.vqvae.quantize import VectorQuantizer2 as VectorQuantizer
-
-from ldm.modules.diffusionmodules.model import Encoder, Decoder
-from ldm.modules.distributions.distributions import DiagonalGaussianDistribution
-
-from ldm.util import instantiate_from_config
-
-
-class VQModel(pl.LightningModule):
- def __init__(self,
- ddconfig,
- lossconfig,
- n_embed,
- embed_dim,
- ckpt_path=None,
- ignore_keys=[],
- image_key="image",
- colorize_nlabels=None,
- monitor=None,
- batch_resize_range=None,
- scheduler_config=None,
- lr_g_factor=1.0,
- remap=None,
- sane_index_shape=False, # tell vector quantizer to return indices as bhw
- use_ema=False
- ):
- super().__init__()
- self.embed_dim = embed_dim
- self.n_embed = n_embed
- self.image_key = image_key
- self.encoder = Encoder(**ddconfig)
- self.decoder = Decoder(**ddconfig)
- self.loss = instantiate_from_config(lossconfig)
- self.quantize = VectorQuantizer(n_embed, embed_dim, beta=0.25,
- remap=remap,
- sane_index_shape=sane_index_shape)
- self.quant_conv = torch.nn.Conv2d(ddconfig["z_channels"], embed_dim, 1)
- self.post_quant_conv = torch.nn.Conv2d(embed_dim, ddconfig["z_channels"], 1)
- if colorize_nlabels is not None:
- assert type(colorize_nlabels)==int
- self.register_buffer("colorize", torch.randn(3, colorize_nlabels, 1, 1))
- if monitor is not None:
- self.monitor = monitor
- self.batch_resize_range = batch_resize_range
- if self.batch_resize_range is not None:
- print(f"{self.__class__.__name__}: Using per-batch resizing in range {batch_resize_range}.")
-
- self.use_ema = use_ema
- if self.use_ema:
- self.model_ema = LitEma(self)
- print(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.")
-
- if ckpt_path is not None:
- self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys)
- self.scheduler_config = scheduler_config
- self.lr_g_factor = lr_g_factor
-
- @contextmanager
- def ema_scope(self, context=None):
- if self.use_ema:
- self.model_ema.store(self.parameters())
- self.model_ema.copy_to(self)
- if context is not None:
- print(f"{context}: Switched to EMA weights")
- try:
- yield None
- finally:
- if self.use_ema:
- self.model_ema.restore(self.parameters())
- if context is not None:
- print(f"{context}: Restored training weights")
-
- def init_from_ckpt(self, path, ignore_keys=list()):
- sd = torch.load(path, map_location="cpu")["state_dict"]
- keys = list(sd.keys())
- for k in keys:
- for ik in ignore_keys:
- if k.startswith(ik):
- print("Deleting key {} from state_dict.".format(k))
- del sd[k]
- missing, unexpected = self.load_state_dict(sd, strict=False)
- print(f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys")
- if len(missing) > 0:
- print(f"Missing Keys: {missing}")
- print(f"Unexpected Keys: {unexpected}")
-
- def on_train_batch_end(self, *args, **kwargs):
- if self.use_ema:
- self.model_ema(self)
-
- def encode(self, x):
- h = self.encoder(x)
- h = self.quant_conv(h)
- quant, emb_loss, info = self.quantize(h)
- return quant, emb_loss, info
-
- def encode_to_prequant(self, x):
- h = self.encoder(x)
- h = self.quant_conv(h)
- return h
-
- def decode(self, quant):
- quant = self.post_quant_conv(quant)
- dec = self.decoder(quant)
- return dec
-
- def decode_code(self, code_b):
- quant_b = self.quantize.embed_code(code_b)
- dec = self.decode(quant_b)
- return dec
-
- def forward(self, input, return_pred_indices=False):
- quant, diff, (_,_,ind) = self.encode(input)
- dec = self.decode(quant)
- if return_pred_indices:
- return dec, diff, ind
- return dec, diff
-
- def get_input(self, batch, k):
- x = batch[k]
- if len(x.shape) == 3:
- x = x[..., None]
- x = x.permute(0, 3, 1, 2).to(memory_format=torch.contiguous_format).float()
- if self.batch_resize_range is not None:
- lower_size = self.batch_resize_range[0]
- upper_size = self.batch_resize_range[1]
- if self.global_step <= 4:
- # do the first few batches with max size to avoid later oom
- new_resize = upper_size
- else:
- new_resize = np.random.choice(np.arange(lower_size, upper_size+16, 16))
- if new_resize != x.shape[2]:
- x = F.interpolate(x, size=new_resize, mode="bicubic")
- x = x.detach()
- return x
-
- def training_step(self, batch, batch_idx, optimizer_idx):
- # https://github.com/pytorch/pytorch/issues/37142
- # try not to fool the heuristics
- x = self.get_input(batch, self.image_key)
- xrec, qloss, ind = self(x, return_pred_indices=True)
-
- if optimizer_idx == 0:
- # autoencode
- aeloss, log_dict_ae = self.loss(qloss, x, xrec, optimizer_idx, self.global_step,
- last_layer=self.get_last_layer(), split="train",
- predicted_indices=ind)
-
- self.log_dict(log_dict_ae, prog_bar=False, logger=True, on_step=True, on_epoch=True)
- return aeloss
-
- if optimizer_idx == 1:
- # discriminator
- discloss, log_dict_disc = self.loss(qloss, x, xrec, optimizer_idx, self.global_step,
- last_layer=self.get_last_layer(), split="train")
- self.log_dict(log_dict_disc, prog_bar=False, logger=True, on_step=True, on_epoch=True)
- return discloss
-
- def validation_step(self, batch, batch_idx):
- log_dict = self._validation_step(batch, batch_idx)
- with self.ema_scope():
- log_dict_ema = self._validation_step(batch, batch_idx, suffix="_ema")
- return log_dict
-
- def _validation_step(self, batch, batch_idx, suffix=""):
- x = self.get_input(batch, self.image_key)
- xrec, qloss, ind = self(x, return_pred_indices=True)
- aeloss, log_dict_ae = self.loss(qloss, x, xrec, 0,
- self.global_step,
- last_layer=self.get_last_layer(),
- split="val"+suffix,
- predicted_indices=ind
- )
-
- discloss, log_dict_disc = self.loss(qloss, x, xrec, 1,
- self.global_step,
- last_layer=self.get_last_layer(),
- split="val"+suffix,
- predicted_indices=ind
- )
- rec_loss = log_dict_ae[f"val{suffix}/rec_loss"]
- self.log(f"val{suffix}/rec_loss", rec_loss,
- prog_bar=True, logger=True, on_step=False, on_epoch=True, sync_dist=True)
- self.log(f"val{suffix}/aeloss", aeloss,
- prog_bar=True, logger=True, on_step=False, on_epoch=True, sync_dist=True)
- if version.parse(pl.__version__) >= version.parse('1.4.0'):
- del log_dict_ae[f"val{suffix}/rec_loss"]
- self.log_dict(log_dict_ae)
- self.log_dict(log_dict_disc)
- return self.log_dict
-
- def configure_optimizers(self):
- lr_d = self.learning_rate
- lr_g = self.lr_g_factor*self.learning_rate
- print("lr_d", lr_d)
- print("lr_g", lr_g)
- opt_ae = torch.optim.Adam(list(self.encoder.parameters())+
- list(self.decoder.parameters())+
- list(self.quantize.parameters())+
- list(self.quant_conv.parameters())+
- list(self.post_quant_conv.parameters()),
- lr=lr_g, betas=(0.5, 0.9))
- opt_disc = torch.optim.Adam(self.loss.discriminator.parameters(),
- lr=lr_d, betas=(0.5, 0.9))
-
- if self.scheduler_config is not None:
- scheduler = instantiate_from_config(self.scheduler_config)
-
- print("Setting up LambdaLR scheduler...")
- scheduler = [
- {
- 'scheduler': LambdaLR(opt_ae, lr_lambda=scheduler.schedule),
- 'interval': 'step',
- 'frequency': 1
- },
- {
- 'scheduler': LambdaLR(opt_disc, lr_lambda=scheduler.schedule),
- 'interval': 'step',
- 'frequency': 1
- },
- ]
- return [opt_ae, opt_disc], scheduler
- return [opt_ae, opt_disc], []
-
- def get_last_layer(self):
- return self.decoder.conv_out.weight
-
- def log_images(self, batch, only_inputs=False, plot_ema=False, **kwargs):
- log = dict()
- x = self.get_input(batch, self.image_key)
- x = x.to(self.device)
- if only_inputs:
- log["inputs"] = x
- return log
- xrec, _ = self(x)
- if x.shape[1] > 3:
- # colorize with random projection
- assert xrec.shape[1] > 3
- x = self.to_rgb(x)
- xrec = self.to_rgb(xrec)
- log["inputs"] = x
- log["reconstructions"] = xrec
- if plot_ema:
- with self.ema_scope():
- xrec_ema, _ = self(x)
- if x.shape[1] > 3: xrec_ema = self.to_rgb(xrec_ema)
- log["reconstructions_ema"] = xrec_ema
- return log
-
- def to_rgb(self, x):
- assert self.image_key == "segmentation"
- if not hasattr(self, "colorize"):
- self.register_buffer("colorize", torch.randn(3, x.shape[1], 1, 1).to(x))
- x = F.conv2d(x, weight=self.colorize)
- x = 2.*(x-x.min())/(x.max()-x.min()) - 1.
- return x
-
-
-class VQModelInterface(VQModel):
- def __init__(self, embed_dim, *args, **kwargs):
- super().__init__(embed_dim=embed_dim, *args, **kwargs)
- self.embed_dim = embed_dim
-
- def encode(self, x):
- h = self.encoder(x)
- h = self.quant_conv(h)
- return h
-
- def decode(self, h, force_not_quantize=False):
- # also go through quantization layer
- if not force_not_quantize:
- quant, emb_loss, info = self.quantize(h)
- else:
- quant = h
- quant = self.post_quant_conv(quant)
- dec = self.decoder(quant)
- return dec
-
-
-class AutoencoderKL(pl.LightningModule):
- def __init__(self,
- ddconfig,
- lossconfig,
- embed_dim,
- ckpt_path=None,
- ignore_keys=[],
- image_key="image",
- colorize_nlabels=None,
- monitor=None,
- ):
- super().__init__()
- self.image_key = image_key
- self.encoder = Encoder(**ddconfig)
- self.decoder = Decoder(**ddconfig)
- self.loss = instantiate_from_config(lossconfig)
- assert ddconfig["double_z"]
- self.quant_conv = torch.nn.Conv2d(2*ddconfig["z_channels"], 2*embed_dim, 1)
- self.post_quant_conv = torch.nn.Conv2d(embed_dim, ddconfig["z_channels"], 1)
- self.embed_dim = embed_dim
- if colorize_nlabels is not None:
- assert type(colorize_nlabels)==int
- self.register_buffer("colorize", torch.randn(3, colorize_nlabels, 1, 1))
- if monitor is not None:
- self.monitor = monitor
- if ckpt_path is not None:
- self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys)
-
- def init_from_ckpt(self, path, ignore_keys=list()):
- sd = torch.load(path, map_location="cpu")["state_dict"]
- keys = list(sd.keys())
- for k in keys:
- for ik in ignore_keys:
- if k.startswith(ik):
- print("Deleting key {} from state_dict.".format(k))
- del sd[k]
- self.load_state_dict(sd, strict=False)
- print(f"Restored from {path}")
-
- def encode(self, x):
- h = self.encoder(x)
- moments = self.quant_conv(h)
- posterior = DiagonalGaussianDistribution(moments)
- return posterior
-
- def decode(self, z):
- z = self.post_quant_conv(z)
- dec = self.decoder(z)
- return dec
-
- def forward(self, input, sample_posterior=True):
- posterior = self.encode(input)
- if sample_posterior:
- z = posterior.sample()
- else:
- z = posterior.mode()
- dec = self.decode(z)
- return dec, posterior
-
- def get_input(self, batch, k):
- x = batch[k]
- if len(x.shape) == 3:
- x = x[..., None]
- x = x.permute(0, 3, 1, 2).to(memory_format=torch.contiguous_format).float()
- return x
-
- def training_step(self, batch, batch_idx, optimizer_idx):
- inputs = self.get_input(batch, self.image_key)
- reconstructions, posterior = self(inputs)
-
- if optimizer_idx == 0:
- # train encoder+decoder+logvar
- aeloss, log_dict_ae = self.loss(inputs, reconstructions, posterior, optimizer_idx, self.global_step,
- last_layer=self.get_last_layer(), split="train")
- self.log("aeloss", aeloss, prog_bar=True, logger=True, on_step=True, on_epoch=True)
- self.log_dict(log_dict_ae, prog_bar=False, logger=True, on_step=True, on_epoch=False)
- return aeloss
-
- if optimizer_idx == 1:
- # train the discriminator
- discloss, log_dict_disc = self.loss(inputs, reconstructions, posterior, optimizer_idx, self.global_step,
- last_layer=self.get_last_layer(), split="train")
-
- self.log("discloss", discloss, prog_bar=True, logger=True, on_step=True, on_epoch=True)
- self.log_dict(log_dict_disc, prog_bar=False, logger=True, on_step=True, on_epoch=False)
- return discloss
-
- def validation_step(self, batch, batch_idx):
- inputs = self.get_input(batch, self.image_key)
- reconstructions, posterior = self(inputs)
- aeloss, log_dict_ae = self.loss(inputs, reconstructions, posterior, 0, self.global_step,
- last_layer=self.get_last_layer(), split="val")
-
- discloss, log_dict_disc = self.loss(inputs, reconstructions, posterior, 1, self.global_step,
- last_layer=self.get_last_layer(), split="val")
-
- self.log("val/rec_loss", log_dict_ae["val/rec_loss"])
- self.log_dict(log_dict_ae)
- self.log_dict(log_dict_disc)
- return self.log_dict
-
- def configure_optimizers(self):
- lr = self.learning_rate
- opt_ae = torch.optim.Adam(list(self.encoder.parameters())+
- list(self.decoder.parameters())+
- list(self.quant_conv.parameters())+
- list(self.post_quant_conv.parameters()),
- lr=lr, betas=(0.5, 0.9))
- opt_disc = torch.optim.Adam(self.loss.discriminator.parameters(),
- lr=lr, betas=(0.5, 0.9))
- return [opt_ae, opt_disc], []
-
- def get_last_layer(self):
- return self.decoder.conv_out.weight
-
- @torch.no_grad()
- def log_images(self, batch, only_inputs=False, **kwargs):
- log = dict()
- x = self.get_input(batch, self.image_key)
- x = x.to(self.device)
- if not only_inputs:
- xrec, posterior = self(x)
- if x.shape[1] > 3:
- # colorize with random projection
- assert xrec.shape[1] > 3
- x = self.to_rgb(x)
- xrec = self.to_rgb(xrec)
- log["samples"] = self.decode(torch.randn_like(posterior.sample()))
- log["reconstructions"] = xrec
- log["inputs"] = x
- return log
-
- def to_rgb(self, x):
- assert self.image_key == "segmentation"
- if not hasattr(self, "colorize"):
- self.register_buffer("colorize", torch.randn(3, x.shape[1], 1, 1).to(x))
- x = F.conv2d(x, weight=self.colorize)
- x = 2.*(x-x.min())/(x.max()-x.min()) - 1.
- return x
-
-
-class IdentityFirstStage(torch.nn.Module):
- def __init__(self, *args, vq_interface=False, **kwargs):
- self.vq_interface = vq_interface # TODO: Should be true by default but check to not break older stuff
- super().__init__()
-
- def encode(self, x, *args, **kwargs):
- return x
-
- def decode(self, x, *args, **kwargs):
- return x
-
- def quantize(self, x, *args, **kwargs):
- if self.vq_interface:
- return x, None, [None, None, None]
- return x
-
- def forward(self, x, *args, **kwargs):
- return x
diff --git a/One-2-3-45-master 2/ldm/models/diffusion/__init__.py b/One-2-3-45-master 2/ldm/models/diffusion/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/One-2-3-45-master 2/ldm/models/diffusion/classifier.py b/One-2-3-45-master 2/ldm/models/diffusion/classifier.py
deleted file mode 100644
index 67e98b9d8ffb96a150b517497ace0a242d7163ef..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/models/diffusion/classifier.py
+++ /dev/null
@@ -1,267 +0,0 @@
-import os
-import torch
-import pytorch_lightning as pl
-from omegaconf import OmegaConf
-from torch.nn import functional as F
-from torch.optim import AdamW
-from torch.optim.lr_scheduler import LambdaLR
-from copy import deepcopy
-from einops import rearrange
-from glob import glob
-from natsort import natsorted
-
-from ldm.modules.diffusionmodules.openaimodel import EncoderUNetModel, UNetModel
-from ldm.util import log_txt_as_img, default, ismap, instantiate_from_config
-
-__models__ = {
- 'class_label': EncoderUNetModel,
- 'segmentation': UNetModel
-}
-
-
-def disabled_train(self, mode=True):
- """Overwrite model.train with this function to make sure train/eval mode
- does not change anymore."""
- return self
-
-
-class NoisyLatentImageClassifier(pl.LightningModule):
-
- def __init__(self,
- diffusion_path,
- num_classes,
- ckpt_path=None,
- pool='attention',
- label_key=None,
- diffusion_ckpt_path=None,
- scheduler_config=None,
- weight_decay=1.e-2,
- log_steps=10,
- monitor='val/loss',
- *args,
- **kwargs):
- super().__init__(*args, **kwargs)
- self.num_classes = num_classes
- # get latest config of diffusion model
- diffusion_config = natsorted(glob(os.path.join(diffusion_path, 'configs', '*-project.yaml')))[-1]
- self.diffusion_config = OmegaConf.load(diffusion_config).model
- self.diffusion_config.params.ckpt_path = diffusion_ckpt_path
- self.load_diffusion()
-
- self.monitor = monitor
- self.numd = self.diffusion_model.first_stage_model.encoder.num_resolutions - 1
- self.log_time_interval = self.diffusion_model.num_timesteps // log_steps
- self.log_steps = log_steps
-
- self.label_key = label_key if not hasattr(self.diffusion_model, 'cond_stage_key') \
- else self.diffusion_model.cond_stage_key
-
- assert self.label_key is not None, 'label_key neither in diffusion model nor in model.params'
-
- if self.label_key not in __models__:
- raise NotImplementedError()
-
- self.load_classifier(ckpt_path, pool)
-
- self.scheduler_config = scheduler_config
- self.use_scheduler = self.scheduler_config is not None
- self.weight_decay = weight_decay
-
- def init_from_ckpt(self, path, ignore_keys=list(), only_model=False):
- sd = torch.load(path, map_location="cpu")
- if "state_dict" in list(sd.keys()):
- sd = sd["state_dict"]
- keys = list(sd.keys())
- for k in keys:
- for ik in ignore_keys:
- if k.startswith(ik):
- print("Deleting key {} from state_dict.".format(k))
- del sd[k]
- missing, unexpected = self.load_state_dict(sd, strict=False) if not only_model else self.model.load_state_dict(
- sd, strict=False)
- print(f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys")
- if len(missing) > 0:
- print(f"Missing Keys: {missing}")
- if len(unexpected) > 0:
- print(f"Unexpected Keys: {unexpected}")
-
- def load_diffusion(self):
- model = instantiate_from_config(self.diffusion_config)
- self.diffusion_model = model.eval()
- self.diffusion_model.train = disabled_train
- for param in self.diffusion_model.parameters():
- param.requires_grad = False
-
- def load_classifier(self, ckpt_path, pool):
- model_config = deepcopy(self.diffusion_config.params.unet_config.params)
- model_config.in_channels = self.diffusion_config.params.unet_config.params.out_channels
- model_config.out_channels = self.num_classes
- if self.label_key == 'class_label':
- model_config.pool = pool
-
- self.model = __models__[self.label_key](**model_config)
- if ckpt_path is not None:
- print('#####################################################################')
- print(f'load from ckpt "{ckpt_path}"')
- print('#####################################################################')
- self.init_from_ckpt(ckpt_path)
-
- @torch.no_grad()
- def get_x_noisy(self, x, t, noise=None):
- noise = default(noise, lambda: torch.randn_like(x))
- continuous_sqrt_alpha_cumprod = None
- if self.diffusion_model.use_continuous_noise:
- continuous_sqrt_alpha_cumprod = self.diffusion_model.sample_continuous_noise_level(x.shape[0], t + 1)
- # todo: make sure t+1 is correct here
-
- return self.diffusion_model.q_sample(x_start=x, t=t, noise=noise,
- continuous_sqrt_alpha_cumprod=continuous_sqrt_alpha_cumprod)
-
- def forward(self, x_noisy, t, *args, **kwargs):
- return self.model(x_noisy, t)
-
- @torch.no_grad()
- def get_input(self, batch, k):
- x = batch[k]
- if len(x.shape) == 3:
- x = x[..., None]
- x = rearrange(x, 'b h w c -> b c h w')
- x = x.to(memory_format=torch.contiguous_format).float()
- return x
-
- @torch.no_grad()
- def get_conditioning(self, batch, k=None):
- if k is None:
- k = self.label_key
- assert k is not None, 'Needs to provide label key'
-
- targets = batch[k].to(self.device)
-
- if self.label_key == 'segmentation':
- targets = rearrange(targets, 'b h w c -> b c h w')
- for down in range(self.numd):
- h, w = targets.shape[-2:]
- targets = F.interpolate(targets, size=(h // 2, w // 2), mode='nearest')
-
- # targets = rearrange(targets,'b c h w -> b h w c')
-
- return targets
-
- def compute_top_k(self, logits, labels, k, reduction="mean"):
- _, top_ks = torch.topk(logits, k, dim=1)
- if reduction == "mean":
- return (top_ks == labels[:, None]).float().sum(dim=-1).mean().item()
- elif reduction == "none":
- return (top_ks == labels[:, None]).float().sum(dim=-1)
-
- def on_train_epoch_start(self):
- # save some memory
- self.diffusion_model.model.to('cpu')
-
- @torch.no_grad()
- def write_logs(self, loss, logits, targets):
- log_prefix = 'train' if self.training else 'val'
- log = {}
- log[f"{log_prefix}/loss"] = loss.mean()
- log[f"{log_prefix}/acc@1"] = self.compute_top_k(
- logits, targets, k=1, reduction="mean"
- )
- log[f"{log_prefix}/acc@5"] = self.compute_top_k(
- logits, targets, k=5, reduction="mean"
- )
-
- self.log_dict(log, prog_bar=False, logger=True, on_step=self.training, on_epoch=True)
- self.log('loss', log[f"{log_prefix}/loss"], prog_bar=True, logger=False)
- self.log('global_step', self.global_step, logger=False, on_epoch=False, prog_bar=True)
- lr = self.optimizers().param_groups[0]['lr']
- self.log('lr_abs', lr, on_step=True, logger=True, on_epoch=False, prog_bar=True)
-
- def shared_step(self, batch, t=None):
- x, *_ = self.diffusion_model.get_input(batch, k=self.diffusion_model.first_stage_key)
- targets = self.get_conditioning(batch)
- if targets.dim() == 4:
- targets = targets.argmax(dim=1)
- if t is None:
- t = torch.randint(0, self.diffusion_model.num_timesteps, (x.shape[0],), device=self.device).long()
- else:
- t = torch.full(size=(x.shape[0],), fill_value=t, device=self.device).long()
- x_noisy = self.get_x_noisy(x, t)
- logits = self(x_noisy, t)
-
- loss = F.cross_entropy(logits, targets, reduction='none')
-
- self.write_logs(loss.detach(), logits.detach(), targets.detach())
-
- loss = loss.mean()
- return loss, logits, x_noisy, targets
-
- def training_step(self, batch, batch_idx):
- loss, *_ = self.shared_step(batch)
- return loss
-
- def reset_noise_accs(self):
- self.noisy_acc = {t: {'acc@1': [], 'acc@5': []} for t in
- range(0, self.diffusion_model.num_timesteps, self.diffusion_model.log_every_t)}
-
- def on_validation_start(self):
- self.reset_noise_accs()
-
- @torch.no_grad()
- def validation_step(self, batch, batch_idx):
- loss, *_ = self.shared_step(batch)
-
- for t in self.noisy_acc:
- _, logits, _, targets = self.shared_step(batch, t)
- self.noisy_acc[t]['acc@1'].append(self.compute_top_k(logits, targets, k=1, reduction='mean'))
- self.noisy_acc[t]['acc@5'].append(self.compute_top_k(logits, targets, k=5, reduction='mean'))
-
- return loss
-
- def configure_optimizers(self):
- optimizer = AdamW(self.model.parameters(), lr=self.learning_rate, weight_decay=self.weight_decay)
-
- if self.use_scheduler:
- scheduler = instantiate_from_config(self.scheduler_config)
-
- print("Setting up LambdaLR scheduler...")
- scheduler = [
- {
- 'scheduler': LambdaLR(optimizer, lr_lambda=scheduler.schedule),
- 'interval': 'step',
- 'frequency': 1
- }]
- return [optimizer], scheduler
-
- return optimizer
-
- @torch.no_grad()
- def log_images(self, batch, N=8, *args, **kwargs):
- log = dict()
- x = self.get_input(batch, self.diffusion_model.first_stage_key)
- log['inputs'] = x
-
- y = self.get_conditioning(batch)
-
- if self.label_key == 'class_label':
- y = log_txt_as_img((x.shape[2], x.shape[3]), batch["human_label"])
- log['labels'] = y
-
- if ismap(y):
- log['labels'] = self.diffusion_model.to_rgb(y)
-
- for step in range(self.log_steps):
- current_time = step * self.log_time_interval
-
- _, logits, x_noisy, _ = self.shared_step(batch, t=current_time)
-
- log[f'inputs@t{current_time}'] = x_noisy
-
- pred = F.one_hot(logits.argmax(dim=1), num_classes=self.num_classes)
- pred = rearrange(pred, 'b h w c -> b c h w')
-
- log[f'pred@t{current_time}'] = self.diffusion_model.to_rgb(pred)
-
- for key in log:
- log[key] = log[key][:N]
-
- return log
diff --git a/One-2-3-45-master 2/ldm/models/diffusion/ddim.py b/One-2-3-45-master 2/ldm/models/diffusion/ddim.py
deleted file mode 100644
index 5db306d8dd82ca8868e34cddfeb4a01daf259c08..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/models/diffusion/ddim.py
+++ /dev/null
@@ -1,326 +0,0 @@
-"""SAMPLING ONLY."""
-
-import torch
-import numpy as np
-from tqdm import tqdm
-from functools import partial
-from einops import rearrange
-
-from ldm.modules.diffusionmodules.util import make_ddim_sampling_parameters, make_ddim_timesteps, noise_like, extract_into_tensor
-from ldm.models.diffusion.sampling_util import renorm_thresholding, norm_thresholding, spatial_norm_thresholding
-
-
-class DDIMSampler(object):
- def __init__(self, model, schedule="linear", **kwargs):
- super().__init__()
- self.model = model
- self.ddpm_num_timesteps = model.num_timesteps
- self.schedule = schedule
- self.device = model.device
-
- def to(self, device):
- """Same as to in torch module
- Don't really underestand why this isn't a module in the first place"""
- for k, v in self.__dict__.items():
- if isinstance(v, torch.Tensor):
- new_v = getattr(self, k).to(device)
- setattr(self, k, new_v)
-
-
- def register_buffer(self, name, attr, device=None):
- if type(attr) == torch.Tensor:
- attr = attr.to(device)
- # if attr.device != torch.device("cuda"):
- # attr = attr.to(torch.device("cuda"))
- setattr(self, name, attr)
-
- def make_schedule(self, ddim_num_steps, ddim_discretize="uniform", ddim_eta=0., verbose=True):
- self.ddim_timesteps = make_ddim_timesteps(ddim_discr_method=ddim_discretize, num_ddim_timesteps=ddim_num_steps,
- num_ddpm_timesteps=self.ddpm_num_timesteps,verbose=verbose)
- alphas_cumprod = self.model.alphas_cumprod
- assert alphas_cumprod.shape[0] == self.ddpm_num_timesteps, 'alphas have to be defined for each timestep'
- to_torch = lambda x: x.clone().detach().to(torch.float32).to(self.model.device)
-
- self.register_buffer('betas', to_torch(self.model.betas), self.device)
- self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod), self.device)
- self.register_buffer('alphas_cumprod_prev', to_torch(self.model.alphas_cumprod_prev), self.device)
-
- # calculations for diffusion q(x_t | x_{t-1}) and others
- self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod.cpu())), self.device)
- self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod.cpu())), self.device)
- self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod.cpu())), self.device)
- self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu())), self.device)
- self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu() - 1)), self.device)
-
- # ddim sampling parameters
- ddim_sigmas, ddim_alphas, ddim_alphas_prev = make_ddim_sampling_parameters(alphacums=alphas_cumprod.cpu(),
- ddim_timesteps=self.ddim_timesteps,
- eta=ddim_eta,verbose=verbose)
- self.register_buffer('ddim_sigmas', ddim_sigmas, self.device)
- self.register_buffer('ddim_alphas', ddim_alphas, self.device)
- self.register_buffer('ddim_alphas_prev', ddim_alphas_prev, self.device)
- self.register_buffer('ddim_sqrt_one_minus_alphas', np.sqrt(1. - ddim_alphas), self.device)
- sigmas_for_original_sampling_steps = ddim_eta * torch.sqrt(
- (1 - self.alphas_cumprod_prev) / (1 - self.alphas_cumprod) * (
- 1 - self.alphas_cumprod / self.alphas_cumprod_prev))
- self.register_buffer('ddim_sigmas_for_original_num_steps', sigmas_for_original_sampling_steps, self.device)
-
- @torch.no_grad()
- def sample(self,
- S,
- batch_size,
- shape,
- conditioning=None,
- callback=None,
- normals_sequence=None,
- img_callback=None,
- quantize_x0=False,
- eta=0.,
- mask=None,
- x0=None,
- temperature=1.,
- noise_dropout=0.,
- score_corrector=None,
- corrector_kwargs=None,
- verbose=True,
- x_T=None,
- log_every_t=100,
- unconditional_guidance_scale=1.,
- unconditional_conditioning=None, # this has to come in the same format as the conditioning, # e.g. as encoded tokens, ...
- dynamic_threshold=None,
- **kwargs
- ):
- if conditioning is not None:
- if isinstance(conditioning, dict):
- ctmp = conditioning[list(conditioning.keys())[0]]
- while isinstance(ctmp, list): ctmp = ctmp[0]
- cbs = ctmp.shape[0]
- if cbs != batch_size:
- print(f"Warning: Got {cbs} conditionings but batch-size is {batch_size}")
-
- else:
- if conditioning.shape[0] != batch_size:
- print(f"Warning: Got {conditioning.shape[0]} conditionings but batch-size is {batch_size}")
-
- self.make_schedule(ddim_num_steps=S, ddim_eta=eta, verbose=verbose)
- # sampling
- C, H, W = shape
- size = (batch_size, C, H, W)
- print(f'Data shape for DDIM sampling is {size}, eta {eta}')
-
- samples, intermediates = self.ddim_sampling(conditioning, size,
- callback=callback,
- img_callback=img_callback,
- quantize_denoised=quantize_x0,
- mask=mask, x0=x0,
- ddim_use_original_steps=False,
- noise_dropout=noise_dropout,
- temperature=temperature,
- score_corrector=score_corrector,
- corrector_kwargs=corrector_kwargs,
- x_T=x_T,
- log_every_t=log_every_t,
- unconditional_guidance_scale=unconditional_guidance_scale,
- unconditional_conditioning=unconditional_conditioning,
- dynamic_threshold=dynamic_threshold,
- )
- return samples, intermediates
-
- @torch.no_grad()
- def ddim_sampling(self, cond, shape,
- x_T=None, ddim_use_original_steps=False,
- callback=None, timesteps=None, quantize_denoised=False,
- mask=None, x0=None, img_callback=None, log_every_t=100,
- temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None,
- unconditional_guidance_scale=1., unconditional_conditioning=None, dynamic_threshold=None,
- t_start=-1):
- device = self.model.betas.device
- b = shape[0]
- if x_T is None:
- img = torch.randn(shape, device=device)
- else:
- img = x_T
-
- if timesteps is None:
- timesteps = self.ddpm_num_timesteps if ddim_use_original_steps else self.ddim_timesteps
- elif timesteps is not None and not ddim_use_original_steps:
- subset_end = int(min(timesteps / self.ddim_timesteps.shape[0], 1) * self.ddim_timesteps.shape[0]) - 1
- timesteps = self.ddim_timesteps[:subset_end]
-
- timesteps = timesteps[:t_start]
-
- intermediates = {'x_inter': [img], 'pred_x0': [img]}
- time_range = reversed(range(0,timesteps)) if ddim_use_original_steps else np.flip(timesteps)
- total_steps = timesteps if ddim_use_original_steps else timesteps.shape[0]
- print(f"Running DDIM Sampling with {total_steps} timesteps")
-
- iterator = tqdm(time_range, desc='DDIM Sampler', total=total_steps)
-
- for i, step in enumerate(iterator):
- index = total_steps - i - 1
- ts = torch.full((b,), step, device=device, dtype=torch.long)
-
- if mask is not None:
- assert x0 is not None
- img_orig = self.model.q_sample(x0, ts) # TODO: deterministic forward pass?
- img = img_orig * mask + (1. - mask) * img
-
- outs = self.p_sample_ddim(img, cond, ts, index=index, use_original_steps=ddim_use_original_steps,
- quantize_denoised=quantize_denoised, temperature=temperature,
- noise_dropout=noise_dropout, score_corrector=score_corrector,
- corrector_kwargs=corrector_kwargs,
- unconditional_guidance_scale=unconditional_guidance_scale,
- unconditional_conditioning=unconditional_conditioning,
- dynamic_threshold=dynamic_threshold)
- img, pred_x0 = outs
- if callback:
- img = callback(i, img, pred_x0)
- if img_callback: img_callback(pred_x0, i)
-
- if index % log_every_t == 0 or index == total_steps - 1:
- intermediates['x_inter'].append(img)
- intermediates['pred_x0'].append(pred_x0)
-
- return img, intermediates
-
- @torch.no_grad()
- def p_sample_ddim(self, x, c, t, index, repeat_noise=False, use_original_steps=False, quantize_denoised=False,
- temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None,
- unconditional_guidance_scale=1., unconditional_conditioning=None,
- dynamic_threshold=None):
- b, *_, device = *x.shape, x.device
-
- if unconditional_conditioning is None or unconditional_guidance_scale == 1.:
- e_t = self.model.apply_model(x, t, c)
- else:
- x_in = torch.cat([x] * 2)
- t_in = torch.cat([t] * 2)
- if isinstance(c, dict):
- assert isinstance(unconditional_conditioning, dict)
- c_in = dict()
- for k in c:
- if isinstance(c[k], list):
- c_in[k] = [torch.cat([
- unconditional_conditioning[k][i],
- c[k][i]]) for i in range(len(c[k]))]
- else:
- c_in[k] = torch.cat([
- unconditional_conditioning[k],
- c[k]])
- else:
- c_in = torch.cat([unconditional_conditioning, c])
- e_t_uncond, e_t = self.model.apply_model(x_in, t_in, c_in).chunk(2)
- e_t = e_t_uncond + unconditional_guidance_scale * (e_t - e_t_uncond)
-
- if score_corrector is not None:
- assert self.model.parameterization == "eps"
- e_t = score_corrector.modify_score(self.model, e_t, x, t, c, **corrector_kwargs)
-
- alphas = self.model.alphas_cumprod if use_original_steps else self.ddim_alphas
- alphas_prev = self.model.alphas_cumprod_prev if use_original_steps else self.ddim_alphas_prev
- sqrt_one_minus_alphas = self.model.sqrt_one_minus_alphas_cumprod if use_original_steps else self.ddim_sqrt_one_minus_alphas
- sigmas = self.model.ddim_sigmas_for_original_num_steps if use_original_steps else self.ddim_sigmas
- # select parameters corresponding to the currently considered timestep
- a_t = torch.full((b, 1, 1, 1), alphas[index], device=device)
- a_prev = torch.full((b, 1, 1, 1), alphas_prev[index], device=device)
- sigma_t = torch.full((b, 1, 1, 1), sigmas[index], device=device)
- sqrt_one_minus_at = torch.full((b, 1, 1, 1), sqrt_one_minus_alphas[index],device=device)
-
- # current prediction for x_0
- pred_x0 = (x - sqrt_one_minus_at * e_t) / a_t.sqrt()
- if quantize_denoised:
- pred_x0, _, *_ = self.model.first_stage_model.quantize(pred_x0)
-
- if dynamic_threshold is not None:
- pred_x0 = norm_thresholding(pred_x0, dynamic_threshold)
-
- # direction pointing to x_t
- dir_xt = (1. - a_prev - sigma_t**2).sqrt() * e_t
- noise = sigma_t * noise_like(x.shape, device, repeat_noise) * temperature
- if noise_dropout > 0.:
- noise = torch.nn.functional.dropout(noise, p=noise_dropout)
- x_prev = a_prev.sqrt() * pred_x0 + dir_xt + noise
- return x_prev, pred_x0
-
- @torch.no_grad()
- def encode(self, x0, c, t_enc, use_original_steps=False, return_intermediates=None,
- unconditional_guidance_scale=1.0, unconditional_conditioning=None):
- num_reference_steps = self.ddpm_num_timesteps if use_original_steps else self.ddim_timesteps.shape[0]
-
- assert t_enc <= num_reference_steps
- num_steps = t_enc
-
- if use_original_steps:
- alphas_next = self.alphas_cumprod[:num_steps]
- alphas = self.alphas_cumprod_prev[:num_steps]
- else:
- alphas_next = self.ddim_alphas[:num_steps]
- alphas = torch.tensor(self.ddim_alphas_prev[:num_steps])
-
- x_next = x0
- intermediates = []
- inter_steps = []
- for i in tqdm(range(num_steps), desc='Encoding Image'):
- t = torch.full((x0.shape[0],), i, device=self.model.device, dtype=torch.long)
- if unconditional_guidance_scale == 1.:
- noise_pred = self.model.apply_model(x_next, t, c)
- else:
- assert unconditional_conditioning is not None
- e_t_uncond, noise_pred = torch.chunk(
- self.model.apply_model(torch.cat((x_next, x_next)), torch.cat((t, t)),
- torch.cat((unconditional_conditioning, c))), 2)
- noise_pred = e_t_uncond + unconditional_guidance_scale * (noise_pred - e_t_uncond)
-
- xt_weighted = (alphas_next[i] / alphas[i]).sqrt() * x_next
- weighted_noise_pred = alphas_next[i].sqrt() * (
- (1 / alphas_next[i] - 1).sqrt() - (1 / alphas[i] - 1).sqrt()) * noise_pred
- x_next = xt_weighted + weighted_noise_pred
- if return_intermediates and i % (
- num_steps // return_intermediates) == 0 and i < num_steps - 1:
- intermediates.append(x_next)
- inter_steps.append(i)
- elif return_intermediates and i >= num_steps - 2:
- intermediates.append(x_next)
- inter_steps.append(i)
-
- out = {'x_encoded': x_next, 'intermediate_steps': inter_steps}
- if return_intermediates:
- out.update({'intermediates': intermediates})
- return x_next, out
-
- @torch.no_grad()
- def stochastic_encode(self, x0, t, use_original_steps=False, noise=None):
- # fast, but does not allow for exact reconstruction
- # t serves as an index to gather the correct alphas
- if use_original_steps:
- sqrt_alphas_cumprod = self.sqrt_alphas_cumprod
- sqrt_one_minus_alphas_cumprod = self.sqrt_one_minus_alphas_cumprod
- else:
- sqrt_alphas_cumprod = torch.sqrt(self.ddim_alphas)
- sqrt_one_minus_alphas_cumprod = self.ddim_sqrt_one_minus_alphas
-
- if noise is None:
- noise = torch.randn_like(x0)
- return (extract_into_tensor(sqrt_alphas_cumprod, t, x0.shape) * x0 +
- extract_into_tensor(sqrt_one_minus_alphas_cumprod, t, x0.shape) * noise)
-
- @torch.no_grad()
- def decode(self, x_latent, cond, t_start, unconditional_guidance_scale=1.0, unconditional_conditioning=None,
- use_original_steps=False):
-
- timesteps = np.arange(self.ddpm_num_timesteps) if use_original_steps else self.ddim_timesteps
- timesteps = timesteps[:t_start]
-
- time_range = np.flip(timesteps)
- total_steps = timesteps.shape[0]
- print(f"Running DDIM Sampling with {total_steps} timesteps")
-
- iterator = tqdm(time_range, desc='Decoding image', total=total_steps)
- x_dec = x_latent
- for i, step in enumerate(iterator):
- index = total_steps - i - 1
- ts = torch.full((x_latent.shape[0],), step, device=x_latent.device, dtype=torch.long)
- x_dec, _ = self.p_sample_ddim(x_dec, cond, ts, index=index, use_original_steps=use_original_steps,
- unconditional_guidance_scale=unconditional_guidance_scale,
- unconditional_conditioning=unconditional_conditioning)
- return x_dec
\ No newline at end of file
diff --git a/One-2-3-45-master 2/ldm/models/diffusion/ddpm.py b/One-2-3-45-master 2/ldm/models/diffusion/ddpm.py
deleted file mode 100644
index 6a6d5017af4f84fdc95c6389a2dcc8d6b8a03080..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/models/diffusion/ddpm.py
+++ /dev/null
@@ -1,1994 +0,0 @@
-"""
-wild mixture of
-https://github.com/lucidrains/denoising-diffusion-pytorch/blob/7706bdfc6f527f58d33f84b7b522e61e6e3164b3/denoising_diffusion_pytorch/denoising_diffusion_pytorch.py
-https://github.com/openai/improved-diffusion/blob/e94489283bb876ac1477d5dd7709bbbd2d9902ce/improved_diffusion/gaussian_diffusion.py
-https://github.com/CompVis/taming-transformers
--- merci
-"""
-
-import torch
-import torch.nn as nn
-import numpy as np
-import pytorch_lightning as pl
-from torch.optim.lr_scheduler import LambdaLR
-from einops import rearrange, repeat
-from contextlib import contextmanager, nullcontext
-from functools import partial
-import itertools
-from tqdm import tqdm
-from torchvision.utils import make_grid
-from pytorch_lightning.utilities.rank_zero import rank_zero_only
-from omegaconf import ListConfig
-
-from ldm.util import log_txt_as_img, exists, default, ismap, isimage, mean_flat, count_params, instantiate_from_config
-from ldm.modules.ema import LitEma
-from ldm.modules.distributions.distributions import normal_kl, DiagonalGaussianDistribution
-from ldm.models.autoencoder import VQModelInterface, IdentityFirstStage, AutoencoderKL
-from ldm.modules.diffusionmodules.util import make_beta_schedule, extract_into_tensor, noise_like
-from ldm.models.diffusion.ddim import DDIMSampler
-from ldm.modules.attention import CrossAttention
-
-
-__conditioning_keys__ = {'concat': 'c_concat',
- 'crossattn': 'c_crossattn',
- 'adm': 'y'}
-
-
-def disabled_train(self, mode=True):
- """Overwrite model.train with this function to make sure train/eval mode
- does not change anymore."""
- return self
-
-
-def uniform_on_device(r1, r2, shape, device):
- return (r1 - r2) * torch.rand(*shape, device=device) + r2
-
-
-class DDPM(pl.LightningModule):
- # classic DDPM with Gaussian diffusion, in image space
- def __init__(self,
- unet_config,
- timesteps=1000,
- beta_schedule="linear",
- loss_type="l2",
- ckpt_path=None,
- ignore_keys=[],
- load_only_unet=False,
- monitor="val/loss",
- use_ema=True,
- first_stage_key="image",
- image_size=256,
- channels=3,
- log_every_t=100,
- clip_denoised=True,
- linear_start=1e-4,
- linear_end=2e-2,
- cosine_s=8e-3,
- given_betas=None,
- original_elbo_weight=0.,
- v_posterior=0., # weight for choosing posterior variance as sigma = (1-v) * beta_tilde + v * beta
- l_simple_weight=1.,
- conditioning_key=None,
- parameterization="eps", # all assuming fixed variance schedules
- scheduler_config=None,
- use_positional_encodings=False,
- learn_logvar=False,
- logvar_init=0.,
- make_it_fit=False,
- ucg_training=None,
- ):
- super().__init__()
- assert parameterization in ["eps", "x0"], 'currently only supporting "eps" and "x0"'
- self.parameterization = parameterization
- print(f"{self.__class__.__name__}: Running in {self.parameterization}-prediction mode")
- self.cond_stage_model = None
- self.clip_denoised = clip_denoised
- self.log_every_t = log_every_t
- self.first_stage_key = first_stage_key
- self.image_size = image_size # try conv?
- self.channels = channels
- self.use_positional_encodings = use_positional_encodings
- self.model = DiffusionWrapper(unet_config, conditioning_key)
- count_params(self.model, verbose=True)
- self.use_ema = use_ema
- if self.use_ema:
- self.model_ema = LitEma(self.model)
- print(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.")
-
- self.use_scheduler = scheduler_config is not None
- if self.use_scheduler:
- self.scheduler_config = scheduler_config
-
- self.v_posterior = v_posterior
- self.original_elbo_weight = original_elbo_weight
- self.l_simple_weight = l_simple_weight
-
- if monitor is not None:
- self.monitor = monitor
- self.make_it_fit = make_it_fit
- if ckpt_path is not None:
- self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys, only_model=load_only_unet)
-
- self.register_schedule(given_betas=given_betas, beta_schedule=beta_schedule, timesteps=timesteps,
- linear_start=linear_start, linear_end=linear_end, cosine_s=cosine_s)
-
- self.loss_type = loss_type
-
- self.learn_logvar = learn_logvar
- self.logvar = torch.full(fill_value=logvar_init, size=(self.num_timesteps,))
- if self.learn_logvar:
- self.logvar = nn.Parameter(self.logvar, requires_grad=True)
-
- self.ucg_training = ucg_training or dict()
- if self.ucg_training:
- self.ucg_prng = np.random.RandomState()
-
- def register_schedule(self, given_betas=None, beta_schedule="linear", timesteps=1000,
- linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3):
- if exists(given_betas):
- betas = given_betas
- else:
- betas = make_beta_schedule(beta_schedule, timesteps, linear_start=linear_start, linear_end=linear_end,
- cosine_s=cosine_s)
- alphas = 1. - betas
- alphas_cumprod = np.cumprod(alphas, axis=0)
- alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1])
-
- timesteps, = betas.shape
- self.num_timesteps = int(timesteps)
- self.linear_start = linear_start
- self.linear_end = linear_end
- assert alphas_cumprod.shape[0] == self.num_timesteps, 'alphas have to be defined for each timestep'
-
- to_torch = partial(torch.tensor, dtype=torch.float32)
-
- self.register_buffer('betas', to_torch(betas))
- self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod))
- self.register_buffer('alphas_cumprod_prev', to_torch(alphas_cumprod_prev))
-
- # calculations for diffusion q(x_t | x_{t-1}) and others
- self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod)))
- self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod)))
- self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod)))
- self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod)))
- self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod - 1)))
-
- # calculations for posterior q(x_{t-1} | x_t, x_0)
- posterior_variance = (1 - self.v_posterior) * betas * (1. - alphas_cumprod_prev) / (
- 1. - alphas_cumprod) + self.v_posterior * betas
- # above: equal to 1. / (1. / (1. - alpha_cumprod_tm1) + alpha_t / beta_t)
- self.register_buffer('posterior_variance', to_torch(posterior_variance))
- # below: log calculation clipped because the posterior variance is 0 at the beginning of the diffusion chain
- self.register_buffer('posterior_log_variance_clipped', to_torch(np.log(np.maximum(posterior_variance, 1e-20))))
- self.register_buffer('posterior_mean_coef1', to_torch(
- betas * np.sqrt(alphas_cumprod_prev) / (1. - alphas_cumprod)))
- self.register_buffer('posterior_mean_coef2', to_torch(
- (1. - alphas_cumprod_prev) * np.sqrt(alphas) / (1. - alphas_cumprod)))
-
- if self.parameterization == "eps":
- lvlb_weights = self.betas ** 2 / (
- 2 * self.posterior_variance * to_torch(alphas) * (1 - self.alphas_cumprod))
- elif self.parameterization == "x0":
- lvlb_weights = 0.5 * np.sqrt(torch.Tensor(alphas_cumprod)) / (2. * 1 - torch.Tensor(alphas_cumprod))
- else:
- raise NotImplementedError("mu not supported")
- # TODO how to choose this term
- lvlb_weights[0] = lvlb_weights[1]
- self.register_buffer('lvlb_weights', lvlb_weights, persistent=False)
- assert not torch.isnan(self.lvlb_weights).all()
-
- @contextmanager
- def ema_scope(self, context=None):
- if self.use_ema:
- self.model_ema.store(self.model.parameters())
- self.model_ema.copy_to(self.model)
- if context is not None:
- print(f"{context}: Switched to EMA weights")
- try:
- yield None
- finally:
- if self.use_ema:
- self.model_ema.restore(self.model.parameters())
- if context is not None:
- print(f"{context}: Restored training weights")
-
- @torch.no_grad()
- def init_from_ckpt(self, path, ignore_keys=list(), only_model=False):
- sd = torch.load(path, map_location="cpu")
- if "state_dict" in list(sd.keys()):
- sd = sd["state_dict"]
- keys = list(sd.keys())
-
- if self.make_it_fit:
- n_params = len([name for name, _ in
- itertools.chain(self.named_parameters(),
- self.named_buffers())])
- for name, param in tqdm(
- itertools.chain(self.named_parameters(),
- self.named_buffers()),
- desc="Fitting old weights to new weights",
- total=n_params
- ):
- if not name in sd:
- continue
- old_shape = sd[name].shape
- new_shape = param.shape
- assert len(old_shape)==len(new_shape)
- if len(new_shape) > 2:
- # we only modify first two axes
- assert new_shape[2:] == old_shape[2:]
- # assumes first axis corresponds to output dim
- if not new_shape == old_shape:
- new_param = param.clone()
- old_param = sd[name]
- if len(new_shape) == 1:
- for i in range(new_param.shape[0]):
- new_param[i] = old_param[i % old_shape[0]]
- elif len(new_shape) >= 2:
- for i in range(new_param.shape[0]):
- for j in range(new_param.shape[1]):
- new_param[i, j] = old_param[i % old_shape[0], j % old_shape[1]]
-
- n_used_old = torch.ones(old_shape[1])
- for j in range(new_param.shape[1]):
- n_used_old[j % old_shape[1]] += 1
- n_used_new = torch.zeros(new_shape[1])
- for j in range(new_param.shape[1]):
- n_used_new[j] = n_used_old[j % old_shape[1]]
-
- n_used_new = n_used_new[None, :]
- while len(n_used_new.shape) < len(new_shape):
- n_used_new = n_used_new.unsqueeze(-1)
- new_param /= n_used_new
-
- sd[name] = new_param
-
- missing, unexpected = self.load_state_dict(sd, strict=False) if not only_model else self.model.load_state_dict(
- sd, strict=False)
- print(f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys")
- if len(missing) > 0:
- print(f"Missing Keys: {missing}")
- if len(unexpected) > 0:
- print(f"Unexpected Keys: {unexpected}")
-
- def q_mean_variance(self, x_start, t):
- """
- Get the distribution q(x_t | x_0).
- :param x_start: the [N x C x ...] tensor of noiseless inputs.
- :param t: the number of diffusion steps (minus 1). Here, 0 means one step.
- :return: A tuple (mean, variance, log_variance), all of x_start's shape.
- """
- mean = (extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start)
- variance = extract_into_tensor(1.0 - self.alphas_cumprod, t, x_start.shape)
- log_variance = extract_into_tensor(self.log_one_minus_alphas_cumprod, t, x_start.shape)
- return mean, variance, log_variance
-
- def predict_start_from_noise(self, x_t, t, noise):
- return (
- extract_into_tensor(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t -
- extract_into_tensor(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape) * noise
- )
-
- def q_posterior(self, x_start, x_t, t):
- posterior_mean = (
- extract_into_tensor(self.posterior_mean_coef1, t, x_t.shape) * x_start +
- extract_into_tensor(self.posterior_mean_coef2, t, x_t.shape) * x_t
- )
- posterior_variance = extract_into_tensor(self.posterior_variance, t, x_t.shape)
- posterior_log_variance_clipped = extract_into_tensor(self.posterior_log_variance_clipped, t, x_t.shape)
- return posterior_mean, posterior_variance, posterior_log_variance_clipped
-
- def p_mean_variance(self, x, t, clip_denoised: bool):
- model_out = self.model(x, t)
- if self.parameterization == "eps":
- x_recon = self.predict_start_from_noise(x, t=t, noise=model_out)
- elif self.parameterization == "x0":
- x_recon = model_out
- if clip_denoised:
- x_recon.clamp_(-1., 1.)
-
- model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t)
- return model_mean, posterior_variance, posterior_log_variance
-
- @torch.no_grad()
- def p_sample(self, x, t, clip_denoised=True, repeat_noise=False):
- b, *_, device = *x.shape, x.device
- model_mean, _, model_log_variance = self.p_mean_variance(x=x, t=t, clip_denoised=clip_denoised)
- noise = noise_like(x.shape, device, repeat_noise)
- # no noise when t == 0
- nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1)))
- return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise
-
- @torch.no_grad()
- def p_sample_loop(self, shape, return_intermediates=False):
- device = self.betas.device
- b = shape[0]
- img = torch.randn(shape, device=device)
- intermediates = [img]
- for i in tqdm(reversed(range(0, self.num_timesteps)), desc='Sampling t', total=self.num_timesteps):
- img = self.p_sample(img, torch.full((b,), i, device=device, dtype=torch.long),
- clip_denoised=self.clip_denoised)
- if i % self.log_every_t == 0 or i == self.num_timesteps - 1:
- intermediates.append(img)
- if return_intermediates:
- return img, intermediates
- return img
-
- @torch.no_grad()
- def sample(self, batch_size=16, return_intermediates=False):
- image_size = self.image_size
- channels = self.channels
- return self.p_sample_loop((batch_size, channels, image_size, image_size),
- return_intermediates=return_intermediates)
-
- def q_sample(self, x_start, t, noise=None):
- noise = default(noise, lambda: torch.randn_like(x_start))
- return (extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start +
- extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) * noise)
-
- def get_loss(self, pred, target, mean=True):
- if self.loss_type == 'l1':
- loss = (target - pred).abs()
- if mean:
- loss = loss.mean()
- elif self.loss_type == 'l2':
- if mean:
- loss = torch.nn.functional.mse_loss(target, pred)
- else:
- loss = torch.nn.functional.mse_loss(target, pred, reduction='none')
- else:
- raise NotImplementedError("unknown loss type '{loss_type}'")
-
- return loss
-
- def p_losses(self, x_start, t, noise=None):
- noise = default(noise, lambda: torch.randn_like(x_start))
- x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise)
- model_out = self.model(x_noisy, t)
-
- loss_dict = {}
- if self.parameterization == "eps":
- target = noise
- elif self.parameterization == "x0":
- target = x_start
- else:
- raise NotImplementedError(f"Paramterization {self.parameterization} not yet supported")
-
- loss = self.get_loss(model_out, target, mean=False).mean(dim=[1, 2, 3])
-
- log_prefix = 'train' if self.training else 'val'
-
- loss_dict.update({f'{log_prefix}/loss_simple': loss.mean()})
- loss_simple = loss.mean() * self.l_simple_weight
-
- loss_vlb = (self.lvlb_weights[t] * loss).mean()
- loss_dict.update({f'{log_prefix}/loss_vlb': loss_vlb})
-
- loss = loss_simple + self.original_elbo_weight * loss_vlb
-
- loss_dict.update({f'{log_prefix}/loss': loss})
-
- return loss, loss_dict
-
- def forward(self, x, *args, **kwargs):
- # b, c, h, w, device, img_size, = *x.shape, x.device, self.image_size
- # assert h == img_size and w == img_size, f'height and width of image must be {img_size}'
- t = torch.randint(0, self.num_timesteps, (x.shape[0],), device=self.device).long()
- return self.p_losses(x, t, *args, **kwargs)
-
- def get_input(self, batch, k):
- x = batch[k]
- if len(x.shape) == 3:
- x = x[..., None]
- x = rearrange(x, 'b h w c -> b c h w')
- x = x.to(memory_format=torch.contiguous_format).float()
- return x
-
- def shared_step(self, batch):
- x = self.get_input(batch, self.first_stage_key)
- loss, loss_dict = self(x)
- return loss, loss_dict
-
- def training_step(self, batch, batch_idx):
- for k in self.ucg_training:
- p = self.ucg_training[k]["p"]
- val = self.ucg_training[k]["val"]
- if val is None:
- val = ""
- for i in range(len(batch[k])):
- if self.ucg_prng.choice(2, p=[1-p, p]):
- batch[k][i] = val
-
- loss, loss_dict = self.shared_step(batch)
-
- self.log_dict(loss_dict, prog_bar=True,
- logger=True, on_step=True, on_epoch=True)
-
- self.log("global_step", self.global_step,
- prog_bar=True, logger=True, on_step=True, on_epoch=False)
-
- if self.use_scheduler:
- lr = self.optimizers().param_groups[0]['lr']
- self.log('lr_abs', lr, prog_bar=True, logger=True, on_step=True, on_epoch=False)
-
- return loss
-
- @torch.no_grad()
- def validation_step(self, batch, batch_idx):
- _, loss_dict_no_ema = self.shared_step(batch)
- with self.ema_scope():
- _, loss_dict_ema = self.shared_step(batch)
- loss_dict_ema = {key + '_ema': loss_dict_ema[key] for key in loss_dict_ema}
- self.log_dict(loss_dict_no_ema, prog_bar=False, logger=True, on_step=False, on_epoch=True)
- self.log_dict(loss_dict_ema, prog_bar=False, logger=True, on_step=False, on_epoch=True)
-
- def on_train_batch_end(self, *args, **kwargs):
- if self.use_ema:
- self.model_ema(self.model)
-
- def _get_rows_from_list(self, samples):
- n_imgs_per_row = len(samples)
- denoise_grid = rearrange(samples, 'n b c h w -> b n c h w')
- denoise_grid = rearrange(denoise_grid, 'b n c h w -> (b n) c h w')
- denoise_grid = make_grid(denoise_grid, nrow=n_imgs_per_row)
- return denoise_grid
-
- @torch.no_grad()
- def log_images(self, batch, N=8, n_row=2, sample=True, return_keys=None, **kwargs):
- log = dict()
- x = self.get_input(batch, self.first_stage_key)
- N = min(x.shape[0], N)
- n_row = min(x.shape[0], n_row)
- x = x.to(self.device)[:N]
- log["inputs"] = x
-
- # get diffusion row
- diffusion_row = list()
- x_start = x[:n_row]
-
- for t in range(self.num_timesteps):
- if t % self.log_every_t == 0 or t == self.num_timesteps - 1:
- t = repeat(torch.tensor([t]), '1 -> b', b=n_row)
- t = t.to(self.device).long()
- noise = torch.randn_like(x_start)
- x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise)
- diffusion_row.append(x_noisy)
-
- log["diffusion_row"] = self._get_rows_from_list(diffusion_row)
-
- if sample:
- # get denoise row
- with self.ema_scope("Plotting"):
- samples, denoise_row = self.sample(batch_size=N, return_intermediates=True)
-
- log["samples"] = samples
- log["denoise_row"] = self._get_rows_from_list(denoise_row)
-
- if return_keys:
- if np.intersect1d(list(log.keys()), return_keys).shape[0] == 0:
- return log
- else:
- return {key: log[key] for key in return_keys}
- return log
-
- def configure_optimizers(self):
- lr = self.learning_rate
- params = list(self.model.parameters())
- if self.learn_logvar:
- params = params + [self.logvar]
- opt = torch.optim.AdamW(params, lr=lr)
- return opt
-
-
-class LatentDiffusion(DDPM):
- """main class"""
- def __init__(self,
- first_stage_config,
- cond_stage_config,
- num_timesteps_cond=None,
- cond_stage_key="image",
- cond_stage_trainable=False,
- concat_mode=True,
- cond_stage_forward=None,
- conditioning_key=None,
- scale_factor=1.0,
- scale_by_std=False,
- unet_trainable=True,
- *args, **kwargs):
- self.num_timesteps_cond = default(num_timesteps_cond, 1)
- self.scale_by_std = scale_by_std
- assert self.num_timesteps_cond <= kwargs['timesteps']
- # for backwards compatibility after implementation of DiffusionWrapper
- if conditioning_key is None:
- conditioning_key = 'concat' if concat_mode else 'crossattn'
- if cond_stage_config == '__is_unconditional__':
- conditioning_key = None
- ckpt_path = kwargs.pop("ckpt_path", None)
- ignore_keys = kwargs.pop("ignore_keys", [])
- super().__init__(conditioning_key=conditioning_key, *args, **kwargs)
- self.concat_mode = concat_mode
- self.cond_stage_trainable = cond_stage_trainable
- self.unet_trainable = unet_trainable
- self.cond_stage_key = cond_stage_key
- try:
- self.num_downs = len(first_stage_config.params.ddconfig.ch_mult) - 1
- except:
- self.num_downs = 0
- if not scale_by_std:
- self.scale_factor = scale_factor
- else:
- self.register_buffer('scale_factor', torch.tensor(scale_factor))
- self.instantiate_first_stage(first_stage_config)
- self.instantiate_cond_stage(cond_stage_config)
- self.cond_stage_forward = cond_stage_forward
-
- # construct linear projection layer for concatenating image CLIP embedding and RT
- self.cc_projection = nn.Linear(772, 768)
- nn.init.eye_(list(self.cc_projection.parameters())[0][:768, :768])
- nn.init.zeros_(list(self.cc_projection.parameters())[1])
- self.cc_projection.requires_grad_(True)
-
- self.clip_denoised = False
- self.bbox_tokenizer = None
-
- self.restarted_from_ckpt = False
- if ckpt_path is not None:
- self.init_from_ckpt(ckpt_path, ignore_keys)
- self.restarted_from_ckpt = True
-
- def make_cond_schedule(self, ):
- self.cond_ids = torch.full(size=(self.num_timesteps,), fill_value=self.num_timesteps - 1, dtype=torch.long)
- ids = torch.round(torch.linspace(0, self.num_timesteps - 1, self.num_timesteps_cond)).long()
- self.cond_ids[:self.num_timesteps_cond] = ids
-
- @rank_zero_only
- @torch.no_grad()
- def on_train_batch_start(self, batch, batch_idx, dataloader_idx):
- # only for very first batch
- if self.scale_by_std and self.current_epoch == 0 and self.global_step == 0 and batch_idx == 0 and not self.restarted_from_ckpt:
- assert self.scale_factor == 1., 'rather not use custom rescaling and std-rescaling simultaneously'
- # set rescale weight to 1./std of encodings
- print("### USING STD-RESCALING ###")
- x = super().get_input(batch, self.first_stage_key)
- x = x.to(self.device)
- encoder_posterior = self.encode_first_stage(x)
- z = self.get_first_stage_encoding(encoder_posterior).detach()
- del self.scale_factor
- self.register_buffer('scale_factor', 1. / z.flatten().std())
- print(f"setting self.scale_factor to {self.scale_factor}")
- print("### USING STD-RESCALING ###")
-
- def register_schedule(self,
- given_betas=None, beta_schedule="linear", timesteps=1000,
- linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3):
- super().register_schedule(given_betas, beta_schedule, timesteps, linear_start, linear_end, cosine_s)
-
- self.shorten_cond_schedule = self.num_timesteps_cond > 1
- if self.shorten_cond_schedule:
- self.make_cond_schedule()
-
- def instantiate_first_stage(self, config):
- model = instantiate_from_config(config)
- self.first_stage_model = model.eval()
- self.first_stage_model.train = disabled_train
- for param in self.first_stage_model.parameters():
- param.requires_grad = False
-
- def instantiate_cond_stage(self, config):
- if not self.cond_stage_trainable:
- if config == "__is_first_stage__":
- print("Using first stage also as cond stage.")
- self.cond_stage_model = self.first_stage_model
- elif config == "__is_unconditional__":
- print(f"Training {self.__class__.__name__} as an unconditional model.")
- self.cond_stage_model = None
- # self.be_unconditional = True
- else:
- model = instantiate_from_config(config)
- self.cond_stage_model = model.eval()
- self.cond_stage_model.train = disabled_train
- for param in self.cond_stage_model.parameters():
- param.requires_grad = False
- else:
- assert config != '__is_first_stage__'
- assert config != '__is_unconditional__'
- model = instantiate_from_config(config)
- self.cond_stage_model = model
-
- def _get_denoise_row_from_list(self, samples, desc='', force_no_decoder_quantization=False):
- denoise_row = []
- for zd in tqdm(samples, desc=desc):
- denoise_row.append(self.decode_first_stage(zd.to(self.device),
- force_not_quantize=force_no_decoder_quantization))
- n_imgs_per_row = len(denoise_row)
- denoise_row = torch.stack(denoise_row) # n_log_step, n_row, C, H, W
- denoise_grid = rearrange(denoise_row, 'n b c h w -> b n c h w')
- denoise_grid = rearrange(denoise_grid, 'b n c h w -> (b n) c h w')
- denoise_grid = make_grid(denoise_grid, nrow=n_imgs_per_row)
- return denoise_grid
-
- def get_first_stage_encoding(self, encoder_posterior):
- if isinstance(encoder_posterior, DiagonalGaussianDistribution):
- z = encoder_posterior.sample()
- elif isinstance(encoder_posterior, torch.Tensor):
- z = encoder_posterior
- else:
- raise NotImplementedError(f"encoder_posterior of type '{type(encoder_posterior)}' not yet implemented")
- return self.scale_factor * z
-
- def get_learned_conditioning(self, c):
- if self.cond_stage_forward is None:
- if hasattr(self.cond_stage_model, 'encode') and callable(self.cond_stage_model.encode):
- c = self.cond_stage_model.encode(c)
- if isinstance(c, DiagonalGaussianDistribution):
- c = c.mode()
- else:
- c = self.cond_stage_model(c)
- else:
- assert hasattr(self.cond_stage_model, self.cond_stage_forward)
- c = getattr(self.cond_stage_model, self.cond_stage_forward)(c)
- return c
-
- def meshgrid(self, h, w):
- y = torch.arange(0, h).view(h, 1, 1).repeat(1, w, 1)
- x = torch.arange(0, w).view(1, w, 1).repeat(h, 1, 1)
-
- arr = torch.cat([y, x], dim=-1)
- return arr
-
- def delta_border(self, h, w):
- """
- :param h: height
- :param w: width
- :return: normalized distance to image border,
- wtith min distance = 0 at border and max dist = 0.5 at image center
- """
- lower_right_corner = torch.tensor([h - 1, w - 1]).view(1, 1, 2)
- arr = self.meshgrid(h, w) / lower_right_corner
- dist_left_up = torch.min(arr, dim=-1, keepdims=True)[0]
- dist_right_down = torch.min(1 - arr, dim=-1, keepdims=True)[0]
- edge_dist = torch.min(torch.cat([dist_left_up, dist_right_down], dim=-1), dim=-1)[0]
- return edge_dist
-
- def get_weighting(self, h, w, Ly, Lx, device):
- weighting = self.delta_border(h, w)
- weighting = torch.clip(weighting, self.split_input_params["clip_min_weight"],
- self.split_input_params["clip_max_weight"], )
- weighting = weighting.view(1, h * w, 1).repeat(1, 1, Ly * Lx).to(device)
-
- if self.split_input_params["tie_braker"]:
- L_weighting = self.delta_border(Ly, Lx)
- L_weighting = torch.clip(L_weighting,
- self.split_input_params["clip_min_tie_weight"],
- self.split_input_params["clip_max_tie_weight"])
-
- L_weighting = L_weighting.view(1, 1, Ly * Lx).to(device)
- weighting = weighting * L_weighting
- return weighting
-
- def get_fold_unfold(self, x, kernel_size, stride, uf=1, df=1): # todo load once not every time, shorten code
- """
- :param x: img of size (bs, c, h, w)
- :return: n img crops of size (n, bs, c, kernel_size[0], kernel_size[1])
- """
- bs, nc, h, w = x.shape
-
- # number of crops in image
- Ly = (h - kernel_size[0]) // stride[0] + 1
- Lx = (w - kernel_size[1]) // stride[1] + 1
-
- if uf == 1 and df == 1:
- fold_params = dict(kernel_size=kernel_size, dilation=1, padding=0, stride=stride)
- unfold = torch.nn.Unfold(**fold_params)
-
- fold = torch.nn.Fold(output_size=x.shape[2:], **fold_params)
-
- weighting = self.get_weighting(kernel_size[0], kernel_size[1], Ly, Lx, x.device).to(x.dtype)
- normalization = fold(weighting).view(1, 1, h, w) # normalizes the overlap
- weighting = weighting.view((1, 1, kernel_size[0], kernel_size[1], Ly * Lx))
-
- elif uf > 1 and df == 1:
- fold_params = dict(kernel_size=kernel_size, dilation=1, padding=0, stride=stride)
- unfold = torch.nn.Unfold(**fold_params)
-
- fold_params2 = dict(kernel_size=(kernel_size[0] * uf, kernel_size[0] * uf),
- dilation=1, padding=0,
- stride=(stride[0] * uf, stride[1] * uf))
- fold = torch.nn.Fold(output_size=(x.shape[2] * uf, x.shape[3] * uf), **fold_params2)
-
- weighting = self.get_weighting(kernel_size[0] * uf, kernel_size[1] * uf, Ly, Lx, x.device).to(x.dtype)
- normalization = fold(weighting).view(1, 1, h * uf, w * uf) # normalizes the overlap
- weighting = weighting.view((1, 1, kernel_size[0] * uf, kernel_size[1] * uf, Ly * Lx))
-
- elif df > 1 and uf == 1:
- fold_params = dict(kernel_size=kernel_size, dilation=1, padding=0, stride=stride)
- unfold = torch.nn.Unfold(**fold_params)
-
- fold_params2 = dict(kernel_size=(kernel_size[0] // df, kernel_size[0] // df),
- dilation=1, padding=0,
- stride=(stride[0] // df, stride[1] // df))
- fold = torch.nn.Fold(output_size=(x.shape[2] // df, x.shape[3] // df), **fold_params2)
-
- weighting = self.get_weighting(kernel_size[0] // df, kernel_size[1] // df, Ly, Lx, x.device).to(x.dtype)
- normalization = fold(weighting).view(1, 1, h // df, w // df) # normalizes the overlap
- weighting = weighting.view((1, 1, kernel_size[0] // df, kernel_size[1] // df, Ly * Lx))
-
- else:
- raise NotImplementedError
-
- return fold, unfold, normalization, weighting
-
-
- @torch.no_grad()
- def get_input(self, batch, k, return_first_stage_outputs=False, force_c_encode=False,
- cond_key=None, return_original_cond=False, bs=None, uncond=0.05):
- x = super().get_input(batch, k)
- T = batch['T'].to(memory_format=torch.contiguous_format).float()
-
- if bs is not None:
- x = x[:bs]
- T = T[:bs].to(self.device)
-
- x = x.to(self.device)
- encoder_posterior = self.encode_first_stage(x)
- z = self.get_first_stage_encoding(encoder_posterior).detach()
- cond_key = cond_key or self.cond_stage_key
- xc = super().get_input(batch, cond_key).to(self.device)
- if bs is not None:
- xc = xc[:bs]
- cond = {}
-
- # To support classifier-free guidance, randomly drop out only text conditioning 5%, only image conditioning 5%, and both 5%.
- random = torch.rand(x.size(0), device=x.device)
- prompt_mask = rearrange(random < 2 * uncond, "n -> n 1 1")
- input_mask = 1 - rearrange((random >= uncond).float() * (random < 3 * uncond).float(), "n -> n 1 1 1")
- null_prompt = self.get_learned_conditioning([""])
-
- # z.shape: [8, 4, 64, 64]; c.shape: [8, 1, 768]
- # print('=========== xc shape ===========', xc.shape)
- with torch.enable_grad():
- clip_emb = self.get_learned_conditioning(xc).detach()
- null_prompt = self.get_learned_conditioning([""]).detach()
- cond["c_crossattn"] = [self.cc_projection(torch.cat([torch.where(prompt_mask, null_prompt, clip_emb), T[:, None, :]], dim=-1))]
- cond["c_concat"] = [input_mask * self.encode_first_stage((xc.to(self.device))).mode().detach()]
- out = [z, cond]
- if return_first_stage_outputs:
- xrec = self.decode_first_stage(z)
- out.extend([x, xrec])
- if return_original_cond:
- out.append(xc)
- return out
-
- # @torch.no_grad()
- def decode_first_stage(self, z, predict_cids=False, force_not_quantize=False):
- if predict_cids:
- if z.dim() == 4:
- z = torch.argmax(z.exp(), dim=1).long()
- z = self.first_stage_model.quantize.get_codebook_entry(z, shape=None)
- z = rearrange(z, 'b h w c -> b c h w').contiguous()
-
- z = 1. / self.scale_factor * z
-
- if hasattr(self, "split_input_params"):
- if self.split_input_params["patch_distributed_vq"]:
- ks = self.split_input_params["ks"] # eg. (128, 128)
- stride = self.split_input_params["stride"] # eg. (64, 64)
- uf = self.split_input_params["vqf"]
- bs, nc, h, w = z.shape
- if ks[0] > h or ks[1] > w:
- ks = (min(ks[0], h), min(ks[1], w))
- print("reducing Kernel")
-
- if stride[0] > h or stride[1] > w:
- stride = (min(stride[0], h), min(stride[1], w))
- print("reducing stride")
-
- fold, unfold, normalization, weighting = self.get_fold_unfold(z, ks, stride, uf=uf)
-
- z = unfold(z) # (bn, nc * prod(**ks), L)
- # 1. Reshape to img shape
- z = z.view((z.shape[0], -1, ks[0], ks[1], z.shape[-1])) # (bn, nc, ks[0], ks[1], L )
-
- # 2. apply model loop over last dim
- if isinstance(self.first_stage_model, VQModelInterface):
- output_list = [self.first_stage_model.decode(z[:, :, :, :, i],
- force_not_quantize=predict_cids or force_not_quantize)
- for i in range(z.shape[-1])]
- else:
-
- output_list = [self.first_stage_model.decode(z[:, :, :, :, i])
- for i in range(z.shape[-1])]
-
- o = torch.stack(output_list, axis=-1) # # (bn, nc, ks[0], ks[1], L)
- o = o * weighting
- # Reverse 1. reshape to img shape
- o = o.view((o.shape[0], -1, o.shape[-1])) # (bn, nc * ks[0] * ks[1], L)
- # stitch crops together
- decoded = fold(o)
- decoded = decoded / normalization # norm is shape (1, 1, h, w)
- return decoded
- else:
- if isinstance(self.first_stage_model, VQModelInterface):
- return self.first_stage_model.decode(z, force_not_quantize=predict_cids or force_not_quantize)
- else:
- return self.first_stage_model.decode(z)
-
- else:
- if isinstance(self.first_stage_model, VQModelInterface):
- return self.first_stage_model.decode(z, force_not_quantize=predict_cids or force_not_quantize)
- else:
- return self.first_stage_model.decode(z)
-
- @torch.no_grad()
- def encode_first_stage(self, x):
- if hasattr(self, "split_input_params"):
- if self.split_input_params["patch_distributed_vq"]:
- ks = self.split_input_params["ks"] # eg. (128, 128)
- stride = self.split_input_params["stride"] # eg. (64, 64)
- df = self.split_input_params["vqf"]
- self.split_input_params['original_image_size'] = x.shape[-2:]
- bs, nc, h, w = x.shape
- if ks[0] > h or ks[1] > w:
- ks = (min(ks[0], h), min(ks[1], w))
- print("reducing Kernel")
-
- if stride[0] > h or stride[1] > w:
- stride = (min(stride[0], h), min(stride[1], w))
- print("reducing stride")
-
- fold, unfold, normalization, weighting = self.get_fold_unfold(x, ks, stride, df=df)
- z = unfold(x) # (bn, nc * prod(**ks), L)
- # Reshape to img shape
- z = z.view((z.shape[0], -1, ks[0], ks[1], z.shape[-1])) # (bn, nc, ks[0], ks[1], L )
-
- output_list = [self.first_stage_model.encode(z[:, :, :, :, i])
- for i in range(z.shape[-1])]
-
- o = torch.stack(output_list, axis=-1)
- o = o * weighting
-
- # Reverse reshape to img shape
- o = o.view((o.shape[0], -1, o.shape[-1])) # (bn, nc * ks[0] * ks[1], L)
- # stitch crops together
- decoded = fold(o)
- decoded = decoded / normalization
- return decoded
-
- else:
- return self.first_stage_model.encode(x)
- else:
- return self.first_stage_model.encode(x)
-
- def shared_step(self, batch, **kwargs):
- x, c = self.get_input(batch, self.first_stage_key)
- loss = self(x, c)
- return loss
-
- def forward(self, x, c, *args, **kwargs):
- t = torch.randint(0, self.num_timesteps, (x.shape[0],), device=self.device).long()
- if self.model.conditioning_key is not None:
- assert c is not None
- # if self.cond_stage_trainable:
- # c = self.get_learned_conditioning(c)
- if self.shorten_cond_schedule: # TODO: drop this option
- tc = self.cond_ids[t].to(self.device)
- c = self.q_sample(x_start=c, t=tc, noise=torch.randn_like(c.float()))
- return self.p_losses(x, c, t, *args, **kwargs)
-
- def _rescale_annotations(self, bboxes, crop_coordinates): # TODO: move to dataset
- def rescale_bbox(bbox):
- x0 = clamp((bbox[0] - crop_coordinates[0]) / crop_coordinates[2])
- y0 = clamp((bbox[1] - crop_coordinates[1]) / crop_coordinates[3])
- w = min(bbox[2] / crop_coordinates[2], 1 - x0)
- h = min(bbox[3] / crop_coordinates[3], 1 - y0)
- return x0, y0, w, h
-
- return [rescale_bbox(b) for b in bboxes]
-
- def apply_model(self, x_noisy, t, cond, return_ids=False):
-
- if isinstance(cond, dict):
- # hybrid case, cond is exptected to be a dict
- pass
- else:
- if not isinstance(cond, list):
- cond = [cond]
- key = 'c_concat' if self.model.conditioning_key == 'concat' else 'c_crossattn'
- cond = {key: cond}
-
- if hasattr(self, "split_input_params"):
- assert len(cond) == 1 # todo can only deal with one conditioning atm
- assert not return_ids
- ks = self.split_input_params["ks"] # eg. (128, 128)
- stride = self.split_input_params["stride"] # eg. (64, 64)
-
- h, w = x_noisy.shape[-2:]
-
- fold, unfold, normalization, weighting = self.get_fold_unfold(x_noisy, ks, stride)
-
- z = unfold(x_noisy) # (bn, nc * prod(**ks), L)
- # Reshape to img shape
- z = z.view((z.shape[0], -1, ks[0], ks[1], z.shape[-1])) # (bn, nc, ks[0], ks[1], L )
- z_list = [z[:, :, :, :, i] for i in range(z.shape[-1])]
-
- if self.cond_stage_key in ["image", "LR_image", "segmentation",
- 'bbox_img'] and self.model.conditioning_key: # todo check for completeness
- c_key = next(iter(cond.keys())) # get key
- c = next(iter(cond.values())) # get value
- assert (len(c) == 1) # todo extend to list with more than one elem
- c = c[0] # get element
-
- c = unfold(c)
- c = c.view((c.shape[0], -1, ks[0], ks[1], c.shape[-1])) # (bn, nc, ks[0], ks[1], L )
-
- cond_list = [{c_key: [c[:, :, :, :, i]]} for i in range(c.shape[-1])]
-
- elif self.cond_stage_key == 'coordinates_bbox':
- assert 'original_image_size' in self.split_input_params, 'BoudingBoxRescaling is missing original_image_size'
-
- # assuming padding of unfold is always 0 and its dilation is always 1
- n_patches_per_row = int((w - ks[0]) / stride[0] + 1)
- full_img_h, full_img_w = self.split_input_params['original_image_size']
- # as we are operating on latents, we need the factor from the original image size to the
- # spatial latent size to properly rescale the crops for regenerating the bbox annotations
- num_downs = self.first_stage_model.encoder.num_resolutions - 1
- rescale_latent = 2 ** (num_downs)
-
- # get top left postions of patches as conforming for the bbbox tokenizer, therefore we
- # need to rescale the tl patch coordinates to be in between (0,1)
- tl_patch_coordinates = [(rescale_latent * stride[0] * (patch_nr % n_patches_per_row) / full_img_w,
- rescale_latent * stride[1] * (patch_nr // n_patches_per_row) / full_img_h)
- for patch_nr in range(z.shape[-1])]
-
- # patch_limits are tl_coord, width and height coordinates as (x_tl, y_tl, h, w)
- patch_limits = [(x_tl, y_tl,
- rescale_latent * ks[0] / full_img_w,
- rescale_latent * ks[1] / full_img_h) for x_tl, y_tl in tl_patch_coordinates]
- # patch_values = [(np.arange(x_tl,min(x_tl+ks, 1.)),np.arange(y_tl,min(y_tl+ks, 1.))) for x_tl, y_tl in tl_patch_coordinates]
-
- # tokenize crop coordinates for the bounding boxes of the respective patches
- patch_limits_tknzd = [torch.LongTensor(self.bbox_tokenizer._crop_encoder(bbox))[None].to(self.device)
- for bbox in patch_limits] # list of length l with tensors of shape (1, 2)
- # cut tknzd crop position from conditioning
- assert isinstance(cond, dict), 'cond must be dict to be fed into model'
- cut_cond = cond['c_crossattn'][0][..., :-2].to(self.device)
-
- adapted_cond = torch.stack([torch.cat([cut_cond, p], dim=1) for p in patch_limits_tknzd])
- adapted_cond = rearrange(adapted_cond, 'l b n -> (l b) n')
- adapted_cond = self.get_learned_conditioning(adapted_cond)
- adapted_cond = rearrange(adapted_cond, '(l b) n d -> l b n d', l=z.shape[-1])
-
- cond_list = [{'c_crossattn': [e]} for e in adapted_cond]
-
- else:
- cond_list = [cond for i in range(z.shape[-1])] # Todo make this more efficient
-
- # apply model by loop over crops
- output_list = [self.model(z_list[i], t, **cond_list[i]) for i in range(z.shape[-1])]
- assert not isinstance(output_list[0],
- tuple) # todo cant deal with multiple model outputs check this never happens
-
- o = torch.stack(output_list, axis=-1)
- o = o * weighting
- # Reverse reshape to img shape
- o = o.view((o.shape[0], -1, o.shape[-1])) # (bn, nc * ks[0] * ks[1], L)
- # stitch crops together
- x_recon = fold(o) / normalization
-
- else:
- x_recon = self.model(x_noisy, t, **cond)
-
- if isinstance(x_recon, tuple) and not return_ids:
- return x_recon[0]
- else:
- return x_recon
-
- def _predict_eps_from_xstart(self, x_t, t, pred_xstart):
- return (extract_into_tensor(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t - pred_xstart) / \
- extract_into_tensor(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape)
-
- def _prior_bpd(self, x_start):
- """
- Get the prior KL term for the variational lower-bound, measured in
- bits-per-dim.
- This term can't be optimized, as it only depends on the encoder.
- :param x_start: the [N x C x ...] tensor of inputs.
- :return: a batch of [N] KL values (in bits), one per batch element.
- """
- batch_size = x_start.shape[0]
- t = torch.tensor([self.num_timesteps - 1] * batch_size, device=x_start.device)
- qt_mean, _, qt_log_variance = self.q_mean_variance(x_start, t)
- kl_prior = normal_kl(mean1=qt_mean, logvar1=qt_log_variance, mean2=0.0, logvar2=0.0)
- return mean_flat(kl_prior) / np.log(2.0)
-
- def p_losses(self, x_start, cond, t, noise=None):
- noise = default(noise, lambda: torch.randn_like(x_start))
- x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise)
- model_output = self.apply_model(x_noisy, t, cond)
-
- loss_dict = {}
- prefix = 'train' if self.training else 'val'
-
- if self.parameterization == "x0":
- target = x_start
- elif self.parameterization == "eps":
- target = noise
- else:
- raise NotImplementedError()
-
- loss_simple = self.get_loss(model_output, target, mean=False).mean([1, 2, 3])
- loss_dict.update({f'{prefix}/loss_simple': loss_simple.mean()})
-
- logvar_t = self.logvar[t].to(self.device)
- loss = loss_simple / torch.exp(logvar_t) + logvar_t
- # loss = loss_simple / torch.exp(self.logvar) + self.logvar
- if self.learn_logvar:
- loss_dict.update({f'{prefix}/loss_gamma': loss.mean()})
- loss_dict.update({'logvar': self.logvar.data.mean()})
-
- loss = self.l_simple_weight * loss.mean()
-
- loss_vlb = self.get_loss(model_output, target, mean=False).mean(dim=(1, 2, 3))
- loss_vlb = (self.lvlb_weights[t] * loss_vlb).mean()
- loss_dict.update({f'{prefix}/loss_vlb': loss_vlb})
- loss += (self.original_elbo_weight * loss_vlb)
- loss_dict.update({f'{prefix}/loss': loss})
-
- return loss, loss_dict
-
- def p_mean_variance(self, x, c, t, clip_denoised: bool, return_codebook_ids=False, quantize_denoised=False,
- return_x0=False, score_corrector=None, corrector_kwargs=None):
- t_in = t
- model_out = self.apply_model(x, t_in, c, return_ids=return_codebook_ids)
-
- if score_corrector is not None:
- assert self.parameterization == "eps"
- model_out = score_corrector.modify_score(self, model_out, x, t, c, **corrector_kwargs)
-
- if return_codebook_ids:
- model_out, logits = model_out
-
- if self.parameterization == "eps":
- x_recon = self.predict_start_from_noise(x, t=t, noise=model_out)
- elif self.parameterization == "x0":
- x_recon = model_out
- else:
- raise NotImplementedError()
-
- if clip_denoised:
- x_recon.clamp_(-1., 1.)
- if quantize_denoised:
- x_recon, _, [_, _, indices] = self.first_stage_model.quantize(x_recon)
- model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t)
- if return_codebook_ids:
- return model_mean, posterior_variance, posterior_log_variance, logits
- elif return_x0:
- return model_mean, posterior_variance, posterior_log_variance, x_recon
- else:
- return model_mean, posterior_variance, posterior_log_variance
-
- @torch.no_grad()
- def p_sample(self, x, c, t, clip_denoised=False, repeat_noise=False,
- return_codebook_ids=False, quantize_denoised=False, return_x0=False,
- temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None):
- b, *_, device = *x.shape, x.device
- outputs = self.p_mean_variance(x=x, c=c, t=t, clip_denoised=clip_denoised,
- return_codebook_ids=return_codebook_ids,
- quantize_denoised=quantize_denoised,
- return_x0=return_x0,
- score_corrector=score_corrector, corrector_kwargs=corrector_kwargs)
- if return_codebook_ids:
- raise DeprecationWarning("Support dropped.")
- model_mean, _, model_log_variance, logits = outputs
- elif return_x0:
- model_mean, _, model_log_variance, x0 = outputs
- else:
- model_mean, _, model_log_variance = outputs
-
- noise = noise_like(x.shape, device, repeat_noise) * temperature
- if noise_dropout > 0.:
- noise = torch.nn.functional.dropout(noise, p=noise_dropout)
- # no noise when t == 0
- nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1)))
-
- if return_codebook_ids:
- return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise, logits.argmax(dim=1)
- if return_x0:
- return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise, x0
- else:
- return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise
-
- @torch.no_grad()
- def progressive_denoising(self, cond, shape, verbose=True, callback=None, quantize_denoised=False,
- img_callback=None, mask=None, x0=None, temperature=1., noise_dropout=0.,
- score_corrector=None, corrector_kwargs=None, batch_size=None, x_T=None, start_T=None,
- log_every_t=None):
- if not log_every_t:
- log_every_t = self.log_every_t
- timesteps = self.num_timesteps
- if batch_size is not None:
- b = batch_size if batch_size is not None else shape[0]
- shape = [batch_size] + list(shape)
- else:
- b = batch_size = shape[0]
- if x_T is None:
- img = torch.randn(shape, device=self.device)
- else:
- img = x_T
- intermediates = []
- if cond is not None:
- if isinstance(cond, dict):
- cond = {key: cond[key][:batch_size] if not isinstance(cond[key], list) else
- list(map(lambda x: x[:batch_size], cond[key])) for key in cond}
- else:
- cond = [c[:batch_size] for c in cond] if isinstance(cond, list) else cond[:batch_size]
-
- if start_T is not None:
- timesteps = min(timesteps, start_T)
- iterator = tqdm(reversed(range(0, timesteps)), desc='Progressive Generation',
- total=timesteps) if verbose else reversed(
- range(0, timesteps))
- if type(temperature) == float:
- temperature = [temperature] * timesteps
-
- for i in iterator:
- ts = torch.full((b,), i, device=self.device, dtype=torch.long)
- if self.shorten_cond_schedule:
- assert self.model.conditioning_key != 'hybrid'
- tc = self.cond_ids[ts].to(cond.device)
- cond = self.q_sample(x_start=cond, t=tc, noise=torch.randn_like(cond))
-
- img, x0_partial = self.p_sample(img, cond, ts,
- clip_denoised=self.clip_denoised,
- quantize_denoised=quantize_denoised, return_x0=True,
- temperature=temperature[i], noise_dropout=noise_dropout,
- score_corrector=score_corrector, corrector_kwargs=corrector_kwargs)
- if mask is not None:
- assert x0 is not None
- img_orig = self.q_sample(x0, ts)
- img = img_orig * mask + (1. - mask) * img
-
- if i % log_every_t == 0 or i == timesteps - 1:
- intermediates.append(x0_partial)
- if callback: callback(i)
- if img_callback: img_callback(img, i)
- return img, intermediates
-
- @torch.no_grad()
- def p_sample_loop(self, cond, shape, return_intermediates=False,
- x_T=None, verbose=True, callback=None, timesteps=None, quantize_denoised=False,
- mask=None, x0=None, img_callback=None, start_T=None,
- log_every_t=None):
-
- if not log_every_t:
- log_every_t = self.log_every_t
- device = self.betas.device
- b = shape[0]
- if x_T is None:
- img = torch.randn(shape, device=device)
- else:
- img = x_T
-
- intermediates = [img]
- if timesteps is None:
- timesteps = self.num_timesteps
-
- if start_T is not None:
- timesteps = min(timesteps, start_T)
- iterator = tqdm(reversed(range(0, timesteps)), desc='Sampling t', total=timesteps) if verbose else reversed(
- range(0, timesteps))
-
- if mask is not None:
- assert x0 is not None
- assert x0.shape[2:3] == mask.shape[2:3] # spatial size has to match
-
- for i in iterator:
- ts = torch.full((b,), i, device=device, dtype=torch.long)
- if self.shorten_cond_schedule:
- assert self.model.conditioning_key != 'hybrid'
- tc = self.cond_ids[ts].to(cond.device)
- cond = self.q_sample(x_start=cond, t=tc, noise=torch.randn_like(cond))
-
- img = self.p_sample(img, cond, ts,
- clip_denoised=self.clip_denoised,
- quantize_denoised=quantize_denoised)
- if mask is not None:
- img_orig = self.q_sample(x0, ts)
- img = img_orig * mask + (1. - mask) * img
-
- if i % log_every_t == 0 or i == timesteps - 1:
- intermediates.append(img)
- if callback: callback(i)
- if img_callback: img_callback(img, i)
-
- if return_intermediates:
- return img, intermediates
- return img
-
- @torch.no_grad()
- def sample(self, cond, batch_size=16, return_intermediates=False, x_T=None,
- verbose=True, timesteps=None, quantize_denoised=False,
- mask=None, x0=None, shape=None,**kwargs):
- if shape is None:
- shape = (batch_size, self.channels, self.image_size, self.image_size)
- if cond is not None:
- if isinstance(cond, dict):
- cond = {key: cond[key][:batch_size] if not isinstance(cond[key], list) else
- list(map(lambda x: x[:batch_size], cond[key])) for key in cond}
- else:
- cond = [c[:batch_size] for c in cond] if isinstance(cond, list) else cond[:batch_size]
- return self.p_sample_loop(cond,
- shape,
- return_intermediates=return_intermediates, x_T=x_T,
- verbose=verbose, timesteps=timesteps, quantize_denoised=quantize_denoised,
- mask=mask, x0=x0)
-
- @torch.no_grad()
- def sample_log(self, cond, batch_size, ddim, ddim_steps, **kwargs):
- if ddim:
- ddim_sampler = DDIMSampler(self)
- shape = (self.channels, self.image_size, self.image_size)
- samples, intermediates = ddim_sampler.sample(ddim_steps, batch_size,
- shape, cond, verbose=False, **kwargs)
-
- else:
- samples, intermediates = self.sample(cond=cond, batch_size=batch_size,
- return_intermediates=True, **kwargs)
-
- return samples, intermediates
-
- @torch.no_grad()
- def get_unconditional_conditioning(self, batch_size, null_label=None, image_size=512):
- if null_label is not None:
- xc = null_label
- if isinstance(xc, ListConfig):
- xc = list(xc)
- if isinstance(xc, dict) or isinstance(xc, list):
- c = self.get_learned_conditioning(xc)
- else:
- if hasattr(xc, "to"):
- xc = xc.to(self.device)
- c = self.get_learned_conditioning(xc)
- else:
- # todo: get null label from cond_stage_model
- raise NotImplementedError()
- c = repeat(c, '1 ... -> b ...', b=batch_size).to(self.device)
- cond = {}
- cond["c_crossattn"] = [c]
- cond["c_concat"] = [torch.zeros([batch_size, 4, image_size // 8, image_size // 8]).to(self.device)]
- return cond
-
- @torch.no_grad()
- def log_images(self, batch, N=8, n_row=4, sample=True, ddim_steps=200, ddim_eta=1., return_keys=None,
- quantize_denoised=True, inpaint=True, plot_denoise_rows=False, plot_progressive_rows=True,
- plot_diffusion_rows=True, unconditional_guidance_scale=1., unconditional_guidance_label=None,
- use_ema_scope=True,
- **kwargs):
- ema_scope = self.ema_scope if use_ema_scope else nullcontext
- use_ddim = ddim_steps is not None
-
- log = dict()
- z, c, x, xrec, xc = self.get_input(batch, self.first_stage_key,
- return_first_stage_outputs=True,
- force_c_encode=True,
- return_original_cond=True,
- bs=N)
- N = min(x.shape[0], N)
- n_row = min(x.shape[0], n_row)
- log["inputs"] = x
- log["reconstruction"] = xrec
- if self.model.conditioning_key is not None:
- if hasattr(self.cond_stage_model, "decode"):
- xc = self.cond_stage_model.decode(c)
- log["conditioning"] = xc
- elif self.cond_stage_key in ["caption", "txt"]:
- xc = log_txt_as_img((x.shape[2], x.shape[3]), batch[self.cond_stage_key], size=x.shape[2]//25)
- log["conditioning"] = xc
- elif self.cond_stage_key == 'class_label':
- xc = log_txt_as_img((x.shape[2], x.shape[3]), batch["human_label"], size=x.shape[2]//25)
- log['conditioning'] = xc
- elif isimage(xc):
- log["conditioning"] = xc
- if ismap(xc):
- log["original_conditioning"] = self.to_rgb(xc)
-
- if plot_diffusion_rows:
- # get diffusion row
- diffusion_row = list()
- z_start = z[:n_row]
- for t in range(self.num_timesteps):
- if t % self.log_every_t == 0 or t == self.num_timesteps - 1:
- t = repeat(torch.tensor([t]), '1 -> b', b=n_row)
- t = t.to(self.device).long()
- noise = torch.randn_like(z_start)
- z_noisy = self.q_sample(x_start=z_start, t=t, noise=noise)
- diffusion_row.append(self.decode_first_stage(z_noisy))
-
- diffusion_row = torch.stack(diffusion_row) # n_log_step, n_row, C, H, W
- diffusion_grid = rearrange(diffusion_row, 'n b c h w -> b n c h w')
- diffusion_grid = rearrange(diffusion_grid, 'b n c h w -> (b n) c h w')
- diffusion_grid = make_grid(diffusion_grid, nrow=diffusion_row.shape[0])
- log["diffusion_row"] = diffusion_grid
-
- if sample:
- # get denoise row
- with ema_scope("Sampling"):
- samples, z_denoise_row = self.sample_log(cond=c,batch_size=N,ddim=use_ddim,
- ddim_steps=ddim_steps,eta=ddim_eta)
- # samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True)
- x_samples = self.decode_first_stage(samples)
- log["samples"] = x_samples
- if plot_denoise_rows:
- denoise_grid = self._get_denoise_row_from_list(z_denoise_row)
- log["denoise_row"] = denoise_grid
-
- if quantize_denoised and not isinstance(self.first_stage_model, AutoencoderKL) and not isinstance(
- self.first_stage_model, IdentityFirstStage):
- # also display when quantizing x0 while sampling
- with ema_scope("Plotting Quantized Denoised"):
- samples, z_denoise_row = self.sample_log(cond=c,batch_size=N,ddim=use_ddim,
- ddim_steps=ddim_steps,eta=ddim_eta,
- quantize_denoised=True)
- # samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True,
- # quantize_denoised=True)
- x_samples = self.decode_first_stage(samples.to(self.device))
- log["samples_x0_quantized"] = x_samples
-
- if unconditional_guidance_scale > 1.0:
- uc = self.get_unconditional_conditioning(N, unconditional_guidance_label, image_size=x.shape[-1])
- # uc = torch.zeros_like(c)
- with ema_scope("Sampling with classifier-free guidance"):
- samples_cfg, _ = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,
- ddim_steps=ddim_steps, eta=ddim_eta,
- unconditional_guidance_scale=unconditional_guidance_scale,
- unconditional_conditioning=uc,
- )
- x_samples_cfg = self.decode_first_stage(samples_cfg)
- log[f"samples_cfg_scale_{unconditional_guidance_scale:.2f}"] = x_samples_cfg
-
- if inpaint:
- # make a simple center square
- b, h, w = z.shape[0], z.shape[2], z.shape[3]
- mask = torch.ones(N, h, w).to(self.device)
- # zeros will be filled in
- mask[:, h // 4:3 * h // 4, w // 4:3 * w // 4] = 0.
- mask = mask[:, None, ...]
- with ema_scope("Plotting Inpaint"):
-
- samples, _ = self.sample_log(cond=c,batch_size=N,ddim=use_ddim, eta=ddim_eta,
- ddim_steps=ddim_steps, x0=z[:N], mask=mask)
- x_samples = self.decode_first_stage(samples.to(self.device))
- log["samples_inpainting"] = x_samples
- log["mask"] = mask
-
- # outpaint
- mask = 1. - mask
- with ema_scope("Plotting Outpaint"):
- samples, _ = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,eta=ddim_eta,
- ddim_steps=ddim_steps, x0=z[:N], mask=mask)
- x_samples = self.decode_first_stage(samples.to(self.device))
- log["samples_outpainting"] = x_samples
-
- if plot_progressive_rows:
- with ema_scope("Plotting Progressives"):
- img, progressives = self.progressive_denoising(c,
- shape=(self.channels, self.image_size, self.image_size),
- batch_size=N)
- prog_row = self._get_denoise_row_from_list(progressives, desc="Progressive Generation")
- log["progressive_row"] = prog_row
-
- if return_keys:
- if np.intersect1d(list(log.keys()), return_keys).shape[0] == 0:
- return log
- else:
- return {key: log[key] for key in return_keys}
- return log
-
- def configure_optimizers(self):
- lr = self.learning_rate
- params = []
- if self.unet_trainable == "attn":
- print("Training only unet attention layers")
- for n, m in self.model.named_modules():
- if isinstance(m, CrossAttention) and n.endswith('attn2'):
- params.extend(m.parameters())
- if self.unet_trainable == "conv_in":
- print("Training only unet input conv layers")
- params = list(self.model.diffusion_model.input_blocks[0][0].parameters())
- elif self.unet_trainable is True or self.unet_trainable == "all":
- print("Training the full unet")
- params = list(self.model.parameters())
- else:
- raise ValueError(f"Unrecognised setting for unet_trainable: {self.unet_trainable}")
-
- if self.cond_stage_trainable:
- print(f"{self.__class__.__name__}: Also optimizing conditioner params!")
- params = params + list(self.cond_stage_model.parameters())
- if self.learn_logvar:
- print('Diffusion model optimizing logvar')
- params.append(self.logvar)
-
- if self.cc_projection is not None:
- params = params + list(self.cc_projection.parameters())
- print('========== optimizing for cc projection weight ==========')
-
- opt = torch.optim.AdamW([{"params": self.model.parameters(), "lr": lr},
- {"params": self.cc_projection.parameters(), "lr": 10. * lr}], lr=lr)
- if self.use_scheduler:
- assert 'target' in self.scheduler_config
- scheduler = instantiate_from_config(self.scheduler_config)
-
- print("Setting up LambdaLR scheduler...")
- scheduler = [
- {
- 'scheduler': LambdaLR(opt, lr_lambda=scheduler.schedule),
- 'interval': 'step',
- 'frequency': 1
- }]
- return [opt], scheduler
- return opt
-
- @torch.no_grad()
- def to_rgb(self, x):
- x = x.float()
- if not hasattr(self, "colorize"):
- self.colorize = torch.randn(3, x.shape[1], 1, 1).to(x)
- x = nn.functional.conv2d(x, weight=self.colorize)
- x = 2. * (x - x.min()) / (x.max() - x.min()) - 1.
- return x
-
-
-class DiffusionWrapper(pl.LightningModule):
- def __init__(self, diff_model_config, conditioning_key):
- super().__init__()
- self.diffusion_model = instantiate_from_config(diff_model_config)
- self.conditioning_key = conditioning_key
- assert self.conditioning_key in [None, 'concat', 'crossattn', 'hybrid', 'adm', 'hybrid-adm']
-
- def forward(self, x, t, c_concat: list = None, c_crossattn: list = None, c_adm=None):
- if self.conditioning_key is None:
- out = self.diffusion_model(x, t)
- elif self.conditioning_key == 'concat':
- xc = torch.cat([x] + c_concat, dim=1)
- out = self.diffusion_model(xc, t)
- elif self.conditioning_key == 'crossattn':
- # c_crossattn dimension: torch.Size([8, 1, 768]) 1
- # cc dimension: torch.Size([8, 1, 768]
- cc = torch.cat(c_crossattn, 1)
- out = self.diffusion_model(x, t, context=cc)
- elif self.conditioning_key == 'hybrid':
- xc = torch.cat([x] + c_concat, dim=1)
- cc = torch.cat(c_crossattn, 1)
- out = self.diffusion_model(xc, t, context=cc)
- elif self.conditioning_key == 'hybrid-adm':
- assert c_adm is not None
- xc = torch.cat([x] + c_concat, dim=1)
- cc = torch.cat(c_crossattn, 1)
- out = self.diffusion_model(xc, t, context=cc, y=c_adm)
- elif self.conditioning_key == 'adm':
- cc = c_crossattn[0]
- out = self.diffusion_model(x, t, y=cc)
- else:
- raise NotImplementedError()
-
- return out
-
-
-class LatentUpscaleDiffusion(LatentDiffusion):
- def __init__(self, *args, low_scale_config, low_scale_key="LR", **kwargs):
- super().__init__(*args, **kwargs)
- # assumes that neither the cond_stage nor the low_scale_model contain trainable params
- assert not self.cond_stage_trainable
- self.instantiate_low_stage(low_scale_config)
- self.low_scale_key = low_scale_key
-
- def instantiate_low_stage(self, config):
- model = instantiate_from_config(config)
- self.low_scale_model = model.eval()
- self.low_scale_model.train = disabled_train
- for param in self.low_scale_model.parameters():
- param.requires_grad = False
-
- @torch.no_grad()
- def get_input(self, batch, k, cond_key=None, bs=None, log_mode=False):
- if not log_mode:
- z, c = super().get_input(batch, k, force_c_encode=True, bs=bs)
- else:
- z, c, x, xrec, xc = super().get_input(batch, self.first_stage_key, return_first_stage_outputs=True,
- force_c_encode=True, return_original_cond=True, bs=bs)
- x_low = batch[self.low_scale_key][:bs]
- x_low = rearrange(x_low, 'b h w c -> b c h w')
- x_low = x_low.to(memory_format=torch.contiguous_format).float()
- zx, noise_level = self.low_scale_model(x_low)
- all_conds = {"c_concat": [zx], "c_crossattn": [c], "c_adm": noise_level}
- #import pudb; pu.db
- if log_mode:
- # TODO: maybe disable if too expensive
- interpretability = False
- if interpretability:
- zx = zx[:, :, ::2, ::2]
- x_low_rec = self.low_scale_model.decode(zx)
- return z, all_conds, x, xrec, xc, x_low, x_low_rec, noise_level
- return z, all_conds
-
- @torch.no_grad()
- def log_images(self, batch, N=8, n_row=4, sample=True, ddim_steps=200, ddim_eta=1., return_keys=None,
- plot_denoise_rows=False, plot_progressive_rows=True, plot_diffusion_rows=True,
- unconditional_guidance_scale=1., unconditional_guidance_label=None, use_ema_scope=True,
- **kwargs):
- ema_scope = self.ema_scope if use_ema_scope else nullcontext
- use_ddim = ddim_steps is not None
-
- log = dict()
- z, c, x, xrec, xc, x_low, x_low_rec, noise_level = self.get_input(batch, self.first_stage_key, bs=N,
- log_mode=True)
- N = min(x.shape[0], N)
- n_row = min(x.shape[0], n_row)
- log["inputs"] = x
- log["reconstruction"] = xrec
- log["x_lr"] = x_low
- log[f"x_lr_rec_@noise_levels{'-'.join(map(lambda x: str(x), list(noise_level.cpu().numpy())))}"] = x_low_rec
- if self.model.conditioning_key is not None:
- if hasattr(self.cond_stage_model, "decode"):
- xc = self.cond_stage_model.decode(c)
- log["conditioning"] = xc
- elif self.cond_stage_key in ["caption", "txt"]:
- xc = log_txt_as_img((x.shape[2], x.shape[3]), batch[self.cond_stage_key], size=x.shape[2]//25)
- log["conditioning"] = xc
- elif self.cond_stage_key == 'class_label':
- xc = log_txt_as_img((x.shape[2], x.shape[3]), batch["human_label"], size=x.shape[2]//25)
- log['conditioning'] = xc
- elif isimage(xc):
- log["conditioning"] = xc
- if ismap(xc):
- log["original_conditioning"] = self.to_rgb(xc)
-
- if plot_diffusion_rows:
- # get diffusion row
- diffusion_row = list()
- z_start = z[:n_row]
- for t in range(self.num_timesteps):
- if t % self.log_every_t == 0 or t == self.num_timesteps - 1:
- t = repeat(torch.tensor([t]), '1 -> b', b=n_row)
- t = t.to(self.device).long()
- noise = torch.randn_like(z_start)
- z_noisy = self.q_sample(x_start=z_start, t=t, noise=noise)
- diffusion_row.append(self.decode_first_stage(z_noisy))
-
- diffusion_row = torch.stack(diffusion_row) # n_log_step, n_row, C, H, W
- diffusion_grid = rearrange(diffusion_row, 'n b c h w -> b n c h w')
- diffusion_grid = rearrange(diffusion_grid, 'b n c h w -> (b n) c h w')
- diffusion_grid = make_grid(diffusion_grid, nrow=diffusion_row.shape[0])
- log["diffusion_row"] = diffusion_grid
-
- if sample:
- # get denoise row
- with ema_scope("Sampling"):
- samples, z_denoise_row = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,
- ddim_steps=ddim_steps, eta=ddim_eta)
- # samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True)
- x_samples = self.decode_first_stage(samples)
- log["samples"] = x_samples
- if plot_denoise_rows:
- denoise_grid = self._get_denoise_row_from_list(z_denoise_row)
- log["denoise_row"] = denoise_grid
-
- if unconditional_guidance_scale > 1.0:
- uc_tmp = self.get_unconditional_conditioning(N, unconditional_guidance_label)
- # TODO explore better "unconditional" choices for the other keys
- # maybe guide away from empty text label and highest noise level and maximally degraded zx?
- uc = dict()
- for k in c:
- if k == "c_crossattn":
- assert isinstance(c[k], list) and len(c[k]) == 1
- uc[k] = [uc_tmp]
- elif k == "c_adm": # todo: only run with text-based guidance?
- assert isinstance(c[k], torch.Tensor)
- uc[k] = torch.ones_like(c[k]) * self.low_scale_model.max_noise_level
- elif isinstance(c[k], list):
- uc[k] = [c[k][i] for i in range(len(c[k]))]
- else:
- uc[k] = c[k]
-
- with ema_scope("Sampling with classifier-free guidance"):
- samples_cfg, _ = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,
- ddim_steps=ddim_steps, eta=ddim_eta,
- unconditional_guidance_scale=unconditional_guidance_scale,
- unconditional_conditioning=uc,
- )
- x_samples_cfg = self.decode_first_stage(samples_cfg)
- log[f"samples_cfg_scale_{unconditional_guidance_scale:.2f}"] = x_samples_cfg
-
- if plot_progressive_rows:
- with ema_scope("Plotting Progressives"):
- img, progressives = self.progressive_denoising(c,
- shape=(self.channels, self.image_size, self.image_size),
- batch_size=N)
- prog_row = self._get_denoise_row_from_list(progressives, desc="Progressive Generation")
- log["progressive_row"] = prog_row
-
- return log
-
-
-class LatentInpaintDiffusion(LatentDiffusion):
- """
- can either run as pure inpainting model (only concat mode) or with mixed conditionings,
- e.g. mask as concat and text via cross-attn.
- To disable finetuning mode, set finetune_keys to None
- """
- def __init__(self,
- finetune_keys=("model.diffusion_model.input_blocks.0.0.weight",
- "model_ema.diffusion_modelinput_blocks00weight"
- ),
- concat_keys=("mask", "masked_image"),
- masked_image_key="masked_image",
- keep_finetune_dims=4, # if model was trained without concat mode before and we would like to keep these channels
- c_concat_log_start=None, # to log reconstruction of c_concat codes
- c_concat_log_end=None,
- *args, **kwargs
- ):
- ckpt_path = kwargs.pop("ckpt_path", None)
- ignore_keys = kwargs.pop("ignore_keys", list())
- super().__init__(*args, **kwargs)
- self.masked_image_key = masked_image_key
- assert self.masked_image_key in concat_keys
- self.finetune_keys = finetune_keys
- self.concat_keys = concat_keys
- self.keep_dims = keep_finetune_dims
- self.c_concat_log_start = c_concat_log_start
- self.c_concat_log_end = c_concat_log_end
- if exists(self.finetune_keys): assert exists(ckpt_path), 'can only finetune from a given checkpoint'
- if exists(ckpt_path):
- self.init_from_ckpt(ckpt_path, ignore_keys)
-
- def init_from_ckpt(self, path, ignore_keys=list(), only_model=False):
- sd = torch.load(path, map_location="cpu")
- if "state_dict" in list(sd.keys()):
- sd = sd["state_dict"]
- keys = list(sd.keys())
- for k in keys:
- for ik in ignore_keys:
- if k.startswith(ik):
- print("Deleting key {} from state_dict.".format(k))
- del sd[k]
-
- # make it explicit, finetune by including extra input channels
- if exists(self.finetune_keys) and k in self.finetune_keys:
- new_entry = None
- for name, param in self.named_parameters():
- if name in self.finetune_keys:
- print(f"modifying key '{name}' and keeping its original {self.keep_dims} (channels) dimensions only")
- new_entry = torch.zeros_like(param) # zero init
- assert exists(new_entry), 'did not find matching parameter to modify'
- new_entry[:, :self.keep_dims, ...] = sd[k]
- sd[k] = new_entry
-
- missing, unexpected = self.load_state_dict(sd, strict=False) if not only_model else self.model.load_state_dict(sd, strict=False)
- print(f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys")
- if len(missing) > 0:
- print(f"Missing Keys: {missing}")
- if len(unexpected) > 0:
- print(f"Unexpected Keys: {unexpected}")
-
- @torch.no_grad()
- def get_input(self, batch, k, cond_key=None, bs=None, return_first_stage_outputs=False):
- # note: restricted to non-trainable encoders currently
- assert not self.cond_stage_trainable, 'trainable cond stages not yet supported for inpainting'
- z, c, x, xrec, xc = super().get_input(batch, self.first_stage_key, return_first_stage_outputs=True,
- force_c_encode=True, return_original_cond=True, bs=bs)
-
- assert exists(self.concat_keys)
- c_cat = list()
- for ck in self.concat_keys:
- cc = rearrange(batch[ck], 'b h w c -> b c h w').to(memory_format=torch.contiguous_format).float()
- if bs is not None:
- cc = cc[:bs]
- cc = cc.to(self.device)
- bchw = z.shape
- if ck != self.masked_image_key:
- cc = torch.nn.functional.interpolate(cc, size=bchw[-2:])
- else:
- cc = self.get_first_stage_encoding(self.encode_first_stage(cc))
- c_cat.append(cc)
- c_cat = torch.cat(c_cat, dim=1)
- all_conds = {"c_concat": [c_cat], "c_crossattn": [c]}
- if return_first_stage_outputs:
- return z, all_conds, x, xrec, xc
- return z, all_conds
-
- @torch.no_grad()
- def log_images(self, batch, N=8, n_row=4, sample=True, ddim_steps=200, ddim_eta=1., return_keys=None,
- quantize_denoised=True, inpaint=True, plot_denoise_rows=False, plot_progressive_rows=True,
- plot_diffusion_rows=True, unconditional_guidance_scale=1., unconditional_guidance_label=None,
- use_ema_scope=True,
- **kwargs):
- ema_scope = self.ema_scope if use_ema_scope else nullcontext
- use_ddim = ddim_steps is not None
-
- log = dict()
- z, c, x, xrec, xc = self.get_input(batch, self.first_stage_key, bs=N, return_first_stage_outputs=True)
- c_cat, c = c["c_concat"][0], c["c_crossattn"][0]
- N = min(x.shape[0], N)
- n_row = min(x.shape[0], n_row)
- log["inputs"] = x
- log["reconstruction"] = xrec
- if self.model.conditioning_key is not None:
- if hasattr(self.cond_stage_model, "decode"):
- xc = self.cond_stage_model.decode(c)
- log["conditioning"] = xc
- elif self.cond_stage_key in ["caption", "txt"]:
- xc = log_txt_as_img((x.shape[2], x.shape[3]), batch[self.cond_stage_key], size=x.shape[2] // 25)
- log["conditioning"] = xc
- elif self.cond_stage_key == 'class_label':
- xc = log_txt_as_img((x.shape[2], x.shape[3]), batch["human_label"], size=x.shape[2] // 25)
- log['conditioning'] = xc
- elif isimage(xc):
- log["conditioning"] = xc
- if ismap(xc):
- log["original_conditioning"] = self.to_rgb(xc)
-
- if not (self.c_concat_log_start is None and self.c_concat_log_end is None):
- log["c_concat_decoded"] = self.decode_first_stage(c_cat[:,self.c_concat_log_start:self.c_concat_log_end])
-
- if plot_diffusion_rows:
- # get diffusion row
- diffusion_row = list()
- z_start = z[:n_row]
- for t in range(self.num_timesteps):
- if t % self.log_every_t == 0 or t == self.num_timesteps - 1:
- t = repeat(torch.tensor([t]), '1 -> b', b=n_row)
- t = t.to(self.device).long()
- noise = torch.randn_like(z_start)
- z_noisy = self.q_sample(x_start=z_start, t=t, noise=noise)
- diffusion_row.append(self.decode_first_stage(z_noisy))
-
- diffusion_row = torch.stack(diffusion_row) # n_log_step, n_row, C, H, W
- diffusion_grid = rearrange(diffusion_row, 'n b c h w -> b n c h w')
- diffusion_grid = rearrange(diffusion_grid, 'b n c h w -> (b n) c h w')
- diffusion_grid = make_grid(diffusion_grid, nrow=diffusion_row.shape[0])
- log["diffusion_row"] = diffusion_grid
-
- if sample:
- # get denoise row
- with ema_scope("Sampling"):
- samples, z_denoise_row = self.sample_log(cond={"c_concat": [c_cat], "c_crossattn": [c]},
- batch_size=N, ddim=use_ddim,
- ddim_steps=ddim_steps, eta=ddim_eta)
- # samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True)
- x_samples = self.decode_first_stage(samples)
- log["samples"] = x_samples
- if plot_denoise_rows:
- denoise_grid = self._get_denoise_row_from_list(z_denoise_row)
- log["denoise_row"] = denoise_grid
-
- if unconditional_guidance_scale > 1.0:
- uc_cross = self.get_unconditional_conditioning(N, unconditional_guidance_label)
- uc_cat = c_cat
- uc_full = {"c_concat": [uc_cat], "c_crossattn": [uc_cross]}
- with ema_scope("Sampling with classifier-free guidance"):
- samples_cfg, _ = self.sample_log(cond={"c_concat": [c_cat], "c_crossattn": [c]},
- batch_size=N, ddim=use_ddim,
- ddim_steps=ddim_steps, eta=ddim_eta,
- unconditional_guidance_scale=unconditional_guidance_scale,
- unconditional_conditioning=uc_full,
- )
- x_samples_cfg = self.decode_first_stage(samples_cfg)
- log[f"samples_cfg_scale_{unconditional_guidance_scale:.2f}"] = x_samples_cfg
-
- log["masked_image"] = rearrange(batch["masked_image"],
- 'b h w c -> b c h w').to(memory_format=torch.contiguous_format).float()
- return log
-
-
-class Layout2ImgDiffusion(LatentDiffusion):
- # TODO: move all layout-specific hacks to this class
- def __init__(self, cond_stage_key, *args, **kwargs):
- assert cond_stage_key == 'coordinates_bbox', 'Layout2ImgDiffusion only for cond_stage_key="coordinates_bbox"'
- super().__init__(cond_stage_key=cond_stage_key, *args, **kwargs)
-
- def log_images(self, batch, N=8, *args, **kwargs):
- logs = super().log_images(batch=batch, N=N, *args, **kwargs)
-
- key = 'train' if self.training else 'validation'
- dset = self.trainer.datamodule.datasets[key]
- mapper = dset.conditional_builders[self.cond_stage_key]
-
- bbox_imgs = []
- map_fn = lambda catno: dset.get_textual_label(dset.get_category_id(catno))
- for tknzd_bbox in batch[self.cond_stage_key][:N]:
- bboximg = mapper.plot(tknzd_bbox.detach().cpu(), map_fn, (256, 256))
- bbox_imgs.append(bboximg)
-
- cond_img = torch.stack(bbox_imgs, dim=0)
- logs['bbox_image'] = cond_img
- return logs
-
-
-class SimpleUpscaleDiffusion(LatentDiffusion):
- def __init__(self, *args, low_scale_key="LR", **kwargs):
- super().__init__(*args, **kwargs)
- # assumes that neither the cond_stage nor the low_scale_model contain trainable params
- assert not self.cond_stage_trainable
- self.low_scale_key = low_scale_key
-
- @torch.no_grad()
- def get_input(self, batch, k, cond_key=None, bs=None, log_mode=False):
- if not log_mode:
- z, c = super().get_input(batch, k, force_c_encode=True, bs=bs)
- else:
- z, c, x, xrec, xc = super().get_input(batch, self.first_stage_key, return_first_stage_outputs=True,
- force_c_encode=True, return_original_cond=True, bs=bs)
- x_low = batch[self.low_scale_key][:bs]
- x_low = rearrange(x_low, 'b h w c -> b c h w')
- x_low = x_low.to(memory_format=torch.contiguous_format).float()
-
- encoder_posterior = self.encode_first_stage(x_low)
- zx = self.get_first_stage_encoding(encoder_posterior).detach()
- all_conds = {"c_concat": [zx], "c_crossattn": [c]}
-
- if log_mode:
- # TODO: maybe disable if too expensive
- interpretability = False
- if interpretability:
- zx = zx[:, :, ::2, ::2]
- return z, all_conds, x, xrec, xc, x_low
- return z, all_conds
-
- @torch.no_grad()
- def log_images(self, batch, N=8, n_row=4, sample=True, ddim_steps=200, ddim_eta=1., return_keys=None,
- plot_denoise_rows=False, plot_progressive_rows=True, plot_diffusion_rows=True,
- unconditional_guidance_scale=1., unconditional_guidance_label=None, use_ema_scope=True,
- **kwargs):
- ema_scope = self.ema_scope if use_ema_scope else nullcontext
- use_ddim = ddim_steps is not None
-
- log = dict()
- z, c, x, xrec, xc, x_low = self.get_input(batch, self.first_stage_key, bs=N, log_mode=True)
- N = min(x.shape[0], N)
- n_row = min(x.shape[0], n_row)
- log["inputs"] = x
- log["reconstruction"] = xrec
- log["x_lr"] = x_low
-
- if self.model.conditioning_key is not None:
- if hasattr(self.cond_stage_model, "decode"):
- xc = self.cond_stage_model.decode(c)
- log["conditioning"] = xc
- elif self.cond_stage_key in ["caption", "txt"]:
- xc = log_txt_as_img((x.shape[2], x.shape[3]), batch[self.cond_stage_key], size=x.shape[2]//25)
- log["conditioning"] = xc
- elif self.cond_stage_key == 'class_label':
- xc = log_txt_as_img((x.shape[2], x.shape[3]), batch["human_label"], size=x.shape[2]//25)
- log['conditioning'] = xc
- elif isimage(xc):
- log["conditioning"] = xc
- if ismap(xc):
- log["original_conditioning"] = self.to_rgb(xc)
-
- if sample:
- # get denoise row
- with ema_scope("Sampling"):
- samples, z_denoise_row = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,
- ddim_steps=ddim_steps, eta=ddim_eta)
- # samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True)
- x_samples = self.decode_first_stage(samples)
- log["samples"] = x_samples
-
- if unconditional_guidance_scale > 1.0:
- uc_tmp = self.get_unconditional_conditioning(N, unconditional_guidance_label)
- uc = dict()
- for k in c:
- if k == "c_crossattn":
- assert isinstance(c[k], list) and len(c[k]) == 1
- uc[k] = [uc_tmp]
- elif isinstance(c[k], list):
- uc[k] = [c[k][i] for i in range(len(c[k]))]
- else:
- uc[k] = c[k]
-
- with ema_scope("Sampling with classifier-free guidance"):
- samples_cfg, _ = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,
- ddim_steps=ddim_steps, eta=ddim_eta,
- unconditional_guidance_scale=unconditional_guidance_scale,
- unconditional_conditioning=uc,
- )
- x_samples_cfg = self.decode_first_stage(samples_cfg)
- log[f"samples_cfg_scale_{unconditional_guidance_scale:.2f}"] = x_samples_cfg
- return log
-
-class MultiCatFrameDiffusion(LatentDiffusion):
- def __init__(self, *args, low_scale_key="LR", **kwargs):
- super().__init__(*args, **kwargs)
- # assumes that neither the cond_stage nor the low_scale_model contain trainable params
- assert not self.cond_stage_trainable
- self.low_scale_key = low_scale_key
-
- @torch.no_grad()
- def get_input(self, batch, k, cond_key=None, bs=None, log_mode=False):
- n = 2
- if not log_mode:
- z, c = super().get_input(batch, k, force_c_encode=True, bs=bs)
- else:
- z, c, x, xrec, xc = super().get_input(batch, self.first_stage_key, return_first_stage_outputs=True,
- force_c_encode=True, return_original_cond=True, bs=bs)
- cat_conds = batch[self.low_scale_key][:bs]
- cats = []
- for i in range(n):
- x_low = cat_conds[:,:,:,3*i:3*(i+1)]
- x_low = rearrange(x_low, 'b h w c -> b c h w')
- x_low = x_low.to(memory_format=torch.contiguous_format).float()
- encoder_posterior = self.encode_first_stage(x_low)
- zx = self.get_first_stage_encoding(encoder_posterior).detach()
- cats.append(zx)
-
- all_conds = {"c_concat": [torch.cat(cats, dim=1)], "c_crossattn": [c]}
-
- if log_mode:
- # TODO: maybe disable if too expensive
- interpretability = False
- if interpretability:
- zx = zx[:, :, ::2, ::2]
- return z, all_conds, x, xrec, xc, x_low
- return z, all_conds
-
- @torch.no_grad()
- def log_images(self, batch, N=8, n_row=4, sample=True, ddim_steps=200, ddim_eta=1., return_keys=None,
- plot_denoise_rows=False, plot_progressive_rows=True, plot_diffusion_rows=True,
- unconditional_guidance_scale=1., unconditional_guidance_label=None, use_ema_scope=True,
- **kwargs):
- ema_scope = self.ema_scope if use_ema_scope else nullcontext
- use_ddim = ddim_steps is not None
-
- log = dict()
- z, c, x, xrec, xc, x_low = self.get_input(batch, self.first_stage_key, bs=N, log_mode=True)
- N = min(x.shape[0], N)
- n_row = min(x.shape[0], n_row)
- log["inputs"] = x
- log["reconstruction"] = xrec
- log["x_lr"] = x_low
-
- if self.model.conditioning_key is not None:
- if hasattr(self.cond_stage_model, "decode"):
- xc = self.cond_stage_model.decode(c)
- log["conditioning"] = xc
- elif self.cond_stage_key in ["caption", "txt"]:
- xc = log_txt_as_img((x.shape[2], x.shape[3]), batch[self.cond_stage_key], size=x.shape[2]//25)
- log["conditioning"] = xc
- elif self.cond_stage_key == 'class_label':
- xc = log_txt_as_img((x.shape[2], x.shape[3]), batch["human_label"], size=x.shape[2]//25)
- log['conditioning'] = xc
- elif isimage(xc):
- log["conditioning"] = xc
- if ismap(xc):
- log["original_conditioning"] = self.to_rgb(xc)
-
- if sample:
- # get denoise row
- with ema_scope("Sampling"):
- samples, z_denoise_row = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,
- ddim_steps=ddim_steps, eta=ddim_eta)
- # samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True)
- x_samples = self.decode_first_stage(samples)
- log["samples"] = x_samples
-
- if unconditional_guidance_scale > 1.0:
- uc_tmp = self.get_unconditional_conditioning(N, unconditional_guidance_label)
- uc = dict()
- for k in c:
- if k == "c_crossattn":
- assert isinstance(c[k], list) and len(c[k]) == 1
- uc[k] = [uc_tmp]
- elif isinstance(c[k], list):
- uc[k] = [c[k][i] for i in range(len(c[k]))]
- else:
- uc[k] = c[k]
-
- with ema_scope("Sampling with classifier-free guidance"):
- samples_cfg, _ = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,
- ddim_steps=ddim_steps, eta=ddim_eta,
- unconditional_guidance_scale=unconditional_guidance_scale,
- unconditional_conditioning=uc,
- )
- x_samples_cfg = self.decode_first_stage(samples_cfg)
- log[f"samples_cfg_scale_{unconditional_guidance_scale:.2f}"] = x_samples_cfg
- return log
diff --git a/One-2-3-45-master 2/ldm/models/diffusion/plms.py b/One-2-3-45-master 2/ldm/models/diffusion/plms.py
deleted file mode 100644
index 080edeec9efed663f0e01de0afbbf3bed1cfa1d1..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/models/diffusion/plms.py
+++ /dev/null
@@ -1,259 +0,0 @@
-"""SAMPLING ONLY."""
-
-import torch
-import numpy as np
-from tqdm import tqdm
-from functools import partial
-
-from ldm.modules.diffusionmodules.util import make_ddim_sampling_parameters, make_ddim_timesteps, noise_like
-from ldm.models.diffusion.sampling_util import norm_thresholding
-
-
-class PLMSSampler(object):
- def __init__(self, model, schedule="linear", **kwargs):
- super().__init__()
- self.model = model
- self.ddpm_num_timesteps = model.num_timesteps
- self.schedule = schedule
-
- def register_buffer(self, name, attr):
- if type(attr) == torch.Tensor:
- if attr.device != torch.device("cuda"):
- attr = attr.to(torch.device("cuda"))
- setattr(self, name, attr)
-
- def make_schedule(self, ddim_num_steps, ddim_discretize="uniform", ddim_eta=0., verbose=True):
- if ddim_eta != 0:
- raise ValueError('ddim_eta must be 0 for PLMS')
- self.ddim_timesteps = make_ddim_timesteps(ddim_discr_method=ddim_discretize, num_ddim_timesteps=ddim_num_steps,
- num_ddpm_timesteps=self.ddpm_num_timesteps,verbose=verbose)
- alphas_cumprod = self.model.alphas_cumprod
- assert alphas_cumprod.shape[0] == self.ddpm_num_timesteps, 'alphas have to be defined for each timestep'
- to_torch = lambda x: x.clone().detach().to(torch.float32).to(self.model.device)
-
- self.register_buffer('betas', to_torch(self.model.betas))
- self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod))
- self.register_buffer('alphas_cumprod_prev', to_torch(self.model.alphas_cumprod_prev))
-
- # calculations for diffusion q(x_t | x_{t-1}) and others
- self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod.cpu())))
- self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod.cpu())))
- self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod.cpu())))
- self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu())))
- self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu() - 1)))
-
- # ddim sampling parameters
- ddim_sigmas, ddim_alphas, ddim_alphas_prev = make_ddim_sampling_parameters(alphacums=alphas_cumprod.cpu(),
- ddim_timesteps=self.ddim_timesteps,
- eta=ddim_eta,verbose=verbose)
- self.register_buffer('ddim_sigmas', ddim_sigmas)
- self.register_buffer('ddim_alphas', ddim_alphas)
- self.register_buffer('ddim_alphas_prev', ddim_alphas_prev)
- self.register_buffer('ddim_sqrt_one_minus_alphas', np.sqrt(1. - ddim_alphas))
- sigmas_for_original_sampling_steps = ddim_eta * torch.sqrt(
- (1 - self.alphas_cumprod_prev) / (1 - self.alphas_cumprod) * (
- 1 - self.alphas_cumprod / self.alphas_cumprod_prev))
- self.register_buffer('ddim_sigmas_for_original_num_steps', sigmas_for_original_sampling_steps)
-
- @torch.no_grad()
- def sample(self,
- S,
- batch_size,
- shape,
- conditioning=None,
- callback=None,
- normals_sequence=None,
- img_callback=None,
- quantize_x0=False,
- eta=0.,
- mask=None,
- x0=None,
- temperature=1.,
- noise_dropout=0.,
- score_corrector=None,
- corrector_kwargs=None,
- verbose=True,
- x_T=None,
- log_every_t=100,
- unconditional_guidance_scale=1.,
- unconditional_conditioning=None,
- # this has to come in the same format as the conditioning, # e.g. as encoded tokens, ...
- dynamic_threshold=None,
- **kwargs
- ):
- if conditioning is not None:
- if isinstance(conditioning, dict):
- ctmp = conditioning[list(conditioning.keys())[0]]
- while isinstance(ctmp, list): ctmp = ctmp[0]
- cbs = ctmp.shape[0]
- if cbs != batch_size:
- print(f"Warning: Got {cbs} conditionings but batch-size is {batch_size}")
- else:
- if conditioning.shape[0] != batch_size:
- print(f"Warning: Got {conditioning.shape[0]} conditionings but batch-size is {batch_size}")
-
- self.make_schedule(ddim_num_steps=S, ddim_eta=eta, verbose=verbose)
- # sampling
- C, H, W = shape
- size = (batch_size, C, H, W)
- print(f'Data shape for PLMS sampling is {size}')
-
- samples, intermediates = self.plms_sampling(conditioning, size,
- callback=callback,
- img_callback=img_callback,
- quantize_denoised=quantize_x0,
- mask=mask, x0=x0,
- ddim_use_original_steps=False,
- noise_dropout=noise_dropout,
- temperature=temperature,
- score_corrector=score_corrector,
- corrector_kwargs=corrector_kwargs,
- x_T=x_T,
- log_every_t=log_every_t,
- unconditional_guidance_scale=unconditional_guidance_scale,
- unconditional_conditioning=unconditional_conditioning,
- dynamic_threshold=dynamic_threshold,
- )
- return samples, intermediates
-
- @torch.no_grad()
- def plms_sampling(self, cond, shape,
- x_T=None, ddim_use_original_steps=False,
- callback=None, timesteps=None, quantize_denoised=False,
- mask=None, x0=None, img_callback=None, log_every_t=100,
- temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None,
- unconditional_guidance_scale=1., unconditional_conditioning=None,
- dynamic_threshold=None):
- device = self.model.betas.device
- b = shape[0]
- if x_T is None:
- img = torch.randn(shape, device=device)
- else:
- img = x_T
-
- if timesteps is None:
- timesteps = self.ddpm_num_timesteps if ddim_use_original_steps else self.ddim_timesteps
- elif timesteps is not None and not ddim_use_original_steps:
- subset_end = int(min(timesteps / self.ddim_timesteps.shape[0], 1) * self.ddim_timesteps.shape[0]) - 1
- timesteps = self.ddim_timesteps[:subset_end]
-
- intermediates = {'x_inter': [img], 'pred_x0': [img]}
- time_range = list(reversed(range(0,timesteps))) if ddim_use_original_steps else np.flip(timesteps)
- total_steps = timesteps if ddim_use_original_steps else timesteps.shape[0]
- print(f"Running PLMS Sampling with {total_steps} timesteps")
-
- iterator = tqdm(time_range, desc='PLMS Sampler', total=total_steps)
- old_eps = []
-
- for i, step in enumerate(iterator):
- index = total_steps - i - 1
- ts = torch.full((b,), step, device=device, dtype=torch.long)
- ts_next = torch.full((b,), time_range[min(i + 1, len(time_range) - 1)], device=device, dtype=torch.long)
-
- if mask is not None:
- assert x0 is not None
- img_orig = self.model.q_sample(x0, ts) # TODO: deterministic forward pass?
- img = img_orig * mask + (1. - mask) * img
-
- outs = self.p_sample_plms(img, cond, ts, index=index, use_original_steps=ddim_use_original_steps,
- quantize_denoised=quantize_denoised, temperature=temperature,
- noise_dropout=noise_dropout, score_corrector=score_corrector,
- corrector_kwargs=corrector_kwargs,
- unconditional_guidance_scale=unconditional_guidance_scale,
- unconditional_conditioning=unconditional_conditioning,
- old_eps=old_eps, t_next=ts_next,
- dynamic_threshold=dynamic_threshold)
- img, pred_x0, e_t = outs
- old_eps.append(e_t)
- if len(old_eps) >= 4:
- old_eps.pop(0)
- if callback: callback(i)
- if img_callback: img_callback(pred_x0, i)
-
- if index % log_every_t == 0 or index == total_steps - 1:
- intermediates['x_inter'].append(img)
- intermediates['pred_x0'].append(pred_x0)
-
- return img, intermediates
-
- @torch.no_grad()
- def p_sample_plms(self, x, c, t, index, repeat_noise=False, use_original_steps=False, quantize_denoised=False,
- temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None,
- unconditional_guidance_scale=1., unconditional_conditioning=None, old_eps=None, t_next=None,
- dynamic_threshold=None):
- b, *_, device = *x.shape, x.device
-
- def get_model_output(x, t):
- if unconditional_conditioning is None or unconditional_guidance_scale == 1.:
- e_t = self.model.apply_model(x, t, c)
- else:
- x_in = torch.cat([x] * 2)
- t_in = torch.cat([t] * 2)
- if isinstance(c, dict):
- assert isinstance(unconditional_conditioning, dict)
- c_in = dict()
- for k in c:
- if isinstance(c[k], list):
- c_in[k] = [torch.cat([
- unconditional_conditioning[k][i],
- c[k][i]]) for i in range(len(c[k]))]
- else:
- c_in[k] = torch.cat([
- unconditional_conditioning[k],
- c[k]])
- else:
- c_in = torch.cat([unconditional_conditioning, c])
- e_t_uncond, e_t = self.model.apply_model(x_in, t_in, c_in).chunk(2)
- e_t = e_t_uncond + unconditional_guidance_scale * (e_t - e_t_uncond)
-
- if score_corrector is not None:
- assert self.model.parameterization == "eps"
- e_t = score_corrector.modify_score(self.model, e_t, x, t, c, **corrector_kwargs)
-
- return e_t
-
- alphas = self.model.alphas_cumprod if use_original_steps else self.ddim_alphas
- alphas_prev = self.model.alphas_cumprod_prev if use_original_steps else self.ddim_alphas_prev
- sqrt_one_minus_alphas = self.model.sqrt_one_minus_alphas_cumprod if use_original_steps else self.ddim_sqrt_one_minus_alphas
- sigmas = self.model.ddim_sigmas_for_original_num_steps if use_original_steps else self.ddim_sigmas
-
- def get_x_prev_and_pred_x0(e_t, index):
- # select parameters corresponding to the currently considered timestep
- a_t = torch.full((b, 1, 1, 1), alphas[index], device=device)
- a_prev = torch.full((b, 1, 1, 1), alphas_prev[index], device=device)
- sigma_t = torch.full((b, 1, 1, 1), sigmas[index], device=device)
- sqrt_one_minus_at = torch.full((b, 1, 1, 1), sqrt_one_minus_alphas[index],device=device)
-
- # current prediction for x_0
- pred_x0 = (x - sqrt_one_minus_at * e_t) / a_t.sqrt()
- if quantize_denoised:
- pred_x0, _, *_ = self.model.first_stage_model.quantize(pred_x0)
- if dynamic_threshold is not None:
- pred_x0 = norm_thresholding(pred_x0, dynamic_threshold)
- # direction pointing to x_t
- dir_xt = (1. - a_prev - sigma_t**2).sqrt() * e_t
- noise = sigma_t * noise_like(x.shape, device, repeat_noise) * temperature
- if noise_dropout > 0.:
- noise = torch.nn.functional.dropout(noise, p=noise_dropout)
- x_prev = a_prev.sqrt() * pred_x0 + dir_xt + noise
- return x_prev, pred_x0
-
- e_t = get_model_output(x, t)
- if len(old_eps) == 0:
- # Pseudo Improved Euler (2nd order)
- x_prev, pred_x0 = get_x_prev_and_pred_x0(e_t, index)
- e_t_next = get_model_output(x_prev, t_next)
- e_t_prime = (e_t + e_t_next) / 2
- elif len(old_eps) == 1:
- # 2nd order Pseudo Linear Multistep (Adams-Bashforth)
- e_t_prime = (3 * e_t - old_eps[-1]) / 2
- elif len(old_eps) == 2:
- # 3nd order Pseudo Linear Multistep (Adams-Bashforth)
- e_t_prime = (23 * e_t - 16 * old_eps[-1] + 5 * old_eps[-2]) / 12
- elif len(old_eps) >= 3:
- # 4nd order Pseudo Linear Multistep (Adams-Bashforth)
- e_t_prime = (55 * e_t - 59 * old_eps[-1] + 37 * old_eps[-2] - 9 * old_eps[-3]) / 24
-
- x_prev, pred_x0 = get_x_prev_and_pred_x0(e_t_prime, index)
-
- return x_prev, pred_x0, e_t
diff --git a/One-2-3-45-master 2/ldm/models/diffusion/sampling_util.py b/One-2-3-45-master 2/ldm/models/diffusion/sampling_util.py
deleted file mode 100644
index a0ae00fe86044456fc403af403be71ff15112424..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/models/diffusion/sampling_util.py
+++ /dev/null
@@ -1,50 +0,0 @@
-import torch
-import numpy as np
-
-
-def append_dims(x, target_dims):
- """Appends dimensions to the end of a tensor until it has target_dims dimensions.
- From https://github.com/crowsonkb/k-diffusion/blob/master/k_diffusion/utils.py"""
- dims_to_append = target_dims - x.ndim
- if dims_to_append < 0:
- raise ValueError(f'input has {x.ndim} dims but target_dims is {target_dims}, which is less')
- return x[(...,) + (None,) * dims_to_append]
-
-
-def renorm_thresholding(x0, value):
- # renorm
- pred_max = x0.max()
- pred_min = x0.min()
- pred_x0 = (x0 - pred_min) / (pred_max - pred_min) # 0 ... 1
- pred_x0 = 2 * pred_x0 - 1. # -1 ... 1
-
- s = torch.quantile(
- rearrange(pred_x0, 'b ... -> b (...)').abs(),
- value,
- dim=-1
- )
- s.clamp_(min=1.0)
- s = s.view(-1, *((1,) * (pred_x0.ndim - 1)))
-
- # clip by threshold
- # pred_x0 = pred_x0.clamp(-s, s) / s # needs newer pytorch # TODO bring back to pure-gpu with min/max
-
- # temporary hack: numpy on cpu
- pred_x0 = np.clip(pred_x0.cpu().numpy(), -s.cpu().numpy(), s.cpu().numpy()) / s.cpu().numpy()
- pred_x0 = torch.tensor(pred_x0).to(self.model.device)
-
- # re.renorm
- pred_x0 = (pred_x0 + 1.) / 2. # 0 ... 1
- pred_x0 = (pred_max - pred_min) * pred_x0 + pred_min # orig range
- return pred_x0
-
-
-def norm_thresholding(x0, value):
- s = append_dims(x0.pow(2).flatten(1).mean(1).sqrt().clamp(min=value), x0.ndim)
- return x0 * (value / s)
-
-
-def spatial_norm_thresholding(x0, value):
- # b c h w
- s = x0.pow(2).mean(1, keepdim=True).sqrt().clamp(min=value)
- return x0 * (value / s)
\ No newline at end of file
diff --git a/One-2-3-45-master 2/ldm/modules/attention.py b/One-2-3-45-master 2/ldm/modules/attention.py
deleted file mode 100644
index 124effbeee03d2f0950f6cac6aa455be5a6d359f..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/attention.py
+++ /dev/null
@@ -1,266 +0,0 @@
-from inspect import isfunction
-import math
-import torch
-import torch.nn.functional as F
-from torch import nn, einsum
-from einops import rearrange, repeat
-
-from ldm.modules.diffusionmodules.util import checkpoint
-
-
-def exists(val):
- return val is not None
-
-
-def uniq(arr):
- return{el: True for el in arr}.keys()
-
-
-def default(val, d):
- if exists(val):
- return val
- return d() if isfunction(d) else d
-
-
-def max_neg_value(t):
- return -torch.finfo(t.dtype).max
-
-
-def init_(tensor):
- dim = tensor.shape[-1]
- std = 1 / math.sqrt(dim)
- tensor.uniform_(-std, std)
- return tensor
-
-
-# feedforward
-class GEGLU(nn.Module):
- def __init__(self, dim_in, dim_out):
- super().__init__()
- self.proj = nn.Linear(dim_in, dim_out * 2)
-
- def forward(self, x):
- x, gate = self.proj(x).chunk(2, dim=-1)
- return x * F.gelu(gate)
-
-
-class FeedForward(nn.Module):
- def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.):
- super().__init__()
- inner_dim = int(dim * mult)
- dim_out = default(dim_out, dim)
- project_in = nn.Sequential(
- nn.Linear(dim, inner_dim),
- nn.GELU()
- ) if not glu else GEGLU(dim, inner_dim)
-
- self.net = nn.Sequential(
- project_in,
- nn.Dropout(dropout),
- nn.Linear(inner_dim, dim_out)
- )
-
- def forward(self, x):
- return self.net(x)
-
-
-def zero_module(module):
- """
- Zero out the parameters of a module and return it.
- """
- for p in module.parameters():
- p.detach().zero_()
- return module
-
-
-def Normalize(in_channels):
- return torch.nn.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True)
-
-
-class LinearAttention(nn.Module):
- def __init__(self, dim, heads=4, dim_head=32):
- super().__init__()
- self.heads = heads
- hidden_dim = dim_head * heads
- self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias = False)
- self.to_out = nn.Conv2d(hidden_dim, dim, 1)
-
- def forward(self, x):
- b, c, h, w = x.shape
- qkv = self.to_qkv(x)
- q, k, v = rearrange(qkv, 'b (qkv heads c) h w -> qkv b heads c (h w)', heads = self.heads, qkv=3)
- k = k.softmax(dim=-1)
- context = torch.einsum('bhdn,bhen->bhde', k, v)
- out = torch.einsum('bhde,bhdn->bhen', context, q)
- out = rearrange(out, 'b heads c (h w) -> b (heads c) h w', heads=self.heads, h=h, w=w)
- return self.to_out(out)
-
-
-class SpatialSelfAttention(nn.Module):
- def __init__(self, in_channels):
- super().__init__()
- self.in_channels = in_channels
-
- self.norm = Normalize(in_channels)
- self.q = torch.nn.Conv2d(in_channels,
- in_channels,
- kernel_size=1,
- stride=1,
- padding=0)
- self.k = torch.nn.Conv2d(in_channels,
- in_channels,
- kernel_size=1,
- stride=1,
- padding=0)
- self.v = torch.nn.Conv2d(in_channels,
- in_channels,
- kernel_size=1,
- stride=1,
- padding=0)
- self.proj_out = torch.nn.Conv2d(in_channels,
- in_channels,
- kernel_size=1,
- stride=1,
- padding=0)
-
- def forward(self, x):
- h_ = x
- h_ = self.norm(h_)
- q = self.q(h_)
- k = self.k(h_)
- v = self.v(h_)
-
- # compute attention
- b,c,h,w = q.shape
- q = rearrange(q, 'b c h w -> b (h w) c')
- k = rearrange(k, 'b c h w -> b c (h w)')
- w_ = torch.einsum('bij,bjk->bik', q, k)
-
- w_ = w_ * (int(c)**(-0.5))
- w_ = torch.nn.functional.softmax(w_, dim=2)
-
- # attend to values
- v = rearrange(v, 'b c h w -> b c (h w)')
- w_ = rearrange(w_, 'b i j -> b j i')
- h_ = torch.einsum('bij,bjk->bik', v, w_)
- h_ = rearrange(h_, 'b c (h w) -> b c h w', h=h)
- h_ = self.proj_out(h_)
-
- return x+h_
-
-
-class CrossAttention(nn.Module):
- def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.):
- super().__init__()
- inner_dim = dim_head * heads
- context_dim = default(context_dim, query_dim)
-
- self.scale = dim_head ** -0.5
- self.heads = heads
-
- self.to_q = nn.Linear(query_dim, inner_dim, bias=False)
- self.to_k = nn.Linear(context_dim, inner_dim, bias=False)
- self.to_v = nn.Linear(context_dim, inner_dim, bias=False)
-
- self.to_out = nn.Sequential(
- nn.Linear(inner_dim, query_dim),
- nn.Dropout(dropout)
- )
-
- def forward(self, x, context=None, mask=None):
- h = self.heads
-
- q = self.to_q(x)
- context = default(context, x)
- k = self.to_k(context)
- v = self.to_v(context)
-
- q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> (b h) n d', h=h), (q, k, v))
-
- sim = einsum('b i d, b j d -> b i j', q, k) * self.scale
-
- if exists(mask):
- mask = rearrange(mask, 'b ... -> b (...)')
- max_neg_value = -torch.finfo(sim.dtype).max
- mask = repeat(mask, 'b j -> (b h) () j', h=h)
- sim.masked_fill_(~mask, max_neg_value)
-
- # attention, what we cannot get enough of
- attn = sim.softmax(dim=-1)
-
- out = einsum('b i j, b j d -> b i d', attn, v)
- out = rearrange(out, '(b h) n d -> b n (h d)', h=h)
- return self.to_out(out)
-
-
-class BasicTransformerBlock(nn.Module):
- def __init__(self, dim, n_heads, d_head, dropout=0., context_dim=None, gated_ff=True, checkpoint=True,
- disable_self_attn=False):
- super().__init__()
- self.disable_self_attn = disable_self_attn
- self.attn1 = CrossAttention(query_dim=dim, heads=n_heads, dim_head=d_head, dropout=dropout,
- context_dim=context_dim if self.disable_self_attn else None) # is a self-attention if not self.disable_self_attn
- self.ff = FeedForward(dim, dropout=dropout, glu=gated_ff)
- self.attn2 = CrossAttention(query_dim=dim, context_dim=context_dim,
- heads=n_heads, dim_head=d_head, dropout=dropout) # is self-attn if context is none
- self.norm1 = nn.LayerNorm(dim)
- self.norm2 = nn.LayerNorm(dim)
- self.norm3 = nn.LayerNorm(dim)
- self.checkpoint = checkpoint
-
- def forward(self, x, context=None):
- return checkpoint(self._forward, (x, context), self.parameters(), self.checkpoint)
-
- def _forward(self, x, context=None):
- x = self.attn1(self.norm1(x), context=context if self.disable_self_attn else None) + x
- x = self.attn2(self.norm2(x), context=context) + x
- x = self.ff(self.norm3(x)) + x
- return x
-
-
-class SpatialTransformer(nn.Module):
- """
- Transformer block for image-like data.
- First, project the input (aka embedding)
- and reshape to b, t, d.
- Then apply standard transformer action.
- Finally, reshape to image
- """
- def __init__(self, in_channels, n_heads, d_head,
- depth=1, dropout=0., context_dim=None,
- disable_self_attn=False):
- super().__init__()
- self.in_channels = in_channels
- inner_dim = n_heads * d_head
- self.norm = Normalize(in_channels)
-
- self.proj_in = nn.Conv2d(in_channels,
- inner_dim,
- kernel_size=1,
- stride=1,
- padding=0)
-
- self.transformer_blocks = nn.ModuleList(
- [BasicTransformerBlock(inner_dim, n_heads, d_head, dropout=dropout, context_dim=context_dim,
- disable_self_attn=disable_self_attn)
- for d in range(depth)]
- )
-
- self.proj_out = zero_module(nn.Conv2d(inner_dim,
- in_channels,
- kernel_size=1,
- stride=1,
- padding=0))
-
- def forward(self, x, context=None):
- # note: if no context is given, cross-attention defaults to self-attention
- b, c, h, w = x.shape
- x_in = x
- x = self.norm(x)
- x = self.proj_in(x)
- x = rearrange(x, 'b c h w -> b (h w) c').contiguous()
- for block in self.transformer_blocks:
- x = block(x, context=context)
- x = rearrange(x, 'b (h w) c -> b c h w', h=h, w=w).contiguous()
- x = self.proj_out(x)
- return x + x_in
diff --git a/One-2-3-45-master 2/ldm/modules/diffusionmodules/__init__.py b/One-2-3-45-master 2/ldm/modules/diffusionmodules/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/One-2-3-45-master 2/ldm/modules/diffusionmodules/model.py b/One-2-3-45-master 2/ldm/modules/diffusionmodules/model.py
deleted file mode 100644
index 533e589a2024f1d7c52093d8c472c3b1b6617e26..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/diffusionmodules/model.py
+++ /dev/null
@@ -1,835 +0,0 @@
-# pytorch_diffusion + derived encoder decoder
-import math
-import torch
-import torch.nn as nn
-import numpy as np
-from einops import rearrange
-
-from ldm.util import instantiate_from_config
-from ldm.modules.attention import LinearAttention
-
-
-def get_timestep_embedding(timesteps, embedding_dim):
- """
- This matches the implementation in Denoising Diffusion Probabilistic Models:
- From Fairseq.
- Build sinusoidal embeddings.
- This matches the implementation in tensor2tensor, but differs slightly
- from the description in Section 3.5 of "Attention Is All You Need".
- """
- assert len(timesteps.shape) == 1
-
- half_dim = embedding_dim // 2
- emb = math.log(10000) / (half_dim - 1)
- emb = torch.exp(torch.arange(half_dim, dtype=torch.float32) * -emb)
- emb = emb.to(device=timesteps.device)
- emb = timesteps.float()[:, None] * emb[None, :]
- emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1)
- if embedding_dim % 2 == 1: # zero pad
- emb = torch.nn.functional.pad(emb, (0,1,0,0))
- return emb
-
-
-def nonlinearity(x):
- # swish
- return x*torch.sigmoid(x)
-
-
-def Normalize(in_channels, num_groups=32):
- return torch.nn.GroupNorm(num_groups=num_groups, num_channels=in_channels, eps=1e-6, affine=True)
-
-
-class Upsample(nn.Module):
- def __init__(self, in_channels, with_conv):
- super().__init__()
- self.with_conv = with_conv
- if self.with_conv:
- self.conv = torch.nn.Conv2d(in_channels,
- in_channels,
- kernel_size=3,
- stride=1,
- padding=1)
-
- def forward(self, x):
- x = torch.nn.functional.interpolate(x, scale_factor=2.0, mode="nearest")
- if self.with_conv:
- x = self.conv(x)
- return x
-
-
-class Downsample(nn.Module):
- def __init__(self, in_channels, with_conv):
- super().__init__()
- self.with_conv = with_conv
- if self.with_conv:
- # no asymmetric padding in torch conv, must do it ourselves
- self.conv = torch.nn.Conv2d(in_channels,
- in_channels,
- kernel_size=3,
- stride=2,
- padding=0)
-
- def forward(self, x):
- if self.with_conv:
- pad = (0,1,0,1)
- x = torch.nn.functional.pad(x, pad, mode="constant", value=0)
- x = self.conv(x)
- else:
- x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2)
- return x
-
-
-class ResnetBlock(nn.Module):
- def __init__(self, *, in_channels, out_channels=None, conv_shortcut=False,
- dropout, temb_channels=512):
- super().__init__()
- self.in_channels = in_channels
- out_channels = in_channels if out_channels is None else out_channels
- self.out_channels = out_channels
- self.use_conv_shortcut = conv_shortcut
-
- self.norm1 = Normalize(in_channels)
- self.conv1 = torch.nn.Conv2d(in_channels,
- out_channels,
- kernel_size=3,
- stride=1,
- padding=1)
- if temb_channels > 0:
- self.temb_proj = torch.nn.Linear(temb_channels,
- out_channels)
- self.norm2 = Normalize(out_channels)
- self.dropout = torch.nn.Dropout(dropout)
- self.conv2 = torch.nn.Conv2d(out_channels,
- out_channels,
- kernel_size=3,
- stride=1,
- padding=1)
- if self.in_channels != self.out_channels:
- if self.use_conv_shortcut:
- self.conv_shortcut = torch.nn.Conv2d(in_channels,
- out_channels,
- kernel_size=3,
- stride=1,
- padding=1)
- else:
- self.nin_shortcut = torch.nn.Conv2d(in_channels,
- out_channels,
- kernel_size=1,
- stride=1,
- padding=0)
-
- def forward(self, x, temb):
- h = x
- h = self.norm1(h)
- h = nonlinearity(h)
- h = self.conv1(h)
-
- if temb is not None:
- h = h + self.temb_proj(nonlinearity(temb))[:,:,None,None]
-
- h = self.norm2(h)
- h = nonlinearity(h)
- h = self.dropout(h)
- h = self.conv2(h)
-
- if self.in_channels != self.out_channels:
- if self.use_conv_shortcut:
- x = self.conv_shortcut(x)
- else:
- x = self.nin_shortcut(x)
-
- return x+h
-
-
-class LinAttnBlock(LinearAttention):
- """to match AttnBlock usage"""
- def __init__(self, in_channels):
- super().__init__(dim=in_channels, heads=1, dim_head=in_channels)
-
-
-class AttnBlock(nn.Module):
- def __init__(self, in_channels):
- super().__init__()
- self.in_channels = in_channels
-
- self.norm = Normalize(in_channels)
- self.q = torch.nn.Conv2d(in_channels,
- in_channels,
- kernel_size=1,
- stride=1,
- padding=0)
- self.k = torch.nn.Conv2d(in_channels,
- in_channels,
- kernel_size=1,
- stride=1,
- padding=0)
- self.v = torch.nn.Conv2d(in_channels,
- in_channels,
- kernel_size=1,
- stride=1,
- padding=0)
- self.proj_out = torch.nn.Conv2d(in_channels,
- in_channels,
- kernel_size=1,
- stride=1,
- padding=0)
-
-
- def forward(self, x):
- h_ = x
- h_ = self.norm(h_)
- q = self.q(h_)
- k = self.k(h_)
- v = self.v(h_)
-
- # compute attention
- b,c,h,w = q.shape
- q = q.reshape(b,c,h*w)
- q = q.permute(0,2,1) # b,hw,c
- k = k.reshape(b,c,h*w) # b,c,hw
- w_ = torch.bmm(q,k) # b,hw,hw w[b,i,j]=sum_c q[b,i,c]k[b,c,j]
- w_ = w_ * (int(c)**(-0.5))
- w_ = torch.nn.functional.softmax(w_, dim=2)
-
- # attend to values
- v = v.reshape(b,c,h*w)
- w_ = w_.permute(0,2,1) # b,hw,hw (first hw of k, second of q)
- h_ = torch.bmm(v,w_) # b, c,hw (hw of q) h_[b,c,j] = sum_i v[b,c,i] w_[b,i,j]
- h_ = h_.reshape(b,c,h,w)
-
- h_ = self.proj_out(h_)
-
- return x+h_
-
-
-def make_attn(in_channels, attn_type="vanilla"):
- assert attn_type in ["vanilla", "linear", "none"], f'attn_type {attn_type} unknown'
- print(f"making attention of type '{attn_type}' with {in_channels} in_channels")
- if attn_type == "vanilla":
- return AttnBlock(in_channels)
- elif attn_type == "none":
- return nn.Identity(in_channels)
- else:
- return LinAttnBlock(in_channels)
-
-
-class Model(nn.Module):
- def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks,
- attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels,
- resolution, use_timestep=True, use_linear_attn=False, attn_type="vanilla"):
- super().__init__()
- if use_linear_attn: attn_type = "linear"
- self.ch = ch
- self.temb_ch = self.ch*4
- self.num_resolutions = len(ch_mult)
- self.num_res_blocks = num_res_blocks
- self.resolution = resolution
- self.in_channels = in_channels
-
- self.use_timestep = use_timestep
- if self.use_timestep:
- # timestep embedding
- self.temb = nn.Module()
- self.temb.dense = nn.ModuleList([
- torch.nn.Linear(self.ch,
- self.temb_ch),
- torch.nn.Linear(self.temb_ch,
- self.temb_ch),
- ])
-
- # downsampling
- self.conv_in = torch.nn.Conv2d(in_channels,
- self.ch,
- kernel_size=3,
- stride=1,
- padding=1)
-
- curr_res = resolution
- in_ch_mult = (1,)+tuple(ch_mult)
- self.down = nn.ModuleList()
- for i_level in range(self.num_resolutions):
- block = nn.ModuleList()
- attn = nn.ModuleList()
- block_in = ch*in_ch_mult[i_level]
- block_out = ch*ch_mult[i_level]
- for i_block in range(self.num_res_blocks):
- block.append(ResnetBlock(in_channels=block_in,
- out_channels=block_out,
- temb_channels=self.temb_ch,
- dropout=dropout))
- block_in = block_out
- if curr_res in attn_resolutions:
- attn.append(make_attn(block_in, attn_type=attn_type))
- down = nn.Module()
- down.block = block
- down.attn = attn
- if i_level != self.num_resolutions-1:
- down.downsample = Downsample(block_in, resamp_with_conv)
- curr_res = curr_res // 2
- self.down.append(down)
-
- # middle
- self.mid = nn.Module()
- self.mid.block_1 = ResnetBlock(in_channels=block_in,
- out_channels=block_in,
- temb_channels=self.temb_ch,
- dropout=dropout)
- self.mid.attn_1 = make_attn(block_in, attn_type=attn_type)
- self.mid.block_2 = ResnetBlock(in_channels=block_in,
- out_channels=block_in,
- temb_channels=self.temb_ch,
- dropout=dropout)
-
- # upsampling
- self.up = nn.ModuleList()
- for i_level in reversed(range(self.num_resolutions)):
- block = nn.ModuleList()
- attn = nn.ModuleList()
- block_out = ch*ch_mult[i_level]
- skip_in = ch*ch_mult[i_level]
- for i_block in range(self.num_res_blocks+1):
- if i_block == self.num_res_blocks:
- skip_in = ch*in_ch_mult[i_level]
- block.append(ResnetBlock(in_channels=block_in+skip_in,
- out_channels=block_out,
- temb_channels=self.temb_ch,
- dropout=dropout))
- block_in = block_out
- if curr_res in attn_resolutions:
- attn.append(make_attn(block_in, attn_type=attn_type))
- up = nn.Module()
- up.block = block
- up.attn = attn
- if i_level != 0:
- up.upsample = Upsample(block_in, resamp_with_conv)
- curr_res = curr_res * 2
- self.up.insert(0, up) # prepend to get consistent order
-
- # end
- self.norm_out = Normalize(block_in)
- self.conv_out = torch.nn.Conv2d(block_in,
- out_ch,
- kernel_size=3,
- stride=1,
- padding=1)
-
- def forward(self, x, t=None, context=None):
- #assert x.shape[2] == x.shape[3] == self.resolution
- if context is not None:
- # assume aligned context, cat along channel axis
- x = torch.cat((x, context), dim=1)
- if self.use_timestep:
- # timestep embedding
- assert t is not None
- temb = get_timestep_embedding(t, self.ch)
- temb = self.temb.dense[0](temb)
- temb = nonlinearity(temb)
- temb = self.temb.dense[1](temb)
- else:
- temb = None
-
- # downsampling
- hs = [self.conv_in(x)]
- for i_level in range(self.num_resolutions):
- for i_block in range(self.num_res_blocks):
- h = self.down[i_level].block[i_block](hs[-1], temb)
- if len(self.down[i_level].attn) > 0:
- h = self.down[i_level].attn[i_block](h)
- hs.append(h)
- if i_level != self.num_resolutions-1:
- hs.append(self.down[i_level].downsample(hs[-1]))
-
- # middle
- h = hs[-1]
- h = self.mid.block_1(h, temb)
- h = self.mid.attn_1(h)
- h = self.mid.block_2(h, temb)
-
- # upsampling
- for i_level in reversed(range(self.num_resolutions)):
- for i_block in range(self.num_res_blocks+1):
- h = self.up[i_level].block[i_block](
- torch.cat([h, hs.pop()], dim=1), temb)
- if len(self.up[i_level].attn) > 0:
- h = self.up[i_level].attn[i_block](h)
- if i_level != 0:
- h = self.up[i_level].upsample(h)
-
- # end
- h = self.norm_out(h)
- h = nonlinearity(h)
- h = self.conv_out(h)
- return h
-
- def get_last_layer(self):
- return self.conv_out.weight
-
-
-class Encoder(nn.Module):
- def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks,
- attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels,
- resolution, z_channels, double_z=True, use_linear_attn=False, attn_type="vanilla",
- **ignore_kwargs):
- super().__init__()
- if use_linear_attn: attn_type = "linear"
- self.ch = ch
- self.temb_ch = 0
- self.num_resolutions = len(ch_mult)
- self.num_res_blocks = num_res_blocks
- self.resolution = resolution
- self.in_channels = in_channels
-
- # downsampling
- self.conv_in = torch.nn.Conv2d(in_channels,
- self.ch,
- kernel_size=3,
- stride=1,
- padding=1)
-
- curr_res = resolution
- in_ch_mult = (1,)+tuple(ch_mult)
- self.in_ch_mult = in_ch_mult
- self.down = nn.ModuleList()
- for i_level in range(self.num_resolutions):
- block = nn.ModuleList()
- attn = nn.ModuleList()
- block_in = ch*in_ch_mult[i_level]
- block_out = ch*ch_mult[i_level]
- for i_block in range(self.num_res_blocks):
- block.append(ResnetBlock(in_channels=block_in,
- out_channels=block_out,
- temb_channels=self.temb_ch,
- dropout=dropout))
- block_in = block_out
- if curr_res in attn_resolutions:
- attn.append(make_attn(block_in, attn_type=attn_type))
- down = nn.Module()
- down.block = block
- down.attn = attn
- if i_level != self.num_resolutions-1:
- down.downsample = Downsample(block_in, resamp_with_conv)
- curr_res = curr_res // 2
- self.down.append(down)
-
- # middle
- self.mid = nn.Module()
- self.mid.block_1 = ResnetBlock(in_channels=block_in,
- out_channels=block_in,
- temb_channels=self.temb_ch,
- dropout=dropout)
- self.mid.attn_1 = make_attn(block_in, attn_type=attn_type)
- self.mid.block_2 = ResnetBlock(in_channels=block_in,
- out_channels=block_in,
- temb_channels=self.temb_ch,
- dropout=dropout)
-
- # end
- self.norm_out = Normalize(block_in)
- self.conv_out = torch.nn.Conv2d(block_in,
- 2*z_channels if double_z else z_channels,
- kernel_size=3,
- stride=1,
- padding=1)
-
- def forward(self, x):
- # timestep embedding
- temb = None
-
- # downsampling
- hs = [self.conv_in(x)]
- for i_level in range(self.num_resolutions):
- for i_block in range(self.num_res_blocks):
- h = self.down[i_level].block[i_block](hs[-1], temb)
- if len(self.down[i_level].attn) > 0:
- h = self.down[i_level].attn[i_block](h)
- hs.append(h)
- if i_level != self.num_resolutions-1:
- hs.append(self.down[i_level].downsample(hs[-1]))
-
- # middle
- h = hs[-1]
- h = self.mid.block_1(h, temb)
- h = self.mid.attn_1(h)
- h = self.mid.block_2(h, temb)
-
- # end
- h = self.norm_out(h)
- h = nonlinearity(h)
- h = self.conv_out(h)
- return h
-
-
-class Decoder(nn.Module):
- def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks,
- attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels,
- resolution, z_channels, give_pre_end=False, tanh_out=False, use_linear_attn=False,
- attn_type="vanilla", **ignorekwargs):
- super().__init__()
- if use_linear_attn: attn_type = "linear"
- self.ch = ch
- self.temb_ch = 0
- self.num_resolutions = len(ch_mult)
- self.num_res_blocks = num_res_blocks
- self.resolution = resolution
- self.in_channels = in_channels
- self.give_pre_end = give_pre_end
- self.tanh_out = tanh_out
-
- # compute in_ch_mult, block_in and curr_res at lowest res
- in_ch_mult = (1,)+tuple(ch_mult)
- block_in = ch*ch_mult[self.num_resolutions-1]
- curr_res = resolution // 2**(self.num_resolutions-1)
- self.z_shape = (1,z_channels,curr_res,curr_res)
- print("Working with z of shape {} = {} dimensions.".format(
- self.z_shape, np.prod(self.z_shape)))
-
- # z to block_in
- self.conv_in = torch.nn.Conv2d(z_channels,
- block_in,
- kernel_size=3,
- stride=1,
- padding=1)
-
- # middle
- self.mid = nn.Module()
- self.mid.block_1 = ResnetBlock(in_channels=block_in,
- out_channels=block_in,
- temb_channels=self.temb_ch,
- dropout=dropout)
- self.mid.attn_1 = make_attn(block_in, attn_type=attn_type)
- self.mid.block_2 = ResnetBlock(in_channels=block_in,
- out_channels=block_in,
- temb_channels=self.temb_ch,
- dropout=dropout)
-
- # upsampling
- self.up = nn.ModuleList()
- for i_level in reversed(range(self.num_resolutions)):
- block = nn.ModuleList()
- attn = nn.ModuleList()
- block_out = ch*ch_mult[i_level]
- for i_block in range(self.num_res_blocks+1):
- block.append(ResnetBlock(in_channels=block_in,
- out_channels=block_out,
- temb_channels=self.temb_ch,
- dropout=dropout))
- block_in = block_out
- if curr_res in attn_resolutions:
- attn.append(make_attn(block_in, attn_type=attn_type))
- up = nn.Module()
- up.block = block
- up.attn = attn
- if i_level != 0:
- up.upsample = Upsample(block_in, resamp_with_conv)
- curr_res = curr_res * 2
- self.up.insert(0, up) # prepend to get consistent order
-
- # end
- self.norm_out = Normalize(block_in)
- self.conv_out = torch.nn.Conv2d(block_in,
- out_ch,
- kernel_size=3,
- stride=1,
- padding=1)
-
- def forward(self, z):
- #assert z.shape[1:] == self.z_shape[1:]
- self.last_z_shape = z.shape
-
- # timestep embedding
- temb = None
-
- # z to block_in
- h = self.conv_in(z)
-
- # middle
- h = self.mid.block_1(h, temb)
- h = self.mid.attn_1(h)
- h = self.mid.block_2(h, temb)
-
- # upsampling
- for i_level in reversed(range(self.num_resolutions)):
- for i_block in range(self.num_res_blocks+1):
- h = self.up[i_level].block[i_block](h, temb)
- if len(self.up[i_level].attn) > 0:
- h = self.up[i_level].attn[i_block](h)
- if i_level != 0:
- h = self.up[i_level].upsample(h)
-
- # end
- if self.give_pre_end:
- return h
-
- h = self.norm_out(h)
- h = nonlinearity(h)
- h = self.conv_out(h)
- if self.tanh_out:
- h = torch.tanh(h)
- return h
-
-
-class SimpleDecoder(nn.Module):
- def __init__(self, in_channels, out_channels, *args, **kwargs):
- super().__init__()
- self.model = nn.ModuleList([nn.Conv2d(in_channels, in_channels, 1),
- ResnetBlock(in_channels=in_channels,
- out_channels=2 * in_channels,
- temb_channels=0, dropout=0.0),
- ResnetBlock(in_channels=2 * in_channels,
- out_channels=4 * in_channels,
- temb_channels=0, dropout=0.0),
- ResnetBlock(in_channels=4 * in_channels,
- out_channels=2 * in_channels,
- temb_channels=0, dropout=0.0),
- nn.Conv2d(2*in_channels, in_channels, 1),
- Upsample(in_channels, with_conv=True)])
- # end
- self.norm_out = Normalize(in_channels)
- self.conv_out = torch.nn.Conv2d(in_channels,
- out_channels,
- kernel_size=3,
- stride=1,
- padding=1)
-
- def forward(self, x):
- for i, layer in enumerate(self.model):
- if i in [1,2,3]:
- x = layer(x, None)
- else:
- x = layer(x)
-
- h = self.norm_out(x)
- h = nonlinearity(h)
- x = self.conv_out(h)
- return x
-
-
-class UpsampleDecoder(nn.Module):
- def __init__(self, in_channels, out_channels, ch, num_res_blocks, resolution,
- ch_mult=(2,2), dropout=0.0):
- super().__init__()
- # upsampling
- self.temb_ch = 0
- self.num_resolutions = len(ch_mult)
- self.num_res_blocks = num_res_blocks
- block_in = in_channels
- curr_res = resolution // 2 ** (self.num_resolutions - 1)
- self.res_blocks = nn.ModuleList()
- self.upsample_blocks = nn.ModuleList()
- for i_level in range(self.num_resolutions):
- res_block = []
- block_out = ch * ch_mult[i_level]
- for i_block in range(self.num_res_blocks + 1):
- res_block.append(ResnetBlock(in_channels=block_in,
- out_channels=block_out,
- temb_channels=self.temb_ch,
- dropout=dropout))
- block_in = block_out
- self.res_blocks.append(nn.ModuleList(res_block))
- if i_level != self.num_resolutions - 1:
- self.upsample_blocks.append(Upsample(block_in, True))
- curr_res = curr_res * 2
-
- # end
- self.norm_out = Normalize(block_in)
- self.conv_out = torch.nn.Conv2d(block_in,
- out_channels,
- kernel_size=3,
- stride=1,
- padding=1)
-
- def forward(self, x):
- # upsampling
- h = x
- for k, i_level in enumerate(range(self.num_resolutions)):
- for i_block in range(self.num_res_blocks + 1):
- h = self.res_blocks[i_level][i_block](h, None)
- if i_level != self.num_resolutions - 1:
- h = self.upsample_blocks[k](h)
- h = self.norm_out(h)
- h = nonlinearity(h)
- h = self.conv_out(h)
- return h
-
-
-class LatentRescaler(nn.Module):
- def __init__(self, factor, in_channels, mid_channels, out_channels, depth=2):
- super().__init__()
- # residual block, interpolate, residual block
- self.factor = factor
- self.conv_in = nn.Conv2d(in_channels,
- mid_channels,
- kernel_size=3,
- stride=1,
- padding=1)
- self.res_block1 = nn.ModuleList([ResnetBlock(in_channels=mid_channels,
- out_channels=mid_channels,
- temb_channels=0,
- dropout=0.0) for _ in range(depth)])
- self.attn = AttnBlock(mid_channels)
- self.res_block2 = nn.ModuleList([ResnetBlock(in_channels=mid_channels,
- out_channels=mid_channels,
- temb_channels=0,
- dropout=0.0) for _ in range(depth)])
-
- self.conv_out = nn.Conv2d(mid_channels,
- out_channels,
- kernel_size=1,
- )
-
- def forward(self, x):
- x = self.conv_in(x)
- for block in self.res_block1:
- x = block(x, None)
- x = torch.nn.functional.interpolate(x, size=(int(round(x.shape[2]*self.factor)), int(round(x.shape[3]*self.factor))))
- x = self.attn(x)
- for block in self.res_block2:
- x = block(x, None)
- x = self.conv_out(x)
- return x
-
-
-class MergedRescaleEncoder(nn.Module):
- def __init__(self, in_channels, ch, resolution, out_ch, num_res_blocks,
- attn_resolutions, dropout=0.0, resamp_with_conv=True,
- ch_mult=(1,2,4,8), rescale_factor=1.0, rescale_module_depth=1):
- super().__init__()
- intermediate_chn = ch * ch_mult[-1]
- self.encoder = Encoder(in_channels=in_channels, num_res_blocks=num_res_blocks, ch=ch, ch_mult=ch_mult,
- z_channels=intermediate_chn, double_z=False, resolution=resolution,
- attn_resolutions=attn_resolutions, dropout=dropout, resamp_with_conv=resamp_with_conv,
- out_ch=None)
- self.rescaler = LatentRescaler(factor=rescale_factor, in_channels=intermediate_chn,
- mid_channels=intermediate_chn, out_channels=out_ch, depth=rescale_module_depth)
-
- def forward(self, x):
- x = self.encoder(x)
- x = self.rescaler(x)
- return x
-
-
-class MergedRescaleDecoder(nn.Module):
- def __init__(self, z_channels, out_ch, resolution, num_res_blocks, attn_resolutions, ch, ch_mult=(1,2,4,8),
- dropout=0.0, resamp_with_conv=True, rescale_factor=1.0, rescale_module_depth=1):
- super().__init__()
- tmp_chn = z_channels*ch_mult[-1]
- self.decoder = Decoder(out_ch=out_ch, z_channels=tmp_chn, attn_resolutions=attn_resolutions, dropout=dropout,
- resamp_with_conv=resamp_with_conv, in_channels=None, num_res_blocks=num_res_blocks,
- ch_mult=ch_mult, resolution=resolution, ch=ch)
- self.rescaler = LatentRescaler(factor=rescale_factor, in_channels=z_channels, mid_channels=tmp_chn,
- out_channels=tmp_chn, depth=rescale_module_depth)
-
- def forward(self, x):
- x = self.rescaler(x)
- x = self.decoder(x)
- return x
-
-
-class Upsampler(nn.Module):
- def __init__(self, in_size, out_size, in_channels, out_channels, ch_mult=2):
- super().__init__()
- assert out_size >= in_size
- num_blocks = int(np.log2(out_size//in_size))+1
- factor_up = 1.+ (out_size % in_size)
- print(f"Building {self.__class__.__name__} with in_size: {in_size} --> out_size {out_size} and factor {factor_up}")
- self.rescaler = LatentRescaler(factor=factor_up, in_channels=in_channels, mid_channels=2*in_channels,
- out_channels=in_channels)
- self.decoder = Decoder(out_ch=out_channels, resolution=out_size, z_channels=in_channels, num_res_blocks=2,
- attn_resolutions=[], in_channels=None, ch=in_channels,
- ch_mult=[ch_mult for _ in range(num_blocks)])
-
- def forward(self, x):
- x = self.rescaler(x)
- x = self.decoder(x)
- return x
-
-
-class Resize(nn.Module):
- def __init__(self, in_channels=None, learned=False, mode="bilinear"):
- super().__init__()
- self.with_conv = learned
- self.mode = mode
- if self.with_conv:
- print(f"Note: {self.__class__.__name} uses learned downsampling and will ignore the fixed {mode} mode")
- raise NotImplementedError()
- assert in_channels is not None
- # no asymmetric padding in torch conv, must do it ourselves
- self.conv = torch.nn.Conv2d(in_channels,
- in_channels,
- kernel_size=4,
- stride=2,
- padding=1)
-
- def forward(self, x, scale_factor=1.0):
- if scale_factor==1.0:
- return x
- else:
- x = torch.nn.functional.interpolate(x, mode=self.mode, align_corners=False, scale_factor=scale_factor)
- return x
-
-class FirstStagePostProcessor(nn.Module):
-
- def __init__(self, ch_mult:list, in_channels,
- pretrained_model:nn.Module=None,
- reshape=False,
- n_channels=None,
- dropout=0.,
- pretrained_config=None):
- super().__init__()
- if pretrained_config is None:
- assert pretrained_model is not None, 'Either "pretrained_model" or "pretrained_config" must not be None'
- self.pretrained_model = pretrained_model
- else:
- assert pretrained_config is not None, 'Either "pretrained_model" or "pretrained_config" must not be None'
- self.instantiate_pretrained(pretrained_config)
-
- self.do_reshape = reshape
-
- if n_channels is None:
- n_channels = self.pretrained_model.encoder.ch
-
- self.proj_norm = Normalize(in_channels,num_groups=in_channels//2)
- self.proj = nn.Conv2d(in_channels,n_channels,kernel_size=3,
- stride=1,padding=1)
-
- blocks = []
- downs = []
- ch_in = n_channels
- for m in ch_mult:
- blocks.append(ResnetBlock(in_channels=ch_in,out_channels=m*n_channels,dropout=dropout))
- ch_in = m * n_channels
- downs.append(Downsample(ch_in, with_conv=False))
-
- self.model = nn.ModuleList(blocks)
- self.downsampler = nn.ModuleList(downs)
-
-
- def instantiate_pretrained(self, config):
- model = instantiate_from_config(config)
- self.pretrained_model = model.eval()
- # self.pretrained_model.train = False
- for param in self.pretrained_model.parameters():
- param.requires_grad = False
-
-
- @torch.no_grad()
- def encode_with_pretrained(self,x):
- c = self.pretrained_model.encode(x)
- if isinstance(c, DiagonalGaussianDistribution):
- c = c.mode()
- return c
-
- def forward(self,x):
- z_fs = self.encode_with_pretrained(x)
- z = self.proj_norm(z_fs)
- z = self.proj(z)
- z = nonlinearity(z)
-
- for submodel, downmodel in zip(self.model,self.downsampler):
- z = submodel(z,temb=None)
- z = downmodel(z)
-
- if self.do_reshape:
- z = rearrange(z,'b c h w -> b (h w) c')
- return z
-
diff --git a/One-2-3-45-master 2/ldm/modules/diffusionmodules/openaimodel.py b/One-2-3-45-master 2/ldm/modules/diffusionmodules/openaimodel.py
deleted file mode 100644
index 6b994cca787464d34f6367edf486974b3542f808..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/diffusionmodules/openaimodel.py
+++ /dev/null
@@ -1,996 +0,0 @@
-from abc import abstractmethod
-from functools import partial
-import math
-from typing import Iterable
-
-import numpy as np
-import torch as th
-import torch.nn as nn
-import torch.nn.functional as F
-
-from ldm.modules.diffusionmodules.util import (
- checkpoint,
- conv_nd,
- linear,
- avg_pool_nd,
- zero_module,
- normalization,
- timestep_embedding,
-)
-from ldm.modules.attention import SpatialTransformer
-from ldm.util import exists
-
-
-# dummy replace
-def convert_module_to_f16(x):
- pass
-
-def convert_module_to_f32(x):
- pass
-
-
-## go
-class AttentionPool2d(nn.Module):
- """
- Adapted from CLIP: https://github.com/openai/CLIP/blob/main/clip/model.py
- """
-
- def __init__(
- self,
- spacial_dim: int,
- embed_dim: int,
- num_heads_channels: int,
- output_dim: int = None,
- ):
- super().__init__()
- self.positional_embedding = nn.Parameter(th.randn(embed_dim, spacial_dim ** 2 + 1) / embed_dim ** 0.5)
- self.qkv_proj = conv_nd(1, embed_dim, 3 * embed_dim, 1)
- self.c_proj = conv_nd(1, embed_dim, output_dim or embed_dim, 1)
- self.num_heads = embed_dim // num_heads_channels
- self.attention = QKVAttention(self.num_heads)
-
- def forward(self, x):
- b, c, *_spatial = x.shape
- x = x.reshape(b, c, -1) # NC(HW)
- x = th.cat([x.mean(dim=-1, keepdim=True), x], dim=-1) # NC(HW+1)
- x = x + self.positional_embedding[None, :, :].to(x.dtype) # NC(HW+1)
- x = self.qkv_proj(x)
- x = self.attention(x)
- x = self.c_proj(x)
- return x[:, :, 0]
-
-
-class TimestepBlock(nn.Module):
- """
- Any module where forward() takes timestep embeddings as a second argument.
- """
-
- @abstractmethod
- def forward(self, x, emb):
- """
- Apply the module to `x` given `emb` timestep embeddings.
- """
-
-
-class TimestepEmbedSequential(nn.Sequential, TimestepBlock):
- """
- A sequential module that passes timestep embeddings to the children that
- support it as an extra input.
- """
-
- def forward(self, x, emb, context=None):
- for layer in self:
- if isinstance(layer, TimestepBlock):
- x = layer(x, emb)
- elif isinstance(layer, SpatialTransformer):
- x = layer(x, context)
- else:
- x = layer(x)
- return x
-
-
-class Upsample(nn.Module):
- """
- An upsampling layer with an optional convolution.
- :param channels: channels in the inputs and outputs.
- :param use_conv: a bool determining if a convolution is applied.
- :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then
- upsampling occurs in the inner-two dimensions.
- """
-
- def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1):
- super().__init__()
- self.channels = channels
- self.out_channels = out_channels or channels
- self.use_conv = use_conv
- self.dims = dims
- if use_conv:
- self.conv = conv_nd(dims, self.channels, self.out_channels, 3, padding=padding)
-
- def forward(self, x):
- assert x.shape[1] == self.channels
- if self.dims == 3:
- x = F.interpolate(
- x, (x.shape[2], x.shape[3] * 2, x.shape[4] * 2), mode="nearest"
- )
- else:
- x = F.interpolate(x, scale_factor=2, mode="nearest")
- if self.use_conv:
- x = self.conv(x)
- return x
-
-class TransposedUpsample(nn.Module):
- 'Learned 2x upsampling without padding'
- def __init__(self, channels, out_channels=None, ks=5):
- super().__init__()
- self.channels = channels
- self.out_channels = out_channels or channels
-
- self.up = nn.ConvTranspose2d(self.channels,self.out_channels,kernel_size=ks,stride=2)
-
- def forward(self,x):
- return self.up(x)
-
-
-class Downsample(nn.Module):
- """
- A downsampling layer with an optional convolution.
- :param channels: channels in the inputs and outputs.
- :param use_conv: a bool determining if a convolution is applied.
- :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then
- downsampling occurs in the inner-two dimensions.
- """
-
- def __init__(self, channels, use_conv, dims=2, out_channels=None,padding=1):
- super().__init__()
- self.channels = channels
- self.out_channels = out_channels or channels
- self.use_conv = use_conv
- self.dims = dims
- stride = 2 if dims != 3 else (1, 2, 2)
- if use_conv:
- self.op = conv_nd(
- dims, self.channels, self.out_channels, 3, stride=stride, padding=padding
- )
- else:
- assert self.channels == self.out_channels
- self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride)
-
- def forward(self, x):
- assert x.shape[1] == self.channels
- return self.op(x)
-
-
-class ResBlock(TimestepBlock):
- """
- A residual block that can optionally change the number of channels.
- :param channels: the number of input channels.
- :param emb_channels: the number of timestep embedding channels.
- :param dropout: the rate of dropout.
- :param out_channels: if specified, the number of out channels.
- :param use_conv: if True and out_channels is specified, use a spatial
- convolution instead of a smaller 1x1 convolution to change the
- channels in the skip connection.
- :param dims: determines if the signal is 1D, 2D, or 3D.
- :param use_checkpoint: if True, use gradient checkpointing on this module.
- :param up: if True, use this block for upsampling.
- :param down: if True, use this block for downsampling.
- """
-
- def __init__(
- self,
- channels,
- emb_channels,
- dropout,
- out_channels=None,
- use_conv=False,
- use_scale_shift_norm=False,
- dims=2,
- use_checkpoint=False,
- up=False,
- down=False,
- ):
- super().__init__()
- self.channels = channels
- self.emb_channels = emb_channels
- self.dropout = dropout
- self.out_channels = out_channels or channels
- self.use_conv = use_conv
- self.use_checkpoint = use_checkpoint
- self.use_scale_shift_norm = use_scale_shift_norm
-
- self.in_layers = nn.Sequential(
- normalization(channels),
- nn.SiLU(),
- conv_nd(dims, channels, self.out_channels, 3, padding=1),
- )
-
- self.updown = up or down
-
- if up:
- self.h_upd = Upsample(channels, False, dims)
- self.x_upd = Upsample(channels, False, dims)
- elif down:
- self.h_upd = Downsample(channels, False, dims)
- self.x_upd = Downsample(channels, False, dims)
- else:
- self.h_upd = self.x_upd = nn.Identity()
-
- self.emb_layers = nn.Sequential(
- nn.SiLU(),
- linear(
- emb_channels,
- 2 * self.out_channels if use_scale_shift_norm else self.out_channels,
- ),
- )
- self.out_layers = nn.Sequential(
- normalization(self.out_channels),
- nn.SiLU(),
- nn.Dropout(p=dropout),
- zero_module(
- conv_nd(dims, self.out_channels, self.out_channels, 3, padding=1)
- ),
- )
-
- if self.out_channels == channels:
- self.skip_connection = nn.Identity()
- elif use_conv:
- self.skip_connection = conv_nd(
- dims, channels, self.out_channels, 3, padding=1
- )
- else:
- self.skip_connection = conv_nd(dims, channels, self.out_channels, 1)
-
- def forward(self, x, emb):
- """
- Apply the block to a Tensor, conditioned on a timestep embedding.
- :param x: an [N x C x ...] Tensor of features.
- :param emb: an [N x emb_channels] Tensor of timestep embeddings.
- :return: an [N x C x ...] Tensor of outputs.
- """
- return checkpoint(
- self._forward, (x, emb), self.parameters(), self.use_checkpoint
- )
-
-
- def _forward(self, x, emb):
- if self.updown:
- in_rest, in_conv = self.in_layers[:-1], self.in_layers[-1]
- h = in_rest(x)
- h = self.h_upd(h)
- x = self.x_upd(x)
- h = in_conv(h)
- else:
- h = self.in_layers(x)
- emb_out = self.emb_layers(emb).type(h.dtype)
- while len(emb_out.shape) < len(h.shape):
- emb_out = emb_out[..., None]
- if self.use_scale_shift_norm:
- out_norm, out_rest = self.out_layers[0], self.out_layers[1:]
- scale, shift = th.chunk(emb_out, 2, dim=1)
- h = out_norm(h) * (1 + scale) + shift
- h = out_rest(h)
- else:
- h = h + emb_out
- h = self.out_layers(h)
- return self.skip_connection(x) + h
-
-
-class AttentionBlock(nn.Module):
- """
- An attention block that allows spatial positions to attend to each other.
- Originally ported from here, but adapted to the N-d case.
- https://github.com/hojonathanho/diffusion/blob/1e0dceb3b3495bbe19116a5e1b3596cd0706c543/diffusion_tf/models/unet.py#L66.
- """
-
- def __init__(
- self,
- channels,
- num_heads=1,
- num_head_channels=-1,
- use_checkpoint=False,
- use_new_attention_order=False,
- ):
- super().__init__()
- self.channels = channels
- if num_head_channels == -1:
- self.num_heads = num_heads
- else:
- assert (
- channels % num_head_channels == 0
- ), f"q,k,v channels {channels} is not divisible by num_head_channels {num_head_channels}"
- self.num_heads = channels // num_head_channels
- self.use_checkpoint = use_checkpoint
- self.norm = normalization(channels)
- self.qkv = conv_nd(1, channels, channels * 3, 1)
- if use_new_attention_order:
- # split qkv before split heads
- self.attention = QKVAttention(self.num_heads)
- else:
- # split heads before split qkv
- self.attention = QKVAttentionLegacy(self.num_heads)
-
- self.proj_out = zero_module(conv_nd(1, channels, channels, 1))
-
- def forward(self, x):
- return checkpoint(self._forward, (x,), self.parameters(), True) # TODO: check checkpoint usage, is True # TODO: fix the .half call!!!
- #return pt_checkpoint(self._forward, x) # pytorch
-
- def _forward(self, x):
- b, c, *spatial = x.shape
- x = x.reshape(b, c, -1)
- qkv = self.qkv(self.norm(x))
- h = self.attention(qkv)
- h = self.proj_out(h)
- return (x + h).reshape(b, c, *spatial)
-
-
-def count_flops_attn(model, _x, y):
- """
- A counter for the `thop` package to count the operations in an
- attention operation.
- Meant to be used like:
- macs, params = thop.profile(
- model,
- inputs=(inputs, timestamps),
- custom_ops={QKVAttention: QKVAttention.count_flops},
- )
- """
- b, c, *spatial = y[0].shape
- num_spatial = int(np.prod(spatial))
- # We perform two matmuls with the same number of ops.
- # The first computes the weight matrix, the second computes
- # the combination of the value vectors.
- matmul_ops = 2 * b * (num_spatial ** 2) * c
- model.total_ops += th.DoubleTensor([matmul_ops])
-
-
-class QKVAttentionLegacy(nn.Module):
- """
- A module which performs QKV attention. Matches legacy QKVAttention + input/ouput heads shaping
- """
-
- def __init__(self, n_heads):
- super().__init__()
- self.n_heads = n_heads
-
- def forward(self, qkv):
- """
- Apply QKV attention.
- :param qkv: an [N x (H * 3 * C) x T] tensor of Qs, Ks, and Vs.
- :return: an [N x (H * C) x T] tensor after attention.
- """
- bs, width, length = qkv.shape
- assert width % (3 * self.n_heads) == 0
- ch = width // (3 * self.n_heads)
- q, k, v = qkv.reshape(bs * self.n_heads, ch * 3, length).split(ch, dim=1)
- scale = 1 / math.sqrt(math.sqrt(ch))
- weight = th.einsum(
- "bct,bcs->bts", q * scale, k * scale
- ) # More stable with f16 than dividing afterwards
- weight = th.softmax(weight.float(), dim=-1).type(weight.dtype)
- a = th.einsum("bts,bcs->bct", weight, v)
- return a.reshape(bs, -1, length)
-
- @staticmethod
- def count_flops(model, _x, y):
- return count_flops_attn(model, _x, y)
-
-
-class QKVAttention(nn.Module):
- """
- A module which performs QKV attention and splits in a different order.
- """
-
- def __init__(self, n_heads):
- super().__init__()
- self.n_heads = n_heads
-
- def forward(self, qkv):
- """
- Apply QKV attention.
- :param qkv: an [N x (3 * H * C) x T] tensor of Qs, Ks, and Vs.
- :return: an [N x (H * C) x T] tensor after attention.
- """
- bs, width, length = qkv.shape
- assert width % (3 * self.n_heads) == 0
- ch = width // (3 * self.n_heads)
- q, k, v = qkv.chunk(3, dim=1)
- scale = 1 / math.sqrt(math.sqrt(ch))
- weight = th.einsum(
- "bct,bcs->bts",
- (q * scale).view(bs * self.n_heads, ch, length),
- (k * scale).view(bs * self.n_heads, ch, length),
- ) # More stable with f16 than dividing afterwards
- weight = th.softmax(weight.float(), dim=-1).type(weight.dtype)
- a = th.einsum("bts,bcs->bct", weight, v.reshape(bs * self.n_heads, ch, length))
- return a.reshape(bs, -1, length)
-
- @staticmethod
- def count_flops(model, _x, y):
- return count_flops_attn(model, _x, y)
-
-
-class UNetModel(nn.Module):
- """
- The full UNet model with attention and timestep embedding.
- :param in_channels: channels in the input Tensor.
- :param model_channels: base channel count for the model.
- :param out_channels: channels in the output Tensor.
- :param num_res_blocks: number of residual blocks per downsample.
- :param attention_resolutions: a collection of downsample rates at which
- attention will take place. May be a set, list, or tuple.
- For example, if this contains 4, then at 4x downsampling, attention
- will be used.
- :param dropout: the dropout probability.
- :param channel_mult: channel multiplier for each level of the UNet.
- :param conv_resample: if True, use learned convolutions for upsampling and
- downsampling.
- :param dims: determines if the signal is 1D, 2D, or 3D.
- :param num_classes: if specified (as an int), then this model will be
- class-conditional with `num_classes` classes.
- :param use_checkpoint: use gradient checkpointing to reduce memory usage.
- :param num_heads: the number of attention heads in each attention layer.
- :param num_heads_channels: if specified, ignore num_heads and instead use
- a fixed channel width per attention head.
- :param num_heads_upsample: works with num_heads to set a different number
- of heads for upsampling. Deprecated.
- :param use_scale_shift_norm: use a FiLM-like conditioning mechanism.
- :param resblock_updown: use residual blocks for up/downsampling.
- :param use_new_attention_order: use a different attention pattern for potentially
- increased efficiency.
- """
-
- def __init__(
- self,
- image_size,
- in_channels,
- model_channels,
- out_channels,
- num_res_blocks,
- attention_resolutions,
- dropout=0,
- channel_mult=(1, 2, 4, 8),
- conv_resample=True,
- dims=2,
- num_classes=None,
- use_checkpoint=False,
- use_fp16=False,
- num_heads=-1,
- num_head_channels=-1,
- num_heads_upsample=-1,
- use_scale_shift_norm=False,
- resblock_updown=False,
- use_new_attention_order=False,
- use_spatial_transformer=False, # custom transformer support
- transformer_depth=1, # custom transformer support
- context_dim=None, # custom transformer support
- n_embed=None, # custom support for prediction of discrete ids into codebook of first stage vq model
- legacy=True,
- disable_self_attentions=None,
- num_attention_blocks=None
- ):
- super().__init__()
- if use_spatial_transformer:
- assert context_dim is not None, 'Fool!! You forgot to include the dimension of your cross-attention conditioning...'
-
- if context_dim is not None:
- assert use_spatial_transformer, 'Fool!! You forgot to use the spatial transformer for your cross-attention conditioning...'
- from omegaconf.listconfig import ListConfig
- if type(context_dim) == ListConfig:
- context_dim = list(context_dim)
-
- if num_heads_upsample == -1:
- num_heads_upsample = num_heads
-
- if num_heads == -1:
- assert num_head_channels != -1, 'Either num_heads or num_head_channels has to be set'
-
- if num_head_channels == -1:
- assert num_heads != -1, 'Either num_heads or num_head_channels has to be set'
-
- self.image_size = image_size
- self.in_channels = in_channels
- self.model_channels = model_channels
- self.out_channels = out_channels
- if isinstance(num_res_blocks, int):
- self.num_res_blocks = len(channel_mult) * [num_res_blocks]
- else:
- if len(num_res_blocks) != len(channel_mult):
- raise ValueError("provide num_res_blocks either as an int (globally constant) or "
- "as a list/tuple (per-level) with the same length as channel_mult")
- self.num_res_blocks = num_res_blocks
- #self.num_res_blocks = num_res_blocks
- if disable_self_attentions is not None:
- # should be a list of booleans, indicating whether to disable self-attention in TransformerBlocks or not
- assert len(disable_self_attentions) == len(channel_mult)
- if num_attention_blocks is not None:
- assert len(num_attention_blocks) == len(self.num_res_blocks)
- assert all(map(lambda i: self.num_res_blocks[i] >= num_attention_blocks[i], range(len(num_attention_blocks))))
- print(f"Constructor of UNetModel received num_attention_blocks={num_attention_blocks}. "
- f"This option has LESS priority than attention_resolutions {attention_resolutions}, "
- f"i.e., in cases where num_attention_blocks[i] > 0 but 2**i not in attention_resolutions, "
- f"attention will still not be set.") # todo: convert to warning
-
- self.attention_resolutions = attention_resolutions
- self.dropout = dropout
- self.channel_mult = channel_mult
- self.conv_resample = conv_resample
- self.num_classes = num_classes
- self.use_checkpoint = use_checkpoint
- self.dtype = th.float16 if use_fp16 else th.float32
- self.num_heads = num_heads
- self.num_head_channels = num_head_channels
- self.num_heads_upsample = num_heads_upsample
- self.predict_codebook_ids = n_embed is not None
-
- time_embed_dim = model_channels * 4
- self.time_embed = nn.Sequential(
- linear(model_channels, time_embed_dim),
- nn.SiLU(),
- linear(time_embed_dim, time_embed_dim),
- )
-
- if self.num_classes is not None:
- self.label_emb = nn.Embedding(num_classes, time_embed_dim)
-
- self.input_blocks = nn.ModuleList(
- [
- TimestepEmbedSequential(
- conv_nd(dims, in_channels, model_channels, 3, padding=1)
- )
- ]
- )
- self._feature_size = model_channels
- input_block_chans = [model_channels]
- ch = model_channels
- ds = 1
- for level, mult in enumerate(channel_mult):
- for nr in range(self.num_res_blocks[level]):
- layers = [
- ResBlock(
- ch,
- time_embed_dim,
- dropout,
- out_channels=mult * model_channels,
- dims=dims,
- use_checkpoint=use_checkpoint,
- use_scale_shift_norm=use_scale_shift_norm,
- )
- ]
- ch = mult * model_channels
- if ds in attention_resolutions:
- if num_head_channels == -1:
- dim_head = ch // num_heads
- else:
- num_heads = ch // num_head_channels
- dim_head = num_head_channels
- if legacy:
- #num_heads = 1
- dim_head = ch // num_heads if use_spatial_transformer else num_head_channels
- if exists(disable_self_attentions):
- disabled_sa = disable_self_attentions[level]
- else:
- disabled_sa = False
-
- if not exists(num_attention_blocks) or nr < num_attention_blocks[level]:
- layers.append(
- AttentionBlock(
- ch,
- use_checkpoint=use_checkpoint,
- num_heads=num_heads,
- num_head_channels=dim_head,
- use_new_attention_order=use_new_attention_order,
- ) if not use_spatial_transformer else SpatialTransformer(
- ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim,
- disable_self_attn=disabled_sa
- )
- )
- self.input_blocks.append(TimestepEmbedSequential(*layers))
- self._feature_size += ch
- input_block_chans.append(ch)
- if level != len(channel_mult) - 1:
- out_ch = ch
- self.input_blocks.append(
- TimestepEmbedSequential(
- ResBlock(
- ch,
- time_embed_dim,
- dropout,
- out_channels=out_ch,
- dims=dims,
- use_checkpoint=use_checkpoint,
- use_scale_shift_norm=use_scale_shift_norm,
- down=True,
- )
- if resblock_updown
- else Downsample(
- ch, conv_resample, dims=dims, out_channels=out_ch
- )
- )
- )
- ch = out_ch
- input_block_chans.append(ch)
- ds *= 2
- self._feature_size += ch
-
- if num_head_channels == -1:
- dim_head = ch // num_heads
- else:
- num_heads = ch // num_head_channels
- dim_head = num_head_channels
- if legacy:
- #num_heads = 1
- dim_head = ch // num_heads if use_spatial_transformer else num_head_channels
- self.middle_block = TimestepEmbedSequential(
- ResBlock(
- ch,
- time_embed_dim,
- dropout,
- dims=dims,
- use_checkpoint=use_checkpoint,
- use_scale_shift_norm=use_scale_shift_norm,
- ),
- AttentionBlock(
- ch,
- use_checkpoint=use_checkpoint,
- num_heads=num_heads,
- num_head_channels=dim_head,
- use_new_attention_order=use_new_attention_order,
- ) if not use_spatial_transformer else SpatialTransformer( # always uses a self-attn
- ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim
- ),
- ResBlock(
- ch,
- time_embed_dim,
- dropout,
- dims=dims,
- use_checkpoint=use_checkpoint,
- use_scale_shift_norm=use_scale_shift_norm,
- ),
- )
- self._feature_size += ch
-
- self.output_blocks = nn.ModuleList([])
- for level, mult in list(enumerate(channel_mult))[::-1]:
- for i in range(self.num_res_blocks[level] + 1):
- ich = input_block_chans.pop()
- layers = [
- ResBlock(
- ch + ich,
- time_embed_dim,
- dropout,
- out_channels=model_channels * mult,
- dims=dims,
- use_checkpoint=use_checkpoint,
- use_scale_shift_norm=use_scale_shift_norm,
- )
- ]
- ch = model_channels * mult
- if ds in attention_resolutions:
- if num_head_channels == -1:
- dim_head = ch // num_heads
- else:
- num_heads = ch // num_head_channels
- dim_head = num_head_channels
- if legacy:
- #num_heads = 1
- dim_head = ch // num_heads if use_spatial_transformer else num_head_channels
- if exists(disable_self_attentions):
- disabled_sa = disable_self_attentions[level]
- else:
- disabled_sa = False
-
- if not exists(num_attention_blocks) or i < num_attention_blocks[level]:
- layers.append(
- AttentionBlock(
- ch,
- use_checkpoint=use_checkpoint,
- num_heads=num_heads_upsample,
- num_head_channels=dim_head,
- use_new_attention_order=use_new_attention_order,
- ) if not use_spatial_transformer else SpatialTransformer(
- ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim,
- disable_self_attn=disabled_sa
- )
- )
- if level and i == self.num_res_blocks[level]:
- out_ch = ch
- layers.append(
- ResBlock(
- ch,
- time_embed_dim,
- dropout,
- out_channels=out_ch,
- dims=dims,
- use_checkpoint=use_checkpoint,
- use_scale_shift_norm=use_scale_shift_norm,
- up=True,
- )
- if resblock_updown
- else Upsample(ch, conv_resample, dims=dims, out_channels=out_ch)
- )
- ds //= 2
- self.output_blocks.append(TimestepEmbedSequential(*layers))
- self._feature_size += ch
-
- self.out = nn.Sequential(
- normalization(ch),
- nn.SiLU(),
- zero_module(conv_nd(dims, model_channels, out_channels, 3, padding=1)),
- )
- if self.predict_codebook_ids:
- self.id_predictor = nn.Sequential(
- normalization(ch),
- conv_nd(dims, model_channels, n_embed, 1),
- #nn.LogSoftmax(dim=1) # change to cross_entropy and produce non-normalized logits
- )
-
- def convert_to_fp16(self):
- """
- Convert the torso of the model to float16.
- """
- self.input_blocks.apply(convert_module_to_f16)
- self.middle_block.apply(convert_module_to_f16)
- self.output_blocks.apply(convert_module_to_f16)
-
- def convert_to_fp32(self):
- """
- Convert the torso of the model to float32.
- """
- self.input_blocks.apply(convert_module_to_f32)
- self.middle_block.apply(convert_module_to_f32)
- self.output_blocks.apply(convert_module_to_f32)
-
- def forward(self, x, timesteps=None, context=None, y=None,**kwargs):
- """
- Apply the model to an input batch.
- :param x: an [N x C x ...] Tensor of inputs.
- :param timesteps: a 1-D batch of timesteps.
- :param context: conditioning plugged in via crossattn
- :param y: an [N] Tensor of labels, if class-conditional.
- :return: an [N x C x ...] Tensor of outputs.
- """
- assert (y is not None) == (
- self.num_classes is not None
- ), "must specify y if and only if the model is class-conditional"
- hs = []
- t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False)
- emb = self.time_embed(t_emb)
-
- if self.num_classes is not None:
- assert y.shape == (x.shape[0],)
- emb = emb + self.label_emb(y)
-
- h = x.type(self.dtype)
- for module in self.input_blocks:
- h = module(h, emb, context)
- hs.append(h)
- h = self.middle_block(h, emb, context)
- for module in self.output_blocks:
- h = th.cat([h, hs.pop()], dim=1)
- h = module(h, emb, context)
- h = h.type(x.dtype)
- if self.predict_codebook_ids:
- return self.id_predictor(h)
- else:
- return self.out(h)
-
-
-class EncoderUNetModel(nn.Module):
- """
- The half UNet model with attention and timestep embedding.
- For usage, see UNet.
- """
-
- def __init__(
- self,
- image_size,
- in_channels,
- model_channels,
- out_channels,
- num_res_blocks,
- attention_resolutions,
- dropout=0,
- channel_mult=(1, 2, 4, 8),
- conv_resample=True,
- dims=2,
- use_checkpoint=False,
- use_fp16=False,
- num_heads=1,
- num_head_channels=-1,
- num_heads_upsample=-1,
- use_scale_shift_norm=False,
- resblock_updown=False,
- use_new_attention_order=False,
- pool="adaptive",
- *args,
- **kwargs
- ):
- super().__init__()
-
- if num_heads_upsample == -1:
- num_heads_upsample = num_heads
-
- self.in_channels = in_channels
- self.model_channels = model_channels
- self.out_channels = out_channels
- self.num_res_blocks = num_res_blocks
- self.attention_resolutions = attention_resolutions
- self.dropout = dropout
- self.channel_mult = channel_mult
- self.conv_resample = conv_resample
- self.use_checkpoint = use_checkpoint
- self.dtype = th.float16 if use_fp16 else th.float32
- self.num_heads = num_heads
- self.num_head_channels = num_head_channels
- self.num_heads_upsample = num_heads_upsample
-
- time_embed_dim = model_channels * 4
- self.time_embed = nn.Sequential(
- linear(model_channels, time_embed_dim),
- nn.SiLU(),
- linear(time_embed_dim, time_embed_dim),
- )
-
- self.input_blocks = nn.ModuleList(
- [
- TimestepEmbedSequential(
- conv_nd(dims, in_channels, model_channels, 3, padding=1)
- )
- ]
- )
- self._feature_size = model_channels
- input_block_chans = [model_channels]
- ch = model_channels
- ds = 1
- for level, mult in enumerate(channel_mult):
- for _ in range(num_res_blocks):
- layers = [
- ResBlock(
- ch,
- time_embed_dim,
- dropout,
- out_channels=mult * model_channels,
- dims=dims,
- use_checkpoint=use_checkpoint,
- use_scale_shift_norm=use_scale_shift_norm,
- )
- ]
- ch = mult * model_channels
- if ds in attention_resolutions:
- layers.append(
- AttentionBlock(
- ch,
- use_checkpoint=use_checkpoint,
- num_heads=num_heads,
- num_head_channels=num_head_channels,
- use_new_attention_order=use_new_attention_order,
- )
- )
- self.input_blocks.append(TimestepEmbedSequential(*layers))
- self._feature_size += ch
- input_block_chans.append(ch)
- if level != len(channel_mult) - 1:
- out_ch = ch
- self.input_blocks.append(
- TimestepEmbedSequential(
- ResBlock(
- ch,
- time_embed_dim,
- dropout,
- out_channels=out_ch,
- dims=dims,
- use_checkpoint=use_checkpoint,
- use_scale_shift_norm=use_scale_shift_norm,
- down=True,
- )
- if resblock_updown
- else Downsample(
- ch, conv_resample, dims=dims, out_channels=out_ch
- )
- )
- )
- ch = out_ch
- input_block_chans.append(ch)
- ds *= 2
- self._feature_size += ch
-
- self.middle_block = TimestepEmbedSequential(
- ResBlock(
- ch,
- time_embed_dim,
- dropout,
- dims=dims,
- use_checkpoint=use_checkpoint,
- use_scale_shift_norm=use_scale_shift_norm,
- ),
- AttentionBlock(
- ch,
- use_checkpoint=use_checkpoint,
- num_heads=num_heads,
- num_head_channels=num_head_channels,
- use_new_attention_order=use_new_attention_order,
- ),
- ResBlock(
- ch,
- time_embed_dim,
- dropout,
- dims=dims,
- use_checkpoint=use_checkpoint,
- use_scale_shift_norm=use_scale_shift_norm,
- ),
- )
- self._feature_size += ch
- self.pool = pool
- if pool == "adaptive":
- self.out = nn.Sequential(
- normalization(ch),
- nn.SiLU(),
- nn.AdaptiveAvgPool2d((1, 1)),
- zero_module(conv_nd(dims, ch, out_channels, 1)),
- nn.Flatten(),
- )
- elif pool == "attention":
- assert num_head_channels != -1
- self.out = nn.Sequential(
- normalization(ch),
- nn.SiLU(),
- AttentionPool2d(
- (image_size // ds), ch, num_head_channels, out_channels
- ),
- )
- elif pool == "spatial":
- self.out = nn.Sequential(
- nn.Linear(self._feature_size, 2048),
- nn.ReLU(),
- nn.Linear(2048, self.out_channels),
- )
- elif pool == "spatial_v2":
- self.out = nn.Sequential(
- nn.Linear(self._feature_size, 2048),
- normalization(2048),
- nn.SiLU(),
- nn.Linear(2048, self.out_channels),
- )
- else:
- raise NotImplementedError(f"Unexpected {pool} pooling")
-
- def convert_to_fp16(self):
- """
- Convert the torso of the model to float16.
- """
- self.input_blocks.apply(convert_module_to_f16)
- self.middle_block.apply(convert_module_to_f16)
-
- def convert_to_fp32(self):
- """
- Convert the torso of the model to float32.
- """
- self.input_blocks.apply(convert_module_to_f32)
- self.middle_block.apply(convert_module_to_f32)
-
- def forward(self, x, timesteps):
- """
- Apply the model to an input batch.
- :param x: an [N x C x ...] Tensor of inputs.
- :param timesteps: a 1-D batch of timesteps.
- :return: an [N x K] Tensor of outputs.
- """
- emb = self.time_embed(timestep_embedding(timesteps, self.model_channels))
-
- results = []
- h = x.type(self.dtype)
- for module in self.input_blocks:
- h = module(h, emb)
- if self.pool.startswith("spatial"):
- results.append(h.type(x.dtype).mean(dim=(2, 3)))
- h = self.middle_block(h, emb)
- if self.pool.startswith("spatial"):
- results.append(h.type(x.dtype).mean(dim=(2, 3)))
- h = th.cat(results, axis=-1)
- return self.out(h)
- else:
- h = h.type(x.dtype)
- return self.out(h)
-
diff --git a/One-2-3-45-master 2/ldm/modules/diffusionmodules/util.py b/One-2-3-45-master 2/ldm/modules/diffusionmodules/util.py
deleted file mode 100644
index a952e6c40308c33edd422da0ce6a60f47e73661b..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/diffusionmodules/util.py
+++ /dev/null
@@ -1,267 +0,0 @@
-# adopted from
-# https://github.com/openai/improved-diffusion/blob/main/improved_diffusion/gaussian_diffusion.py
-# and
-# https://github.com/lucidrains/denoising-diffusion-pytorch/blob/7706bdfc6f527f58d33f84b7b522e61e6e3164b3/denoising_diffusion_pytorch/denoising_diffusion_pytorch.py
-# and
-# https://github.com/openai/guided-diffusion/blob/0ba878e517b276c45d1195eb29f6f5f72659a05b/guided_diffusion/nn.py
-#
-# thanks!
-
-
-import os
-import math
-import torch
-import torch.nn as nn
-import numpy as np
-from einops import repeat
-
-from ldm.util import instantiate_from_config
-
-
-def make_beta_schedule(schedule, n_timestep, linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3):
- if schedule == "linear":
- betas = (
- torch.linspace(linear_start ** 0.5, linear_end ** 0.5, n_timestep, dtype=torch.float64) ** 2
- )
-
- elif schedule == "cosine":
- timesteps = (
- torch.arange(n_timestep + 1, dtype=torch.float64) / n_timestep + cosine_s
- )
- alphas = timesteps / (1 + cosine_s) * np.pi / 2
- alphas = torch.cos(alphas).pow(2)
- alphas = alphas / alphas[0]
- betas = 1 - alphas[1:] / alphas[:-1]
- betas = np.clip(betas, a_min=0, a_max=0.999)
-
- elif schedule == "sqrt_linear":
- betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64)
- elif schedule == "sqrt":
- betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) ** 0.5
- else:
- raise ValueError(f"schedule '{schedule}' unknown.")
- return betas.numpy()
-
-
-def make_ddim_timesteps(ddim_discr_method, num_ddim_timesteps, num_ddpm_timesteps, verbose=True):
- if ddim_discr_method == 'uniform':
- c = num_ddpm_timesteps // num_ddim_timesteps
- ddim_timesteps = np.asarray(list(range(0, num_ddpm_timesteps, c)))
- elif ddim_discr_method == 'quad':
- ddim_timesteps = ((np.linspace(0, np.sqrt(num_ddpm_timesteps * .8), num_ddim_timesteps)) ** 2).astype(int)
- else:
- raise NotImplementedError(f'There is no ddim discretization method called "{ddim_discr_method}"')
-
- # assert ddim_timesteps.shape[0] == num_ddim_timesteps
- # add one to get the final alpha values right (the ones from first scale to data during sampling)
- steps_out = ddim_timesteps + 1
- if verbose:
- print(f'Selected timesteps for ddim sampler: {steps_out}')
- return steps_out
-
-
-def make_ddim_sampling_parameters(alphacums, ddim_timesteps, eta, verbose=True):
- # select alphas for computing the variance schedule
- alphas = alphacums[ddim_timesteps]
- alphas_prev = np.asarray([alphacums[0]] + alphacums[ddim_timesteps[:-1]].tolist())
-
- # according the the formula provided in https://arxiv.org/abs/2010.02502
- sigmas = eta * np.sqrt((1 - alphas_prev) / (1 - alphas) * (1 - alphas / alphas_prev))
- if verbose:
- print(f'Selected alphas for ddim sampler: a_t: {alphas}; a_(t-1): {alphas_prev}')
- print(f'For the chosen value of eta, which is {eta}, '
- f'this results in the following sigma_t schedule for ddim sampler {sigmas}')
- return sigmas, alphas, alphas_prev
-
-
-def betas_for_alpha_bar(num_diffusion_timesteps, alpha_bar, max_beta=0.999):
- """
- Create a beta schedule that discretizes the given alpha_t_bar function,
- which defines the cumulative product of (1-beta) over time from t = [0,1].
- :param num_diffusion_timesteps: the number of betas to produce.
- :param alpha_bar: a lambda that takes an argument t from 0 to 1 and
- produces the cumulative product of (1-beta) up to that
- part of the diffusion process.
- :param max_beta: the maximum beta to use; use values lower than 1 to
- prevent singularities.
- """
- betas = []
- for i in range(num_diffusion_timesteps):
- t1 = i / num_diffusion_timesteps
- t2 = (i + 1) / num_diffusion_timesteps
- betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))
- return np.array(betas)
-
-
-def extract_into_tensor(a, t, x_shape):
- b, *_ = t.shape
- out = a.gather(-1, t)
- return out.reshape(b, *((1,) * (len(x_shape) - 1)))
-
-
-def checkpoint(func, inputs, params, flag):
- """
- Evaluate a function without caching intermediate activations, allowing for
- reduced memory at the expense of extra compute in the backward pass.
- :param func: the function to evaluate.
- :param inputs: the argument sequence to pass to `func`.
- :param params: a sequence of parameters `func` depends on but does not
- explicitly take as arguments.
- :param flag: if False, disable gradient checkpointing.
- """
- if flag:
- args = tuple(inputs) + tuple(params)
- return CheckpointFunction.apply(func, len(inputs), *args)
- else:
- return func(*inputs)
-
-
-class CheckpointFunction(torch.autograd.Function):
- @staticmethod
- def forward(ctx, run_function, length, *args):
- ctx.run_function = run_function
- ctx.input_tensors = list(args[:length])
- ctx.input_params = list(args[length:])
-
- with torch.no_grad():
- output_tensors = ctx.run_function(*ctx.input_tensors)
- return output_tensors
-
- @staticmethod
- def backward(ctx, *output_grads):
- ctx.input_tensors = [x.detach().requires_grad_(True) for x in ctx.input_tensors]
- with torch.enable_grad():
- # Fixes a bug where the first op in run_function modifies the
- # Tensor storage in place, which is not allowed for detach()'d
- # Tensors.
- shallow_copies = [x.view_as(x) for x in ctx.input_tensors]
- output_tensors = ctx.run_function(*shallow_copies)
- input_grads = torch.autograd.grad(
- output_tensors,
- ctx.input_tensors + ctx.input_params,
- output_grads,
- allow_unused=True,
- )
- del ctx.input_tensors
- del ctx.input_params
- del output_tensors
- return (None, None) + input_grads
-
-
-def timestep_embedding(timesteps, dim, max_period=10000, repeat_only=False):
- """
- Create sinusoidal timestep embeddings.
- :param timesteps: a 1-D Tensor of N indices, one per batch element.
- These may be fractional.
- :param dim: the dimension of the output.
- :param max_period: controls the minimum frequency of the embeddings.
- :return: an [N x dim] Tensor of positional embeddings.
- """
- if not repeat_only:
- half = dim // 2
- freqs = torch.exp(
- -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half
- ).to(device=timesteps.device)
- args = timesteps[:, None].float() * freqs[None]
- embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1)
- if dim % 2:
- embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1)
- else:
- embedding = repeat(timesteps, 'b -> b d', d=dim)
- return embedding
-
-
-def zero_module(module):
- """
- Zero out the parameters of a module and return it.
- """
- for p in module.parameters():
- p.detach().zero_()
- return module
-
-
-def scale_module(module, scale):
- """
- Scale the parameters of a module and return it.
- """
- for p in module.parameters():
- p.detach().mul_(scale)
- return module
-
-
-def mean_flat(tensor):
- """
- Take the mean over all non-batch dimensions.
- """
- return tensor.mean(dim=list(range(1, len(tensor.shape))))
-
-
-def normalization(channels):
- """
- Make a standard normalization layer.
- :param channels: number of input channels.
- :return: an nn.Module for normalization.
- """
- return GroupNorm32(32, channels)
-
-
-# PyTorch 1.7 has SiLU, but we support PyTorch 1.5.
-class SiLU(nn.Module):
- def forward(self, x):
- return x * torch.sigmoid(x)
-
-
-class GroupNorm32(nn.GroupNorm):
- def forward(self, x):
- return super().forward(x.float()).type(x.dtype)
-
-def conv_nd(dims, *args, **kwargs):
- """
- Create a 1D, 2D, or 3D convolution module.
- """
- if dims == 1:
- return nn.Conv1d(*args, **kwargs)
- elif dims == 2:
- return nn.Conv2d(*args, **kwargs)
- elif dims == 3:
- return nn.Conv3d(*args, **kwargs)
- raise ValueError(f"unsupported dimensions: {dims}")
-
-
-def linear(*args, **kwargs):
- """
- Create a linear module.
- """
- return nn.Linear(*args, **kwargs)
-
-
-def avg_pool_nd(dims, *args, **kwargs):
- """
- Create a 1D, 2D, or 3D average pooling module.
- """
- if dims == 1:
- return nn.AvgPool1d(*args, **kwargs)
- elif dims == 2:
- return nn.AvgPool2d(*args, **kwargs)
- elif dims == 3:
- return nn.AvgPool3d(*args, **kwargs)
- raise ValueError(f"unsupported dimensions: {dims}")
-
-
-class HybridConditioner(nn.Module):
-
- def __init__(self, c_concat_config, c_crossattn_config):
- super().__init__()
- self.concat_conditioner = instantiate_from_config(c_concat_config)
- self.crossattn_conditioner = instantiate_from_config(c_crossattn_config)
-
- def forward(self, c_concat, c_crossattn):
- c_concat = self.concat_conditioner(c_concat)
- c_crossattn = self.crossattn_conditioner(c_crossattn)
- return {'c_concat': [c_concat], 'c_crossattn': [c_crossattn]}
-
-
-def noise_like(shape, device, repeat=False):
- repeat_noise = lambda: torch.randn((1, *shape[1:]), device=device).repeat(shape[0], *((1,) * (len(shape) - 1)))
- noise = lambda: torch.randn(shape, device=device)
- return repeat_noise() if repeat else noise()
\ No newline at end of file
diff --git a/One-2-3-45-master 2/ldm/modules/distributions/__init__.py b/One-2-3-45-master 2/ldm/modules/distributions/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/One-2-3-45-master 2/ldm/modules/distributions/distributions.py b/One-2-3-45-master 2/ldm/modules/distributions/distributions.py
deleted file mode 100644
index f2b8ef901130efc171aa69742ca0244d94d3f2e9..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/distributions/distributions.py
+++ /dev/null
@@ -1,92 +0,0 @@
-import torch
-import numpy as np
-
-
-class AbstractDistribution:
- def sample(self):
- raise NotImplementedError()
-
- def mode(self):
- raise NotImplementedError()
-
-
-class DiracDistribution(AbstractDistribution):
- def __init__(self, value):
- self.value = value
-
- def sample(self):
- return self.value
-
- def mode(self):
- return self.value
-
-
-class DiagonalGaussianDistribution(object):
- def __init__(self, parameters, deterministic=False):
- self.parameters = parameters
- self.mean, self.logvar = torch.chunk(parameters, 2, dim=1)
- self.logvar = torch.clamp(self.logvar, -30.0, 20.0)
- self.deterministic = deterministic
- self.std = torch.exp(0.5 * self.logvar)
- self.var = torch.exp(self.logvar)
- if self.deterministic:
- self.var = self.std = torch.zeros_like(self.mean).to(device=self.parameters.device)
-
- def sample(self):
- x = self.mean + self.std * torch.randn(self.mean.shape).to(device=self.parameters.device)
- return x
-
- def kl(self, other=None):
- if self.deterministic:
- return torch.Tensor([0.])
- else:
- if other is None:
- return 0.5 * torch.sum(torch.pow(self.mean, 2)
- + self.var - 1.0 - self.logvar,
- dim=[1, 2, 3])
- else:
- return 0.5 * torch.sum(
- torch.pow(self.mean - other.mean, 2) / other.var
- + self.var / other.var - 1.0 - self.logvar + other.logvar,
- dim=[1, 2, 3])
-
- def nll(self, sample, dims=[1,2,3]):
- if self.deterministic:
- return torch.Tensor([0.])
- logtwopi = np.log(2.0 * np.pi)
- return 0.5 * torch.sum(
- logtwopi + self.logvar + torch.pow(sample - self.mean, 2) / self.var,
- dim=dims)
-
- def mode(self):
- return self.mean
-
-
-def normal_kl(mean1, logvar1, mean2, logvar2):
- """
- source: https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/losses.py#L12
- Compute the KL divergence between two gaussians.
- Shapes are automatically broadcasted, so batches can be compared to
- scalars, among other use cases.
- """
- tensor = None
- for obj in (mean1, logvar1, mean2, logvar2):
- if isinstance(obj, torch.Tensor):
- tensor = obj
- break
- assert tensor is not None, "at least one argument must be a Tensor"
-
- # Force variances to be Tensors. Broadcasting helps convert scalars to
- # Tensors, but it does not work for torch.exp().
- logvar1, logvar2 = [
- x if isinstance(x, torch.Tensor) else torch.tensor(x).to(tensor)
- for x in (logvar1, logvar2)
- ]
-
- return 0.5 * (
- -1.0
- + logvar2
- - logvar1
- + torch.exp(logvar1 - logvar2)
- + ((mean1 - mean2) ** 2) * torch.exp(-logvar2)
- )
diff --git a/One-2-3-45-master 2/ldm/modules/ema.py b/One-2-3-45-master 2/ldm/modules/ema.py
deleted file mode 100644
index c8c75af43565f6e140287644aaaefa97dd6e67c5..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/ema.py
+++ /dev/null
@@ -1,76 +0,0 @@
-import torch
-from torch import nn
-
-
-class LitEma(nn.Module):
- def __init__(self, model, decay=0.9999, use_num_upates=True):
- super().__init__()
- if decay < 0.0 or decay > 1.0:
- raise ValueError('Decay must be between 0 and 1')
-
- self.m_name2s_name = {}
- self.register_buffer('decay', torch.tensor(decay, dtype=torch.float32))
- self.register_buffer('num_updates', torch.tensor(0,dtype=torch.int) if use_num_upates
- else torch.tensor(-1,dtype=torch.int))
-
- for name, p in model.named_parameters():
- if p.requires_grad:
- #remove as '.'-character is not allowed in buffers
- s_name = name.replace('.','')
- self.m_name2s_name.update({name:s_name})
- self.register_buffer(s_name,p.clone().detach().data)
-
- self.collected_params = []
-
- def forward(self,model):
- decay = self.decay
-
- if self.num_updates >= 0:
- self.num_updates += 1
- decay = min(self.decay,(1 + self.num_updates) / (10 + self.num_updates))
-
- one_minus_decay = 1.0 - decay
-
- with torch.no_grad():
- m_param = dict(model.named_parameters())
- shadow_params = dict(self.named_buffers())
-
- for key in m_param:
- if m_param[key].requires_grad:
- sname = self.m_name2s_name[key]
- shadow_params[sname] = shadow_params[sname].type_as(m_param[key])
- shadow_params[sname].sub_(one_minus_decay * (shadow_params[sname] - m_param[key]))
- else:
- assert not key in self.m_name2s_name
-
- def copy_to(self, model):
- m_param = dict(model.named_parameters())
- shadow_params = dict(self.named_buffers())
- for key in m_param:
- if m_param[key].requires_grad:
- m_param[key].data.copy_(shadow_params[self.m_name2s_name[key]].data)
- else:
- assert not key in self.m_name2s_name
-
- def store(self, parameters):
- """
- Save the current parameters for restoring later.
- Args:
- parameters: Iterable of `torch.nn.Parameter`; the parameters to be
- temporarily stored.
- """
- self.collected_params = [param.clone() for param in parameters]
-
- def restore(self, parameters):
- """
- Restore the parameters stored with the `store` method.
- Useful to validate the model with EMA parameters without affecting the
- original optimization process. Store the parameters before the
- `copy_to` method. After validation (or model saving), use this to
- restore the former parameters.
- Args:
- parameters: Iterable of `torch.nn.Parameter`; the parameters to be
- updated with the stored parameters.
- """
- for c_param, param in zip(self.collected_params, parameters):
- param.data.copy_(c_param.data)
diff --git a/One-2-3-45-master 2/ldm/modules/encoders/__init__.py b/One-2-3-45-master 2/ldm/modules/encoders/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/One-2-3-45-master 2/ldm/modules/encoders/modules.py b/One-2-3-45-master 2/ldm/modules/encoders/modules.py
deleted file mode 100644
index b1afccfc55d1b8162d6da8c0316082584a4bde34..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/encoders/modules.py
+++ /dev/null
@@ -1,550 +0,0 @@
-import torch
-import torch.nn as nn
-import numpy as np
-from functools import partial
-import kornia
-
-from ldm.modules.x_transformer import Encoder, TransformerWrapper # TODO: can we directly rely on lucidrains code and simply add this as a reuirement? --> test
-from ldm.util import default
-import clip
-
-
-class AbstractEncoder(nn.Module):
- def __init__(self):
- super().__init__()
-
- def encode(self, *args, **kwargs):
- raise NotImplementedError
-
-class IdentityEncoder(AbstractEncoder):
-
- def encode(self, x):
- return x
-
-class FaceClipEncoder(AbstractEncoder):
- def __init__(self, augment=True, retreival_key=None):
- super().__init__()
- self.encoder = FrozenCLIPImageEmbedder()
- self.augment = augment
- self.retreival_key = retreival_key
-
- def forward(self, img):
- encodings = []
- with torch.no_grad():
- x_offset = 125
- if self.retreival_key:
- # Assumes retrieved image are packed into the second half of channels
- face = img[:,3:,190:440,x_offset:(512-x_offset)]
- other = img[:,:3,...].clone()
- else:
- face = img[:,:,190:440,x_offset:(512-x_offset)]
- other = img.clone()
-
- if self.augment:
- face = K.RandomHorizontalFlip()(face)
-
- other[:,:,190:440,x_offset:(512-x_offset)] *= 0
- encodings = [
- self.encoder.encode(face),
- self.encoder.encode(other),
- ]
-
- return torch.cat(encodings, dim=1)
-
- def encode(self, img):
- if isinstance(img, list):
- # Uncondition
- return torch.zeros((1, 2, 768), device=self.encoder.model.visual.conv1.weight.device)
-
- return self(img)
-
-class FaceIdClipEncoder(AbstractEncoder):
- def __init__(self):
- super().__init__()
- self.encoder = FrozenCLIPImageEmbedder()
- for p in self.encoder.parameters():
- p.requires_grad = False
- self.id = FrozenFaceEncoder("/home/jpinkney/code/stable-diffusion/model_ir_se50.pth", augment=True)
-
- def forward(self, img):
- encodings = []
- with torch.no_grad():
- face = kornia.geometry.resize(img, (256, 256),
- interpolation='bilinear', align_corners=True)
-
- other = img.clone()
- other[:,:,184:452,122:396] *= 0
- encodings = [
- self.id.encode(face),
- self.encoder.encode(other),
- ]
-
- return torch.cat(encodings, dim=1)
-
- def encode(self, img):
- if isinstance(img, list):
- # Uncondition
- return torch.zeros((1, 2, 768), device=self.encoder.model.visual.conv1.weight.device)
-
- return self(img)
-
-class ClassEmbedder(nn.Module):
- def __init__(self, embed_dim, n_classes=1000, key='class'):
- super().__init__()
- self.key = key
- self.embedding = nn.Embedding(n_classes, embed_dim)
-
- def forward(self, batch, key=None):
- if key is None:
- key = self.key
- # this is for use in crossattn
- c = batch[key][:, None]
- c = self.embedding(c)
- return c
-
-
-class TransformerEmbedder(AbstractEncoder):
- """Some transformer encoder layers"""
- def __init__(self, n_embed, n_layer, vocab_size, max_seq_len=77, device="cuda"):
- super().__init__()
- self.device = device
- self.transformer = TransformerWrapper(num_tokens=vocab_size, max_seq_len=max_seq_len,
- attn_layers=Encoder(dim=n_embed, depth=n_layer))
-
- def forward(self, tokens):
- tokens = tokens.to(self.device) # meh
- z = self.transformer(tokens, return_embeddings=True)
- return z
-
- def encode(self, x):
- return self(x)
-
-
-class BERTTokenizer(AbstractEncoder):
- """ Uses a pretrained BERT tokenizer by huggingface. Vocab size: 30522 (?)"""
- def __init__(self, device="cuda", vq_interface=True, max_length=77):
- super().__init__()
- from transformers import BertTokenizerFast # TODO: add to reuquirements
- self.tokenizer = BertTokenizerFast.from_pretrained("bert-base-uncased")
- self.device = device
- self.vq_interface = vq_interface
- self.max_length = max_length
-
- def forward(self, text):
- batch_encoding = self.tokenizer(text, truncation=True, max_length=self.max_length, return_length=True,
- return_overflowing_tokens=False, padding="max_length", return_tensors="pt")
- tokens = batch_encoding["input_ids"].to(self.device)
- return tokens
-
- @torch.no_grad()
- def encode(self, text):
- tokens = self(text)
- if not self.vq_interface:
- return tokens
- return None, None, [None, None, tokens]
-
- def decode(self, text):
- return text
-
-
-class BERTEmbedder(AbstractEncoder):
- """Uses the BERT tokenizr model and add some transformer encoder layers"""
- def __init__(self, n_embed, n_layer, vocab_size=30522, max_seq_len=77,
- device="cuda",use_tokenizer=True, embedding_dropout=0.0):
- super().__init__()
- self.use_tknz_fn = use_tokenizer
- if self.use_tknz_fn:
- self.tknz_fn = BERTTokenizer(vq_interface=False, max_length=max_seq_len)
- self.device = device
- self.transformer = TransformerWrapper(num_tokens=vocab_size, max_seq_len=max_seq_len,
- attn_layers=Encoder(dim=n_embed, depth=n_layer),
- emb_dropout=embedding_dropout)
-
- def forward(self, text):
- if self.use_tknz_fn:
- tokens = self.tknz_fn(text)#.to(self.device)
- else:
- tokens = text
- z = self.transformer(tokens, return_embeddings=True)
- return z
-
- def encode(self, text):
- # output of length 77
- return self(text)
-
-
-from transformers import T5Tokenizer, T5EncoderModel, CLIPTokenizer, CLIPTextModel
-
-def disabled_train(self, mode=True):
- """Overwrite model.train with this function to make sure train/eval mode
- does not change anymore."""
- return self
-
-
-class FrozenT5Embedder(AbstractEncoder):
- """Uses the T5 transformer encoder for text"""
- def __init__(self, version="google/t5-v1_1-large", device="cuda", max_length=77): # others are google/t5-v1_1-xl and google/t5-v1_1-xxl
- super().__init__()
- self.tokenizer = T5Tokenizer.from_pretrained(version)
- self.transformer = T5EncoderModel.from_pretrained(version)
- self.device = device
- self.max_length = max_length # TODO: typical value?
- self.freeze()
-
- def freeze(self):
- self.transformer = self.transformer.eval()
- #self.train = disabled_train
- for param in self.parameters():
- param.requires_grad = False
-
- def forward(self, text):
- batch_encoding = self.tokenizer(text, truncation=True, max_length=self.max_length, return_length=True,
- return_overflowing_tokens=False, padding="max_length", return_tensors="pt")
- tokens = batch_encoding["input_ids"].to(self.device)
- outputs = self.transformer(input_ids=tokens)
-
- z = outputs.last_hidden_state
- return z
-
- def encode(self, text):
- return self(text)
-
-from ldm.thirdp.psp.id_loss import IDFeatures
-import kornia.augmentation as K
-
-class FrozenFaceEncoder(AbstractEncoder):
- def __init__(self, model_path, augment=False):
- super().__init__()
- self.loss_fn = IDFeatures(model_path)
- # face encoder is frozen
- for p in self.loss_fn.parameters():
- p.requires_grad = False
- # Mapper is trainable
- self.mapper = torch.nn.Linear(512, 768)
- p = 0.25
- if augment:
- self.augment = K.AugmentationSequential(
- K.RandomHorizontalFlip(p=0.5),
- K.RandomEqualize(p=p),
- # K.RandomPlanckianJitter(p=p),
- # K.RandomPlasmaBrightness(p=p),
- # K.RandomPlasmaContrast(p=p),
- # K.ColorJiggle(0.02, 0.2, 0.2, p=p),
- )
- else:
- self.augment = False
-
- def forward(self, img):
- if isinstance(img, list):
- # Uncondition
- return torch.zeros((1, 1, 768), device=self.mapper.weight.device)
-
- if self.augment is not None:
- # Transforms require 0-1
- img = self.augment((img + 1)/2)
- img = 2*img - 1
-
- feat = self.loss_fn(img, crop=True)
- feat = self.mapper(feat.unsqueeze(1))
- return feat
-
- def encode(self, img):
- return self(img)
-
-class FrozenCLIPEmbedder(AbstractEncoder):
- """Uses the CLIP transformer encoder for text (from huggingface)"""
- def __init__(self, version="openai/clip-vit-large-patch14", device="cuda", max_length=77): # clip-vit-base-patch32
- super().__init__()
- self.tokenizer = CLIPTokenizer.from_pretrained(version)
- self.transformer = CLIPTextModel.from_pretrained(version)
- self.device = device
- self.max_length = max_length # TODO: typical value?
- self.freeze()
-
- def freeze(self):
- self.transformer = self.transformer.eval()
- #self.train = disabled_train
- for param in self.parameters():
- param.requires_grad = False
-
- def forward(self, text):
- batch_encoding = self.tokenizer(text, truncation=True, max_length=self.max_length, return_length=True,
- return_overflowing_tokens=False, padding="max_length", return_tensors="pt")
- tokens = batch_encoding["input_ids"].to(self.device)
- outputs = self.transformer(input_ids=tokens)
-
- z = outputs.last_hidden_state
- return z
-
- def encode(self, text):
- return self(text)
-
-import torch.nn.functional as F
-from transformers import CLIPVisionModel
-class ClipImageProjector(AbstractEncoder):
- """
- Uses the CLIP image encoder.
- """
- def __init__(self, version="openai/clip-vit-large-patch14", max_length=77): # clip-vit-base-patch32
- super().__init__()
- self.model = CLIPVisionModel.from_pretrained(version)
- self.model.train()
- self.max_length = max_length # TODO: typical value?
- self.antialias = True
- self.mapper = torch.nn.Linear(1024, 768)
- self.register_buffer('mean', torch.Tensor([0.48145466, 0.4578275, 0.40821073]), persistent=False)
- self.register_buffer('std', torch.Tensor([0.26862954, 0.26130258, 0.27577711]), persistent=False)
- null_cond = self.get_null_cond(version, max_length)
- self.register_buffer('null_cond', null_cond)
-
- @torch.no_grad()
- def get_null_cond(self, version, max_length):
- device = self.mean.device
- embedder = FrozenCLIPEmbedder(version=version, device=device, max_length=max_length)
- null_cond = embedder([""])
- return null_cond
-
- def preprocess(self, x):
- # Expects inputs in the range -1, 1
- x = kornia.geometry.resize(x, (224, 224),
- interpolation='bicubic',align_corners=True,
- antialias=self.antialias)
- x = (x + 1.) / 2.
- # renormalize according to clip
- x = kornia.enhance.normalize(x, self.mean, self.std)
- return x
-
- def forward(self, x):
- if isinstance(x, list):
- return self.null_cond
- # x is assumed to be in range [-1,1]
- x = self.preprocess(x)
- outputs = self.model(pixel_values=x)
- last_hidden_state = outputs.last_hidden_state
- last_hidden_state = self.mapper(last_hidden_state)
- return F.pad(last_hidden_state, [0,0, 0,self.max_length-last_hidden_state.shape[1], 0,0])
-
- def encode(self, im):
- return self(im)
-
-class ProjectedFrozenCLIPEmbedder(AbstractEncoder):
- def __init__(self, version="openai/clip-vit-large-patch14", device="cuda", max_length=77): # clip-vit-base-patch32
- super().__init__()
- self.embedder = FrozenCLIPEmbedder(version=version, device=device, max_length=max_length)
- self.projection = torch.nn.Linear(768, 768)
-
- def forward(self, text):
- z = self.embedder(text)
- return self.projection(z)
-
- def encode(self, text):
- return self(text)
-
-class FrozenCLIPImageEmbedder(AbstractEncoder):
- """
- Uses the CLIP image encoder.
- Not actually frozen... If you want that set cond_stage_trainable=False in cfg
- """
- def __init__(
- self,
- model='ViT-L/14',
- jit=False,
- device='cpu',
- antialias=False,
- ):
- super().__init__()
- self.model, _ = clip.load(name=model, device=device, jit=jit)
- # We don't use the text part so delete it
- del self.model.transformer
- self.antialias = antialias
- self.register_buffer('mean', torch.Tensor([0.48145466, 0.4578275, 0.40821073]), persistent=False)
- self.register_buffer('std', torch.Tensor([0.26862954, 0.26130258, 0.27577711]), persistent=False)
-
- def preprocess(self, x):
- # Expects inputs in the range -1, 1
- x = kornia.geometry.resize(x, (224, 224),
- interpolation='bicubic',align_corners=True,
- antialias=self.antialias)
- x = (x + 1.) / 2.
- # renormalize according to clip
- x = kornia.enhance.normalize(x, self.mean, self.std)
- return x
-
- def forward(self, x):
- # x is assumed to be in range [-1,1]
- if isinstance(x, list):
- # [""] denotes condition dropout for ucg
- device = self.model.visual.conv1.weight.device
- return torch.zeros(1, 768, device=device)
- return self.model.encode_image(self.preprocess(x)).float()
-
- def encode(self, im):
- return self(im).unsqueeze(1)
-
-from torchvision import transforms
-import random
-
-class FrozenCLIPImageMutliEmbedder(AbstractEncoder):
- """
- Uses the CLIP image encoder.
- Not actually frozen... If you want that set cond_stage_trainable=False in cfg
- """
- def __init__(
- self,
- model='ViT-L/14',
- jit=False,
- device='cpu',
- antialias=True,
- max_crops=5,
- ):
- super().__init__()
- self.model, _ = clip.load(name=model, device=device, jit=jit)
- # We don't use the text part so delete it
- del self.model.transformer
- self.antialias = antialias
- self.register_buffer('mean', torch.Tensor([0.48145466, 0.4578275, 0.40821073]), persistent=False)
- self.register_buffer('std', torch.Tensor([0.26862954, 0.26130258, 0.27577711]), persistent=False)
- self.max_crops = max_crops
-
- def preprocess(self, x):
-
- # Expects inputs in the range -1, 1
- randcrop = transforms.RandomResizedCrop(224, scale=(0.085, 1.0), ratio=(1,1))
- max_crops = self.max_crops
- patches = []
- crops = [randcrop(x) for _ in range(max_crops)]
- patches.extend(crops)
- x = torch.cat(patches, dim=0)
- x = (x + 1.) / 2.
- # renormalize according to clip
- x = kornia.enhance.normalize(x, self.mean, self.std)
- return x
-
- def forward(self, x):
- # x is assumed to be in range [-1,1]
- if isinstance(x, list):
- # [""] denotes condition dropout for ucg
- device = self.model.visual.conv1.weight.device
- return torch.zeros(1, self.max_crops, 768, device=device)
- batch_tokens = []
- for im in x:
- patches = self.preprocess(im.unsqueeze(0))
- tokens = self.model.encode_image(patches).float()
- for t in tokens:
- if random.random() < 0.1:
- t *= 0
- batch_tokens.append(tokens.unsqueeze(0))
-
- return torch.cat(batch_tokens, dim=0)
-
- def encode(self, im):
- return self(im)
-
-class SpatialRescaler(nn.Module):
- def __init__(self,
- n_stages=1,
- method='bilinear',
- multiplier=0.5,
- in_channels=3,
- out_channels=None,
- bias=False):
- super().__init__()
- self.n_stages = n_stages
- assert self.n_stages >= 0
- assert method in ['nearest','linear','bilinear','trilinear','bicubic','area']
- self.multiplier = multiplier
- self.interpolator = partial(torch.nn.functional.interpolate, mode=method)
- self.remap_output = out_channels is not None
- if self.remap_output:
- print(f'Spatial Rescaler mapping from {in_channels} to {out_channels} channels after resizing.')
- self.channel_mapper = nn.Conv2d(in_channels,out_channels,1,bias=bias)
-
- def forward(self,x):
- for stage in range(self.n_stages):
- x = self.interpolator(x, scale_factor=self.multiplier)
-
-
- if self.remap_output:
- x = self.channel_mapper(x)
- return x
-
- def encode(self, x):
- return self(x)
-
-
-from ldm.util import instantiate_from_config
-from ldm.modules.diffusionmodules.util import make_beta_schedule, extract_into_tensor, noise_like
-
-
-class LowScaleEncoder(nn.Module):
- def __init__(self, model_config, linear_start, linear_end, timesteps=1000, max_noise_level=250, output_size=64,
- scale_factor=1.0):
- super().__init__()
- self.max_noise_level = max_noise_level
- self.model = instantiate_from_config(model_config)
- self.augmentation_schedule = self.register_schedule(timesteps=timesteps, linear_start=linear_start,
- linear_end=linear_end)
- self.out_size = output_size
- self.scale_factor = scale_factor
-
- def register_schedule(self, beta_schedule="linear", timesteps=1000,
- linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3):
- betas = make_beta_schedule(beta_schedule, timesteps, linear_start=linear_start, linear_end=linear_end,
- cosine_s=cosine_s)
- alphas = 1. - betas
- alphas_cumprod = np.cumprod(alphas, axis=0)
- alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1])
-
- timesteps, = betas.shape
- self.num_timesteps = int(timesteps)
- self.linear_start = linear_start
- self.linear_end = linear_end
- assert alphas_cumprod.shape[0] == self.num_timesteps, 'alphas have to be defined for each timestep'
-
- to_torch = partial(torch.tensor, dtype=torch.float32)
-
- self.register_buffer('betas', to_torch(betas))
- self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod))
- self.register_buffer('alphas_cumprod_prev', to_torch(alphas_cumprod_prev))
-
- # calculations for diffusion q(x_t | x_{t-1}) and others
- self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod)))
- self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod)))
- self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod)))
- self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod)))
- self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod - 1)))
-
- def q_sample(self, x_start, t, noise=None):
- noise = default(noise, lambda: torch.randn_like(x_start))
- return (extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start +
- extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) * noise)
-
- def forward(self, x):
- z = self.model.encode(x).sample()
- z = z * self.scale_factor
- noise_level = torch.randint(0, self.max_noise_level, (x.shape[0],), device=x.device).long()
- z = self.q_sample(z, noise_level)
- if self.out_size is not None:
- z = torch.nn.functional.interpolate(z, size=self.out_size, mode="nearest") # TODO: experiment with mode
- # z = z.repeat_interleave(2, -2).repeat_interleave(2, -1)
- return z, noise_level
-
- def decode(self, z):
- z = z / self.scale_factor
- return self.model.decode(z)
-
-
-if __name__ == "__main__":
- from ldm.util import count_params
- sentences = ["a hedgehog drinking a whiskey", "der mond ist aufgegangen", "Ein Satz mit vielen Sonderzeichen: äöü ß ?! : 'xx-y/@s'"]
- model = FrozenT5Embedder(version="google/t5-v1_1-xl").cuda()
- count_params(model, True)
- z = model(sentences)
- print(z.shape)
-
- model = FrozenCLIPEmbedder().cuda()
- count_params(model, True)
- z = model(sentences)
- print(z.shape)
-
- print("done.")
diff --git a/One-2-3-45-master 2/ldm/modules/evaluate/adm_evaluator.py b/One-2-3-45-master 2/ldm/modules/evaluate/adm_evaluator.py
deleted file mode 100644
index 508cddf206e9aa8b2fa1de32e69a7b78acee13c0..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/evaluate/adm_evaluator.py
+++ /dev/null
@@ -1,676 +0,0 @@
-import argparse
-import io
-import os
-import random
-import warnings
-import zipfile
-from abc import ABC, abstractmethod
-from contextlib import contextmanager
-from functools import partial
-from multiprocessing import cpu_count
-from multiprocessing.pool import ThreadPool
-from typing import Iterable, Optional, Tuple
-import yaml
-
-import numpy as np
-import requests
-import tensorflow.compat.v1 as tf
-from scipy import linalg
-from tqdm.auto import tqdm
-
-INCEPTION_V3_URL = "https://openaipublic.blob.core.windows.net/diffusion/jul-2021/ref_batches/classify_image_graph_def.pb"
-INCEPTION_V3_PATH = "classify_image_graph_def.pb"
-
-FID_POOL_NAME = "pool_3:0"
-FID_SPATIAL_NAME = "mixed_6/conv:0"
-
-REQUIREMENTS = f"This script has the following requirements: \n" \
- 'tensorflow-gpu>=2.0' + "\n" + 'scipy' + "\n" + "requests" + "\n" + "tqdm"
-
-
-def main():
- parser = argparse.ArgumentParser()
- parser.add_argument("--ref_batch", help="path to reference batch npz file")
- parser.add_argument("--sample_batch", help="path to sample batch npz file")
- args = parser.parse_args()
-
- config = tf.ConfigProto(
- allow_soft_placement=True # allows DecodeJpeg to run on CPU in Inception graph
- )
- config.gpu_options.allow_growth = True
- evaluator = Evaluator(tf.Session(config=config))
-
- print("warming up TensorFlow...")
- # This will cause TF to print a bunch of verbose stuff now rather
- # than after the next print(), to help prevent confusion.
- evaluator.warmup()
-
- print("computing reference batch activations...")
- ref_acts = evaluator.read_activations(args.ref_batch)
- print("computing/reading reference batch statistics...")
- ref_stats, ref_stats_spatial = evaluator.read_statistics(args.ref_batch, ref_acts)
-
- print("computing sample batch activations...")
- sample_acts = evaluator.read_activations(args.sample_batch)
- print("computing/reading sample batch statistics...")
- sample_stats, sample_stats_spatial = evaluator.read_statistics(args.sample_batch, sample_acts)
-
- print("Computing evaluations...")
- is_ = evaluator.compute_inception_score(sample_acts[0])
- print("Inception Score:", is_)
- fid = sample_stats.frechet_distance(ref_stats)
- print("FID:", fid)
- sfid = sample_stats_spatial.frechet_distance(ref_stats_spatial)
- print("sFID:", sfid)
- prec, recall = evaluator.compute_prec_recall(ref_acts[0], sample_acts[0])
- print("Precision:", prec)
- print("Recall:", recall)
-
- savepath = '/'.join(args.sample_batch.split('/')[:-1])
- results_file = os.path.join(savepath,'evaluation_metrics.yaml')
- print(f'Saving evaluation results to "{results_file}"')
-
- results = {
- 'IS': is_,
- 'FID': fid,
- 'sFID': sfid,
- 'Precision:':prec,
- 'Recall': recall
- }
-
- with open(results_file, 'w') as f:
- yaml.dump(results, f, default_flow_style=False)
-
-class InvalidFIDException(Exception):
- pass
-
-
-class FIDStatistics:
- def __init__(self, mu: np.ndarray, sigma: np.ndarray):
- self.mu = mu
- self.sigma = sigma
-
- def frechet_distance(self, other, eps=1e-6):
- """
- Compute the Frechet distance between two sets of statistics.
- """
- # https://github.com/bioinf-jku/TTUR/blob/73ab375cdf952a12686d9aa7978567771084da42/fid.py#L132
- mu1, sigma1 = self.mu, self.sigma
- mu2, sigma2 = other.mu, other.sigma
-
- mu1 = np.atleast_1d(mu1)
- mu2 = np.atleast_1d(mu2)
-
- sigma1 = np.atleast_2d(sigma1)
- sigma2 = np.atleast_2d(sigma2)
-
- assert (
- mu1.shape == mu2.shape
- ), f"Training and test mean vectors have different lengths: {mu1.shape}, {mu2.shape}"
- assert (
- sigma1.shape == sigma2.shape
- ), f"Training and test covariances have different dimensions: {sigma1.shape}, {sigma2.shape}"
-
- diff = mu1 - mu2
-
- # product might be almost singular
- covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False)
- if not np.isfinite(covmean).all():
- msg = (
- "fid calculation produces singular product; adding %s to diagonal of cov estimates"
- % eps
- )
- warnings.warn(msg)
- offset = np.eye(sigma1.shape[0]) * eps
- covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset))
-
- # numerical error might give slight imaginary component
- if np.iscomplexobj(covmean):
- if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3):
- m = np.max(np.abs(covmean.imag))
- raise ValueError("Imaginary component {}".format(m))
- covmean = covmean.real
-
- tr_covmean = np.trace(covmean)
-
- return diff.dot(diff) + np.trace(sigma1) + np.trace(sigma2) - 2 * tr_covmean
-
-
-class Evaluator:
- def __init__(
- self,
- session,
- batch_size=64,
- softmax_batch_size=512,
- ):
- self.sess = session
- self.batch_size = batch_size
- self.softmax_batch_size = softmax_batch_size
- self.manifold_estimator = ManifoldEstimator(session)
- with self.sess.graph.as_default():
- self.image_input = tf.placeholder(tf.float32, shape=[None, None, None, 3])
- self.softmax_input = tf.placeholder(tf.float32, shape=[None, 2048])
- self.pool_features, self.spatial_features = _create_feature_graph(self.image_input)
- self.softmax = _create_softmax_graph(self.softmax_input)
-
- def warmup(self):
- self.compute_activations(np.zeros([1, 8, 64, 64, 3]))
-
- def read_activations(self, npz_path: str) -> Tuple[np.ndarray, np.ndarray]:
- with open_npz_array(npz_path, "arr_0") as reader:
- return self.compute_activations(reader.read_batches(self.batch_size))
-
- def compute_activations(self, batches: Iterable[np.ndarray],silent=False) -> Tuple[np.ndarray, np.ndarray]:
- """
- Compute image features for downstream evals.
-
- :param batches: a iterator over NHWC numpy arrays in [0, 255].
- :return: a tuple of numpy arrays of shape [N x X], where X is a feature
- dimension. The tuple is (pool_3, spatial).
- """
- preds = []
- spatial_preds = []
- it = batches if silent else tqdm(batches)
- for batch in it:
- batch = batch.astype(np.float32)
- pred, spatial_pred = self.sess.run(
- [self.pool_features, self.spatial_features], {self.image_input: batch}
- )
- preds.append(pred.reshape([pred.shape[0], -1]))
- spatial_preds.append(spatial_pred.reshape([spatial_pred.shape[0], -1]))
- return (
- np.concatenate(preds, axis=0),
- np.concatenate(spatial_preds, axis=0),
- )
-
- def read_statistics(
- self, npz_path: str, activations: Tuple[np.ndarray, np.ndarray]
- ) -> Tuple[FIDStatistics, FIDStatistics]:
- obj = np.load(npz_path)
- if "mu" in list(obj.keys()):
- return FIDStatistics(obj["mu"], obj["sigma"]), FIDStatistics(
- obj["mu_s"], obj["sigma_s"]
- )
- return tuple(self.compute_statistics(x) for x in activations)
-
- def compute_statistics(self, activations: np.ndarray) -> FIDStatistics:
- mu = np.mean(activations, axis=0)
- sigma = np.cov(activations, rowvar=False)
- return FIDStatistics(mu, sigma)
-
- def compute_inception_score(self, activations: np.ndarray, split_size: int = 5000) -> float:
- softmax_out = []
- for i in range(0, len(activations), self.softmax_batch_size):
- acts = activations[i : i + self.softmax_batch_size]
- softmax_out.append(self.sess.run(self.softmax, feed_dict={self.softmax_input: acts}))
- preds = np.concatenate(softmax_out, axis=0)
- # https://github.com/openai/improved-gan/blob/4f5d1ec5c16a7eceb206f42bfc652693601e1d5c/inception_score/model.py#L46
- scores = []
- for i in range(0, len(preds), split_size):
- part = preds[i : i + split_size]
- kl = part * (np.log(part) - np.log(np.expand_dims(np.mean(part, 0), 0)))
- kl = np.mean(np.sum(kl, 1))
- scores.append(np.exp(kl))
- return float(np.mean(scores))
-
- def compute_prec_recall(
- self, activations_ref: np.ndarray, activations_sample: np.ndarray
- ) -> Tuple[float, float]:
- radii_1 = self.manifold_estimator.manifold_radii(activations_ref)
- radii_2 = self.manifold_estimator.manifold_radii(activations_sample)
- pr = self.manifold_estimator.evaluate_pr(
- activations_ref, radii_1, activations_sample, radii_2
- )
- return (float(pr[0][0]), float(pr[1][0]))
-
-
-class ManifoldEstimator:
- """
- A helper for comparing manifolds of feature vectors.
-
- Adapted from https://github.com/kynkaat/improved-precision-and-recall-metric/blob/f60f25e5ad933a79135c783fcda53de30f42c9b9/precision_recall.py#L57
- """
-
- def __init__(
- self,
- session,
- row_batch_size=10000,
- col_batch_size=10000,
- nhood_sizes=(3,),
- clamp_to_percentile=None,
- eps=1e-5,
- ):
- """
- Estimate the manifold of given feature vectors.
-
- :param session: the TensorFlow session.
- :param row_batch_size: row batch size to compute pairwise distances
- (parameter to trade-off between memory usage and performance).
- :param col_batch_size: column batch size to compute pairwise distances.
- :param nhood_sizes: number of neighbors used to estimate the manifold.
- :param clamp_to_percentile: prune hyperspheres that have radius larger than
- the given percentile.
- :param eps: small number for numerical stability.
- """
- self.distance_block = DistanceBlock(session)
- self.row_batch_size = row_batch_size
- self.col_batch_size = col_batch_size
- self.nhood_sizes = nhood_sizes
- self.num_nhoods = len(nhood_sizes)
- self.clamp_to_percentile = clamp_to_percentile
- self.eps = eps
-
- def warmup(self):
- feats, radii = (
- np.zeros([1, 2048], dtype=np.float32),
- np.zeros([1, 1], dtype=np.float32),
- )
- self.evaluate_pr(feats, radii, feats, radii)
-
- def manifold_radii(self, features: np.ndarray) -> np.ndarray:
- num_images = len(features)
-
- # Estimate manifold of features by calculating distances to k-NN of each sample.
- radii = np.zeros([num_images, self.num_nhoods], dtype=np.float32)
- distance_batch = np.zeros([self.row_batch_size, num_images], dtype=np.float32)
- seq = np.arange(max(self.nhood_sizes) + 1, dtype=np.int32)
-
- for begin1 in range(0, num_images, self.row_batch_size):
- end1 = min(begin1 + self.row_batch_size, num_images)
- row_batch = features[begin1:end1]
-
- for begin2 in range(0, num_images, self.col_batch_size):
- end2 = min(begin2 + self.col_batch_size, num_images)
- col_batch = features[begin2:end2]
-
- # Compute distances between batches.
- distance_batch[
- 0 : end1 - begin1, begin2:end2
- ] = self.distance_block.pairwise_distances(row_batch, col_batch)
-
- # Find the k-nearest neighbor from the current batch.
- radii[begin1:end1, :] = np.concatenate(
- [
- x[:, self.nhood_sizes]
- for x in _numpy_partition(distance_batch[0 : end1 - begin1, :], seq, axis=1)
- ],
- axis=0,
- )
-
- if self.clamp_to_percentile is not None:
- max_distances = np.percentile(radii, self.clamp_to_percentile, axis=0)
- radii[radii > max_distances] = 0
- return radii
-
- def evaluate(self, features: np.ndarray, radii: np.ndarray, eval_features: np.ndarray):
- """
- Evaluate if new feature vectors are at the manifold.
- """
- num_eval_images = eval_features.shape[0]
- num_ref_images = radii.shape[0]
- distance_batch = np.zeros([self.row_batch_size, num_ref_images], dtype=np.float32)
- batch_predictions = np.zeros([num_eval_images, self.num_nhoods], dtype=np.int32)
- max_realism_score = np.zeros([num_eval_images], dtype=np.float32)
- nearest_indices = np.zeros([num_eval_images], dtype=np.int32)
-
- for begin1 in range(0, num_eval_images, self.row_batch_size):
- end1 = min(begin1 + self.row_batch_size, num_eval_images)
- feature_batch = eval_features[begin1:end1]
-
- for begin2 in range(0, num_ref_images, self.col_batch_size):
- end2 = min(begin2 + self.col_batch_size, num_ref_images)
- ref_batch = features[begin2:end2]
-
- distance_batch[
- 0 : end1 - begin1, begin2:end2
- ] = self.distance_block.pairwise_distances(feature_batch, ref_batch)
-
- # From the minibatch of new feature vectors, determine if they are in the estimated manifold.
- # If a feature vector is inside a hypersphere of some reference sample, then
- # the new sample lies at the estimated manifold.
- # The radii of the hyperspheres are determined from distances of neighborhood size k.
- samples_in_manifold = distance_batch[0 : end1 - begin1, :, None] <= radii
- batch_predictions[begin1:end1] = np.any(samples_in_manifold, axis=1).astype(np.int32)
-
- max_realism_score[begin1:end1] = np.max(
- radii[:, 0] / (distance_batch[0 : end1 - begin1, :] + self.eps), axis=1
- )
- nearest_indices[begin1:end1] = np.argmin(distance_batch[0 : end1 - begin1, :], axis=1)
-
- return {
- "fraction": float(np.mean(batch_predictions)),
- "batch_predictions": batch_predictions,
- "max_realisim_score": max_realism_score,
- "nearest_indices": nearest_indices,
- }
-
- def evaluate_pr(
- self,
- features_1: np.ndarray,
- radii_1: np.ndarray,
- features_2: np.ndarray,
- radii_2: np.ndarray,
- ) -> Tuple[np.ndarray, np.ndarray]:
- """
- Evaluate precision and recall efficiently.
-
- :param features_1: [N1 x D] feature vectors for reference batch.
- :param radii_1: [N1 x K1] radii for reference vectors.
- :param features_2: [N2 x D] feature vectors for the other batch.
- :param radii_2: [N x K2] radii for other vectors.
- :return: a tuple of arrays for (precision, recall):
- - precision: an np.ndarray of length K1
- - recall: an np.ndarray of length K2
- """
- features_1_status = np.zeros([len(features_1), radii_2.shape[1]], dtype=np.bool)
- features_2_status = np.zeros([len(features_2), radii_1.shape[1]], dtype=np.bool)
- for begin_1 in range(0, len(features_1), self.row_batch_size):
- end_1 = begin_1 + self.row_batch_size
- batch_1 = features_1[begin_1:end_1]
- for begin_2 in range(0, len(features_2), self.col_batch_size):
- end_2 = begin_2 + self.col_batch_size
- batch_2 = features_2[begin_2:end_2]
- batch_1_in, batch_2_in = self.distance_block.less_thans(
- batch_1, radii_1[begin_1:end_1], batch_2, radii_2[begin_2:end_2]
- )
- features_1_status[begin_1:end_1] |= batch_1_in
- features_2_status[begin_2:end_2] |= batch_2_in
- return (
- np.mean(features_2_status.astype(np.float64), axis=0),
- np.mean(features_1_status.astype(np.float64), axis=0),
- )
-
-
-class DistanceBlock:
- """
- Calculate pairwise distances between vectors.
-
- Adapted from https://github.com/kynkaat/improved-precision-and-recall-metric/blob/f60f25e5ad933a79135c783fcda53de30f42c9b9/precision_recall.py#L34
- """
-
- def __init__(self, session):
- self.session = session
-
- # Initialize TF graph to calculate pairwise distances.
- with session.graph.as_default():
- self._features_batch1 = tf.placeholder(tf.float32, shape=[None, None])
- self._features_batch2 = tf.placeholder(tf.float32, shape=[None, None])
- distance_block_16 = _batch_pairwise_distances(
- tf.cast(self._features_batch1, tf.float16),
- tf.cast(self._features_batch2, tf.float16),
- )
- self.distance_block = tf.cond(
- tf.reduce_all(tf.math.is_finite(distance_block_16)),
- lambda: tf.cast(distance_block_16, tf.float32),
- lambda: _batch_pairwise_distances(self._features_batch1, self._features_batch2),
- )
-
- # Extra logic for less thans.
- self._radii1 = tf.placeholder(tf.float32, shape=[None, None])
- self._radii2 = tf.placeholder(tf.float32, shape=[None, None])
- dist32 = tf.cast(self.distance_block, tf.float32)[..., None]
- self._batch_1_in = tf.math.reduce_any(dist32 <= self._radii2, axis=1)
- self._batch_2_in = tf.math.reduce_any(dist32 <= self._radii1[:, None], axis=0)
-
- def pairwise_distances(self, U, V):
- """
- Evaluate pairwise distances between two batches of feature vectors.
- """
- return self.session.run(
- self.distance_block,
- feed_dict={self._features_batch1: U, self._features_batch2: V},
- )
-
- def less_thans(self, batch_1, radii_1, batch_2, radii_2):
- return self.session.run(
- [self._batch_1_in, self._batch_2_in],
- feed_dict={
- self._features_batch1: batch_1,
- self._features_batch2: batch_2,
- self._radii1: radii_1,
- self._radii2: radii_2,
- },
- )
-
-
-def _batch_pairwise_distances(U, V):
- """
- Compute pairwise distances between two batches of feature vectors.
- """
- with tf.variable_scope("pairwise_dist_block"):
- # Squared norms of each row in U and V.
- norm_u = tf.reduce_sum(tf.square(U), 1)
- norm_v = tf.reduce_sum(tf.square(V), 1)
-
- # norm_u as a column and norm_v as a row vectors.
- norm_u = tf.reshape(norm_u, [-1, 1])
- norm_v = tf.reshape(norm_v, [1, -1])
-
- # Pairwise squared Euclidean distances.
- D = tf.maximum(norm_u - 2 * tf.matmul(U, V, False, True) + norm_v, 0.0)
-
- return D
-
-
-class NpzArrayReader(ABC):
- @abstractmethod
- def read_batch(self, batch_size: int) -> Optional[np.ndarray]:
- pass
-
- @abstractmethod
- def remaining(self) -> int:
- pass
-
- def read_batches(self, batch_size: int) -> Iterable[np.ndarray]:
- def gen_fn():
- while True:
- batch = self.read_batch(batch_size)
- if batch is None:
- break
- yield batch
-
- rem = self.remaining()
- num_batches = rem // batch_size + int(rem % batch_size != 0)
- return BatchIterator(gen_fn, num_batches)
-
-
-class BatchIterator:
- def __init__(self, gen_fn, length):
- self.gen_fn = gen_fn
- self.length = length
-
- def __len__(self):
- return self.length
-
- def __iter__(self):
- return self.gen_fn()
-
-
-class StreamingNpzArrayReader(NpzArrayReader):
- def __init__(self, arr_f, shape, dtype):
- self.arr_f = arr_f
- self.shape = shape
- self.dtype = dtype
- self.idx = 0
-
- def read_batch(self, batch_size: int) -> Optional[np.ndarray]:
- if self.idx >= self.shape[0]:
- return None
-
- bs = min(batch_size, self.shape[0] - self.idx)
- self.idx += bs
-
- if self.dtype.itemsize == 0:
- return np.ndarray([bs, *self.shape[1:]], dtype=self.dtype)
-
- read_count = bs * np.prod(self.shape[1:])
- read_size = int(read_count * self.dtype.itemsize)
- data = _read_bytes(self.arr_f, read_size, "array data")
- return np.frombuffer(data, dtype=self.dtype).reshape([bs, *self.shape[1:]])
-
- def remaining(self) -> int:
- return max(0, self.shape[0] - self.idx)
-
-
-class MemoryNpzArrayReader(NpzArrayReader):
- def __init__(self, arr):
- self.arr = arr
- self.idx = 0
-
- @classmethod
- def load(cls, path: str, arr_name: str):
- with open(path, "rb") as f:
- arr = np.load(f)[arr_name]
- return cls(arr)
-
- def read_batch(self, batch_size: int) -> Optional[np.ndarray]:
- if self.idx >= self.arr.shape[0]:
- return None
-
- res = self.arr[self.idx : self.idx + batch_size]
- self.idx += batch_size
- return res
-
- def remaining(self) -> int:
- return max(0, self.arr.shape[0] - self.idx)
-
-
-@contextmanager
-def open_npz_array(path: str, arr_name: str) -> NpzArrayReader:
- with _open_npy_file(path, arr_name) as arr_f:
- version = np.lib.format.read_magic(arr_f)
- if version == (1, 0):
- header = np.lib.format.read_array_header_1_0(arr_f)
- elif version == (2, 0):
- header = np.lib.format.read_array_header_2_0(arr_f)
- else:
- yield MemoryNpzArrayReader.load(path, arr_name)
- return
- shape, fortran, dtype = header
- if fortran or dtype.hasobject:
- yield MemoryNpzArrayReader.load(path, arr_name)
- else:
- yield StreamingNpzArrayReader(arr_f, shape, dtype)
-
-
-def _read_bytes(fp, size, error_template="ran out of data"):
- """
- Copied from: https://github.com/numpy/numpy/blob/fb215c76967739268de71aa4bda55dd1b062bc2e/numpy/lib/format.py#L788-L886
-
- Read from file-like object until size bytes are read.
- Raises ValueError if not EOF is encountered before size bytes are read.
- Non-blocking objects only supported if they derive from io objects.
- Required as e.g. ZipExtFile in python 2.6 can return less data than
- requested.
- """
- data = bytes()
- while True:
- # io files (default in python3) return None or raise on
- # would-block, python2 file will truncate, probably nothing can be
- # done about that. note that regular files can't be non-blocking
- try:
- r = fp.read(size - len(data))
- data += r
- if len(r) == 0 or len(data) == size:
- break
- except io.BlockingIOError:
- pass
- if len(data) != size:
- msg = "EOF: reading %s, expected %d bytes got %d"
- raise ValueError(msg % (error_template, size, len(data)))
- else:
- return data
-
-
-@contextmanager
-def _open_npy_file(path: str, arr_name: str):
- with open(path, "rb") as f:
- with zipfile.ZipFile(f, "r") as zip_f:
- if f"{arr_name}.npy" not in zip_f.namelist():
- raise ValueError(f"missing {arr_name} in npz file")
- with zip_f.open(f"{arr_name}.npy", "r") as arr_f:
- yield arr_f
-
-
-def _download_inception_model():
- if os.path.exists(INCEPTION_V3_PATH):
- return
- print("downloading InceptionV3 model...")
- with requests.get(INCEPTION_V3_URL, stream=True) as r:
- r.raise_for_status()
- tmp_path = INCEPTION_V3_PATH + ".tmp"
- with open(tmp_path, "wb") as f:
- for chunk in tqdm(r.iter_content(chunk_size=8192)):
- f.write(chunk)
- os.rename(tmp_path, INCEPTION_V3_PATH)
-
-
-def _create_feature_graph(input_batch):
- _download_inception_model()
- prefix = f"{random.randrange(2**32)}_{random.randrange(2**32)}"
- with open(INCEPTION_V3_PATH, "rb") as f:
- graph_def = tf.GraphDef()
- graph_def.ParseFromString(f.read())
- pool3, spatial = tf.import_graph_def(
- graph_def,
- input_map={f"ExpandDims:0": input_batch},
- return_elements=[FID_POOL_NAME, FID_SPATIAL_NAME],
- name=prefix,
- )
- _update_shapes(pool3)
- spatial = spatial[..., :7]
- return pool3, spatial
-
-
-def _create_softmax_graph(input_batch):
- _download_inception_model()
- prefix = f"{random.randrange(2**32)}_{random.randrange(2**32)}"
- with open(INCEPTION_V3_PATH, "rb") as f:
- graph_def = tf.GraphDef()
- graph_def.ParseFromString(f.read())
- (matmul,) = tf.import_graph_def(
- graph_def, return_elements=[f"softmax/logits/MatMul"], name=prefix
- )
- w = matmul.inputs[1]
- logits = tf.matmul(input_batch, w)
- return tf.nn.softmax(logits)
-
-
-def _update_shapes(pool3):
- # https://github.com/bioinf-jku/TTUR/blob/73ab375cdf952a12686d9aa7978567771084da42/fid.py#L50-L63
- ops = pool3.graph.get_operations()
- for op in ops:
- for o in op.outputs:
- shape = o.get_shape()
- if shape._dims is not None: # pylint: disable=protected-access
- # shape = [s.value for s in shape] TF 1.x
- shape = [s for s in shape] # TF 2.x
- new_shape = []
- for j, s in enumerate(shape):
- if s == 1 and j == 0:
- new_shape.append(None)
- else:
- new_shape.append(s)
- o.__dict__["_shape_val"] = tf.TensorShape(new_shape)
- return pool3
-
-
-def _numpy_partition(arr, kth, **kwargs):
- num_workers = min(cpu_count(), len(arr))
- chunk_size = len(arr) // num_workers
- extra = len(arr) % num_workers
-
- start_idx = 0
- batches = []
- for i in range(num_workers):
- size = chunk_size + (1 if i < extra else 0)
- batches.append(arr[start_idx : start_idx + size])
- start_idx += size
-
- with ThreadPool(num_workers) as pool:
- return list(pool.map(partial(np.partition, kth=kth, **kwargs), batches))
-
-
-if __name__ == "__main__":
- print(REQUIREMENTS)
- main()
diff --git a/One-2-3-45-master 2/ldm/modules/evaluate/evaluate_perceptualsim.py b/One-2-3-45-master 2/ldm/modules/evaluate/evaluate_perceptualsim.py
deleted file mode 100644
index c85fef967b60b90e3001b0cc29aa70b1a80ed36f..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/evaluate/evaluate_perceptualsim.py
+++ /dev/null
@@ -1,630 +0,0 @@
-import argparse
-import glob
-import os
-from tqdm import tqdm
-from collections import namedtuple
-
-import numpy as np
-import torch
-import torchvision.transforms as transforms
-from torchvision import models
-from PIL import Image
-
-from ldm.modules.evaluate.ssim import ssim
-
-
-transform = transforms.Compose([transforms.ToTensor()])
-
-def normalize_tensor(in_feat, eps=1e-10):
- norm_factor = torch.sqrt(torch.sum(in_feat ** 2, dim=1)).view(
- in_feat.size()[0], 1, in_feat.size()[2], in_feat.size()[3]
- )
- return in_feat / (norm_factor.expand_as(in_feat) + eps)
-
-
-def cos_sim(in0, in1):
- in0_norm = normalize_tensor(in0)
- in1_norm = normalize_tensor(in1)
- N = in0.size()[0]
- X = in0.size()[2]
- Y = in0.size()[3]
-
- return torch.mean(
- torch.mean(
- torch.sum(in0_norm * in1_norm, dim=1).view(N, 1, X, Y), dim=2
- ).view(N, 1, 1, Y),
- dim=3,
- ).view(N)
-
-
-class squeezenet(torch.nn.Module):
- def __init__(self, requires_grad=False, pretrained=True):
- super(squeezenet, self).__init__()
- pretrained_features = models.squeezenet1_1(
- pretrained=pretrained
- ).features
- self.slice1 = torch.nn.Sequential()
- self.slice2 = torch.nn.Sequential()
- self.slice3 = torch.nn.Sequential()
- self.slice4 = torch.nn.Sequential()
- self.slice5 = torch.nn.Sequential()
- self.slice6 = torch.nn.Sequential()
- self.slice7 = torch.nn.Sequential()
- self.N_slices = 7
- for x in range(2):
- self.slice1.add_module(str(x), pretrained_features[x])
- for x in range(2, 5):
- self.slice2.add_module(str(x), pretrained_features[x])
- for x in range(5, 8):
- self.slice3.add_module(str(x), pretrained_features[x])
- for x in range(8, 10):
- self.slice4.add_module(str(x), pretrained_features[x])
- for x in range(10, 11):
- self.slice5.add_module(str(x), pretrained_features[x])
- for x in range(11, 12):
- self.slice6.add_module(str(x), pretrained_features[x])
- for x in range(12, 13):
- self.slice7.add_module(str(x), pretrained_features[x])
- if not requires_grad:
- for param in self.parameters():
- param.requires_grad = False
-
- def forward(self, X):
- h = self.slice1(X)
- h_relu1 = h
- h = self.slice2(h)
- h_relu2 = h
- h = self.slice3(h)
- h_relu3 = h
- h = self.slice4(h)
- h_relu4 = h
- h = self.slice5(h)
- h_relu5 = h
- h = self.slice6(h)
- h_relu6 = h
- h = self.slice7(h)
- h_relu7 = h
- vgg_outputs = namedtuple(
- "SqueezeOutputs",
- ["relu1", "relu2", "relu3", "relu4", "relu5", "relu6", "relu7"],
- )
- out = vgg_outputs(
- h_relu1, h_relu2, h_relu3, h_relu4, h_relu5, h_relu6, h_relu7
- )
-
- return out
-
-
-class alexnet(torch.nn.Module):
- def __init__(self, requires_grad=False, pretrained=True):
- super(alexnet, self).__init__()
- alexnet_pretrained_features = models.alexnet(
- pretrained=pretrained
- ).features
- self.slice1 = torch.nn.Sequential()
- self.slice2 = torch.nn.Sequential()
- self.slice3 = torch.nn.Sequential()
- self.slice4 = torch.nn.Sequential()
- self.slice5 = torch.nn.Sequential()
- self.N_slices = 5
- for x in range(2):
- self.slice1.add_module(str(x), alexnet_pretrained_features[x])
- for x in range(2, 5):
- self.slice2.add_module(str(x), alexnet_pretrained_features[x])
- for x in range(5, 8):
- self.slice3.add_module(str(x), alexnet_pretrained_features[x])
- for x in range(8, 10):
- self.slice4.add_module(str(x), alexnet_pretrained_features[x])
- for x in range(10, 12):
- self.slice5.add_module(str(x), alexnet_pretrained_features[x])
- if not requires_grad:
- for param in self.parameters():
- param.requires_grad = False
-
- def forward(self, X):
- h = self.slice1(X)
- h_relu1 = h
- h = self.slice2(h)
- h_relu2 = h
- h = self.slice3(h)
- h_relu3 = h
- h = self.slice4(h)
- h_relu4 = h
- h = self.slice5(h)
- h_relu5 = h
- alexnet_outputs = namedtuple(
- "AlexnetOutputs", ["relu1", "relu2", "relu3", "relu4", "relu5"]
- )
- out = alexnet_outputs(h_relu1, h_relu2, h_relu3, h_relu4, h_relu5)
-
- return out
-
-
-class vgg16(torch.nn.Module):
- def __init__(self, requires_grad=False, pretrained=True):
- super(vgg16, self).__init__()
- vgg_pretrained_features = models.vgg16(pretrained=pretrained).features
- self.slice1 = torch.nn.Sequential()
- self.slice2 = torch.nn.Sequential()
- self.slice3 = torch.nn.Sequential()
- self.slice4 = torch.nn.Sequential()
- self.slice5 = torch.nn.Sequential()
- self.N_slices = 5
- for x in range(4):
- self.slice1.add_module(str(x), vgg_pretrained_features[x])
- for x in range(4, 9):
- self.slice2.add_module(str(x), vgg_pretrained_features[x])
- for x in range(9, 16):
- self.slice3.add_module(str(x), vgg_pretrained_features[x])
- for x in range(16, 23):
- self.slice4.add_module(str(x), vgg_pretrained_features[x])
- for x in range(23, 30):
- self.slice5.add_module(str(x), vgg_pretrained_features[x])
- if not requires_grad:
- for param in self.parameters():
- param.requires_grad = False
-
- def forward(self, X):
- h = self.slice1(X)
- h_relu1_2 = h
- h = self.slice2(h)
- h_relu2_2 = h
- h = self.slice3(h)
- h_relu3_3 = h
- h = self.slice4(h)
- h_relu4_3 = h
- h = self.slice5(h)
- h_relu5_3 = h
- vgg_outputs = namedtuple(
- "VggOutputs",
- ["relu1_2", "relu2_2", "relu3_3", "relu4_3", "relu5_3"],
- )
- out = vgg_outputs(h_relu1_2, h_relu2_2, h_relu3_3, h_relu4_3, h_relu5_3)
-
- return out
-
-
-class resnet(torch.nn.Module):
- def __init__(self, requires_grad=False, pretrained=True, num=18):
- super(resnet, self).__init__()
- if num == 18:
- self.net = models.resnet18(pretrained=pretrained)
- elif num == 34:
- self.net = models.resnet34(pretrained=pretrained)
- elif num == 50:
- self.net = models.resnet50(pretrained=pretrained)
- elif num == 101:
- self.net = models.resnet101(pretrained=pretrained)
- elif num == 152:
- self.net = models.resnet152(pretrained=pretrained)
- self.N_slices = 5
-
- self.conv1 = self.net.conv1
- self.bn1 = self.net.bn1
- self.relu = self.net.relu
- self.maxpool = self.net.maxpool
- self.layer1 = self.net.layer1
- self.layer2 = self.net.layer2
- self.layer3 = self.net.layer3
- self.layer4 = self.net.layer4
-
- def forward(self, X):
- h = self.conv1(X)
- h = self.bn1(h)
- h = self.relu(h)
- h_relu1 = h
- h = self.maxpool(h)
- h = self.layer1(h)
- h_conv2 = h
- h = self.layer2(h)
- h_conv3 = h
- h = self.layer3(h)
- h_conv4 = h
- h = self.layer4(h)
- h_conv5 = h
-
- outputs = namedtuple(
- "Outputs", ["relu1", "conv2", "conv3", "conv4", "conv5"]
- )
- out = outputs(h_relu1, h_conv2, h_conv3, h_conv4, h_conv5)
-
- return out
-
-# Off-the-shelf deep network
-class PNet(torch.nn.Module):
- """Pre-trained network with all channels equally weighted by default"""
-
- def __init__(self, pnet_type="vgg", pnet_rand=False, use_gpu=True):
- super(PNet, self).__init__()
-
- self.use_gpu = use_gpu
-
- self.pnet_type = pnet_type
- self.pnet_rand = pnet_rand
-
- self.shift = torch.Tensor([-0.030, -0.088, -0.188]).view(1, 3, 1, 1)
- self.scale = torch.Tensor([0.458, 0.448, 0.450]).view(1, 3, 1, 1)
-
- if self.pnet_type in ["vgg", "vgg16"]:
- self.net = vgg16(pretrained=not self.pnet_rand, requires_grad=False)
- elif self.pnet_type == "alex":
- self.net = alexnet(
- pretrained=not self.pnet_rand, requires_grad=False
- )
- elif self.pnet_type[:-2] == "resnet":
- self.net = resnet(
- pretrained=not self.pnet_rand,
- requires_grad=False,
- num=int(self.pnet_type[-2:]),
- )
- elif self.pnet_type == "squeeze":
- self.net = squeezenet(
- pretrained=not self.pnet_rand, requires_grad=False
- )
-
- self.L = self.net.N_slices
-
- if use_gpu:
- self.net.cuda()
- self.shift = self.shift.cuda()
- self.scale = self.scale.cuda()
-
- def forward(self, in0, in1, retPerLayer=False):
- in0_sc = (in0 - self.shift.expand_as(in0)) / self.scale.expand_as(in0)
- in1_sc = (in1 - self.shift.expand_as(in0)) / self.scale.expand_as(in0)
-
- outs0 = self.net.forward(in0_sc)
- outs1 = self.net.forward(in1_sc)
-
- if retPerLayer:
- all_scores = []
- for (kk, out0) in enumerate(outs0):
- cur_score = 1.0 - cos_sim(outs0[kk], outs1[kk])
- if kk == 0:
- val = 1.0 * cur_score
- else:
- val = val + cur_score
- if retPerLayer:
- all_scores += [cur_score]
-
- if retPerLayer:
- return (val, all_scores)
- else:
- return val
-
-
-
-
-# The SSIM metric
-def ssim_metric(img1, img2, mask=None):
- return ssim(img1, img2, mask=mask, size_average=False)
-
-
-# The PSNR metric
-def psnr(img1, img2, mask=None,reshape=False):
- b = img1.size(0)
- if not (mask is None):
- b = img1.size(0)
- mse_err = (img1 - img2).pow(2) * mask
- if reshape:
- mse_err = mse_err.reshape(b, -1).sum(dim=1) / (
- 3 * mask.reshape(b, -1).sum(dim=1).clamp(min=1)
- )
- else:
- mse_err = mse_err.view(b, -1).sum(dim=1) / (
- 3 * mask.view(b, -1).sum(dim=1).clamp(min=1)
- )
- else:
- if reshape:
- mse_err = (img1 - img2).pow(2).reshape(b, -1).mean(dim=1)
- else:
- mse_err = (img1 - img2).pow(2).view(b, -1).mean(dim=1)
-
- psnr = 10 * (1 / mse_err).log10()
- return psnr
-
-
-# The perceptual similarity metric
-def perceptual_sim(img1, img2, vgg16):
- # First extract features
- dist = vgg16(img1 * 2 - 1, img2 * 2 - 1)
-
- return dist
-
-def load_img(img_name, size=None):
- try:
- img = Image.open(img_name)
-
- if type(size) == int:
- img = img.resize((size, size))
- elif size is not None:
- img = img.resize((size[1], size[0]))
-
- img = transform(img).cuda()
- img = img.unsqueeze(0)
- except Exception as e:
- print("Failed at loading %s " % img_name)
- print(e)
- img = torch.zeros(1, 3, 256, 256).cuda()
- raise
- return img
-
-
-def compute_perceptual_similarity(folder, pred_img, tgt_img, take_every_other):
-
- # Load VGG16 for feature similarity
- vgg16 = PNet().to("cuda")
- vgg16.eval()
- vgg16.cuda()
-
- values_percsim = []
- values_ssim = []
- values_psnr = []
- folders = os.listdir(folder)
- for i, f in tqdm(enumerate(sorted(folders))):
- pred_imgs = glob.glob(folder + f + "/" + pred_img)
- tgt_imgs = glob.glob(folder + f + "/" + tgt_img)
- assert len(tgt_imgs) == 1
-
- perc_sim = 10000
- ssim_sim = -10
- psnr_sim = -10
- for p_img in pred_imgs:
- t_img = load_img(tgt_imgs[0])
- p_img = load_img(p_img, size=t_img.shape[2:])
- t_perc_sim = perceptual_sim(p_img, t_img, vgg16).item()
- perc_sim = min(perc_sim, t_perc_sim)
-
- ssim_sim = max(ssim_sim, ssim_metric(p_img, t_img).item())
- psnr_sim = max(psnr_sim, psnr(p_img, t_img).item())
-
- values_percsim += [perc_sim]
- values_ssim += [ssim_sim]
- values_psnr += [psnr_sim]
-
- if take_every_other:
- n_valuespercsim = []
- n_valuesssim = []
- n_valuespsnr = []
- for i in range(0, len(values_percsim) // 2):
- n_valuespercsim += [
- min(values_percsim[2 * i], values_percsim[2 * i + 1])
- ]
- n_valuespsnr += [max(values_psnr[2 * i], values_psnr[2 * i + 1])]
- n_valuesssim += [max(values_ssim[2 * i], values_ssim[2 * i + 1])]
-
- values_percsim = n_valuespercsim
- values_ssim = n_valuesssim
- values_psnr = n_valuespsnr
-
- avg_percsim = np.mean(np.array(values_percsim))
- std_percsim = np.std(np.array(values_percsim))
-
- avg_psnr = np.mean(np.array(values_psnr))
- std_psnr = np.std(np.array(values_psnr))
-
- avg_ssim = np.mean(np.array(values_ssim))
- std_ssim = np.std(np.array(values_ssim))
-
- return {
- "Perceptual similarity": (avg_percsim, std_percsim),
- "PSNR": (avg_psnr, std_psnr),
- "SSIM": (avg_ssim, std_ssim),
- }
-
-
-def compute_perceptual_similarity_from_list(pred_imgs_list, tgt_imgs_list,
- take_every_other,
- simple_format=True):
-
- # Load VGG16 for feature similarity
- vgg16 = PNet().to("cuda")
- vgg16.eval()
- vgg16.cuda()
-
- values_percsim = []
- values_ssim = []
- values_psnr = []
- equal_count = 0
- ambig_count = 0
- for i, tgt_img in enumerate(tqdm(tgt_imgs_list)):
- pred_imgs = pred_imgs_list[i]
- tgt_imgs = [tgt_img]
- assert len(tgt_imgs) == 1
-
- if type(pred_imgs) != list:
- pred_imgs = [pred_imgs]
-
- perc_sim = 10000
- ssim_sim = -10
- psnr_sim = -10
- assert len(pred_imgs)>0
- for p_img in pred_imgs:
- t_img = load_img(tgt_imgs[0])
- p_img = load_img(p_img, size=t_img.shape[2:])
- t_perc_sim = perceptual_sim(p_img, t_img, vgg16).item()
- perc_sim = min(perc_sim, t_perc_sim)
-
- ssim_sim = max(ssim_sim, ssim_metric(p_img, t_img).item())
- psnr_sim = max(psnr_sim, psnr(p_img, t_img).item())
-
- values_percsim += [perc_sim]
- values_ssim += [ssim_sim]
- if psnr_sim != np.float("inf"):
- values_psnr += [psnr_sim]
- else:
- if torch.allclose(p_img, t_img):
- equal_count += 1
- print("{} equal src and wrp images.".format(equal_count))
- else:
- ambig_count += 1
- print("{} ambiguous src and wrp images.".format(ambig_count))
-
- if take_every_other:
- n_valuespercsim = []
- n_valuesssim = []
- n_valuespsnr = []
- for i in range(0, len(values_percsim) // 2):
- n_valuespercsim += [
- min(values_percsim[2 * i], values_percsim[2 * i + 1])
- ]
- n_valuespsnr += [max(values_psnr[2 * i], values_psnr[2 * i + 1])]
- n_valuesssim += [max(values_ssim[2 * i], values_ssim[2 * i + 1])]
-
- values_percsim = n_valuespercsim
- values_ssim = n_valuesssim
- values_psnr = n_valuespsnr
-
- avg_percsim = np.mean(np.array(values_percsim))
- std_percsim = np.std(np.array(values_percsim))
-
- avg_psnr = np.mean(np.array(values_psnr))
- std_psnr = np.std(np.array(values_psnr))
-
- avg_ssim = np.mean(np.array(values_ssim))
- std_ssim = np.std(np.array(values_ssim))
-
- if simple_format:
- # just to make yaml formatting readable
- return {
- "Perceptual similarity": [float(avg_percsim), float(std_percsim)],
- "PSNR": [float(avg_psnr), float(std_psnr)],
- "SSIM": [float(avg_ssim), float(std_ssim)],
- }
- else:
- return {
- "Perceptual similarity": (avg_percsim, std_percsim),
- "PSNR": (avg_psnr, std_psnr),
- "SSIM": (avg_ssim, std_ssim),
- }
-
-
-def compute_perceptual_similarity_from_list_topk(pred_imgs_list, tgt_imgs_list,
- take_every_other, resize=False):
-
- # Load VGG16 for feature similarity
- vgg16 = PNet().to("cuda")
- vgg16.eval()
- vgg16.cuda()
-
- values_percsim = []
- values_ssim = []
- values_psnr = []
- individual_percsim = []
- individual_ssim = []
- individual_psnr = []
- for i, tgt_img in enumerate(tqdm(tgt_imgs_list)):
- pred_imgs = pred_imgs_list[i]
- tgt_imgs = [tgt_img]
- assert len(tgt_imgs) == 1
-
- if type(pred_imgs) != list:
- assert False
- pred_imgs = [pred_imgs]
-
- perc_sim = 10000
- ssim_sim = -10
- psnr_sim = -10
- sample_percsim = list()
- sample_ssim = list()
- sample_psnr = list()
- for p_img in pred_imgs:
- if resize:
- t_img = load_img(tgt_imgs[0], size=(256,256))
- else:
- t_img = load_img(tgt_imgs[0])
- p_img = load_img(p_img, size=t_img.shape[2:])
-
- t_perc_sim = perceptual_sim(p_img, t_img, vgg16).item()
- sample_percsim.append(t_perc_sim)
- perc_sim = min(perc_sim, t_perc_sim)
-
- t_ssim = ssim_metric(p_img, t_img).item()
- sample_ssim.append(t_ssim)
- ssim_sim = max(ssim_sim, t_ssim)
-
- t_psnr = psnr(p_img, t_img).item()
- sample_psnr.append(t_psnr)
- psnr_sim = max(psnr_sim, t_psnr)
-
- values_percsim += [perc_sim]
- values_ssim += [ssim_sim]
- values_psnr += [psnr_sim]
- individual_percsim.append(sample_percsim)
- individual_ssim.append(sample_ssim)
- individual_psnr.append(sample_psnr)
-
- if take_every_other:
- assert False, "Do this later, after specifying topk to get proper results"
- n_valuespercsim = []
- n_valuesssim = []
- n_valuespsnr = []
- for i in range(0, len(values_percsim) // 2):
- n_valuespercsim += [
- min(values_percsim[2 * i], values_percsim[2 * i + 1])
- ]
- n_valuespsnr += [max(values_psnr[2 * i], values_psnr[2 * i + 1])]
- n_valuesssim += [max(values_ssim[2 * i], values_ssim[2 * i + 1])]
-
- values_percsim = n_valuespercsim
- values_ssim = n_valuesssim
- values_psnr = n_valuespsnr
-
- avg_percsim = np.mean(np.array(values_percsim))
- std_percsim = np.std(np.array(values_percsim))
-
- avg_psnr = np.mean(np.array(values_psnr))
- std_psnr = np.std(np.array(values_psnr))
-
- avg_ssim = np.mean(np.array(values_ssim))
- std_ssim = np.std(np.array(values_ssim))
-
- individual_percsim = np.array(individual_percsim)
- individual_psnr = np.array(individual_psnr)
- individual_ssim = np.array(individual_ssim)
-
- return {
- "avg_of_best": {
- "Perceptual similarity": [float(avg_percsim), float(std_percsim)],
- "PSNR": [float(avg_psnr), float(std_psnr)],
- "SSIM": [float(avg_ssim), float(std_ssim)],
- },
- "individual": {
- "PSIM": individual_percsim,
- "PSNR": individual_psnr,
- "SSIM": individual_ssim,
- }
- }
-
-
-if __name__ == "__main__":
- args = argparse.ArgumentParser()
- args.add_argument("--folder", type=str, default="")
- args.add_argument("--pred_image", type=str, default="")
- args.add_argument("--target_image", type=str, default="")
- args.add_argument("--take_every_other", action="store_true", default=False)
- args.add_argument("--output_file", type=str, default="")
-
- opts = args.parse_args()
-
- folder = opts.folder
- pred_img = opts.pred_image
- tgt_img = opts.target_image
-
- results = compute_perceptual_similarity(
- folder, pred_img, tgt_img, opts.take_every_other
- )
-
- f = open(opts.output_file, 'w')
- for key in results:
- print("%s for %s: \n" % (key, opts.folder))
- print(
- "\t {:0.4f} | {:0.4f} \n".format(results[key][0], results[key][1])
- )
-
- f.write("%s for %s: \n" % (key, opts.folder))
- f.write(
- "\t {:0.4f} | {:0.4f} \n".format(results[key][0], results[key][1])
- )
-
- f.close()
diff --git a/One-2-3-45-master 2/ldm/modules/evaluate/frechet_video_distance.py b/One-2-3-45-master 2/ldm/modules/evaluate/frechet_video_distance.py
deleted file mode 100644
index d9e13c41505d9895016cdda1a1fd59aec33ab4d0..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/evaluate/frechet_video_distance.py
+++ /dev/null
@@ -1,147 +0,0 @@
-# coding=utf-8
-# Copyright 2022 The Google Research Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Lint as: python2, python3
-"""Minimal Reference implementation for the Frechet Video Distance (FVD).
-
-FVD is a metric for the quality of video generation models. It is inspired by
-the FID (Frechet Inception Distance) used for images, but uses a different
-embedding to be better suitable for videos.
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-
-import six
-import tensorflow.compat.v1 as tf
-import tensorflow_gan as tfgan
-import tensorflow_hub as hub
-
-
-def preprocess(videos, target_resolution):
- """Runs some preprocessing on the videos for I3D model.
-
- Args:
- videos: [batch_size, num_frames, height, width, depth] The videos to be
- preprocessed. We don't care about the specific dtype of the videos, it can
- be anything that tf.image.resize_bilinear accepts. Values are expected to
- be in the range 0-255.
- target_resolution: (width, height): target video resolution
-
- Returns:
- videos: [batch_size, num_frames, height, width, depth]
- """
- videos_shape = list(videos.shape)
- all_frames = tf.reshape(videos, [-1] + videos_shape[-3:])
- resized_videos = tf.image.resize_bilinear(all_frames, size=target_resolution)
- target_shape = [videos_shape[0], -1] + list(target_resolution) + [3]
- output_videos = tf.reshape(resized_videos, target_shape)
- scaled_videos = 2. * tf.cast(output_videos, tf.float32) / 255. - 1
- return scaled_videos
-
-
-def _is_in_graph(tensor_name):
- """Checks whether a given tensor does exists in the graph."""
- try:
- tf.get_default_graph().get_tensor_by_name(tensor_name)
- except KeyError:
- return False
- return True
-
-
-def create_id3_embedding(videos,warmup=False,batch_size=16):
- """Embeds the given videos using the Inflated 3D Convolution ne twork.
-
- Downloads the graph of the I3D from tf.hub and adds it to the graph on the
- first call.
-
- Args:
- videos: [batch_size, num_frames, height=224, width=224, depth=3].
- Expected range is [-1, 1].
-
- Returns:
- embedding: [batch_size, embedding_size]. embedding_size depends
- on the model used.
-
- Raises:
- ValueError: when a provided embedding_layer is not supported.
- """
-
- # batch_size = 16
- module_spec = "https://tfhub.dev/deepmind/i3d-kinetics-400/1"
-
-
- # Making sure that we import the graph separately for
- # each different input video tensor.
- module_name = "fvd_kinetics-400_id3_module_" + six.ensure_str(
- videos.name).replace(":", "_")
-
-
-
- assert_ops = [
- tf.Assert(
- tf.reduce_max(videos) <= 1.001,
- ["max value in frame is > 1", videos]),
- tf.Assert(
- tf.reduce_min(videos) >= -1.001,
- ["min value in frame is < -1", videos]),
- tf.assert_equal(
- tf.shape(videos)[0],
- batch_size, ["invalid frame batch size: ",
- tf.shape(videos)],
- summarize=6),
- ]
- with tf.control_dependencies(assert_ops):
- videos = tf.identity(videos)
-
- module_scope = "%s_apply_default/" % module_name
-
- # To check whether the module has already been loaded into the graph, we look
- # for a given tensor name. If this tensor name exists, we assume the function
- # has been called before and the graph was imported. Otherwise we import it.
- # Note: in theory, the tensor could exist, but have wrong shapes.
- # This will happen if create_id3_embedding is called with a frames_placehoder
- # of wrong size/batch size, because even though that will throw a tf.Assert
- # on graph-execution time, it will insert the tensor (with wrong shape) into
- # the graph. This is why we need the following assert.
- if warmup:
- video_batch_size = int(videos.shape[0])
- assert video_batch_size in [batch_size, -1, None], f"Invalid batch size {video_batch_size}"
- tensor_name = module_scope + "RGB/inception_i3d/Mean:0"
- if not _is_in_graph(tensor_name):
- i3d_model = hub.Module(module_spec, name=module_name)
- i3d_model(videos)
-
- # gets the kinetics-i3d-400-logits layer
- tensor_name = module_scope + "RGB/inception_i3d/Mean:0"
- tensor = tf.get_default_graph().get_tensor_by_name(tensor_name)
- return tensor
-
-
-def calculate_fvd(real_activations,
- generated_activations):
- """Returns a list of ops that compute metrics as funcs of activations.
-
- Args:
- real_activations: [num_samples, embedding_size]
- generated_activations: [num_samples, embedding_size]
-
- Returns:
- A scalar that contains the requested FVD.
- """
- return tfgan.eval.frechet_classifier_distance_from_activations(
- real_activations, generated_activations)
diff --git a/One-2-3-45-master 2/ldm/modules/evaluate/ssim.py b/One-2-3-45-master 2/ldm/modules/evaluate/ssim.py
deleted file mode 100644
index 4e8883ccb3b30455a76caf2e4d1e04745f75d214..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/evaluate/ssim.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# MIT Licence
-
-# Methods to predict the SSIM, taken from
-# https://github.com/Po-Hsun-Su/pytorch-ssim/blob/master/pytorch_ssim/__init__.py
-
-from math import exp
-
-import torch
-import torch.nn.functional as F
-from torch.autograd import Variable
-
-def gaussian(window_size, sigma):
- gauss = torch.Tensor(
- [
- exp(-((x - window_size // 2) ** 2) / float(2 * sigma ** 2))
- for x in range(window_size)
- ]
- )
- return gauss / gauss.sum()
-
-
-def create_window(window_size, channel):
- _1D_window = gaussian(window_size, 1.5).unsqueeze(1)
- _2D_window = _1D_window.mm(_1D_window.t()).float().unsqueeze(0).unsqueeze(0)
- window = Variable(
- _2D_window.expand(channel, 1, window_size, window_size).contiguous()
- )
- return window
-
-
-def _ssim(
- img1, img2, window, window_size, channel, mask=None, size_average=True
-):
- mu1 = F.conv2d(img1, window, padding=window_size // 2, groups=channel)
- mu2 = F.conv2d(img2, window, padding=window_size // 2, groups=channel)
-
- mu1_sq = mu1.pow(2)
- mu2_sq = mu2.pow(2)
- mu1_mu2 = mu1 * mu2
-
- sigma1_sq = (
- F.conv2d(img1 * img1, window, padding=window_size // 2, groups=channel)
- - mu1_sq
- )
- sigma2_sq = (
- F.conv2d(img2 * img2, window, padding=window_size // 2, groups=channel)
- - mu2_sq
- )
- sigma12 = (
- F.conv2d(img1 * img2, window, padding=window_size // 2, groups=channel)
- - mu1_mu2
- )
-
- C1 = (0.01) ** 2
- C2 = (0.03) ** 2
-
- ssim_map = ((2 * mu1_mu2 + C1) * (2 * sigma12 + C2)) / (
- (mu1_sq + mu2_sq + C1) * (sigma1_sq + sigma2_sq + C2)
- )
-
- if not (mask is None):
- b = mask.size(0)
- ssim_map = ssim_map.mean(dim=1, keepdim=True) * mask
- ssim_map = ssim_map.view(b, -1).sum(dim=1) / mask.view(b, -1).sum(
- dim=1
- ).clamp(min=1)
- return ssim_map
-
- import pdb
-
- pdb.set_trace
-
- if size_average:
- return ssim_map.mean()
- else:
- return ssim_map.mean(1).mean(1).mean(1)
-
-
-class SSIM(torch.nn.Module):
- def __init__(self, window_size=11, size_average=True):
- super(SSIM, self).__init__()
- self.window_size = window_size
- self.size_average = size_average
- self.channel = 1
- self.window = create_window(window_size, self.channel)
-
- def forward(self, img1, img2, mask=None):
- (_, channel, _, _) = img1.size()
-
- if (
- channel == self.channel
- and self.window.data.type() == img1.data.type()
- ):
- window = self.window
- else:
- window = create_window(self.window_size, channel)
-
- if img1.is_cuda:
- window = window.cuda(img1.get_device())
- window = window.type_as(img1)
-
- self.window = window
- self.channel = channel
-
- return _ssim(
- img1,
- img2,
- window,
- self.window_size,
- channel,
- mask,
- self.size_average,
- )
-
-
-def ssim(img1, img2, window_size=11, mask=None, size_average=True):
- (_, channel, _, _) = img1.size()
- window = create_window(window_size, channel)
-
- if img1.is_cuda:
- window = window.cuda(img1.get_device())
- window = window.type_as(img1)
-
- return _ssim(img1, img2, window, window_size, channel, mask, size_average)
diff --git a/One-2-3-45-master 2/ldm/modules/evaluate/torch_frechet_video_distance.py b/One-2-3-45-master 2/ldm/modules/evaluate/torch_frechet_video_distance.py
deleted file mode 100644
index 04856b828a17cdc97fa88a7b9d2f7fe0f735b3fc..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/evaluate/torch_frechet_video_distance.py
+++ /dev/null
@@ -1,294 +0,0 @@
-# based on https://github.com/universome/fvd-comparison/blob/master/compare_models.py; huge thanks!
-import os
-import numpy as np
-import io
-import re
-import requests
-import html
-import hashlib
-import urllib
-import urllib.request
-import scipy.linalg
-import multiprocessing as mp
-import glob
-
-
-from tqdm import tqdm
-from typing import Any, List, Tuple, Union, Dict, Callable
-
-from torchvision.io import read_video
-import torch; torch.set_grad_enabled(False)
-from einops import rearrange
-
-from nitro.util import isvideo
-
-def compute_frechet_distance(mu_sample,sigma_sample,mu_ref,sigma_ref) -> float:
- print('Calculate frechet distance...')
- m = np.square(mu_sample - mu_ref).sum()
- s, _ = scipy.linalg.sqrtm(np.dot(sigma_sample, sigma_ref), disp=False) # pylint: disable=no-member
- fid = np.real(m + np.trace(sigma_sample + sigma_ref - s * 2))
-
- return float(fid)
-
-
-def compute_stats(feats: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
- mu = feats.mean(axis=0) # [d]
- sigma = np.cov(feats, rowvar=False) # [d, d]
-
- return mu, sigma
-
-
-def open_url(url: str, num_attempts: int = 10, verbose: bool = True, return_filename: bool = False) -> Any:
- """Download the given URL and return a binary-mode file object to access the data."""
- assert num_attempts >= 1
-
- # Doesn't look like an URL scheme so interpret it as a local filename.
- if not re.match('^[a-z]+://', url):
- return url if return_filename else open(url, "rb")
-
- # Handle file URLs. This code handles unusual file:// patterns that
- # arise on Windows:
- #
- # file:///c:/foo.txt
- #
- # which would translate to a local '/c:/foo.txt' filename that's
- # invalid. Drop the forward slash for such pathnames.
- #
- # If you touch this code path, you should test it on both Linux and
- # Windows.
- #
- # Some internet resources suggest using urllib.request.url2pathname() but
- # but that converts forward slashes to backslashes and this causes
- # its own set of problems.
- if url.startswith('file://'):
- filename = urllib.parse.urlparse(url).path
- if re.match(r'^/[a-zA-Z]:', filename):
- filename = filename[1:]
- return filename if return_filename else open(filename, "rb")
-
- url_md5 = hashlib.md5(url.encode("utf-8")).hexdigest()
-
- # Download.
- url_name = None
- url_data = None
- with requests.Session() as session:
- if verbose:
- print("Downloading %s ..." % url, end="", flush=True)
- for attempts_left in reversed(range(num_attempts)):
- try:
- with session.get(url) as res:
- res.raise_for_status()
- if len(res.content) == 0:
- raise IOError("No data received")
-
- if len(res.content) < 8192:
- content_str = res.content.decode("utf-8")
- if "download_warning" in res.headers.get("Set-Cookie", ""):
- links = [html.unescape(link) for link in content_str.split('"') if "export=download" in link]
- if len(links) == 1:
- url = requests.compat.urljoin(url, links[0])
- raise IOError("Google Drive virus checker nag")
- if "Google Drive - Quota exceeded" in content_str:
- raise IOError("Google Drive download quota exceeded -- please try again later")
-
- match = re.search(r'filename="([^"]*)"', res.headers.get("Content-Disposition", ""))
- url_name = match[1] if match else url
- url_data = res.content
- if verbose:
- print(" done")
- break
- except KeyboardInterrupt:
- raise
- except:
- if not attempts_left:
- if verbose:
- print(" failed")
- raise
- if verbose:
- print(".", end="", flush=True)
-
- # Return data as file object.
- assert not return_filename
- return io.BytesIO(url_data)
-
-def load_video(ip):
- vid, *_ = read_video(ip)
- vid = rearrange(vid, 't h w c -> t c h w').to(torch.uint8)
- return vid
-
-def get_data_from_str(input_str,nprc = None):
- assert os.path.isdir(input_str), f'Specified input folder "{input_str}" is not a directory'
- vid_filelist = glob.glob(os.path.join(input_str,'*.mp4'))
- print(f'Found {len(vid_filelist)} videos in dir {input_str}')
-
- if nprc is None:
- try:
- nprc = mp.cpu_count()
- except NotImplementedError:
- print('WARNING: cpu_count() not avlailable, using only 1 cpu for video loading')
- nprc = 1
-
- pool = mp.Pool(processes=nprc)
-
- vids = []
- for v in tqdm(pool.imap_unordered(load_video,vid_filelist),total=len(vid_filelist),desc='Loading videos...'):
- vids.append(v)
-
-
- vids = torch.stack(vids,dim=0).float()
-
- return vids
-
-def get_stats(stats):
- assert os.path.isfile(stats) and stats.endswith('.npz'), f'no stats found under {stats}'
-
- print(f'Using precomputed statistics under {stats}')
- stats = np.load(stats)
- stats = {key: stats[key] for key in stats.files}
-
- return stats
-
-
-
-
-@torch.no_grad()
-def compute_fvd(ref_input, sample_input, bs=32,
- ref_stats=None,
- sample_stats=None,
- nprc_load=None):
-
-
-
- calc_stats = ref_stats is None or sample_stats is None
-
- if calc_stats:
-
- only_ref = sample_stats is not None
- only_sample = ref_stats is not None
-
-
- if isinstance(ref_input,str) and not only_sample:
- ref_input = get_data_from_str(ref_input,nprc_load)
-
- if isinstance(sample_input, str) and not only_ref:
- sample_input = get_data_from_str(sample_input, nprc_load)
-
- stats = compute_statistics(sample_input,ref_input,
- device='cuda' if torch.cuda.is_available() else 'cpu',
- bs=bs,
- only_ref=only_ref,
- only_sample=only_sample)
-
- if only_ref:
- stats.update(get_stats(sample_stats))
- elif only_sample:
- stats.update(get_stats(ref_stats))
-
-
-
- else:
- stats = get_stats(sample_stats)
- stats.update(get_stats(ref_stats))
-
- fvd = compute_frechet_distance(**stats)
-
- return {'FVD' : fvd,}
-
-
-@torch.no_grad()
-def compute_statistics(videos_fake, videos_real, device: str='cuda', bs=32, only_ref=False,only_sample=False) -> Dict:
- detector_url = 'https://www.dropbox.com/s/ge9e5ujwgetktms/i3d_torchscript.pt?dl=1'
- detector_kwargs = dict(rescale=True, resize=True, return_features=True) # Return raw features before the softmax layer.
-
- with open_url(detector_url, verbose=False) as f:
- detector = torch.jit.load(f).eval().to(device)
-
-
-
- assert not (only_sample and only_ref), 'only_ref and only_sample arguments are mutually exclusive'
-
- ref_embed, sample_embed = [], []
-
- info = f'Computing I3D activations for FVD score with batch size {bs}'
-
- if only_ref:
-
- if not isvideo(videos_real):
- # if not is video we assume to have numpy arrays pf shape (n_vids, t, h, w, c) in range [0,255]
- videos_real = torch.from_numpy(videos_real).permute(0, 4, 1, 2, 3).float()
- print(videos_real.shape)
-
- if videos_real.shape[0] % bs == 0:
- n_secs = videos_real.shape[0] // bs
- else:
- n_secs = videos_real.shape[0] // bs + 1
-
- videos_real = torch.tensor_split(videos_real, n_secs, dim=0)
-
- for ref_v in tqdm(videos_real, total=len(videos_real),desc=info):
-
- feats_ref = detector(ref_v.to(device).contiguous(), **detector_kwargs).cpu().numpy()
- ref_embed.append(feats_ref)
-
- elif only_sample:
-
- if not isvideo(videos_fake):
- # if not is video we assume to have numpy arrays pf shape (n_vids, t, h, w, c) in range [0,255]
- videos_fake = torch.from_numpy(videos_fake).permute(0, 4, 1, 2, 3).float()
- print(videos_fake.shape)
-
- if videos_fake.shape[0] % bs == 0:
- n_secs = videos_fake.shape[0] // bs
- else:
- n_secs = videos_fake.shape[0] // bs + 1
-
- videos_real = torch.tensor_split(videos_real, n_secs, dim=0)
-
- for sample_v in tqdm(videos_fake, total=len(videos_real),desc=info):
- feats_sample = detector(sample_v.to(device).contiguous(), **detector_kwargs).cpu().numpy()
- sample_embed.append(feats_sample)
-
-
- else:
-
- if not isvideo(videos_real):
- # if not is video we assume to have numpy arrays pf shape (n_vids, t, h, w, c) in range [0,255]
- videos_real = torch.from_numpy(videos_real).permute(0, 4, 1, 2, 3).float()
-
- if not isvideo(videos_fake):
- videos_fake = torch.from_numpy(videos_fake).permute(0, 4, 1, 2, 3).float()
-
- if videos_fake.shape[0] % bs == 0:
- n_secs = videos_fake.shape[0] // bs
- else:
- n_secs = videos_fake.shape[0] // bs + 1
-
- videos_real = torch.tensor_split(videos_real, n_secs, dim=0)
- videos_fake = torch.tensor_split(videos_fake, n_secs, dim=0)
-
- for ref_v, sample_v in tqdm(zip(videos_real,videos_fake),total=len(videos_fake),desc=info):
- # print(ref_v.shape)
- # ref_v = torch.nn.functional.interpolate(ref_v, size=(sample_v.shape[2], 256, 256), mode='trilinear', align_corners=False)
- # sample_v = torch.nn.functional.interpolate(sample_v, size=(sample_v.shape[2], 256, 256), mode='trilinear', align_corners=False)
-
-
- feats_sample = detector(sample_v.to(device).contiguous(), **detector_kwargs).cpu().numpy()
- feats_ref = detector(ref_v.to(device).contiguous(), **detector_kwargs).cpu().numpy()
- sample_embed.append(feats_sample)
- ref_embed.append(feats_ref)
-
- out = dict()
- if len(sample_embed) > 0:
- sample_embed = np.concatenate(sample_embed,axis=0)
- mu_sample, sigma_sample = compute_stats(sample_embed)
- out.update({'mu_sample': mu_sample,
- 'sigma_sample': sigma_sample})
-
- if len(ref_embed) > 0:
- ref_embed = np.concatenate(ref_embed,axis=0)
- mu_ref, sigma_ref = compute_stats(ref_embed)
- out.update({'mu_ref': mu_ref,
- 'sigma_ref': sigma_ref})
-
-
- return out
diff --git a/One-2-3-45-master 2/ldm/modules/image_degradation/__init__.py b/One-2-3-45-master 2/ldm/modules/image_degradation/__init__.py
deleted file mode 100644
index 7836cada81f90ded99c58d5942eea4c3477f58fc..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/image_degradation/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from ldm.modules.image_degradation.bsrgan import degradation_bsrgan_variant as degradation_fn_bsr
-from ldm.modules.image_degradation.bsrgan_light import degradation_bsrgan_variant as degradation_fn_bsr_light
diff --git a/One-2-3-45-master 2/ldm/modules/image_degradation/bsrgan.py b/One-2-3-45-master 2/ldm/modules/image_degradation/bsrgan.py
deleted file mode 100644
index 32ef56169978e550090261cddbcf5eb611a6173b..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/image_degradation/bsrgan.py
+++ /dev/null
@@ -1,730 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-# --------------------------------------------
-# Super-Resolution
-# --------------------------------------------
-#
-# Kai Zhang (cskaizhang@gmail.com)
-# https://github.com/cszn
-# From 2019/03--2021/08
-# --------------------------------------------
-"""
-
-import numpy as np
-import cv2
-import torch
-
-from functools import partial
-import random
-from scipy import ndimage
-import scipy
-import scipy.stats as ss
-from scipy.interpolate import interp2d
-from scipy.linalg import orth
-import albumentations
-
-import ldm.modules.image_degradation.utils_image as util
-
-
-def modcrop_np(img, sf):
- '''
- Args:
- img: numpy image, WxH or WxHxC
- sf: scale factor
- Return:
- cropped image
- '''
- w, h = img.shape[:2]
- im = np.copy(img)
- return im[:w - w % sf, :h - h % sf, ...]
-
-
-"""
-# --------------------------------------------
-# anisotropic Gaussian kernels
-# --------------------------------------------
-"""
-
-
-def analytic_kernel(k):
- """Calculate the X4 kernel from the X2 kernel (for proof see appendix in paper)"""
- k_size = k.shape[0]
- # Calculate the big kernels size
- big_k = np.zeros((3 * k_size - 2, 3 * k_size - 2))
- # Loop over the small kernel to fill the big one
- for r in range(k_size):
- for c in range(k_size):
- big_k[2 * r:2 * r + k_size, 2 * c:2 * c + k_size] += k[r, c] * k
- # Crop the edges of the big kernel to ignore very small values and increase run time of SR
- crop = k_size // 2
- cropped_big_k = big_k[crop:-crop, crop:-crop]
- # Normalize to 1
- return cropped_big_k / cropped_big_k.sum()
-
-
-def anisotropic_Gaussian(ksize=15, theta=np.pi, l1=6, l2=6):
- """ generate an anisotropic Gaussian kernel
- Args:
- ksize : e.g., 15, kernel size
- theta : [0, pi], rotation angle range
- l1 : [0.1,50], scaling of eigenvalues
- l2 : [0.1,l1], scaling of eigenvalues
- If l1 = l2, will get an isotropic Gaussian kernel.
- Returns:
- k : kernel
- """
-
- v = np.dot(np.array([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]]), np.array([1., 0.]))
- V = np.array([[v[0], v[1]], [v[1], -v[0]]])
- D = np.array([[l1, 0], [0, l2]])
- Sigma = np.dot(np.dot(V, D), np.linalg.inv(V))
- k = gm_blur_kernel(mean=[0, 0], cov=Sigma, size=ksize)
-
- return k
-
-
-def gm_blur_kernel(mean, cov, size=15):
- center = size / 2.0 + 0.5
- k = np.zeros([size, size])
- for y in range(size):
- for x in range(size):
- cy = y - center + 1
- cx = x - center + 1
- k[y, x] = ss.multivariate_normal.pdf([cx, cy], mean=mean, cov=cov)
-
- k = k / np.sum(k)
- return k
-
-
-def shift_pixel(x, sf, upper_left=True):
- """shift pixel for super-resolution with different scale factors
- Args:
- x: WxHxC or WxH
- sf: scale factor
- upper_left: shift direction
- """
- h, w = x.shape[:2]
- shift = (sf - 1) * 0.5
- xv, yv = np.arange(0, w, 1.0), np.arange(0, h, 1.0)
- if upper_left:
- x1 = xv + shift
- y1 = yv + shift
- else:
- x1 = xv - shift
- y1 = yv - shift
-
- x1 = np.clip(x1, 0, w - 1)
- y1 = np.clip(y1, 0, h - 1)
-
- if x.ndim == 2:
- x = interp2d(xv, yv, x)(x1, y1)
- if x.ndim == 3:
- for i in range(x.shape[-1]):
- x[:, :, i] = interp2d(xv, yv, x[:, :, i])(x1, y1)
-
- return x
-
-
-def blur(x, k):
- '''
- x: image, NxcxHxW
- k: kernel, Nx1xhxw
- '''
- n, c = x.shape[:2]
- p1, p2 = (k.shape[-2] - 1) // 2, (k.shape[-1] - 1) // 2
- x = torch.nn.functional.pad(x, pad=(p1, p2, p1, p2), mode='replicate')
- k = k.repeat(1, c, 1, 1)
- k = k.view(-1, 1, k.shape[2], k.shape[3])
- x = x.view(1, -1, x.shape[2], x.shape[3])
- x = torch.nn.functional.conv2d(x, k, bias=None, stride=1, padding=0, groups=n * c)
- x = x.view(n, c, x.shape[2], x.shape[3])
-
- return x
-
-
-def gen_kernel(k_size=np.array([15, 15]), scale_factor=np.array([4, 4]), min_var=0.6, max_var=10., noise_level=0):
- """"
- # modified version of https://github.com/assafshocher/BlindSR_dataset_generator
- # Kai Zhang
- # min_var = 0.175 * sf # variance of the gaussian kernel will be sampled between min_var and max_var
- # max_var = 2.5 * sf
- """
- # Set random eigen-vals (lambdas) and angle (theta) for COV matrix
- lambda_1 = min_var + np.random.rand() * (max_var - min_var)
- lambda_2 = min_var + np.random.rand() * (max_var - min_var)
- theta = np.random.rand() * np.pi # random theta
- noise = -noise_level + np.random.rand(*k_size) * noise_level * 2
-
- # Set COV matrix using Lambdas and Theta
- LAMBDA = np.diag([lambda_1, lambda_2])
- Q = np.array([[np.cos(theta), -np.sin(theta)],
- [np.sin(theta), np.cos(theta)]])
- SIGMA = Q @ LAMBDA @ Q.T
- INV_SIGMA = np.linalg.inv(SIGMA)[None, None, :, :]
-
- # Set expectation position (shifting kernel for aligned image)
- MU = k_size // 2 - 0.5 * (scale_factor - 1) # - 0.5 * (scale_factor - k_size % 2)
- MU = MU[None, None, :, None]
-
- # Create meshgrid for Gaussian
- [X, Y] = np.meshgrid(range(k_size[0]), range(k_size[1]))
- Z = np.stack([X, Y], 2)[:, :, :, None]
-
- # Calcualte Gaussian for every pixel of the kernel
- ZZ = Z - MU
- ZZ_t = ZZ.transpose(0, 1, 3, 2)
- raw_kernel = np.exp(-0.5 * np.squeeze(ZZ_t @ INV_SIGMA @ ZZ)) * (1 + noise)
-
- # shift the kernel so it will be centered
- # raw_kernel_centered = kernel_shift(raw_kernel, scale_factor)
-
- # Normalize the kernel and return
- # kernel = raw_kernel_centered / np.sum(raw_kernel_centered)
- kernel = raw_kernel / np.sum(raw_kernel)
- return kernel
-
-
-def fspecial_gaussian(hsize, sigma):
- hsize = [hsize, hsize]
- siz = [(hsize[0] - 1.0) / 2.0, (hsize[1] - 1.0) / 2.0]
- std = sigma
- [x, y] = np.meshgrid(np.arange(-siz[1], siz[1] + 1), np.arange(-siz[0], siz[0] + 1))
- arg = -(x * x + y * y) / (2 * std * std)
- h = np.exp(arg)
- h[h < scipy.finfo(float).eps * h.max()] = 0
- sumh = h.sum()
- if sumh != 0:
- h = h / sumh
- return h
-
-
-def fspecial_laplacian(alpha):
- alpha = max([0, min([alpha, 1])])
- h1 = alpha / (alpha + 1)
- h2 = (1 - alpha) / (alpha + 1)
- h = [[h1, h2, h1], [h2, -4 / (alpha + 1), h2], [h1, h2, h1]]
- h = np.array(h)
- return h
-
-
-def fspecial(filter_type, *args, **kwargs):
- '''
- python code from:
- https://github.com/ronaldosena/imagens-medicas-2/blob/40171a6c259edec7827a6693a93955de2bd39e76/Aulas/aula_2_-_uniform_filter/matlab_fspecial.py
- '''
- if filter_type == 'gaussian':
- return fspecial_gaussian(*args, **kwargs)
- if filter_type == 'laplacian':
- return fspecial_laplacian(*args, **kwargs)
-
-
-"""
-# --------------------------------------------
-# degradation models
-# --------------------------------------------
-"""
-
-
-def bicubic_degradation(x, sf=3):
- '''
- Args:
- x: HxWxC image, [0, 1]
- sf: down-scale factor
- Return:
- bicubicly downsampled LR image
- '''
- x = util.imresize_np(x, scale=1 / sf)
- return x
-
-
-def srmd_degradation(x, k, sf=3):
- ''' blur + bicubic downsampling
- Args:
- x: HxWxC image, [0, 1]
- k: hxw, double
- sf: down-scale factor
- Return:
- downsampled LR image
- Reference:
- @inproceedings{zhang2018learning,
- title={Learning a single convolutional super-resolution network for multiple degradations},
- author={Zhang, Kai and Zuo, Wangmeng and Zhang, Lei},
- booktitle={IEEE Conference on Computer Vision and Pattern Recognition},
- pages={3262--3271},
- year={2018}
- }
- '''
- x = ndimage.filters.convolve(x, np.expand_dims(k, axis=2), mode='wrap') # 'nearest' | 'mirror'
- x = bicubic_degradation(x, sf=sf)
- return x
-
-
-def dpsr_degradation(x, k, sf=3):
- ''' bicubic downsampling + blur
- Args:
- x: HxWxC image, [0, 1]
- k: hxw, double
- sf: down-scale factor
- Return:
- downsampled LR image
- Reference:
- @inproceedings{zhang2019deep,
- title={Deep Plug-and-Play Super-Resolution for Arbitrary Blur Kernels},
- author={Zhang, Kai and Zuo, Wangmeng and Zhang, Lei},
- booktitle={IEEE Conference on Computer Vision and Pattern Recognition},
- pages={1671--1681},
- year={2019}
- }
- '''
- x = bicubic_degradation(x, sf=sf)
- x = ndimage.filters.convolve(x, np.expand_dims(k, axis=2), mode='wrap')
- return x
-
-
-def classical_degradation(x, k, sf=3):
- ''' blur + downsampling
- Args:
- x: HxWxC image, [0, 1]/[0, 255]
- k: hxw, double
- sf: down-scale factor
- Return:
- downsampled LR image
- '''
- x = ndimage.filters.convolve(x, np.expand_dims(k, axis=2), mode='wrap')
- # x = filters.correlate(x, np.expand_dims(np.flip(k), axis=2))
- st = 0
- return x[st::sf, st::sf, ...]
-
-
-def add_sharpening(img, weight=0.5, radius=50, threshold=10):
- """USM sharpening. borrowed from real-ESRGAN
- Input image: I; Blurry image: B.
- 1. K = I + weight * (I - B)
- 2. Mask = 1 if abs(I - B) > threshold, else: 0
- 3. Blur mask:
- 4. Out = Mask * K + (1 - Mask) * I
- Args:
- img (Numpy array): Input image, HWC, BGR; float32, [0, 1].
- weight (float): Sharp weight. Default: 1.
- radius (float): Kernel size of Gaussian blur. Default: 50.
- threshold (int):
- """
- if radius % 2 == 0:
- radius += 1
- blur = cv2.GaussianBlur(img, (radius, radius), 0)
- residual = img - blur
- mask = np.abs(residual) * 255 > threshold
- mask = mask.astype('float32')
- soft_mask = cv2.GaussianBlur(mask, (radius, radius), 0)
-
- K = img + weight * residual
- K = np.clip(K, 0, 1)
- return soft_mask * K + (1 - soft_mask) * img
-
-
-def add_blur(img, sf=4):
- wd2 = 4.0 + sf
- wd = 2.0 + 0.2 * sf
- if random.random() < 0.5:
- l1 = wd2 * random.random()
- l2 = wd2 * random.random()
- k = anisotropic_Gaussian(ksize=2 * random.randint(2, 11) + 3, theta=random.random() * np.pi, l1=l1, l2=l2)
- else:
- k = fspecial('gaussian', 2 * random.randint(2, 11) + 3, wd * random.random())
- img = ndimage.filters.convolve(img, np.expand_dims(k, axis=2), mode='mirror')
-
- return img
-
-
-def add_resize(img, sf=4):
- rnum = np.random.rand()
- if rnum > 0.8: # up
- sf1 = random.uniform(1, 2)
- elif rnum < 0.7: # down
- sf1 = random.uniform(0.5 / sf, 1)
- else:
- sf1 = 1.0
- img = cv2.resize(img, (int(sf1 * img.shape[1]), int(sf1 * img.shape[0])), interpolation=random.choice([1, 2, 3]))
- img = np.clip(img, 0.0, 1.0)
-
- return img
-
-
-# def add_Gaussian_noise(img, noise_level1=2, noise_level2=25):
-# noise_level = random.randint(noise_level1, noise_level2)
-# rnum = np.random.rand()
-# if rnum > 0.6: # add color Gaussian noise
-# img += np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)
-# elif rnum < 0.4: # add grayscale Gaussian noise
-# img += np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)
-# else: # add noise
-# L = noise_level2 / 255.
-# D = np.diag(np.random.rand(3))
-# U = orth(np.random.rand(3, 3))
-# conv = np.dot(np.dot(np.transpose(U), D), U)
-# img += np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)
-# img = np.clip(img, 0.0, 1.0)
-# return img
-
-def add_Gaussian_noise(img, noise_level1=2, noise_level2=25):
- noise_level = random.randint(noise_level1, noise_level2)
- rnum = np.random.rand()
- if rnum > 0.6: # add color Gaussian noise
- img = img + np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)
- elif rnum < 0.4: # add grayscale Gaussian noise
- img = img + np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)
- else: # add noise
- L = noise_level2 / 255.
- D = np.diag(np.random.rand(3))
- U = orth(np.random.rand(3, 3))
- conv = np.dot(np.dot(np.transpose(U), D), U)
- img = img + np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)
- img = np.clip(img, 0.0, 1.0)
- return img
-
-
-def add_speckle_noise(img, noise_level1=2, noise_level2=25):
- noise_level = random.randint(noise_level1, noise_level2)
- img = np.clip(img, 0.0, 1.0)
- rnum = random.random()
- if rnum > 0.6:
- img += img * np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)
- elif rnum < 0.4:
- img += img * np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)
- else:
- L = noise_level2 / 255.
- D = np.diag(np.random.rand(3))
- U = orth(np.random.rand(3, 3))
- conv = np.dot(np.dot(np.transpose(U), D), U)
- img += img * np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)
- img = np.clip(img, 0.0, 1.0)
- return img
-
-
-def add_Poisson_noise(img):
- img = np.clip((img * 255.0).round(), 0, 255) / 255.
- vals = 10 ** (2 * random.random() + 2.0) # [2, 4]
- if random.random() < 0.5:
- img = np.random.poisson(img * vals).astype(np.float32) / vals
- else:
- img_gray = np.dot(img[..., :3], [0.299, 0.587, 0.114])
- img_gray = np.clip((img_gray * 255.0).round(), 0, 255) / 255.
- noise_gray = np.random.poisson(img_gray * vals).astype(np.float32) / vals - img_gray
- img += noise_gray[:, :, np.newaxis]
- img = np.clip(img, 0.0, 1.0)
- return img
-
-
-def add_JPEG_noise(img):
- quality_factor = random.randint(30, 95)
- img = cv2.cvtColor(util.single2uint(img), cv2.COLOR_RGB2BGR)
- result, encimg = cv2.imencode('.jpg', img, [int(cv2.IMWRITE_JPEG_QUALITY), quality_factor])
- img = cv2.imdecode(encimg, 1)
- img = cv2.cvtColor(util.uint2single(img), cv2.COLOR_BGR2RGB)
- return img
-
-
-def random_crop(lq, hq, sf=4, lq_patchsize=64):
- h, w = lq.shape[:2]
- rnd_h = random.randint(0, h - lq_patchsize)
- rnd_w = random.randint(0, w - lq_patchsize)
- lq = lq[rnd_h:rnd_h + lq_patchsize, rnd_w:rnd_w + lq_patchsize, :]
-
- rnd_h_H, rnd_w_H = int(rnd_h * sf), int(rnd_w * sf)
- hq = hq[rnd_h_H:rnd_h_H + lq_patchsize * sf, rnd_w_H:rnd_w_H + lq_patchsize * sf, :]
- return lq, hq
-
-
-def degradation_bsrgan(img, sf=4, lq_patchsize=72, isp_model=None):
- """
- This is the degradation model of BSRGAN from the paper
- "Designing a Practical Degradation Model for Deep Blind Image Super-Resolution"
- ----------
- img: HXWXC, [0, 1], its size should be large than (lq_patchsizexsf)x(lq_patchsizexsf)
- sf: scale factor
- isp_model: camera ISP model
- Returns
- -------
- img: low-quality patch, size: lq_patchsizeXlq_patchsizeXC, range: [0, 1]
- hq: corresponding high-quality patch, size: (lq_patchsizexsf)X(lq_patchsizexsf)XC, range: [0, 1]
- """
- isp_prob, jpeg_prob, scale2_prob = 0.25, 0.9, 0.25
- sf_ori = sf
-
- h1, w1 = img.shape[:2]
- img = img.copy()[:w1 - w1 % sf, :h1 - h1 % sf, ...] # mod crop
- h, w = img.shape[:2]
-
- if h < lq_patchsize * sf or w < lq_patchsize * sf:
- raise ValueError(f'img size ({h1}X{w1}) is too small!')
-
- hq = img.copy()
-
- if sf == 4 and random.random() < scale2_prob: # downsample1
- if np.random.rand() < 0.5:
- img = cv2.resize(img, (int(1 / 2 * img.shape[1]), int(1 / 2 * img.shape[0])),
- interpolation=random.choice([1, 2, 3]))
- else:
- img = util.imresize_np(img, 1 / 2, True)
- img = np.clip(img, 0.0, 1.0)
- sf = 2
-
- shuffle_order = random.sample(range(7), 7)
- idx1, idx2 = shuffle_order.index(2), shuffle_order.index(3)
- if idx1 > idx2: # keep downsample3 last
- shuffle_order[idx1], shuffle_order[idx2] = shuffle_order[idx2], shuffle_order[idx1]
-
- for i in shuffle_order:
-
- if i == 0:
- img = add_blur(img, sf=sf)
-
- elif i == 1:
- img = add_blur(img, sf=sf)
-
- elif i == 2:
- a, b = img.shape[1], img.shape[0]
- # downsample2
- if random.random() < 0.75:
- sf1 = random.uniform(1, 2 * sf)
- img = cv2.resize(img, (int(1 / sf1 * img.shape[1]), int(1 / sf1 * img.shape[0])),
- interpolation=random.choice([1, 2, 3]))
- else:
- k = fspecial('gaussian', 25, random.uniform(0.1, 0.6 * sf))
- k_shifted = shift_pixel(k, sf)
- k_shifted = k_shifted / k_shifted.sum() # blur with shifted kernel
- img = ndimage.filters.convolve(img, np.expand_dims(k_shifted, axis=2), mode='mirror')
- img = img[0::sf, 0::sf, ...] # nearest downsampling
- img = np.clip(img, 0.0, 1.0)
-
- elif i == 3:
- # downsample3
- img = cv2.resize(img, (int(1 / sf * a), int(1 / sf * b)), interpolation=random.choice([1, 2, 3]))
- img = np.clip(img, 0.0, 1.0)
-
- elif i == 4:
- # add Gaussian noise
- img = add_Gaussian_noise(img, noise_level1=2, noise_level2=25)
-
- elif i == 5:
- # add JPEG noise
- if random.random() < jpeg_prob:
- img = add_JPEG_noise(img)
-
- elif i == 6:
- # add processed camera sensor noise
- if random.random() < isp_prob and isp_model is not None:
- with torch.no_grad():
- img, hq = isp_model.forward(img.copy(), hq)
-
- # add final JPEG compression noise
- img = add_JPEG_noise(img)
-
- # random crop
- img, hq = random_crop(img, hq, sf_ori, lq_patchsize)
-
- return img, hq
-
-
-# todo no isp_model?
-def degradation_bsrgan_variant(image, sf=4, isp_model=None):
- """
- This is the degradation model of BSRGAN from the paper
- "Designing a Practical Degradation Model for Deep Blind Image Super-Resolution"
- ----------
- sf: scale factor
- isp_model: camera ISP model
- Returns
- -------
- img: low-quality patch, size: lq_patchsizeXlq_patchsizeXC, range: [0, 1]
- hq: corresponding high-quality patch, size: (lq_patchsizexsf)X(lq_patchsizexsf)XC, range: [0, 1]
- """
- image = util.uint2single(image)
- isp_prob, jpeg_prob, scale2_prob = 0.25, 0.9, 0.25
- sf_ori = sf
-
- h1, w1 = image.shape[:2]
- image = image.copy()[:w1 - w1 % sf, :h1 - h1 % sf, ...] # mod crop
- h, w = image.shape[:2]
-
- hq = image.copy()
-
- if sf == 4 and random.random() < scale2_prob: # downsample1
- if np.random.rand() < 0.5:
- image = cv2.resize(image, (int(1 / 2 * image.shape[1]), int(1 / 2 * image.shape[0])),
- interpolation=random.choice([1, 2, 3]))
- else:
- image = util.imresize_np(image, 1 / 2, True)
- image = np.clip(image, 0.0, 1.0)
- sf = 2
-
- shuffle_order = random.sample(range(7), 7)
- idx1, idx2 = shuffle_order.index(2), shuffle_order.index(3)
- if idx1 > idx2: # keep downsample3 last
- shuffle_order[idx1], shuffle_order[idx2] = shuffle_order[idx2], shuffle_order[idx1]
-
- for i in shuffle_order:
-
- if i == 0:
- image = add_blur(image, sf=sf)
-
- elif i == 1:
- image = add_blur(image, sf=sf)
-
- elif i == 2:
- a, b = image.shape[1], image.shape[0]
- # downsample2
- if random.random() < 0.75:
- sf1 = random.uniform(1, 2 * sf)
- image = cv2.resize(image, (int(1 / sf1 * image.shape[1]), int(1 / sf1 * image.shape[0])),
- interpolation=random.choice([1, 2, 3]))
- else:
- k = fspecial('gaussian', 25, random.uniform(0.1, 0.6 * sf))
- k_shifted = shift_pixel(k, sf)
- k_shifted = k_shifted / k_shifted.sum() # blur with shifted kernel
- image = ndimage.filters.convolve(image, np.expand_dims(k_shifted, axis=2), mode='mirror')
- image = image[0::sf, 0::sf, ...] # nearest downsampling
- image = np.clip(image, 0.0, 1.0)
-
- elif i == 3:
- # downsample3
- image = cv2.resize(image, (int(1 / sf * a), int(1 / sf * b)), interpolation=random.choice([1, 2, 3]))
- image = np.clip(image, 0.0, 1.0)
-
- elif i == 4:
- # add Gaussian noise
- image = add_Gaussian_noise(image, noise_level1=2, noise_level2=25)
-
- elif i == 5:
- # add JPEG noise
- if random.random() < jpeg_prob:
- image = add_JPEG_noise(image)
-
- # elif i == 6:
- # # add processed camera sensor noise
- # if random.random() < isp_prob and isp_model is not None:
- # with torch.no_grad():
- # img, hq = isp_model.forward(img.copy(), hq)
-
- # add final JPEG compression noise
- image = add_JPEG_noise(image)
- image = util.single2uint(image)
- example = {"image":image}
- return example
-
-
-# TODO incase there is a pickle error one needs to replace a += x with a = a + x in add_speckle_noise etc...
-def degradation_bsrgan_plus(img, sf=4, shuffle_prob=0.5, use_sharp=True, lq_patchsize=64, isp_model=None):
- """
- This is an extended degradation model by combining
- the degradation models of BSRGAN and Real-ESRGAN
- ----------
- img: HXWXC, [0, 1], its size should be large than (lq_patchsizexsf)x(lq_patchsizexsf)
- sf: scale factor
- use_shuffle: the degradation shuffle
- use_sharp: sharpening the img
- Returns
- -------
- img: low-quality patch, size: lq_patchsizeXlq_patchsizeXC, range: [0, 1]
- hq: corresponding high-quality patch, size: (lq_patchsizexsf)X(lq_patchsizexsf)XC, range: [0, 1]
- """
-
- h1, w1 = img.shape[:2]
- img = img.copy()[:w1 - w1 % sf, :h1 - h1 % sf, ...] # mod crop
- h, w = img.shape[:2]
-
- if h < lq_patchsize * sf or w < lq_patchsize * sf:
- raise ValueError(f'img size ({h1}X{w1}) is too small!')
-
- if use_sharp:
- img = add_sharpening(img)
- hq = img.copy()
-
- if random.random() < shuffle_prob:
- shuffle_order = random.sample(range(13), 13)
- else:
- shuffle_order = list(range(13))
- # local shuffle for noise, JPEG is always the last one
- shuffle_order[2:6] = random.sample(shuffle_order[2:6], len(range(2, 6)))
- shuffle_order[9:13] = random.sample(shuffle_order[9:13], len(range(9, 13)))
-
- poisson_prob, speckle_prob, isp_prob = 0.1, 0.1, 0.1
-
- for i in shuffle_order:
- if i == 0:
- img = add_blur(img, sf=sf)
- elif i == 1:
- img = add_resize(img, sf=sf)
- elif i == 2:
- img = add_Gaussian_noise(img, noise_level1=2, noise_level2=25)
- elif i == 3:
- if random.random() < poisson_prob:
- img = add_Poisson_noise(img)
- elif i == 4:
- if random.random() < speckle_prob:
- img = add_speckle_noise(img)
- elif i == 5:
- if random.random() < isp_prob and isp_model is not None:
- with torch.no_grad():
- img, hq = isp_model.forward(img.copy(), hq)
- elif i == 6:
- img = add_JPEG_noise(img)
- elif i == 7:
- img = add_blur(img, sf=sf)
- elif i == 8:
- img = add_resize(img, sf=sf)
- elif i == 9:
- img = add_Gaussian_noise(img, noise_level1=2, noise_level2=25)
- elif i == 10:
- if random.random() < poisson_prob:
- img = add_Poisson_noise(img)
- elif i == 11:
- if random.random() < speckle_prob:
- img = add_speckle_noise(img)
- elif i == 12:
- if random.random() < isp_prob and isp_model is not None:
- with torch.no_grad():
- img, hq = isp_model.forward(img.copy(), hq)
- else:
- print('check the shuffle!')
-
- # resize to desired size
- img = cv2.resize(img, (int(1 / sf * hq.shape[1]), int(1 / sf * hq.shape[0])),
- interpolation=random.choice([1, 2, 3]))
-
- # add final JPEG compression noise
- img = add_JPEG_noise(img)
-
- # random crop
- img, hq = random_crop(img, hq, sf, lq_patchsize)
-
- return img, hq
-
-
-if __name__ == '__main__':
- print("hey")
- img = util.imread_uint('utils/test.png', 3)
- print(img)
- img = util.uint2single(img)
- print(img)
- img = img[:448, :448]
- h = img.shape[0] // 4
- print("resizing to", h)
- sf = 4
- deg_fn = partial(degradation_bsrgan_variant, sf=sf)
- for i in range(20):
- print(i)
- img_lq = deg_fn(img)
- print(img_lq)
- img_lq_bicubic = albumentations.SmallestMaxSize(max_size=h, interpolation=cv2.INTER_CUBIC)(image=img)["image"]
- print(img_lq.shape)
- print("bicubic", img_lq_bicubic.shape)
- print(img_hq.shape)
- lq_nearest = cv2.resize(util.single2uint(img_lq), (int(sf * img_lq.shape[1]), int(sf * img_lq.shape[0])),
- interpolation=0)
- lq_bicubic_nearest = cv2.resize(util.single2uint(img_lq_bicubic), (int(sf * img_lq.shape[1]), int(sf * img_lq.shape[0])),
- interpolation=0)
- img_concat = np.concatenate([lq_bicubic_nearest, lq_nearest, util.single2uint(img_hq)], axis=1)
- util.imsave(img_concat, str(i) + '.png')
-
-
diff --git a/One-2-3-45-master 2/ldm/modules/image_degradation/bsrgan_light.py b/One-2-3-45-master 2/ldm/modules/image_degradation/bsrgan_light.py
deleted file mode 100644
index dfa760689762d4e9490fe4d817f844955f1b35de..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/image_degradation/bsrgan_light.py
+++ /dev/null
@@ -1,650 +0,0 @@
-# -*- coding: utf-8 -*-
-import numpy as np
-import cv2
-import torch
-
-from functools import partial
-import random
-from scipy import ndimage
-import scipy
-import scipy.stats as ss
-from scipy.interpolate import interp2d
-from scipy.linalg import orth
-import albumentations
-
-import ldm.modules.image_degradation.utils_image as util
-
-"""
-# --------------------------------------------
-# Super-Resolution
-# --------------------------------------------
-#
-# Kai Zhang (cskaizhang@gmail.com)
-# https://github.com/cszn
-# From 2019/03--2021/08
-# --------------------------------------------
-"""
-
-
-def modcrop_np(img, sf):
- '''
- Args:
- img: numpy image, WxH or WxHxC
- sf: scale factor
- Return:
- cropped image
- '''
- w, h = img.shape[:2]
- im = np.copy(img)
- return im[:w - w % sf, :h - h % sf, ...]
-
-
-"""
-# --------------------------------------------
-# anisotropic Gaussian kernels
-# --------------------------------------------
-"""
-
-
-def analytic_kernel(k):
- """Calculate the X4 kernel from the X2 kernel (for proof see appendix in paper)"""
- k_size = k.shape[0]
- # Calculate the big kernels size
- big_k = np.zeros((3 * k_size - 2, 3 * k_size - 2))
- # Loop over the small kernel to fill the big one
- for r in range(k_size):
- for c in range(k_size):
- big_k[2 * r:2 * r + k_size, 2 * c:2 * c + k_size] += k[r, c] * k
- # Crop the edges of the big kernel to ignore very small values and increase run time of SR
- crop = k_size // 2
- cropped_big_k = big_k[crop:-crop, crop:-crop]
- # Normalize to 1
- return cropped_big_k / cropped_big_k.sum()
-
-
-def anisotropic_Gaussian(ksize=15, theta=np.pi, l1=6, l2=6):
- """ generate an anisotropic Gaussian kernel
- Args:
- ksize : e.g., 15, kernel size
- theta : [0, pi], rotation angle range
- l1 : [0.1,50], scaling of eigenvalues
- l2 : [0.1,l1], scaling of eigenvalues
- If l1 = l2, will get an isotropic Gaussian kernel.
- Returns:
- k : kernel
- """
-
- v = np.dot(np.array([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]]), np.array([1., 0.]))
- V = np.array([[v[0], v[1]], [v[1], -v[0]]])
- D = np.array([[l1, 0], [0, l2]])
- Sigma = np.dot(np.dot(V, D), np.linalg.inv(V))
- k = gm_blur_kernel(mean=[0, 0], cov=Sigma, size=ksize)
-
- return k
-
-
-def gm_blur_kernel(mean, cov, size=15):
- center = size / 2.0 + 0.5
- k = np.zeros([size, size])
- for y in range(size):
- for x in range(size):
- cy = y - center + 1
- cx = x - center + 1
- k[y, x] = ss.multivariate_normal.pdf([cx, cy], mean=mean, cov=cov)
-
- k = k / np.sum(k)
- return k
-
-
-def shift_pixel(x, sf, upper_left=True):
- """shift pixel for super-resolution with different scale factors
- Args:
- x: WxHxC or WxH
- sf: scale factor
- upper_left: shift direction
- """
- h, w = x.shape[:2]
- shift = (sf - 1) * 0.5
- xv, yv = np.arange(0, w, 1.0), np.arange(0, h, 1.0)
- if upper_left:
- x1 = xv + shift
- y1 = yv + shift
- else:
- x1 = xv - shift
- y1 = yv - shift
-
- x1 = np.clip(x1, 0, w - 1)
- y1 = np.clip(y1, 0, h - 1)
-
- if x.ndim == 2:
- x = interp2d(xv, yv, x)(x1, y1)
- if x.ndim == 3:
- for i in range(x.shape[-1]):
- x[:, :, i] = interp2d(xv, yv, x[:, :, i])(x1, y1)
-
- return x
-
-
-def blur(x, k):
- '''
- x: image, NxcxHxW
- k: kernel, Nx1xhxw
- '''
- n, c = x.shape[:2]
- p1, p2 = (k.shape[-2] - 1) // 2, (k.shape[-1] - 1) // 2
- x = torch.nn.functional.pad(x, pad=(p1, p2, p1, p2), mode='replicate')
- k = k.repeat(1, c, 1, 1)
- k = k.view(-1, 1, k.shape[2], k.shape[3])
- x = x.view(1, -1, x.shape[2], x.shape[3])
- x = torch.nn.functional.conv2d(x, k, bias=None, stride=1, padding=0, groups=n * c)
- x = x.view(n, c, x.shape[2], x.shape[3])
-
- return x
-
-
-def gen_kernel(k_size=np.array([15, 15]), scale_factor=np.array([4, 4]), min_var=0.6, max_var=10., noise_level=0):
- """"
- # modified version of https://github.com/assafshocher/BlindSR_dataset_generator
- # Kai Zhang
- # min_var = 0.175 * sf # variance of the gaussian kernel will be sampled between min_var and max_var
- # max_var = 2.5 * sf
- """
- # Set random eigen-vals (lambdas) and angle (theta) for COV matrix
- lambda_1 = min_var + np.random.rand() * (max_var - min_var)
- lambda_2 = min_var + np.random.rand() * (max_var - min_var)
- theta = np.random.rand() * np.pi # random theta
- noise = -noise_level + np.random.rand(*k_size) * noise_level * 2
-
- # Set COV matrix using Lambdas and Theta
- LAMBDA = np.diag([lambda_1, lambda_2])
- Q = np.array([[np.cos(theta), -np.sin(theta)],
- [np.sin(theta), np.cos(theta)]])
- SIGMA = Q @ LAMBDA @ Q.T
- INV_SIGMA = np.linalg.inv(SIGMA)[None, None, :, :]
-
- # Set expectation position (shifting kernel for aligned image)
- MU = k_size // 2 - 0.5 * (scale_factor - 1) # - 0.5 * (scale_factor - k_size % 2)
- MU = MU[None, None, :, None]
-
- # Create meshgrid for Gaussian
- [X, Y] = np.meshgrid(range(k_size[0]), range(k_size[1]))
- Z = np.stack([X, Y], 2)[:, :, :, None]
-
- # Calcualte Gaussian for every pixel of the kernel
- ZZ = Z - MU
- ZZ_t = ZZ.transpose(0, 1, 3, 2)
- raw_kernel = np.exp(-0.5 * np.squeeze(ZZ_t @ INV_SIGMA @ ZZ)) * (1 + noise)
-
- # shift the kernel so it will be centered
- # raw_kernel_centered = kernel_shift(raw_kernel, scale_factor)
-
- # Normalize the kernel and return
- # kernel = raw_kernel_centered / np.sum(raw_kernel_centered)
- kernel = raw_kernel / np.sum(raw_kernel)
- return kernel
-
-
-def fspecial_gaussian(hsize, sigma):
- hsize = [hsize, hsize]
- siz = [(hsize[0] - 1.0) / 2.0, (hsize[1] - 1.0) / 2.0]
- std = sigma
- [x, y] = np.meshgrid(np.arange(-siz[1], siz[1] + 1), np.arange(-siz[0], siz[0] + 1))
- arg = -(x * x + y * y) / (2 * std * std)
- h = np.exp(arg)
- h[h < scipy.finfo(float).eps * h.max()] = 0
- sumh = h.sum()
- if sumh != 0:
- h = h / sumh
- return h
-
-
-def fspecial_laplacian(alpha):
- alpha = max([0, min([alpha, 1])])
- h1 = alpha / (alpha + 1)
- h2 = (1 - alpha) / (alpha + 1)
- h = [[h1, h2, h1], [h2, -4 / (alpha + 1), h2], [h1, h2, h1]]
- h = np.array(h)
- return h
-
-
-def fspecial(filter_type, *args, **kwargs):
- '''
- python code from:
- https://github.com/ronaldosena/imagens-medicas-2/blob/40171a6c259edec7827a6693a93955de2bd39e76/Aulas/aula_2_-_uniform_filter/matlab_fspecial.py
- '''
- if filter_type == 'gaussian':
- return fspecial_gaussian(*args, **kwargs)
- if filter_type == 'laplacian':
- return fspecial_laplacian(*args, **kwargs)
-
-
-"""
-# --------------------------------------------
-# degradation models
-# --------------------------------------------
-"""
-
-
-def bicubic_degradation(x, sf=3):
- '''
- Args:
- x: HxWxC image, [0, 1]
- sf: down-scale factor
- Return:
- bicubicly downsampled LR image
- '''
- x = util.imresize_np(x, scale=1 / sf)
- return x
-
-
-def srmd_degradation(x, k, sf=3):
- ''' blur + bicubic downsampling
- Args:
- x: HxWxC image, [0, 1]
- k: hxw, double
- sf: down-scale factor
- Return:
- downsampled LR image
- Reference:
- @inproceedings{zhang2018learning,
- title={Learning a single convolutional super-resolution network for multiple degradations},
- author={Zhang, Kai and Zuo, Wangmeng and Zhang, Lei},
- booktitle={IEEE Conference on Computer Vision and Pattern Recognition},
- pages={3262--3271},
- year={2018}
- }
- '''
- x = ndimage.convolve(x, np.expand_dims(k, axis=2), mode='wrap') # 'nearest' | 'mirror'
- x = bicubic_degradation(x, sf=sf)
- return x
-
-
-def dpsr_degradation(x, k, sf=3):
- ''' bicubic downsampling + blur
- Args:
- x: HxWxC image, [0, 1]
- k: hxw, double
- sf: down-scale factor
- Return:
- downsampled LR image
- Reference:
- @inproceedings{zhang2019deep,
- title={Deep Plug-and-Play Super-Resolution for Arbitrary Blur Kernels},
- author={Zhang, Kai and Zuo, Wangmeng and Zhang, Lei},
- booktitle={IEEE Conference on Computer Vision and Pattern Recognition},
- pages={1671--1681},
- year={2019}
- }
- '''
- x = bicubic_degradation(x, sf=sf)
- x = ndimage.convolve(x, np.expand_dims(k, axis=2), mode='wrap')
- return x
-
-
-def classical_degradation(x, k, sf=3):
- ''' blur + downsampling
- Args:
- x: HxWxC image, [0, 1]/[0, 255]
- k: hxw, double
- sf: down-scale factor
- Return:
- downsampled LR image
- '''
- x = ndimage.convolve(x, np.expand_dims(k, axis=2), mode='wrap')
- # x = filters.correlate(x, np.expand_dims(np.flip(k), axis=2))
- st = 0
- return x[st::sf, st::sf, ...]
-
-
-def add_sharpening(img, weight=0.5, radius=50, threshold=10):
- """USM sharpening. borrowed from real-ESRGAN
- Input image: I; Blurry image: B.
- 1. K = I + weight * (I - B)
- 2. Mask = 1 if abs(I - B) > threshold, else: 0
- 3. Blur mask:
- 4. Out = Mask * K + (1 - Mask) * I
- Args:
- img (Numpy array): Input image, HWC, BGR; float32, [0, 1].
- weight (float): Sharp weight. Default: 1.
- radius (float): Kernel size of Gaussian blur. Default: 50.
- threshold (int):
- """
- if radius % 2 == 0:
- radius += 1
- blur = cv2.GaussianBlur(img, (radius, radius), 0)
- residual = img - blur
- mask = np.abs(residual) * 255 > threshold
- mask = mask.astype('float32')
- soft_mask = cv2.GaussianBlur(mask, (radius, radius), 0)
-
- K = img + weight * residual
- K = np.clip(K, 0, 1)
- return soft_mask * K + (1 - soft_mask) * img
-
-
-def add_blur(img, sf=4):
- wd2 = 4.0 + sf
- wd = 2.0 + 0.2 * sf
-
- wd2 = wd2/4
- wd = wd/4
-
- if random.random() < 0.5:
- l1 = wd2 * random.random()
- l2 = wd2 * random.random()
- k = anisotropic_Gaussian(ksize=random.randint(2, 11) + 3, theta=random.random() * np.pi, l1=l1, l2=l2)
- else:
- k = fspecial('gaussian', random.randint(2, 4) + 3, wd * random.random())
- img = ndimage.convolve(img, np.expand_dims(k, axis=2), mode='mirror')
-
- return img
-
-
-def add_resize(img, sf=4):
- rnum = np.random.rand()
- if rnum > 0.8: # up
- sf1 = random.uniform(1, 2)
- elif rnum < 0.7: # down
- sf1 = random.uniform(0.5 / sf, 1)
- else:
- sf1 = 1.0
- img = cv2.resize(img, (int(sf1 * img.shape[1]), int(sf1 * img.shape[0])), interpolation=random.choice([1, 2, 3]))
- img = np.clip(img, 0.0, 1.0)
-
- return img
-
-
-# def add_Gaussian_noise(img, noise_level1=2, noise_level2=25):
-# noise_level = random.randint(noise_level1, noise_level2)
-# rnum = np.random.rand()
-# if rnum > 0.6: # add color Gaussian noise
-# img += np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)
-# elif rnum < 0.4: # add grayscale Gaussian noise
-# img += np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)
-# else: # add noise
-# L = noise_level2 / 255.
-# D = np.diag(np.random.rand(3))
-# U = orth(np.random.rand(3, 3))
-# conv = np.dot(np.dot(np.transpose(U), D), U)
-# img += np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)
-# img = np.clip(img, 0.0, 1.0)
-# return img
-
-def add_Gaussian_noise(img, noise_level1=2, noise_level2=25):
- noise_level = random.randint(noise_level1, noise_level2)
- rnum = np.random.rand()
- if rnum > 0.6: # add color Gaussian noise
- img = img + np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)
- elif rnum < 0.4: # add grayscale Gaussian noise
- img = img + np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)
- else: # add noise
- L = noise_level2 / 255.
- D = np.diag(np.random.rand(3))
- U = orth(np.random.rand(3, 3))
- conv = np.dot(np.dot(np.transpose(U), D), U)
- img = img + np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)
- img = np.clip(img, 0.0, 1.0)
- return img
-
-
-def add_speckle_noise(img, noise_level1=2, noise_level2=25):
- noise_level = random.randint(noise_level1, noise_level2)
- img = np.clip(img, 0.0, 1.0)
- rnum = random.random()
- if rnum > 0.6:
- img += img * np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)
- elif rnum < 0.4:
- img += img * np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)
- else:
- L = noise_level2 / 255.
- D = np.diag(np.random.rand(3))
- U = orth(np.random.rand(3, 3))
- conv = np.dot(np.dot(np.transpose(U), D), U)
- img += img * np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)
- img = np.clip(img, 0.0, 1.0)
- return img
-
-
-def add_Poisson_noise(img):
- img = np.clip((img * 255.0).round(), 0, 255) / 255.
- vals = 10 ** (2 * random.random() + 2.0) # [2, 4]
- if random.random() < 0.5:
- img = np.random.poisson(img * vals).astype(np.float32) / vals
- else:
- img_gray = np.dot(img[..., :3], [0.299, 0.587, 0.114])
- img_gray = np.clip((img_gray * 255.0).round(), 0, 255) / 255.
- noise_gray = np.random.poisson(img_gray * vals).astype(np.float32) / vals - img_gray
- img += noise_gray[:, :, np.newaxis]
- img = np.clip(img, 0.0, 1.0)
- return img
-
-
-def add_JPEG_noise(img):
- quality_factor = random.randint(80, 95)
- img = cv2.cvtColor(util.single2uint(img), cv2.COLOR_RGB2BGR)
- result, encimg = cv2.imencode('.jpg', img, [int(cv2.IMWRITE_JPEG_QUALITY), quality_factor])
- img = cv2.imdecode(encimg, 1)
- img = cv2.cvtColor(util.uint2single(img), cv2.COLOR_BGR2RGB)
- return img
-
-
-def random_crop(lq, hq, sf=4, lq_patchsize=64):
- h, w = lq.shape[:2]
- rnd_h = random.randint(0, h - lq_patchsize)
- rnd_w = random.randint(0, w - lq_patchsize)
- lq = lq[rnd_h:rnd_h + lq_patchsize, rnd_w:rnd_w + lq_patchsize, :]
-
- rnd_h_H, rnd_w_H = int(rnd_h * sf), int(rnd_w * sf)
- hq = hq[rnd_h_H:rnd_h_H + lq_patchsize * sf, rnd_w_H:rnd_w_H + lq_patchsize * sf, :]
- return lq, hq
-
-
-def degradation_bsrgan(img, sf=4, lq_patchsize=72, isp_model=None):
- """
- This is the degradation model of BSRGAN from the paper
- "Designing a Practical Degradation Model for Deep Blind Image Super-Resolution"
- ----------
- img: HXWXC, [0, 1], its size should be large than (lq_patchsizexsf)x(lq_patchsizexsf)
- sf: scale factor
- isp_model: camera ISP model
- Returns
- -------
- img: low-quality patch, size: lq_patchsizeXlq_patchsizeXC, range: [0, 1]
- hq: corresponding high-quality patch, size: (lq_patchsizexsf)X(lq_patchsizexsf)XC, range: [0, 1]
- """
- isp_prob, jpeg_prob, scale2_prob = 0.25, 0.9, 0.25
- sf_ori = sf
-
- h1, w1 = img.shape[:2]
- img = img.copy()[:w1 - w1 % sf, :h1 - h1 % sf, ...] # mod crop
- h, w = img.shape[:2]
-
- if h < lq_patchsize * sf or w < lq_patchsize * sf:
- raise ValueError(f'img size ({h1}X{w1}) is too small!')
-
- hq = img.copy()
-
- if sf == 4 and random.random() < scale2_prob: # downsample1
- if np.random.rand() < 0.5:
- img = cv2.resize(img, (int(1 / 2 * img.shape[1]), int(1 / 2 * img.shape[0])),
- interpolation=random.choice([1, 2, 3]))
- else:
- img = util.imresize_np(img, 1 / 2, True)
- img = np.clip(img, 0.0, 1.0)
- sf = 2
-
- shuffle_order = random.sample(range(7), 7)
- idx1, idx2 = shuffle_order.index(2), shuffle_order.index(3)
- if idx1 > idx2: # keep downsample3 last
- shuffle_order[idx1], shuffle_order[idx2] = shuffle_order[idx2], shuffle_order[idx1]
-
- for i in shuffle_order:
-
- if i == 0:
- img = add_blur(img, sf=sf)
-
- elif i == 1:
- img = add_blur(img, sf=sf)
-
- elif i == 2:
- a, b = img.shape[1], img.shape[0]
- # downsample2
- if random.random() < 0.75:
- sf1 = random.uniform(1, 2 * sf)
- img = cv2.resize(img, (int(1 / sf1 * img.shape[1]), int(1 / sf1 * img.shape[0])),
- interpolation=random.choice([1, 2, 3]))
- else:
- k = fspecial('gaussian', 25, random.uniform(0.1, 0.6 * sf))
- k_shifted = shift_pixel(k, sf)
- k_shifted = k_shifted / k_shifted.sum() # blur with shifted kernel
- img = ndimage.convolve(img, np.expand_dims(k_shifted, axis=2), mode='mirror')
- img = img[0::sf, 0::sf, ...] # nearest downsampling
- img = np.clip(img, 0.0, 1.0)
-
- elif i == 3:
- # downsample3
- img = cv2.resize(img, (int(1 / sf * a), int(1 / sf * b)), interpolation=random.choice([1, 2, 3]))
- img = np.clip(img, 0.0, 1.0)
-
- elif i == 4:
- # add Gaussian noise
- img = add_Gaussian_noise(img, noise_level1=2, noise_level2=8)
-
- elif i == 5:
- # add JPEG noise
- if random.random() < jpeg_prob:
- img = add_JPEG_noise(img)
-
- elif i == 6:
- # add processed camera sensor noise
- if random.random() < isp_prob and isp_model is not None:
- with torch.no_grad():
- img, hq = isp_model.forward(img.copy(), hq)
-
- # add final JPEG compression noise
- img = add_JPEG_noise(img)
-
- # random crop
- img, hq = random_crop(img, hq, sf_ori, lq_patchsize)
-
- return img, hq
-
-
-# todo no isp_model?
-def degradation_bsrgan_variant(image, sf=4, isp_model=None):
- """
- This is the degradation model of BSRGAN from the paper
- "Designing a Practical Degradation Model for Deep Blind Image Super-Resolution"
- ----------
- sf: scale factor
- isp_model: camera ISP model
- Returns
- -------
- img: low-quality patch, size: lq_patchsizeXlq_patchsizeXC, range: [0, 1]
- hq: corresponding high-quality patch, size: (lq_patchsizexsf)X(lq_patchsizexsf)XC, range: [0, 1]
- """
- image = util.uint2single(image)
- isp_prob, jpeg_prob, scale2_prob = 0.25, 0.9, 0.25
- sf_ori = sf
-
- h1, w1 = image.shape[:2]
- image = image.copy()[:w1 - w1 % sf, :h1 - h1 % sf, ...] # mod crop
- h, w = image.shape[:2]
-
- hq = image.copy()
-
- if sf == 4 and random.random() < scale2_prob: # downsample1
- if np.random.rand() < 0.5:
- image = cv2.resize(image, (int(1 / 2 * image.shape[1]), int(1 / 2 * image.shape[0])),
- interpolation=random.choice([1, 2, 3]))
- else:
- image = util.imresize_np(image, 1 / 2, True)
- image = np.clip(image, 0.0, 1.0)
- sf = 2
-
- shuffle_order = random.sample(range(7), 7)
- idx1, idx2 = shuffle_order.index(2), shuffle_order.index(3)
- if idx1 > idx2: # keep downsample3 last
- shuffle_order[idx1], shuffle_order[idx2] = shuffle_order[idx2], shuffle_order[idx1]
-
- for i in shuffle_order:
-
- if i == 0:
- image = add_blur(image, sf=sf)
-
- # elif i == 1:
- # image = add_blur(image, sf=sf)
-
- if i == 0:
- pass
-
- elif i == 2:
- a, b = image.shape[1], image.shape[0]
- # downsample2
- if random.random() < 0.8:
- sf1 = random.uniform(1, 2 * sf)
- image = cv2.resize(image, (int(1 / sf1 * image.shape[1]), int(1 / sf1 * image.shape[0])),
- interpolation=random.choice([1, 2, 3]))
- else:
- k = fspecial('gaussian', 25, random.uniform(0.1, 0.6 * sf))
- k_shifted = shift_pixel(k, sf)
- k_shifted = k_shifted / k_shifted.sum() # blur with shifted kernel
- image = ndimage.convolve(image, np.expand_dims(k_shifted, axis=2), mode='mirror')
- image = image[0::sf, 0::sf, ...] # nearest downsampling
-
- image = np.clip(image, 0.0, 1.0)
-
- elif i == 3:
- # downsample3
- image = cv2.resize(image, (int(1 / sf * a), int(1 / sf * b)), interpolation=random.choice([1, 2, 3]))
- image = np.clip(image, 0.0, 1.0)
-
- elif i == 4:
- # add Gaussian noise
- image = add_Gaussian_noise(image, noise_level1=1, noise_level2=2)
-
- elif i == 5:
- # add JPEG noise
- if random.random() < jpeg_prob:
- image = add_JPEG_noise(image)
- #
- # elif i == 6:
- # # add processed camera sensor noise
- # if random.random() < isp_prob and isp_model is not None:
- # with torch.no_grad():
- # img, hq = isp_model.forward(img.copy(), hq)
-
- # add final JPEG compression noise
- image = add_JPEG_noise(image)
- image = util.single2uint(image)
- example = {"image": image}
- return example
-
-
-
-
-if __name__ == '__main__':
- print("hey")
- img = util.imread_uint('utils/test.png', 3)
- img = img[:448, :448]
- h = img.shape[0] // 4
- print("resizing to", h)
- sf = 4
- deg_fn = partial(degradation_bsrgan_variant, sf=sf)
- for i in range(20):
- print(i)
- img_hq = img
- img_lq = deg_fn(img)["image"]
- img_hq, img_lq = util.uint2single(img_hq), util.uint2single(img_lq)
- print(img_lq)
- img_lq_bicubic = albumentations.SmallestMaxSize(max_size=h, interpolation=cv2.INTER_CUBIC)(image=img_hq)["image"]
- print(img_lq.shape)
- print("bicubic", img_lq_bicubic.shape)
- print(img_hq.shape)
- lq_nearest = cv2.resize(util.single2uint(img_lq), (int(sf * img_lq.shape[1]), int(sf * img_lq.shape[0])),
- interpolation=0)
- lq_bicubic_nearest = cv2.resize(util.single2uint(img_lq_bicubic),
- (int(sf * img_lq.shape[1]), int(sf * img_lq.shape[0])),
- interpolation=0)
- img_concat = np.concatenate([lq_bicubic_nearest, lq_nearest, util.single2uint(img_hq)], axis=1)
- util.imsave(img_concat, str(i) + '.png')
diff --git a/One-2-3-45-master 2/ldm/modules/image_degradation/utils/test.png b/One-2-3-45-master 2/ldm/modules/image_degradation/utils/test.png
deleted file mode 100644
index 4249b43de0f22707758d13c240268a401642f6e6..0000000000000000000000000000000000000000
Binary files a/One-2-3-45-master 2/ldm/modules/image_degradation/utils/test.png and /dev/null differ
diff --git a/One-2-3-45-master 2/ldm/modules/image_degradation/utils_image.py b/One-2-3-45-master 2/ldm/modules/image_degradation/utils_image.py
deleted file mode 100644
index 0175f155ad900ae33c3c46ed87f49b352e3faf98..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/image_degradation/utils_image.py
+++ /dev/null
@@ -1,916 +0,0 @@
-import os
-import math
-import random
-import numpy as np
-import torch
-import cv2
-from torchvision.utils import make_grid
-from datetime import datetime
-#import matplotlib.pyplot as plt # TODO: check with Dominik, also bsrgan.py vs bsrgan_light.py
-
-
-os.environ["KMP_DUPLICATE_LIB_OK"]="TRUE"
-
-
-'''
-# --------------------------------------------
-# Kai Zhang (github: https://github.com/cszn)
-# 03/Mar/2019
-# --------------------------------------------
-# https://github.com/twhui/SRGAN-pyTorch
-# https://github.com/xinntao/BasicSR
-# --------------------------------------------
-'''
-
-
-IMG_EXTENSIONS = ['.jpg', '.JPG', '.jpeg', '.JPEG', '.png', '.PNG', '.ppm', '.PPM', '.bmp', '.BMP', '.tif']
-
-
-def is_image_file(filename):
- return any(filename.endswith(extension) for extension in IMG_EXTENSIONS)
-
-
-def get_timestamp():
- return datetime.now().strftime('%y%m%d-%H%M%S')
-
-
-def imshow(x, title=None, cbar=False, figsize=None):
- plt.figure(figsize=figsize)
- plt.imshow(np.squeeze(x), interpolation='nearest', cmap='gray')
- if title:
- plt.title(title)
- if cbar:
- plt.colorbar()
- plt.show()
-
-
-def surf(Z, cmap='rainbow', figsize=None):
- plt.figure(figsize=figsize)
- ax3 = plt.axes(projection='3d')
-
- w, h = Z.shape[:2]
- xx = np.arange(0,w,1)
- yy = np.arange(0,h,1)
- X, Y = np.meshgrid(xx, yy)
- ax3.plot_surface(X,Y,Z,cmap=cmap)
- #ax3.contour(X,Y,Z, zdim='z',offset=-2,cmap=cmap)
- plt.show()
-
-
-'''
-# --------------------------------------------
-# get image pathes
-# --------------------------------------------
-'''
-
-
-def get_image_paths(dataroot):
- paths = None # return None if dataroot is None
- if dataroot is not None:
- paths = sorted(_get_paths_from_images(dataroot))
- return paths
-
-
-def _get_paths_from_images(path):
- assert os.path.isdir(path), '{:s} is not a valid directory'.format(path)
- images = []
- for dirpath, _, fnames in sorted(os.walk(path)):
- for fname in sorted(fnames):
- if is_image_file(fname):
- img_path = os.path.join(dirpath, fname)
- images.append(img_path)
- assert images, '{:s} has no valid image file'.format(path)
- return images
-
-
-'''
-# --------------------------------------------
-# split large images into small images
-# --------------------------------------------
-'''
-
-
-def patches_from_image(img, p_size=512, p_overlap=64, p_max=800):
- w, h = img.shape[:2]
- patches = []
- if w > p_max and h > p_max:
- w1 = list(np.arange(0, w-p_size, p_size-p_overlap, dtype=np.int))
- h1 = list(np.arange(0, h-p_size, p_size-p_overlap, dtype=np.int))
- w1.append(w-p_size)
- h1.append(h-p_size)
-# print(w1)
-# print(h1)
- for i in w1:
- for j in h1:
- patches.append(img[i:i+p_size, j:j+p_size,:])
- else:
- patches.append(img)
-
- return patches
-
-
-def imssave(imgs, img_path):
- """
- imgs: list, N images of size WxHxC
- """
- img_name, ext = os.path.splitext(os.path.basename(img_path))
-
- for i, img in enumerate(imgs):
- if img.ndim == 3:
- img = img[:, :, [2, 1, 0]]
- new_path = os.path.join(os.path.dirname(img_path), img_name+str('_s{:04d}'.format(i))+'.png')
- cv2.imwrite(new_path, img)
-
-
-def split_imageset(original_dataroot, taget_dataroot, n_channels=3, p_size=800, p_overlap=96, p_max=1000):
- """
- split the large images from original_dataroot into small overlapped images with size (p_size)x(p_size),
- and save them into taget_dataroot; only the images with larger size than (p_max)x(p_max)
- will be splitted.
- Args:
- original_dataroot:
- taget_dataroot:
- p_size: size of small images
- p_overlap: patch size in training is a good choice
- p_max: images with smaller size than (p_max)x(p_max) keep unchanged.
- """
- paths = get_image_paths(original_dataroot)
- for img_path in paths:
- # img_name, ext = os.path.splitext(os.path.basename(img_path))
- img = imread_uint(img_path, n_channels=n_channels)
- patches = patches_from_image(img, p_size, p_overlap, p_max)
- imssave(patches, os.path.join(taget_dataroot,os.path.basename(img_path)))
- #if original_dataroot == taget_dataroot:
- #del img_path
-
-'''
-# --------------------------------------------
-# makedir
-# --------------------------------------------
-'''
-
-
-def mkdir(path):
- if not os.path.exists(path):
- os.makedirs(path)
-
-
-def mkdirs(paths):
- if isinstance(paths, str):
- mkdir(paths)
- else:
- for path in paths:
- mkdir(path)
-
-
-def mkdir_and_rename(path):
- if os.path.exists(path):
- new_name = path + '_archived_' + get_timestamp()
- print('Path already exists. Rename it to [{:s}]'.format(new_name))
- os.rename(path, new_name)
- os.makedirs(path)
-
-
-'''
-# --------------------------------------------
-# read image from path
-# opencv is fast, but read BGR numpy image
-# --------------------------------------------
-'''
-
-
-# --------------------------------------------
-# get uint8 image of size HxWxn_channles (RGB)
-# --------------------------------------------
-def imread_uint(path, n_channels=3):
- # input: path
- # output: HxWx3(RGB or GGG), or HxWx1 (G)
- if n_channels == 1:
- img = cv2.imread(path, 0) # cv2.IMREAD_GRAYSCALE
- img = np.expand_dims(img, axis=2) # HxWx1
- elif n_channels == 3:
- img = cv2.imread(path, cv2.IMREAD_UNCHANGED) # BGR or G
- if img.ndim == 2:
- img = cv2.cvtColor(img, cv2.COLOR_GRAY2RGB) # GGG
- else:
- img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) # RGB
- return img
-
-
-# --------------------------------------------
-# matlab's imwrite
-# --------------------------------------------
-def imsave(img, img_path):
- img = np.squeeze(img)
- if img.ndim == 3:
- img = img[:, :, [2, 1, 0]]
- cv2.imwrite(img_path, img)
-
-def imwrite(img, img_path):
- img = np.squeeze(img)
- if img.ndim == 3:
- img = img[:, :, [2, 1, 0]]
- cv2.imwrite(img_path, img)
-
-
-
-# --------------------------------------------
-# get single image of size HxWxn_channles (BGR)
-# --------------------------------------------
-def read_img(path):
- # read image by cv2
- # return: Numpy float32, HWC, BGR, [0,1]
- img = cv2.imread(path, cv2.IMREAD_UNCHANGED) # cv2.IMREAD_GRAYSCALE
- img = img.astype(np.float32) / 255.
- if img.ndim == 2:
- img = np.expand_dims(img, axis=2)
- # some images have 4 channels
- if img.shape[2] > 3:
- img = img[:, :, :3]
- return img
-
-
-'''
-# --------------------------------------------
-# image format conversion
-# --------------------------------------------
-# numpy(single) <---> numpy(unit)
-# numpy(single) <---> tensor
-# numpy(unit) <---> tensor
-# --------------------------------------------
-'''
-
-
-# --------------------------------------------
-# numpy(single) [0, 1] <---> numpy(unit)
-# --------------------------------------------
-
-
-def uint2single(img):
-
- return np.float32(img/255.)
-
-
-def single2uint(img):
-
- return np.uint8((img.clip(0, 1)*255.).round())
-
-
-def uint162single(img):
-
- return np.float32(img/65535.)
-
-
-def single2uint16(img):
-
- return np.uint16((img.clip(0, 1)*65535.).round())
-
-
-# --------------------------------------------
-# numpy(unit) (HxWxC or HxW) <---> tensor
-# --------------------------------------------
-
-
-# convert uint to 4-dimensional torch tensor
-def uint2tensor4(img):
- if img.ndim == 2:
- img = np.expand_dims(img, axis=2)
- return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1).float().div(255.).unsqueeze(0)
-
-
-# convert uint to 3-dimensional torch tensor
-def uint2tensor3(img):
- if img.ndim == 2:
- img = np.expand_dims(img, axis=2)
- return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1).float().div(255.)
-
-
-# convert 2/3/4-dimensional torch tensor to uint
-def tensor2uint(img):
- img = img.data.squeeze().float().clamp_(0, 1).cpu().numpy()
- if img.ndim == 3:
- img = np.transpose(img, (1, 2, 0))
- return np.uint8((img*255.0).round())
-
-
-# --------------------------------------------
-# numpy(single) (HxWxC) <---> tensor
-# --------------------------------------------
-
-
-# convert single (HxWxC) to 3-dimensional torch tensor
-def single2tensor3(img):
- return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1).float()
-
-
-# convert single (HxWxC) to 4-dimensional torch tensor
-def single2tensor4(img):
- return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1).float().unsqueeze(0)
-
-
-# convert torch tensor to single
-def tensor2single(img):
- img = img.data.squeeze().float().cpu().numpy()
- if img.ndim == 3:
- img = np.transpose(img, (1, 2, 0))
-
- return img
-
-# convert torch tensor to single
-def tensor2single3(img):
- img = img.data.squeeze().float().cpu().numpy()
- if img.ndim == 3:
- img = np.transpose(img, (1, 2, 0))
- elif img.ndim == 2:
- img = np.expand_dims(img, axis=2)
- return img
-
-
-def single2tensor5(img):
- return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1, 3).float().unsqueeze(0)
-
-
-def single32tensor5(img):
- return torch.from_numpy(np.ascontiguousarray(img)).float().unsqueeze(0).unsqueeze(0)
-
-
-def single42tensor4(img):
- return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1, 3).float()
-
-
-# from skimage.io import imread, imsave
-def tensor2img(tensor, out_type=np.uint8, min_max=(0, 1)):
- '''
- Converts a torch Tensor into an image Numpy array of BGR channel order
- Input: 4D(B,(3/1),H,W), 3D(C,H,W), or 2D(H,W), any range, RGB channel order
- Output: 3D(H,W,C) or 2D(H,W), [0,255], np.uint8 (default)
- '''
- tensor = tensor.squeeze().float().cpu().clamp_(*min_max) # squeeze first, then clamp
- tensor = (tensor - min_max[0]) / (min_max[1] - min_max[0]) # to range [0,1]
- n_dim = tensor.dim()
- if n_dim == 4:
- n_img = len(tensor)
- img_np = make_grid(tensor, nrow=int(math.sqrt(n_img)), normalize=False).numpy()
- img_np = np.transpose(img_np[[2, 1, 0], :, :], (1, 2, 0)) # HWC, BGR
- elif n_dim == 3:
- img_np = tensor.numpy()
- img_np = np.transpose(img_np[[2, 1, 0], :, :], (1, 2, 0)) # HWC, BGR
- elif n_dim == 2:
- img_np = tensor.numpy()
- else:
- raise TypeError(
- 'Only support 4D, 3D and 2D tensor. But received with dimension: {:d}'.format(n_dim))
- if out_type == np.uint8:
- img_np = (img_np * 255.0).round()
- # Important. Unlike matlab, numpy.unit8() WILL NOT round by default.
- return img_np.astype(out_type)
-
-
-'''
-# --------------------------------------------
-# Augmentation, flipe and/or rotate
-# --------------------------------------------
-# The following two are enough.
-# (1) augmet_img: numpy image of WxHxC or WxH
-# (2) augment_img_tensor4: tensor image 1xCxWxH
-# --------------------------------------------
-'''
-
-
-def augment_img(img, mode=0):
- '''Kai Zhang (github: https://github.com/cszn)
- '''
- if mode == 0:
- return img
- elif mode == 1:
- return np.flipud(np.rot90(img))
- elif mode == 2:
- return np.flipud(img)
- elif mode == 3:
- return np.rot90(img, k=3)
- elif mode == 4:
- return np.flipud(np.rot90(img, k=2))
- elif mode == 5:
- return np.rot90(img)
- elif mode == 6:
- return np.rot90(img, k=2)
- elif mode == 7:
- return np.flipud(np.rot90(img, k=3))
-
-
-def augment_img_tensor4(img, mode=0):
- '''Kai Zhang (github: https://github.com/cszn)
- '''
- if mode == 0:
- return img
- elif mode == 1:
- return img.rot90(1, [2, 3]).flip([2])
- elif mode == 2:
- return img.flip([2])
- elif mode == 3:
- return img.rot90(3, [2, 3])
- elif mode == 4:
- return img.rot90(2, [2, 3]).flip([2])
- elif mode == 5:
- return img.rot90(1, [2, 3])
- elif mode == 6:
- return img.rot90(2, [2, 3])
- elif mode == 7:
- return img.rot90(3, [2, 3]).flip([2])
-
-
-def augment_img_tensor(img, mode=0):
- '''Kai Zhang (github: https://github.com/cszn)
- '''
- img_size = img.size()
- img_np = img.data.cpu().numpy()
- if len(img_size) == 3:
- img_np = np.transpose(img_np, (1, 2, 0))
- elif len(img_size) == 4:
- img_np = np.transpose(img_np, (2, 3, 1, 0))
- img_np = augment_img(img_np, mode=mode)
- img_tensor = torch.from_numpy(np.ascontiguousarray(img_np))
- if len(img_size) == 3:
- img_tensor = img_tensor.permute(2, 0, 1)
- elif len(img_size) == 4:
- img_tensor = img_tensor.permute(3, 2, 0, 1)
-
- return img_tensor.type_as(img)
-
-
-def augment_img_np3(img, mode=0):
- if mode == 0:
- return img
- elif mode == 1:
- return img.transpose(1, 0, 2)
- elif mode == 2:
- return img[::-1, :, :]
- elif mode == 3:
- img = img[::-1, :, :]
- img = img.transpose(1, 0, 2)
- return img
- elif mode == 4:
- return img[:, ::-1, :]
- elif mode == 5:
- img = img[:, ::-1, :]
- img = img.transpose(1, 0, 2)
- return img
- elif mode == 6:
- img = img[:, ::-1, :]
- img = img[::-1, :, :]
- return img
- elif mode == 7:
- img = img[:, ::-1, :]
- img = img[::-1, :, :]
- img = img.transpose(1, 0, 2)
- return img
-
-
-def augment_imgs(img_list, hflip=True, rot=True):
- # horizontal flip OR rotate
- hflip = hflip and random.random() < 0.5
- vflip = rot and random.random() < 0.5
- rot90 = rot and random.random() < 0.5
-
- def _augment(img):
- if hflip:
- img = img[:, ::-1, :]
- if vflip:
- img = img[::-1, :, :]
- if rot90:
- img = img.transpose(1, 0, 2)
- return img
-
- return [_augment(img) for img in img_list]
-
-
-'''
-# --------------------------------------------
-# modcrop and shave
-# --------------------------------------------
-'''
-
-
-def modcrop(img_in, scale):
- # img_in: Numpy, HWC or HW
- img = np.copy(img_in)
- if img.ndim == 2:
- H, W = img.shape
- H_r, W_r = H % scale, W % scale
- img = img[:H - H_r, :W - W_r]
- elif img.ndim == 3:
- H, W, C = img.shape
- H_r, W_r = H % scale, W % scale
- img = img[:H - H_r, :W - W_r, :]
- else:
- raise ValueError('Wrong img ndim: [{:d}].'.format(img.ndim))
- return img
-
-
-def shave(img_in, border=0):
- # img_in: Numpy, HWC or HW
- img = np.copy(img_in)
- h, w = img.shape[:2]
- img = img[border:h-border, border:w-border]
- return img
-
-
-'''
-# --------------------------------------------
-# image processing process on numpy image
-# channel_convert(in_c, tar_type, img_list):
-# rgb2ycbcr(img, only_y=True):
-# bgr2ycbcr(img, only_y=True):
-# ycbcr2rgb(img):
-# --------------------------------------------
-'''
-
-
-def rgb2ycbcr(img, only_y=True):
- '''same as matlab rgb2ycbcr
- only_y: only return Y channel
- Input:
- uint8, [0, 255]
- float, [0, 1]
- '''
- in_img_type = img.dtype
- img.astype(np.float32)
- if in_img_type != np.uint8:
- img *= 255.
- # convert
- if only_y:
- rlt = np.dot(img, [65.481, 128.553, 24.966]) / 255.0 + 16.0
- else:
- rlt = np.matmul(img, [[65.481, -37.797, 112.0], [128.553, -74.203, -93.786],
- [24.966, 112.0, -18.214]]) / 255.0 + [16, 128, 128]
- if in_img_type == np.uint8:
- rlt = rlt.round()
- else:
- rlt /= 255.
- return rlt.astype(in_img_type)
-
-
-def ycbcr2rgb(img):
- '''same as matlab ycbcr2rgb
- Input:
- uint8, [0, 255]
- float, [0, 1]
- '''
- in_img_type = img.dtype
- img.astype(np.float32)
- if in_img_type != np.uint8:
- img *= 255.
- # convert
- rlt = np.matmul(img, [[0.00456621, 0.00456621, 0.00456621], [0, -0.00153632, 0.00791071],
- [0.00625893, -0.00318811, 0]]) * 255.0 + [-222.921, 135.576, -276.836]
- if in_img_type == np.uint8:
- rlt = rlt.round()
- else:
- rlt /= 255.
- return rlt.astype(in_img_type)
-
-
-def bgr2ycbcr(img, only_y=True):
- '''bgr version of rgb2ycbcr
- only_y: only return Y channel
- Input:
- uint8, [0, 255]
- float, [0, 1]
- '''
- in_img_type = img.dtype
- img.astype(np.float32)
- if in_img_type != np.uint8:
- img *= 255.
- # convert
- if only_y:
- rlt = np.dot(img, [24.966, 128.553, 65.481]) / 255.0 + 16.0
- else:
- rlt = np.matmul(img, [[24.966, 112.0, -18.214], [128.553, -74.203, -93.786],
- [65.481, -37.797, 112.0]]) / 255.0 + [16, 128, 128]
- if in_img_type == np.uint8:
- rlt = rlt.round()
- else:
- rlt /= 255.
- return rlt.astype(in_img_type)
-
-
-def channel_convert(in_c, tar_type, img_list):
- # conversion among BGR, gray and y
- if in_c == 3 and tar_type == 'gray': # BGR to gray
- gray_list = [cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) for img in img_list]
- return [np.expand_dims(img, axis=2) for img in gray_list]
- elif in_c == 3 and tar_type == 'y': # BGR to y
- y_list = [bgr2ycbcr(img, only_y=True) for img in img_list]
- return [np.expand_dims(img, axis=2) for img in y_list]
- elif in_c == 1 and tar_type == 'RGB': # gray/y to BGR
- return [cv2.cvtColor(img, cv2.COLOR_GRAY2BGR) for img in img_list]
- else:
- return img_list
-
-
-'''
-# --------------------------------------------
-# metric, PSNR and SSIM
-# --------------------------------------------
-'''
-
-
-# --------------------------------------------
-# PSNR
-# --------------------------------------------
-def calculate_psnr(img1, img2, border=0):
- # img1 and img2 have range [0, 255]
- #img1 = img1.squeeze()
- #img2 = img2.squeeze()
- if not img1.shape == img2.shape:
- raise ValueError('Input images must have the same dimensions.')
- h, w = img1.shape[:2]
- img1 = img1[border:h-border, border:w-border]
- img2 = img2[border:h-border, border:w-border]
-
- img1 = img1.astype(np.float64)
- img2 = img2.astype(np.float64)
- mse = np.mean((img1 - img2)**2)
- if mse == 0:
- return float('inf')
- return 20 * math.log10(255.0 / math.sqrt(mse))
-
-
-# --------------------------------------------
-# SSIM
-# --------------------------------------------
-def calculate_ssim(img1, img2, border=0):
- '''calculate SSIM
- the same outputs as MATLAB's
- img1, img2: [0, 255]
- '''
- #img1 = img1.squeeze()
- #img2 = img2.squeeze()
- if not img1.shape == img2.shape:
- raise ValueError('Input images must have the same dimensions.')
- h, w = img1.shape[:2]
- img1 = img1[border:h-border, border:w-border]
- img2 = img2[border:h-border, border:w-border]
-
- if img1.ndim == 2:
- return ssim(img1, img2)
- elif img1.ndim == 3:
- if img1.shape[2] == 3:
- ssims = []
- for i in range(3):
- ssims.append(ssim(img1[:,:,i], img2[:,:,i]))
- return np.array(ssims).mean()
- elif img1.shape[2] == 1:
- return ssim(np.squeeze(img1), np.squeeze(img2))
- else:
- raise ValueError('Wrong input image dimensions.')
-
-
-def ssim(img1, img2):
- C1 = (0.01 * 255)**2
- C2 = (0.03 * 255)**2
-
- img1 = img1.astype(np.float64)
- img2 = img2.astype(np.float64)
- kernel = cv2.getGaussianKernel(11, 1.5)
- window = np.outer(kernel, kernel.transpose())
-
- mu1 = cv2.filter2D(img1, -1, window)[5:-5, 5:-5] # valid
- mu2 = cv2.filter2D(img2, -1, window)[5:-5, 5:-5]
- mu1_sq = mu1**2
- mu2_sq = mu2**2
- mu1_mu2 = mu1 * mu2
- sigma1_sq = cv2.filter2D(img1**2, -1, window)[5:-5, 5:-5] - mu1_sq
- sigma2_sq = cv2.filter2D(img2**2, -1, window)[5:-5, 5:-5] - mu2_sq
- sigma12 = cv2.filter2D(img1 * img2, -1, window)[5:-5, 5:-5] - mu1_mu2
-
- ssim_map = ((2 * mu1_mu2 + C1) * (2 * sigma12 + C2)) / ((mu1_sq + mu2_sq + C1) *
- (sigma1_sq + sigma2_sq + C2))
- return ssim_map.mean()
-
-
-'''
-# --------------------------------------------
-# matlab's bicubic imresize (numpy and torch) [0, 1]
-# --------------------------------------------
-'''
-
-
-# matlab 'imresize' function, now only support 'bicubic'
-def cubic(x):
- absx = torch.abs(x)
- absx2 = absx**2
- absx3 = absx**3
- return (1.5*absx3 - 2.5*absx2 + 1) * ((absx <= 1).type_as(absx)) + \
- (-0.5*absx3 + 2.5*absx2 - 4*absx + 2) * (((absx > 1)*(absx <= 2)).type_as(absx))
-
-
-def calculate_weights_indices(in_length, out_length, scale, kernel, kernel_width, antialiasing):
- if (scale < 1) and (antialiasing):
- # Use a modified kernel to simultaneously interpolate and antialias- larger kernel width
- kernel_width = kernel_width / scale
-
- # Output-space coordinates
- x = torch.linspace(1, out_length, out_length)
-
- # Input-space coordinates. Calculate the inverse mapping such that 0.5
- # in output space maps to 0.5 in input space, and 0.5+scale in output
- # space maps to 1.5 in input space.
- u = x / scale + 0.5 * (1 - 1 / scale)
-
- # What is the left-most pixel that can be involved in the computation?
- left = torch.floor(u - kernel_width / 2)
-
- # What is the maximum number of pixels that can be involved in the
- # computation? Note: it's OK to use an extra pixel here; if the
- # corresponding weights are all zero, it will be eliminated at the end
- # of this function.
- P = math.ceil(kernel_width) + 2
-
- # The indices of the input pixels involved in computing the k-th output
- # pixel are in row k of the indices matrix.
- indices = left.view(out_length, 1).expand(out_length, P) + torch.linspace(0, P - 1, P).view(
- 1, P).expand(out_length, P)
-
- # The weights used to compute the k-th output pixel are in row k of the
- # weights matrix.
- distance_to_center = u.view(out_length, 1).expand(out_length, P) - indices
- # apply cubic kernel
- if (scale < 1) and (antialiasing):
- weights = scale * cubic(distance_to_center * scale)
- else:
- weights = cubic(distance_to_center)
- # Normalize the weights matrix so that each row sums to 1.
- weights_sum = torch.sum(weights, 1).view(out_length, 1)
- weights = weights / weights_sum.expand(out_length, P)
-
- # If a column in weights is all zero, get rid of it. only consider the first and last column.
- weights_zero_tmp = torch.sum((weights == 0), 0)
- if not math.isclose(weights_zero_tmp[0], 0, rel_tol=1e-6):
- indices = indices.narrow(1, 1, P - 2)
- weights = weights.narrow(1, 1, P - 2)
- if not math.isclose(weights_zero_tmp[-1], 0, rel_tol=1e-6):
- indices = indices.narrow(1, 0, P - 2)
- weights = weights.narrow(1, 0, P - 2)
- weights = weights.contiguous()
- indices = indices.contiguous()
- sym_len_s = -indices.min() + 1
- sym_len_e = indices.max() - in_length
- indices = indices + sym_len_s - 1
- return weights, indices, int(sym_len_s), int(sym_len_e)
-
-
-# --------------------------------------------
-# imresize for tensor image [0, 1]
-# --------------------------------------------
-def imresize(img, scale, antialiasing=True):
- # Now the scale should be the same for H and W
- # input: img: pytorch tensor, CHW or HW [0,1]
- # output: CHW or HW [0,1] w/o round
- need_squeeze = True if img.dim() == 2 else False
- if need_squeeze:
- img.unsqueeze_(0)
- in_C, in_H, in_W = img.size()
- out_C, out_H, out_W = in_C, math.ceil(in_H * scale), math.ceil(in_W * scale)
- kernel_width = 4
- kernel = 'cubic'
-
- # Return the desired dimension order for performing the resize. The
- # strategy is to perform the resize first along the dimension with the
- # smallest scale factor.
- # Now we do not support this.
-
- # get weights and indices
- weights_H, indices_H, sym_len_Hs, sym_len_He = calculate_weights_indices(
- in_H, out_H, scale, kernel, kernel_width, antialiasing)
- weights_W, indices_W, sym_len_Ws, sym_len_We = calculate_weights_indices(
- in_W, out_W, scale, kernel, kernel_width, antialiasing)
- # process H dimension
- # symmetric copying
- img_aug = torch.FloatTensor(in_C, in_H + sym_len_Hs + sym_len_He, in_W)
- img_aug.narrow(1, sym_len_Hs, in_H).copy_(img)
-
- sym_patch = img[:, :sym_len_Hs, :]
- inv_idx = torch.arange(sym_patch.size(1) - 1, -1, -1).long()
- sym_patch_inv = sym_patch.index_select(1, inv_idx)
- img_aug.narrow(1, 0, sym_len_Hs).copy_(sym_patch_inv)
-
- sym_patch = img[:, -sym_len_He:, :]
- inv_idx = torch.arange(sym_patch.size(1) - 1, -1, -1).long()
- sym_patch_inv = sym_patch.index_select(1, inv_idx)
- img_aug.narrow(1, sym_len_Hs + in_H, sym_len_He).copy_(sym_patch_inv)
-
- out_1 = torch.FloatTensor(in_C, out_H, in_W)
- kernel_width = weights_H.size(1)
- for i in range(out_H):
- idx = int(indices_H[i][0])
- for j in range(out_C):
- out_1[j, i, :] = img_aug[j, idx:idx + kernel_width, :].transpose(0, 1).mv(weights_H[i])
-
- # process W dimension
- # symmetric copying
- out_1_aug = torch.FloatTensor(in_C, out_H, in_W + sym_len_Ws + sym_len_We)
- out_1_aug.narrow(2, sym_len_Ws, in_W).copy_(out_1)
-
- sym_patch = out_1[:, :, :sym_len_Ws]
- inv_idx = torch.arange(sym_patch.size(2) - 1, -1, -1).long()
- sym_patch_inv = sym_patch.index_select(2, inv_idx)
- out_1_aug.narrow(2, 0, sym_len_Ws).copy_(sym_patch_inv)
-
- sym_patch = out_1[:, :, -sym_len_We:]
- inv_idx = torch.arange(sym_patch.size(2) - 1, -1, -1).long()
- sym_patch_inv = sym_patch.index_select(2, inv_idx)
- out_1_aug.narrow(2, sym_len_Ws + in_W, sym_len_We).copy_(sym_patch_inv)
-
- out_2 = torch.FloatTensor(in_C, out_H, out_W)
- kernel_width = weights_W.size(1)
- for i in range(out_W):
- idx = int(indices_W[i][0])
- for j in range(out_C):
- out_2[j, :, i] = out_1_aug[j, :, idx:idx + kernel_width].mv(weights_W[i])
- if need_squeeze:
- out_2.squeeze_()
- return out_2
-
-
-# --------------------------------------------
-# imresize for numpy image [0, 1]
-# --------------------------------------------
-def imresize_np(img, scale, antialiasing=True):
- # Now the scale should be the same for H and W
- # input: img: Numpy, HWC or HW [0,1]
- # output: HWC or HW [0,1] w/o round
- img = torch.from_numpy(img)
- need_squeeze = True if img.dim() == 2 else False
- if need_squeeze:
- img.unsqueeze_(2)
-
- in_H, in_W, in_C = img.size()
- out_C, out_H, out_W = in_C, math.ceil(in_H * scale), math.ceil(in_W * scale)
- kernel_width = 4
- kernel = 'cubic'
-
- # Return the desired dimension order for performing the resize. The
- # strategy is to perform the resize first along the dimension with the
- # smallest scale factor.
- # Now we do not support this.
-
- # get weights and indices
- weights_H, indices_H, sym_len_Hs, sym_len_He = calculate_weights_indices(
- in_H, out_H, scale, kernel, kernel_width, antialiasing)
- weights_W, indices_W, sym_len_Ws, sym_len_We = calculate_weights_indices(
- in_W, out_W, scale, kernel, kernel_width, antialiasing)
- # process H dimension
- # symmetric copying
- img_aug = torch.FloatTensor(in_H + sym_len_Hs + sym_len_He, in_W, in_C)
- img_aug.narrow(0, sym_len_Hs, in_H).copy_(img)
-
- sym_patch = img[:sym_len_Hs, :, :]
- inv_idx = torch.arange(sym_patch.size(0) - 1, -1, -1).long()
- sym_patch_inv = sym_patch.index_select(0, inv_idx)
- img_aug.narrow(0, 0, sym_len_Hs).copy_(sym_patch_inv)
-
- sym_patch = img[-sym_len_He:, :, :]
- inv_idx = torch.arange(sym_patch.size(0) - 1, -1, -1).long()
- sym_patch_inv = sym_patch.index_select(0, inv_idx)
- img_aug.narrow(0, sym_len_Hs + in_H, sym_len_He).copy_(sym_patch_inv)
-
- out_1 = torch.FloatTensor(out_H, in_W, in_C)
- kernel_width = weights_H.size(1)
- for i in range(out_H):
- idx = int(indices_H[i][0])
- for j in range(out_C):
- out_1[i, :, j] = img_aug[idx:idx + kernel_width, :, j].transpose(0, 1).mv(weights_H[i])
-
- # process W dimension
- # symmetric copying
- out_1_aug = torch.FloatTensor(out_H, in_W + sym_len_Ws + sym_len_We, in_C)
- out_1_aug.narrow(1, sym_len_Ws, in_W).copy_(out_1)
-
- sym_patch = out_1[:, :sym_len_Ws, :]
- inv_idx = torch.arange(sym_patch.size(1) - 1, -1, -1).long()
- sym_patch_inv = sym_patch.index_select(1, inv_idx)
- out_1_aug.narrow(1, 0, sym_len_Ws).copy_(sym_patch_inv)
-
- sym_patch = out_1[:, -sym_len_We:, :]
- inv_idx = torch.arange(sym_patch.size(1) - 1, -1, -1).long()
- sym_patch_inv = sym_patch.index_select(1, inv_idx)
- out_1_aug.narrow(1, sym_len_Ws + in_W, sym_len_We).copy_(sym_patch_inv)
-
- out_2 = torch.FloatTensor(out_H, out_W, in_C)
- kernel_width = weights_W.size(1)
- for i in range(out_W):
- idx = int(indices_W[i][0])
- for j in range(out_C):
- out_2[:, i, j] = out_1_aug[:, idx:idx + kernel_width, j].mv(weights_W[i])
- if need_squeeze:
- out_2.squeeze_()
-
- return out_2.numpy()
-
-
-if __name__ == '__main__':
- print('---')
-# img = imread_uint('test.bmp', 3)
-# img = uint2single(img)
-# img_bicubic = imresize_np(img, 1/4)
\ No newline at end of file
diff --git a/One-2-3-45-master 2/ldm/modules/losses/__init__.py b/One-2-3-45-master 2/ldm/modules/losses/__init__.py
deleted file mode 100644
index 876d7c5bd6e3245ee77feb4c482b7a8143604ad5..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/losses/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from ldm.modules.losses.contperceptual import LPIPSWithDiscriminator
\ No newline at end of file
diff --git a/One-2-3-45-master 2/ldm/modules/losses/contperceptual.py b/One-2-3-45-master 2/ldm/modules/losses/contperceptual.py
deleted file mode 100644
index 672c1e32a1389def02461c0781339681060c540e..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/losses/contperceptual.py
+++ /dev/null
@@ -1,111 +0,0 @@
-import torch
-import torch.nn as nn
-
-from taming.modules.losses.vqperceptual import * # TODO: taming dependency yes/no?
-
-
-class LPIPSWithDiscriminator(nn.Module):
- def __init__(self, disc_start, logvar_init=0.0, kl_weight=1.0, pixelloss_weight=1.0,
- disc_num_layers=3, disc_in_channels=3, disc_factor=1.0, disc_weight=1.0,
- perceptual_weight=1.0, use_actnorm=False, disc_conditional=False,
- disc_loss="hinge"):
-
- super().__init__()
- assert disc_loss in ["hinge", "vanilla"]
- self.kl_weight = kl_weight
- self.pixel_weight = pixelloss_weight
- self.perceptual_loss = LPIPS().eval()
- self.perceptual_weight = perceptual_weight
- # output log variance
- self.logvar = nn.Parameter(torch.ones(size=()) * logvar_init)
-
- self.discriminator = NLayerDiscriminator(input_nc=disc_in_channels,
- n_layers=disc_num_layers,
- use_actnorm=use_actnorm
- ).apply(weights_init)
- self.discriminator_iter_start = disc_start
- self.disc_loss = hinge_d_loss if disc_loss == "hinge" else vanilla_d_loss
- self.disc_factor = disc_factor
- self.discriminator_weight = disc_weight
- self.disc_conditional = disc_conditional
-
- def calculate_adaptive_weight(self, nll_loss, g_loss, last_layer=None):
- if last_layer is not None:
- nll_grads = torch.autograd.grad(nll_loss, last_layer, retain_graph=True)[0]
- g_grads = torch.autograd.grad(g_loss, last_layer, retain_graph=True)[0]
- else:
- nll_grads = torch.autograd.grad(nll_loss, self.last_layer[0], retain_graph=True)[0]
- g_grads = torch.autograd.grad(g_loss, self.last_layer[0], retain_graph=True)[0]
-
- d_weight = torch.norm(nll_grads) / (torch.norm(g_grads) + 1e-4)
- d_weight = torch.clamp(d_weight, 0.0, 1e4).detach()
- d_weight = d_weight * self.discriminator_weight
- return d_weight
-
- def forward(self, inputs, reconstructions, posteriors, optimizer_idx,
- global_step, last_layer=None, cond=None, split="train",
- weights=None):
- rec_loss = torch.abs(inputs.contiguous() - reconstructions.contiguous())
- if self.perceptual_weight > 0:
- p_loss = self.perceptual_loss(inputs.contiguous(), reconstructions.contiguous())
- rec_loss = rec_loss + self.perceptual_weight * p_loss
-
- nll_loss = rec_loss / torch.exp(self.logvar) + self.logvar
- weighted_nll_loss = nll_loss
- if weights is not None:
- weighted_nll_loss = weights*nll_loss
- weighted_nll_loss = torch.sum(weighted_nll_loss) / weighted_nll_loss.shape[0]
- nll_loss = torch.sum(nll_loss) / nll_loss.shape[0]
- kl_loss = posteriors.kl()
- kl_loss = torch.sum(kl_loss) / kl_loss.shape[0]
-
- # now the GAN part
- if optimizer_idx == 0:
- # generator update
- if cond is None:
- assert not self.disc_conditional
- logits_fake = self.discriminator(reconstructions.contiguous())
- else:
- assert self.disc_conditional
- logits_fake = self.discriminator(torch.cat((reconstructions.contiguous(), cond), dim=1))
- g_loss = -torch.mean(logits_fake)
-
- if self.disc_factor > 0.0:
- try:
- d_weight = self.calculate_adaptive_weight(nll_loss, g_loss, last_layer=last_layer)
- except RuntimeError:
- assert not self.training
- d_weight = torch.tensor(0.0)
- else:
- d_weight = torch.tensor(0.0)
-
- disc_factor = adopt_weight(self.disc_factor, global_step, threshold=self.discriminator_iter_start)
- loss = weighted_nll_loss + self.kl_weight * kl_loss + d_weight * disc_factor * g_loss
-
- log = {"{}/total_loss".format(split): loss.clone().detach().mean(), "{}/logvar".format(split): self.logvar.detach(),
- "{}/kl_loss".format(split): kl_loss.detach().mean(), "{}/nll_loss".format(split): nll_loss.detach().mean(),
- "{}/rec_loss".format(split): rec_loss.detach().mean(),
- "{}/d_weight".format(split): d_weight.detach(),
- "{}/disc_factor".format(split): torch.tensor(disc_factor),
- "{}/g_loss".format(split): g_loss.detach().mean(),
- }
- return loss, log
-
- if optimizer_idx == 1:
- # second pass for discriminator update
- if cond is None:
- logits_real = self.discriminator(inputs.contiguous().detach())
- logits_fake = self.discriminator(reconstructions.contiguous().detach())
- else:
- logits_real = self.discriminator(torch.cat((inputs.contiguous().detach(), cond), dim=1))
- logits_fake = self.discriminator(torch.cat((reconstructions.contiguous().detach(), cond), dim=1))
-
- disc_factor = adopt_weight(self.disc_factor, global_step, threshold=self.discriminator_iter_start)
- d_loss = disc_factor * self.disc_loss(logits_real, logits_fake)
-
- log = {"{}/disc_loss".format(split): d_loss.clone().detach().mean(),
- "{}/logits_real".format(split): logits_real.detach().mean(),
- "{}/logits_fake".format(split): logits_fake.detach().mean()
- }
- return d_loss, log
-
diff --git a/One-2-3-45-master 2/ldm/modules/losses/vqperceptual.py b/One-2-3-45-master 2/ldm/modules/losses/vqperceptual.py
deleted file mode 100644
index f69981769e4bd5462600458c4fcf26620f7e4306..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/losses/vqperceptual.py
+++ /dev/null
@@ -1,167 +0,0 @@
-import torch
-from torch import nn
-import torch.nn.functional as F
-from einops import repeat
-
-from taming.modules.discriminator.model import NLayerDiscriminator, weights_init
-from taming.modules.losses.lpips import LPIPS
-from taming.modules.losses.vqperceptual import hinge_d_loss, vanilla_d_loss
-
-
-def hinge_d_loss_with_exemplar_weights(logits_real, logits_fake, weights):
- assert weights.shape[0] == logits_real.shape[0] == logits_fake.shape[0]
- loss_real = torch.mean(F.relu(1. - logits_real), dim=[1,2,3])
- loss_fake = torch.mean(F.relu(1. + logits_fake), dim=[1,2,3])
- loss_real = (weights * loss_real).sum() / weights.sum()
- loss_fake = (weights * loss_fake).sum() / weights.sum()
- d_loss = 0.5 * (loss_real + loss_fake)
- return d_loss
-
-def adopt_weight(weight, global_step, threshold=0, value=0.):
- if global_step < threshold:
- weight = value
- return weight
-
-
-def measure_perplexity(predicted_indices, n_embed):
- # src: https://github.com/karpathy/deep-vector-quantization/blob/main/model.py
- # eval cluster perplexity. when perplexity == num_embeddings then all clusters are used exactly equally
- encodings = F.one_hot(predicted_indices, n_embed).float().reshape(-1, n_embed)
- avg_probs = encodings.mean(0)
- perplexity = (-(avg_probs * torch.log(avg_probs + 1e-10)).sum()).exp()
- cluster_use = torch.sum(avg_probs > 0)
- return perplexity, cluster_use
-
-def l1(x, y):
- return torch.abs(x-y)
-
-
-def l2(x, y):
- return torch.pow((x-y), 2)
-
-
-class VQLPIPSWithDiscriminator(nn.Module):
- def __init__(self, disc_start, codebook_weight=1.0, pixelloss_weight=1.0,
- disc_num_layers=3, disc_in_channels=3, disc_factor=1.0, disc_weight=1.0,
- perceptual_weight=1.0, use_actnorm=False, disc_conditional=False,
- disc_ndf=64, disc_loss="hinge", n_classes=None, perceptual_loss="lpips",
- pixel_loss="l1"):
- super().__init__()
- assert disc_loss in ["hinge", "vanilla"]
- assert perceptual_loss in ["lpips", "clips", "dists"]
- assert pixel_loss in ["l1", "l2"]
- self.codebook_weight = codebook_weight
- self.pixel_weight = pixelloss_weight
- if perceptual_loss == "lpips":
- print(f"{self.__class__.__name__}: Running with LPIPS.")
- self.perceptual_loss = LPIPS().eval()
- else:
- raise ValueError(f"Unknown perceptual loss: >> {perceptual_loss} <<")
- self.perceptual_weight = perceptual_weight
-
- if pixel_loss == "l1":
- self.pixel_loss = l1
- else:
- self.pixel_loss = l2
-
- self.discriminator = NLayerDiscriminator(input_nc=disc_in_channels,
- n_layers=disc_num_layers,
- use_actnorm=use_actnorm,
- ndf=disc_ndf
- ).apply(weights_init)
- self.discriminator_iter_start = disc_start
- if disc_loss == "hinge":
- self.disc_loss = hinge_d_loss
- elif disc_loss == "vanilla":
- self.disc_loss = vanilla_d_loss
- else:
- raise ValueError(f"Unknown GAN loss '{disc_loss}'.")
- print(f"VQLPIPSWithDiscriminator running with {disc_loss} loss.")
- self.disc_factor = disc_factor
- self.discriminator_weight = disc_weight
- self.disc_conditional = disc_conditional
- self.n_classes = n_classes
-
- def calculate_adaptive_weight(self, nll_loss, g_loss, last_layer=None):
- if last_layer is not None:
- nll_grads = torch.autograd.grad(nll_loss, last_layer, retain_graph=True)[0]
- g_grads = torch.autograd.grad(g_loss, last_layer, retain_graph=True)[0]
- else:
- nll_grads = torch.autograd.grad(nll_loss, self.last_layer[0], retain_graph=True)[0]
- g_grads = torch.autograd.grad(g_loss, self.last_layer[0], retain_graph=True)[0]
-
- d_weight = torch.norm(nll_grads) / (torch.norm(g_grads) + 1e-4)
- d_weight = torch.clamp(d_weight, 0.0, 1e4).detach()
- d_weight = d_weight * self.discriminator_weight
- return d_weight
-
- def forward(self, codebook_loss, inputs, reconstructions, optimizer_idx,
- global_step, last_layer=None, cond=None, split="train", predicted_indices=None):
- if not exists(codebook_loss):
- codebook_loss = torch.tensor([0.]).to(inputs.device)
- #rec_loss = torch.abs(inputs.contiguous() - reconstructions.contiguous())
- rec_loss = self.pixel_loss(inputs.contiguous(), reconstructions.contiguous())
- if self.perceptual_weight > 0:
- p_loss = self.perceptual_loss(inputs.contiguous(), reconstructions.contiguous())
- rec_loss = rec_loss + self.perceptual_weight * p_loss
- else:
- p_loss = torch.tensor([0.0])
-
- nll_loss = rec_loss
- #nll_loss = torch.sum(nll_loss) / nll_loss.shape[0]
- nll_loss = torch.mean(nll_loss)
-
- # now the GAN part
- if optimizer_idx == 0:
- # generator update
- if cond is None:
- assert not self.disc_conditional
- logits_fake = self.discriminator(reconstructions.contiguous())
- else:
- assert self.disc_conditional
- logits_fake = self.discriminator(torch.cat((reconstructions.contiguous(), cond), dim=1))
- g_loss = -torch.mean(logits_fake)
-
- try:
- d_weight = self.calculate_adaptive_weight(nll_loss, g_loss, last_layer=last_layer)
- except RuntimeError:
- assert not self.training
- d_weight = torch.tensor(0.0)
-
- disc_factor = adopt_weight(self.disc_factor, global_step, threshold=self.discriminator_iter_start)
- loss = nll_loss + d_weight * disc_factor * g_loss + self.codebook_weight * codebook_loss.mean()
-
- log = {"{}/total_loss".format(split): loss.clone().detach().mean(),
- "{}/quant_loss".format(split): codebook_loss.detach().mean(),
- "{}/nll_loss".format(split): nll_loss.detach().mean(),
- "{}/rec_loss".format(split): rec_loss.detach().mean(),
- "{}/p_loss".format(split): p_loss.detach().mean(),
- "{}/d_weight".format(split): d_weight.detach(),
- "{}/disc_factor".format(split): torch.tensor(disc_factor),
- "{}/g_loss".format(split): g_loss.detach().mean(),
- }
- if predicted_indices is not None:
- assert self.n_classes is not None
- with torch.no_grad():
- perplexity, cluster_usage = measure_perplexity(predicted_indices, self.n_classes)
- log[f"{split}/perplexity"] = perplexity
- log[f"{split}/cluster_usage"] = cluster_usage
- return loss, log
-
- if optimizer_idx == 1:
- # second pass for discriminator update
- if cond is None:
- logits_real = self.discriminator(inputs.contiguous().detach())
- logits_fake = self.discriminator(reconstructions.contiguous().detach())
- else:
- logits_real = self.discriminator(torch.cat((inputs.contiguous().detach(), cond), dim=1))
- logits_fake = self.discriminator(torch.cat((reconstructions.contiguous().detach(), cond), dim=1))
-
- disc_factor = adopt_weight(self.disc_factor, global_step, threshold=self.discriminator_iter_start)
- d_loss = disc_factor * self.disc_loss(logits_real, logits_fake)
-
- log = {"{}/disc_loss".format(split): d_loss.clone().detach().mean(),
- "{}/logits_real".format(split): logits_real.detach().mean(),
- "{}/logits_fake".format(split): logits_fake.detach().mean()
- }
- return d_loss, log
diff --git a/One-2-3-45-master 2/ldm/modules/x_transformer.py b/One-2-3-45-master 2/ldm/modules/x_transformer.py
deleted file mode 100644
index 5fc15bf9cfe0111a910e7de33d04ffdec3877576..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/modules/x_transformer.py
+++ /dev/null
@@ -1,641 +0,0 @@
-"""shout-out to https://github.com/lucidrains/x-transformers/tree/main/x_transformers"""
-import torch
-from torch import nn, einsum
-import torch.nn.functional as F
-from functools import partial
-from inspect import isfunction
-from collections import namedtuple
-from einops import rearrange, repeat, reduce
-
-# constants
-
-DEFAULT_DIM_HEAD = 64
-
-Intermediates = namedtuple('Intermediates', [
- 'pre_softmax_attn',
- 'post_softmax_attn'
-])
-
-LayerIntermediates = namedtuple('Intermediates', [
- 'hiddens',
- 'attn_intermediates'
-])
-
-
-class AbsolutePositionalEmbedding(nn.Module):
- def __init__(self, dim, max_seq_len):
- super().__init__()
- self.emb = nn.Embedding(max_seq_len, dim)
- self.init_()
-
- def init_(self):
- nn.init.normal_(self.emb.weight, std=0.02)
-
- def forward(self, x):
- n = torch.arange(x.shape[1], device=x.device)
- return self.emb(n)[None, :, :]
-
-
-class FixedPositionalEmbedding(nn.Module):
- def __init__(self, dim):
- super().__init__()
- inv_freq = 1. / (10000 ** (torch.arange(0, dim, 2).float() / dim))
- self.register_buffer('inv_freq', inv_freq)
-
- def forward(self, x, seq_dim=1, offset=0):
- t = torch.arange(x.shape[seq_dim], device=x.device).type_as(self.inv_freq) + offset
- sinusoid_inp = torch.einsum('i , j -> i j', t, self.inv_freq)
- emb = torch.cat((sinusoid_inp.sin(), sinusoid_inp.cos()), dim=-1)
- return emb[None, :, :]
-
-
-# helpers
-
-def exists(val):
- return val is not None
-
-
-def default(val, d):
- if exists(val):
- return val
- return d() if isfunction(d) else d
-
-
-def always(val):
- def inner(*args, **kwargs):
- return val
- return inner
-
-
-def not_equals(val):
- def inner(x):
- return x != val
- return inner
-
-
-def equals(val):
- def inner(x):
- return x == val
- return inner
-
-
-def max_neg_value(tensor):
- return -torch.finfo(tensor.dtype).max
-
-
-# keyword argument helpers
-
-def pick_and_pop(keys, d):
- values = list(map(lambda key: d.pop(key), keys))
- return dict(zip(keys, values))
-
-
-def group_dict_by_key(cond, d):
- return_val = [dict(), dict()]
- for key in d.keys():
- match = bool(cond(key))
- ind = int(not match)
- return_val[ind][key] = d[key]
- return (*return_val,)
-
-
-def string_begins_with(prefix, str):
- return str.startswith(prefix)
-
-
-def group_by_key_prefix(prefix, d):
- return group_dict_by_key(partial(string_begins_with, prefix), d)
-
-
-def groupby_prefix_and_trim(prefix, d):
- kwargs_with_prefix, kwargs = group_dict_by_key(partial(string_begins_with, prefix), d)
- kwargs_without_prefix = dict(map(lambda x: (x[0][len(prefix):], x[1]), tuple(kwargs_with_prefix.items())))
- return kwargs_without_prefix, kwargs
-
-
-# classes
-class Scale(nn.Module):
- def __init__(self, value, fn):
- super().__init__()
- self.value = value
- self.fn = fn
-
- def forward(self, x, **kwargs):
- x, *rest = self.fn(x, **kwargs)
- return (x * self.value, *rest)
-
-
-class Rezero(nn.Module):
- def __init__(self, fn):
- super().__init__()
- self.fn = fn
- self.g = nn.Parameter(torch.zeros(1))
-
- def forward(self, x, **kwargs):
- x, *rest = self.fn(x, **kwargs)
- return (x * self.g, *rest)
-
-
-class ScaleNorm(nn.Module):
- def __init__(self, dim, eps=1e-5):
- super().__init__()
- self.scale = dim ** -0.5
- self.eps = eps
- self.g = nn.Parameter(torch.ones(1))
-
- def forward(self, x):
- norm = torch.norm(x, dim=-1, keepdim=True) * self.scale
- return x / norm.clamp(min=self.eps) * self.g
-
-
-class RMSNorm(nn.Module):
- def __init__(self, dim, eps=1e-8):
- super().__init__()
- self.scale = dim ** -0.5
- self.eps = eps
- self.g = nn.Parameter(torch.ones(dim))
-
- def forward(self, x):
- norm = torch.norm(x, dim=-1, keepdim=True) * self.scale
- return x / norm.clamp(min=self.eps) * self.g
-
-
-class Residual(nn.Module):
- def forward(self, x, residual):
- return x + residual
-
-
-class GRUGating(nn.Module):
- def __init__(self, dim):
- super().__init__()
- self.gru = nn.GRUCell(dim, dim)
-
- def forward(self, x, residual):
- gated_output = self.gru(
- rearrange(x, 'b n d -> (b n) d'),
- rearrange(residual, 'b n d -> (b n) d')
- )
-
- return gated_output.reshape_as(x)
-
-
-# feedforward
-
-class GEGLU(nn.Module):
- def __init__(self, dim_in, dim_out):
- super().__init__()
- self.proj = nn.Linear(dim_in, dim_out * 2)
-
- def forward(self, x):
- x, gate = self.proj(x).chunk(2, dim=-1)
- return x * F.gelu(gate)
-
-
-class FeedForward(nn.Module):
- def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.):
- super().__init__()
- inner_dim = int(dim * mult)
- dim_out = default(dim_out, dim)
- project_in = nn.Sequential(
- nn.Linear(dim, inner_dim),
- nn.GELU()
- ) if not glu else GEGLU(dim, inner_dim)
-
- self.net = nn.Sequential(
- project_in,
- nn.Dropout(dropout),
- nn.Linear(inner_dim, dim_out)
- )
-
- def forward(self, x):
- return self.net(x)
-
-
-# attention.
-class Attention(nn.Module):
- def __init__(
- self,
- dim,
- dim_head=DEFAULT_DIM_HEAD,
- heads=8,
- causal=False,
- mask=None,
- talking_heads=False,
- sparse_topk=None,
- use_entmax15=False,
- num_mem_kv=0,
- dropout=0.,
- on_attn=False
- ):
- super().__init__()
- if use_entmax15:
- raise NotImplementedError("Check out entmax activation instead of softmax activation!")
- self.scale = dim_head ** -0.5
- self.heads = heads
- self.causal = causal
- self.mask = mask
-
- inner_dim = dim_head * heads
-
- self.to_q = nn.Linear(dim, inner_dim, bias=False)
- self.to_k = nn.Linear(dim, inner_dim, bias=False)
- self.to_v = nn.Linear(dim, inner_dim, bias=False)
- self.dropout = nn.Dropout(dropout)
-
- # talking heads
- self.talking_heads = talking_heads
- if talking_heads:
- self.pre_softmax_proj = nn.Parameter(torch.randn(heads, heads))
- self.post_softmax_proj = nn.Parameter(torch.randn(heads, heads))
-
- # explicit topk sparse attention
- self.sparse_topk = sparse_topk
-
- # entmax
- #self.attn_fn = entmax15 if use_entmax15 else F.softmax
- self.attn_fn = F.softmax
-
- # add memory key / values
- self.num_mem_kv = num_mem_kv
- if num_mem_kv > 0:
- self.mem_k = nn.Parameter(torch.randn(heads, num_mem_kv, dim_head))
- self.mem_v = nn.Parameter(torch.randn(heads, num_mem_kv, dim_head))
-
- # attention on attention
- self.attn_on_attn = on_attn
- self.to_out = nn.Sequential(nn.Linear(inner_dim, dim * 2), nn.GLU()) if on_attn else nn.Linear(inner_dim, dim)
-
- def forward(
- self,
- x,
- context=None,
- mask=None,
- context_mask=None,
- rel_pos=None,
- sinusoidal_emb=None,
- prev_attn=None,
- mem=None
- ):
- b, n, _, h, talking_heads, device = *x.shape, self.heads, self.talking_heads, x.device
- kv_input = default(context, x)
-
- q_input = x
- k_input = kv_input
- v_input = kv_input
-
- if exists(mem):
- k_input = torch.cat((mem, k_input), dim=-2)
- v_input = torch.cat((mem, v_input), dim=-2)
-
- if exists(sinusoidal_emb):
- # in shortformer, the query would start at a position offset depending on the past cached memory
- offset = k_input.shape[-2] - q_input.shape[-2]
- q_input = q_input + sinusoidal_emb(q_input, offset=offset)
- k_input = k_input + sinusoidal_emb(k_input)
-
- q = self.to_q(q_input)
- k = self.to_k(k_input)
- v = self.to_v(v_input)
-
- q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h=h), (q, k, v))
-
- input_mask = None
- if any(map(exists, (mask, context_mask))):
- q_mask = default(mask, lambda: torch.ones((b, n), device=device).bool())
- k_mask = q_mask if not exists(context) else context_mask
- k_mask = default(k_mask, lambda: torch.ones((b, k.shape[-2]), device=device).bool())
- q_mask = rearrange(q_mask, 'b i -> b () i ()')
- k_mask = rearrange(k_mask, 'b j -> b () () j')
- input_mask = q_mask * k_mask
-
- if self.num_mem_kv > 0:
- mem_k, mem_v = map(lambda t: repeat(t, 'h n d -> b h n d', b=b), (self.mem_k, self.mem_v))
- k = torch.cat((mem_k, k), dim=-2)
- v = torch.cat((mem_v, v), dim=-2)
- if exists(input_mask):
- input_mask = F.pad(input_mask, (self.num_mem_kv, 0), value=True)
-
- dots = einsum('b h i d, b h j d -> b h i j', q, k) * self.scale
- mask_value = max_neg_value(dots)
-
- if exists(prev_attn):
- dots = dots + prev_attn
-
- pre_softmax_attn = dots
-
- if talking_heads:
- dots = einsum('b h i j, h k -> b k i j', dots, self.pre_softmax_proj).contiguous()
-
- if exists(rel_pos):
- dots = rel_pos(dots)
-
- if exists(input_mask):
- dots.masked_fill_(~input_mask, mask_value)
- del input_mask
-
- if self.causal:
- i, j = dots.shape[-2:]
- r = torch.arange(i, device=device)
- mask = rearrange(r, 'i -> () () i ()') < rearrange(r, 'j -> () () () j')
- mask = F.pad(mask, (j - i, 0), value=False)
- dots.masked_fill_(mask, mask_value)
- del mask
-
- if exists(self.sparse_topk) and self.sparse_topk < dots.shape[-1]:
- top, _ = dots.topk(self.sparse_topk, dim=-1)
- vk = top[..., -1].unsqueeze(-1).expand_as(dots)
- mask = dots < vk
- dots.masked_fill_(mask, mask_value)
- del mask
-
- attn = self.attn_fn(dots, dim=-1)
- post_softmax_attn = attn
-
- attn = self.dropout(attn)
-
- if talking_heads:
- attn = einsum('b h i j, h k -> b k i j', attn, self.post_softmax_proj).contiguous()
-
- out = einsum('b h i j, b h j d -> b h i d', attn, v)
- out = rearrange(out, 'b h n d -> b n (h d)')
-
- intermediates = Intermediates(
- pre_softmax_attn=pre_softmax_attn,
- post_softmax_attn=post_softmax_attn
- )
-
- return self.to_out(out), intermediates
-
-
-class AttentionLayers(nn.Module):
- def __init__(
- self,
- dim,
- depth,
- heads=8,
- causal=False,
- cross_attend=False,
- only_cross=False,
- use_scalenorm=False,
- use_rmsnorm=False,
- use_rezero=False,
- rel_pos_num_buckets=32,
- rel_pos_max_distance=128,
- position_infused_attn=False,
- custom_layers=None,
- sandwich_coef=None,
- par_ratio=None,
- residual_attn=False,
- cross_residual_attn=False,
- macaron=False,
- pre_norm=True,
- gate_residual=False,
- **kwargs
- ):
- super().__init__()
- ff_kwargs, kwargs = groupby_prefix_and_trim('ff_', kwargs)
- attn_kwargs, _ = groupby_prefix_and_trim('attn_', kwargs)
-
- dim_head = attn_kwargs.get('dim_head', DEFAULT_DIM_HEAD)
-
- self.dim = dim
- self.depth = depth
- self.layers = nn.ModuleList([])
-
- self.has_pos_emb = position_infused_attn
- self.pia_pos_emb = FixedPositionalEmbedding(dim) if position_infused_attn else None
- self.rotary_pos_emb = always(None)
-
- assert rel_pos_num_buckets <= rel_pos_max_distance, 'number of relative position buckets must be less than the relative position max distance'
- self.rel_pos = None
-
- self.pre_norm = pre_norm
-
- self.residual_attn = residual_attn
- self.cross_residual_attn = cross_residual_attn
-
- norm_class = ScaleNorm if use_scalenorm else nn.LayerNorm
- norm_class = RMSNorm if use_rmsnorm else norm_class
- norm_fn = partial(norm_class, dim)
-
- norm_fn = nn.Identity if use_rezero else norm_fn
- branch_fn = Rezero if use_rezero else None
-
- if cross_attend and not only_cross:
- default_block = ('a', 'c', 'f')
- elif cross_attend and only_cross:
- default_block = ('c', 'f')
- else:
- default_block = ('a', 'f')
-
- if macaron:
- default_block = ('f',) + default_block
-
- if exists(custom_layers):
- layer_types = custom_layers
- elif exists(par_ratio):
- par_depth = depth * len(default_block)
- assert 1 < par_ratio <= par_depth, 'par ratio out of range'
- default_block = tuple(filter(not_equals('f'), default_block))
- par_attn = par_depth // par_ratio
- depth_cut = par_depth * 2 // 3 # 2 / 3 attention layer cutoff suggested by PAR paper
- par_width = (depth_cut + depth_cut // par_attn) // par_attn
- assert len(default_block) <= par_width, 'default block is too large for par_ratio'
- par_block = default_block + ('f',) * (par_width - len(default_block))
- par_head = par_block * par_attn
- layer_types = par_head + ('f',) * (par_depth - len(par_head))
- elif exists(sandwich_coef):
- assert sandwich_coef > 0 and sandwich_coef <= depth, 'sandwich coefficient should be less than the depth'
- layer_types = ('a',) * sandwich_coef + default_block * (depth - sandwich_coef) + ('f',) * sandwich_coef
- else:
- layer_types = default_block * depth
-
- self.layer_types = layer_types
- self.num_attn_layers = len(list(filter(equals('a'), layer_types)))
-
- for layer_type in self.layer_types:
- if layer_type == 'a':
- layer = Attention(dim, heads=heads, causal=causal, **attn_kwargs)
- elif layer_type == 'c':
- layer = Attention(dim, heads=heads, **attn_kwargs)
- elif layer_type == 'f':
- layer = FeedForward(dim, **ff_kwargs)
- layer = layer if not macaron else Scale(0.5, layer)
- else:
- raise Exception(f'invalid layer type {layer_type}')
-
- if isinstance(layer, Attention) and exists(branch_fn):
- layer = branch_fn(layer)
-
- if gate_residual:
- residual_fn = GRUGating(dim)
- else:
- residual_fn = Residual()
-
- self.layers.append(nn.ModuleList([
- norm_fn(),
- layer,
- residual_fn
- ]))
-
- def forward(
- self,
- x,
- context=None,
- mask=None,
- context_mask=None,
- mems=None,
- return_hiddens=False
- ):
- hiddens = []
- intermediates = []
- prev_attn = None
- prev_cross_attn = None
-
- mems = mems.copy() if exists(mems) else [None] * self.num_attn_layers
-
- for ind, (layer_type, (norm, block, residual_fn)) in enumerate(zip(self.layer_types, self.layers)):
- is_last = ind == (len(self.layers) - 1)
-
- if layer_type == 'a':
- hiddens.append(x)
- layer_mem = mems.pop(0)
-
- residual = x
-
- if self.pre_norm:
- x = norm(x)
-
- if layer_type == 'a':
- out, inter = block(x, mask=mask, sinusoidal_emb=self.pia_pos_emb, rel_pos=self.rel_pos,
- prev_attn=prev_attn, mem=layer_mem)
- elif layer_type == 'c':
- out, inter = block(x, context=context, mask=mask, context_mask=context_mask, prev_attn=prev_cross_attn)
- elif layer_type == 'f':
- out = block(x)
-
- x = residual_fn(out, residual)
-
- if layer_type in ('a', 'c'):
- intermediates.append(inter)
-
- if layer_type == 'a' and self.residual_attn:
- prev_attn = inter.pre_softmax_attn
- elif layer_type == 'c' and self.cross_residual_attn:
- prev_cross_attn = inter.pre_softmax_attn
-
- if not self.pre_norm and not is_last:
- x = norm(x)
-
- if return_hiddens:
- intermediates = LayerIntermediates(
- hiddens=hiddens,
- attn_intermediates=intermediates
- )
-
- return x, intermediates
-
- return x
-
-
-class Encoder(AttentionLayers):
- def __init__(self, **kwargs):
- assert 'causal' not in kwargs, 'cannot set causality on encoder'
- super().__init__(causal=False, **kwargs)
-
-
-
-class TransformerWrapper(nn.Module):
- def __init__(
- self,
- *,
- num_tokens,
- max_seq_len,
- attn_layers,
- emb_dim=None,
- max_mem_len=0.,
- emb_dropout=0.,
- num_memory_tokens=None,
- tie_embedding=False,
- use_pos_emb=True
- ):
- super().__init__()
- assert isinstance(attn_layers, AttentionLayers), 'attention layers must be one of Encoder or Decoder'
-
- dim = attn_layers.dim
- emb_dim = default(emb_dim, dim)
-
- self.max_seq_len = max_seq_len
- self.max_mem_len = max_mem_len
- self.num_tokens = num_tokens
-
- self.token_emb = nn.Embedding(num_tokens, emb_dim)
- self.pos_emb = AbsolutePositionalEmbedding(emb_dim, max_seq_len) if (
- use_pos_emb and not attn_layers.has_pos_emb) else always(0)
- self.emb_dropout = nn.Dropout(emb_dropout)
-
- self.project_emb = nn.Linear(emb_dim, dim) if emb_dim != dim else nn.Identity()
- self.attn_layers = attn_layers
- self.norm = nn.LayerNorm(dim)
-
- self.init_()
-
- self.to_logits = nn.Linear(dim, num_tokens) if not tie_embedding else lambda t: t @ self.token_emb.weight.t()
-
- # memory tokens (like [cls]) from Memory Transformers paper
- num_memory_tokens = default(num_memory_tokens, 0)
- self.num_memory_tokens = num_memory_tokens
- if num_memory_tokens > 0:
- self.memory_tokens = nn.Parameter(torch.randn(num_memory_tokens, dim))
-
- # let funnel encoder know number of memory tokens, if specified
- if hasattr(attn_layers, 'num_memory_tokens'):
- attn_layers.num_memory_tokens = num_memory_tokens
-
- def init_(self):
- nn.init.normal_(self.token_emb.weight, std=0.02)
-
- def forward(
- self,
- x,
- return_embeddings=False,
- mask=None,
- return_mems=False,
- return_attn=False,
- mems=None,
- **kwargs
- ):
- b, n, device, num_mem = *x.shape, x.device, self.num_memory_tokens
- x = self.token_emb(x)
- x += self.pos_emb(x)
- x = self.emb_dropout(x)
-
- x = self.project_emb(x)
-
- if num_mem > 0:
- mem = repeat(self.memory_tokens, 'n d -> b n d', b=b)
- x = torch.cat((mem, x), dim=1)
-
- # auto-handle masking after appending memory tokens
- if exists(mask):
- mask = F.pad(mask, (num_mem, 0), value=True)
-
- x, intermediates = self.attn_layers(x, mask=mask, mems=mems, return_hiddens=True, **kwargs)
- x = self.norm(x)
-
- mem, x = x[:, :num_mem], x[:, num_mem:]
-
- out = self.to_logits(x) if not return_embeddings else x
-
- if return_mems:
- hiddens = intermediates.hiddens
- new_mems = list(map(lambda pair: torch.cat(pair, dim=-2), zip(mems, hiddens))) if exists(mems) else hiddens
- new_mems = list(map(lambda t: t[..., -self.max_mem_len:, :].detach(), new_mems))
- return out, new_mems
-
- if return_attn:
- attn_maps = list(map(lambda t: t.post_softmax_attn, intermediates.attn_intermediates))
- return out, attn_maps
-
- return out
-
diff --git a/One-2-3-45-master 2/ldm/thirdp/psp/helpers.py b/One-2-3-45-master 2/ldm/thirdp/psp/helpers.py
deleted file mode 100644
index 983baaa50ea9df0cbabe09aba80293ddf7709845..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/thirdp/psp/helpers.py
+++ /dev/null
@@ -1,121 +0,0 @@
-# https://github.com/eladrich/pixel2style2pixel
-
-from collections import namedtuple
-import torch
-from torch.nn import Conv2d, BatchNorm2d, PReLU, ReLU, Sigmoid, MaxPool2d, AdaptiveAvgPool2d, Sequential, Module
-
-"""
-ArcFace implementation from [TreB1eN](https://github.com/TreB1eN/InsightFace_Pytorch)
-"""
-
-
-class Flatten(Module):
- def forward(self, input):
- return input.view(input.size(0), -1)
-
-
-def l2_norm(input, axis=1):
- norm = torch.norm(input, 2, axis, True)
- output = torch.div(input, norm)
- return output
-
-
-class Bottleneck(namedtuple('Block', ['in_channel', 'depth', 'stride'])):
- """ A named tuple describing a ResNet block. """
-
-
-def get_block(in_channel, depth, num_units, stride=2):
- return [Bottleneck(in_channel, depth, stride)] + [Bottleneck(depth, depth, 1) for i in range(num_units - 1)]
-
-
-def get_blocks(num_layers):
- if num_layers == 50:
- blocks = [
- get_block(in_channel=64, depth=64, num_units=3),
- get_block(in_channel=64, depth=128, num_units=4),
- get_block(in_channel=128, depth=256, num_units=14),
- get_block(in_channel=256, depth=512, num_units=3)
- ]
- elif num_layers == 100:
- blocks = [
- get_block(in_channel=64, depth=64, num_units=3),
- get_block(in_channel=64, depth=128, num_units=13),
- get_block(in_channel=128, depth=256, num_units=30),
- get_block(in_channel=256, depth=512, num_units=3)
- ]
- elif num_layers == 152:
- blocks = [
- get_block(in_channel=64, depth=64, num_units=3),
- get_block(in_channel=64, depth=128, num_units=8),
- get_block(in_channel=128, depth=256, num_units=36),
- get_block(in_channel=256, depth=512, num_units=3)
- ]
- else:
- raise ValueError("Invalid number of layers: {}. Must be one of [50, 100, 152]".format(num_layers))
- return blocks
-
-
-class SEModule(Module):
- def __init__(self, channels, reduction):
- super(SEModule, self).__init__()
- self.avg_pool = AdaptiveAvgPool2d(1)
- self.fc1 = Conv2d(channels, channels // reduction, kernel_size=1, padding=0, bias=False)
- self.relu = ReLU(inplace=True)
- self.fc2 = Conv2d(channels // reduction, channels, kernel_size=1, padding=0, bias=False)
- self.sigmoid = Sigmoid()
-
- def forward(self, x):
- module_input = x
- x = self.avg_pool(x)
- x = self.fc1(x)
- x = self.relu(x)
- x = self.fc2(x)
- x = self.sigmoid(x)
- return module_input * x
-
-
-class bottleneck_IR(Module):
- def __init__(self, in_channel, depth, stride):
- super(bottleneck_IR, self).__init__()
- if in_channel == depth:
- self.shortcut_layer = MaxPool2d(1, stride)
- else:
- self.shortcut_layer = Sequential(
- Conv2d(in_channel, depth, (1, 1), stride, bias=False),
- BatchNorm2d(depth)
- )
- self.res_layer = Sequential(
- BatchNorm2d(in_channel),
- Conv2d(in_channel, depth, (3, 3), (1, 1), 1, bias=False), PReLU(depth),
- Conv2d(depth, depth, (3, 3), stride, 1, bias=False), BatchNorm2d(depth)
- )
-
- def forward(self, x):
- shortcut = self.shortcut_layer(x)
- res = self.res_layer(x)
- return res + shortcut
-
-
-class bottleneck_IR_SE(Module):
- def __init__(self, in_channel, depth, stride):
- super(bottleneck_IR_SE, self).__init__()
- if in_channel == depth:
- self.shortcut_layer = MaxPool2d(1, stride)
- else:
- self.shortcut_layer = Sequential(
- Conv2d(in_channel, depth, (1, 1), stride, bias=False),
- BatchNorm2d(depth)
- )
- self.res_layer = Sequential(
- BatchNorm2d(in_channel),
- Conv2d(in_channel, depth, (3, 3), (1, 1), 1, bias=False),
- PReLU(depth),
- Conv2d(depth, depth, (3, 3), stride, 1, bias=False),
- BatchNorm2d(depth),
- SEModule(depth, 16)
- )
-
- def forward(self, x):
- shortcut = self.shortcut_layer(x)
- res = self.res_layer(x)
- return res + shortcut
\ No newline at end of file
diff --git a/One-2-3-45-master 2/ldm/thirdp/psp/id_loss.py b/One-2-3-45-master 2/ldm/thirdp/psp/id_loss.py
deleted file mode 100644
index e08ee095bd20ff664dcf470de15ff54f839b38e2..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/thirdp/psp/id_loss.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# https://github.com/eladrich/pixel2style2pixel
-import torch
-from torch import nn
-from ldm.thirdp.psp.model_irse import Backbone
-
-
-class IDFeatures(nn.Module):
- def __init__(self, model_path):
- super(IDFeatures, self).__init__()
- print('Loading ResNet ArcFace')
- self.facenet = Backbone(input_size=112, num_layers=50, drop_ratio=0.6, mode='ir_se')
- self.facenet.load_state_dict(torch.load(model_path, map_location="cpu"))
- self.face_pool = torch.nn.AdaptiveAvgPool2d((112, 112))
- self.facenet.eval()
-
- def forward(self, x, crop=False):
- # Not sure of the image range here
- if crop:
- x = torch.nn.functional.interpolate(x, (256, 256), mode="area")
- x = x[:, :, 35:223, 32:220]
- x = self.face_pool(x)
- x_feats = self.facenet(x)
- return x_feats
diff --git a/One-2-3-45-master 2/ldm/thirdp/psp/model_irse.py b/One-2-3-45-master 2/ldm/thirdp/psp/model_irse.py
deleted file mode 100644
index 21cedd2994a6eed5a0afd451b08dd09801fe60c0..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/thirdp/psp/model_irse.py
+++ /dev/null
@@ -1,86 +0,0 @@
-# https://github.com/eladrich/pixel2style2pixel
-
-from torch.nn import Linear, Conv2d, BatchNorm1d, BatchNorm2d, PReLU, Dropout, Sequential, Module
-from ldm.thirdp.psp.helpers import get_blocks, Flatten, bottleneck_IR, bottleneck_IR_SE, l2_norm
-
-"""
-Modified Backbone implementation from [TreB1eN](https://github.com/TreB1eN/InsightFace_Pytorch)
-"""
-
-
-class Backbone(Module):
- def __init__(self, input_size, num_layers, mode='ir', drop_ratio=0.4, affine=True):
- super(Backbone, self).__init__()
- assert input_size in [112, 224], "input_size should be 112 or 224"
- assert num_layers in [50, 100, 152], "num_layers should be 50, 100 or 152"
- assert mode in ['ir', 'ir_se'], "mode should be ir or ir_se"
- blocks = get_blocks(num_layers)
- if mode == 'ir':
- unit_module = bottleneck_IR
- elif mode == 'ir_se':
- unit_module = bottleneck_IR_SE
- self.input_layer = Sequential(Conv2d(3, 64, (3, 3), 1, 1, bias=False),
- BatchNorm2d(64),
- PReLU(64))
- if input_size == 112:
- self.output_layer = Sequential(BatchNorm2d(512),
- Dropout(drop_ratio),
- Flatten(),
- Linear(512 * 7 * 7, 512),
- BatchNorm1d(512, affine=affine))
- else:
- self.output_layer = Sequential(BatchNorm2d(512),
- Dropout(drop_ratio),
- Flatten(),
- Linear(512 * 14 * 14, 512),
- BatchNorm1d(512, affine=affine))
-
- modules = []
- for block in blocks:
- for bottleneck in block:
- modules.append(unit_module(bottleneck.in_channel,
- bottleneck.depth,
- bottleneck.stride))
- self.body = Sequential(*modules)
-
- def forward(self, x):
- x = self.input_layer(x)
- x = self.body(x)
- x = self.output_layer(x)
- return l2_norm(x)
-
-
-def IR_50(input_size):
- """Constructs a ir-50 model."""
- model = Backbone(input_size, num_layers=50, mode='ir', drop_ratio=0.4, affine=False)
- return model
-
-
-def IR_101(input_size):
- """Constructs a ir-101 model."""
- model = Backbone(input_size, num_layers=100, mode='ir', drop_ratio=0.4, affine=False)
- return model
-
-
-def IR_152(input_size):
- """Constructs a ir-152 model."""
- model = Backbone(input_size, num_layers=152, mode='ir', drop_ratio=0.4, affine=False)
- return model
-
-
-def IR_SE_50(input_size):
- """Constructs a ir_se-50 model."""
- model = Backbone(input_size, num_layers=50, mode='ir_se', drop_ratio=0.4, affine=False)
- return model
-
-
-def IR_SE_101(input_size):
- """Constructs a ir_se-101 model."""
- model = Backbone(input_size, num_layers=100, mode='ir_se', drop_ratio=0.4, affine=False)
- return model
-
-
-def IR_SE_152(input_size):
- """Constructs a ir_se-152 model."""
- model = Backbone(input_size, num_layers=152, mode='ir_se', drop_ratio=0.4, affine=False)
- return model
\ No newline at end of file
diff --git a/One-2-3-45-master 2/ldm/util.py b/One-2-3-45-master 2/ldm/util.py
deleted file mode 100644
index 07e2689a919f605a50866bdfd1e0faf5cc7fadc0..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/ldm/util.py
+++ /dev/null
@@ -1,256 +0,0 @@
-import importlib
-
-import torch
-from torch import optim
-import numpy as np
-
-from inspect import isfunction
-from PIL import Image, ImageDraw, ImageFont
-
-import os
-import numpy as np
-import matplotlib.pyplot as plt
-from PIL import Image
-import torch
-import time
-import cv2
-import PIL
-
-def pil_rectangle_crop(im):
- width, height = im.size # Get dimensions
-
- if width <= height:
- left = 0
- right = width
- top = (height - width)/2
- bottom = (height + width)/2
- else:
-
- top = 0
- bottom = height
- left = (width - height) / 2
- bottom = (width + height) / 2
-
- # Crop the center of the image
- im = im.crop((left, top, right, bottom))
- return im
-
-def add_margin(pil_img, color, size=256):
- width, height = pil_img.size
- result = Image.new(pil_img.mode, (size, size), color)
- result.paste(pil_img, ((size - width) // 2, (size - height) // 2))
- return result
-
-def load_and_preprocess(interface, input_im):
- '''
- :param input_im (PIL Image).
- :return image (H, W, 3) array in [0, 1].
- '''
- # See https://github.com/Ir1d/image-background-remove-tool
- image = input_im.convert('RGB')
-
- image_without_background = interface([image])[0]
- image_without_background = np.array(image_without_background)
- est_seg = image_without_background > 127
- image = np.array(image)
- foreground = est_seg[:, : , -1].astype(np.bool_)
- image[~foreground] = [255., 255., 255.]
- x, y, w, h = cv2.boundingRect(foreground.astype(np.uint8))
- image = image[y:y+h, x:x+w, :]
- image = PIL.Image.fromarray(np.array(image))
-
- # resize image such that long edge is 512
- image.thumbnail([200, 200], Image.Resampling.LANCZOS)
- image = add_margin(image, (255, 255, 255), size=256)
- image = np.array(image)
-
- return image
-
-
-def log_txt_as_img(wh, xc, size=10):
- # wh a tuple of (width, height)
- # xc a list of captions to plot
- b = len(xc)
- txts = list()
- for bi in range(b):
- txt = Image.new("RGB", wh, color="white")
- draw = ImageDraw.Draw(txt)
- font = ImageFont.truetype('data/DejaVuSans.ttf', size=size)
- nc = int(40 * (wh[0] / 256))
- lines = "\n".join(xc[bi][start:start + nc] for start in range(0, len(xc[bi]), nc))
-
- try:
- draw.text((0, 0), lines, fill="black", font=font)
- except UnicodeEncodeError:
- print("Cant encode string for logging. Skipping.")
-
- txt = np.array(txt).transpose(2, 0, 1) / 127.5 - 1.0
- txts.append(txt)
- txts = np.stack(txts)
- txts = torch.tensor(txts)
- return txts
-
-
-def ismap(x):
- if not isinstance(x, torch.Tensor):
- return False
- return (len(x.shape) == 4) and (x.shape[1] > 3)
-
-
-def isimage(x):
- if not isinstance(x,torch.Tensor):
- return False
- return (len(x.shape) == 4) and (x.shape[1] == 3 or x.shape[1] == 1)
-
-
-def exists(x):
- return x is not None
-
-
-def default(val, d):
- if exists(val):
- return val
- return d() if isfunction(d) else d
-
-
-def mean_flat(tensor):
- """
- https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/nn.py#L86
- Take the mean over all non-batch dimensions.
- """
- return tensor.mean(dim=list(range(1, len(tensor.shape))))
-
-
-def count_params(model, verbose=False):
- total_params = sum(p.numel() for p in model.parameters())
- if verbose:
- print(f"{model.__class__.__name__} has {total_params*1.e-6:.2f} M params.")
- return total_params
-
-
-def instantiate_from_config(config):
- if not "target" in config:
- if config == '__is_first_stage__':
- return None
- elif config == "__is_unconditional__":
- return None
- raise KeyError("Expected key `target` to instantiate.")
- return get_obj_from_str(config["target"])(**config.get("params", dict()))
-
-
-def get_obj_from_str(string, reload=False):
- module, cls = string.rsplit(".", 1)
- if reload:
- module_imp = importlib.import_module(module)
- importlib.reload(module_imp)
- return getattr(importlib.import_module(module, package=None), cls)
-
-
-class AdamWwithEMAandWings(optim.Optimizer):
- # credit to https://gist.github.com/crowsonkb/65f7265353f403714fce3b2595e0b298
- def __init__(self, params, lr=1.e-3, betas=(0.9, 0.999), eps=1.e-8, # TODO: check hyperparameters before using
- weight_decay=1.e-2, amsgrad=False, ema_decay=0.9999, # ema decay to match previous code
- ema_power=1., param_names=()):
- """AdamW that saves EMA versions of the parameters."""
- if not 0.0 <= lr:
- raise ValueError("Invalid learning rate: {}".format(lr))
- if not 0.0 <= eps:
- raise ValueError("Invalid epsilon value: {}".format(eps))
- if not 0.0 <= betas[0] < 1.0:
- raise ValueError("Invalid beta parameter at index 0: {}".format(betas[0]))
- if not 0.0 <= betas[1] < 1.0:
- raise ValueError("Invalid beta parameter at index 1: {}".format(betas[1]))
- if not 0.0 <= weight_decay:
- raise ValueError("Invalid weight_decay value: {}".format(weight_decay))
- if not 0.0 <= ema_decay <= 1.0:
- raise ValueError("Invalid ema_decay value: {}".format(ema_decay))
- defaults = dict(lr=lr, betas=betas, eps=eps,
- weight_decay=weight_decay, amsgrad=amsgrad, ema_decay=ema_decay,
- ema_power=ema_power, param_names=param_names)
- super().__init__(params, defaults)
-
- def __setstate__(self, state):
- super().__setstate__(state)
- for group in self.param_groups:
- group.setdefault('amsgrad', False)
-
- @torch.no_grad()
- def step(self, closure=None):
- """Performs a single optimization step.
- Args:
- closure (callable, optional): A closure that reevaluates the model
- and returns the loss.
- """
- loss = None
- if closure is not None:
- with torch.enable_grad():
- loss = closure()
-
- for group in self.param_groups:
- params_with_grad = []
- grads = []
- exp_avgs = []
- exp_avg_sqs = []
- ema_params_with_grad = []
- state_sums = []
- max_exp_avg_sqs = []
- state_steps = []
- amsgrad = group['amsgrad']
- beta1, beta2 = group['betas']
- ema_decay = group['ema_decay']
- ema_power = group['ema_power']
-
- for p in group['params']:
- if p.grad is None:
- continue
- params_with_grad.append(p)
- if p.grad.is_sparse:
- raise RuntimeError('AdamW does not support sparse gradients')
- grads.append(p.grad)
-
- state = self.state[p]
-
- # State initialization
- if len(state) == 0:
- state['step'] = 0
- # Exponential moving average of gradient values
- state['exp_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format)
- # Exponential moving average of squared gradient values
- state['exp_avg_sq'] = torch.zeros_like(p, memory_format=torch.preserve_format)
- if amsgrad:
- # Maintains max of all exp. moving avg. of sq. grad. values
- state['max_exp_avg_sq'] = torch.zeros_like(p, memory_format=torch.preserve_format)
- # Exponential moving average of parameter values
- state['param_exp_avg'] = p.detach().float().clone()
-
- exp_avgs.append(state['exp_avg'])
- exp_avg_sqs.append(state['exp_avg_sq'])
- ema_params_with_grad.append(state['param_exp_avg'])
-
- if amsgrad:
- max_exp_avg_sqs.append(state['max_exp_avg_sq'])
-
- # update the steps for each param group update
- state['step'] += 1
- # record the step after step update
- state_steps.append(state['step'])
-
- optim._functional.adamw(params_with_grad,
- grads,
- exp_avgs,
- exp_avg_sqs,
- max_exp_avg_sqs,
- state_steps,
- amsgrad=amsgrad,
- beta1=beta1,
- beta2=beta2,
- lr=group['lr'],
- weight_decay=group['weight_decay'],
- eps=group['eps'],
- maximize=False)
-
- cur_ema_decay = min(ema_decay, 1 - state['step'] ** -ema_power)
- for param, ema_param in zip(params_with_grad, ema_params_with_grad):
- ema_param.mul_(cur_ema_decay).add_(param.float(), alpha=1 - cur_ema_decay)
-
- return loss
\ No newline at end of file
diff --git a/One-2-3-45-master 2/reconstruction/confs/one2345_lod0_val_demo.conf b/One-2-3-45-master 2/reconstruction/confs/one2345_lod0_val_demo.conf
deleted file mode 100644
index f0f2f7eba0afc3a62d3a903c009c221209af4b50..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/confs/one2345_lod0_val_demo.conf
+++ /dev/null
@@ -1,130 +0,0 @@
-# - for the lod1 geometry network, using adaptive cost for sparse cost regularization network
-#- for lod1 rendering network, using depth-adaptive render
-
-general {
-
- base_exp_dir = exp/lod0 # !!! where you store the results and checkpoints to be used
- recording = [
- ./,
- ./data
- ./ops
- ./models
- ./loss
- ]
-}
-
-dataset {
- trainpath = ../
- valpath = ../ # !!! where you store the validation data
- testpath = ../
-
- imgScale_train = 1.0
- imgScale_test = 1.0
- nviews = 5
- clean_image = True
- importance_sample = True
-
- # test dataset
- test_img_wh = [256, 256]
- test_clip_wh = [0, 0]
- test_scan_id = scan110
- test_dir_comment = train
-}
-
-train {
- learning_rate = 2e-4
- learning_rate_milestone = [100000, 150000, 200000]
- learning_rate_factor = 0.5
- end_iter = 200000
- save_freq = 5000
- val_freq = 1
- val_mesh_freq = 1
- report_freq = 100
-
- N_rays = 512
-
- validate_resolution_level = 4
- anneal_start = 0
- anneal_end = 25000
- anneal_start_lod1 = 0
- anneal_end_lod1 = 15000
-
- use_white_bkgd = True
-
- # Loss
- # ! for training the lod1 network, don't use this regularization in first 10k steps; then use the regularization
- sdf_igr_weight = 0.1
- sdf_sparse_weight = 0.02 # 0.002 for lod1 network; 0.02 for lod0 network
- sdf_decay_param = 100 # cannot be too large, which decide the tsdf range
- fg_bg_weight = 0.01 # first 0.01
- bg_ratio = 0.3
-
- if_fix_lod0_networks = False
-}
-
-model {
- num_lods = 1
-
- sdf_network_lod0 {
- lod = 0,
- ch_in = 56, # the channel num of fused pyramid features
- voxel_size = 0.02105263, # 0.02083333, should be 2/95
- vol_dims = [96, 96, 96],
- hidden_dim = 128,
- cost_type = variance_mean
- d_pyramid_feature_compress = 16,
- regnet_d_out = 16,
- num_sdf_layers = 4,
- # position embedding
- multires = 6
- }
-
-
- sdf_network_lod1 {
- lod = 1,
- ch_in = 56, # the channel num of fused pyramid features
- voxel_size = 0.0104712, #0.01041667, should be 2/191
- vol_dims = [192, 192, 192],
- hidden_dim = 128,
- cost_type = variance_mean
- d_pyramid_feature_compress = 8,
- regnet_d_out = 16,
- num_sdf_layers = 4,
-
- # position embedding
- multires = 6
- }
-
-
- variance_network {
- init_val = 0.2
- }
-
- variance_network_lod1 {
- init_val = 0.2
- }
-
- rendering_network {
- in_geometry_feat_ch = 16
- in_rendering_feat_ch = 56
- anti_alias_pooling = True
- }
-
- rendering_network_lod1 {
- in_geometry_feat_ch = 16 # default 8
- in_rendering_feat_ch = 56
- anti_alias_pooling = True
-
- }
-
-
- trainer {
- n_samples_lod0 = 64
- n_importance_lod0 = 64
- n_samples_lod1 = 64
- n_importance_lod1 = 64
- n_outside = 0 # 128 if render_outside_uniform_sampling
- perturb = 1.0
- alpha_type = div
- }
-}
diff --git a/One-2-3-45-master 2/reconstruction/confs/one2345_lod_train.conf b/One-2-3-45-master 2/reconstruction/confs/one2345_lod_train.conf
deleted file mode 100644
index 253b279fa3c1845bab84b2d51d93dec8c8561c33..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/confs/one2345_lod_train.conf
+++ /dev/null
@@ -1,130 +0,0 @@
-# only use lod0
-
-general {
- base_exp_dir = ./exp/One2345/obj_lod0_train
- recording = [
- ./,
- ./data
- ./ops
- ./models
- ./loss
- ]
-}
-
-dataset {
- # local path
- trainpath = /objaverse-processed/zero12345_img/
- valpath = /objaverse-processed/zero12345_img/
- testpath = /objaverse-processed/zero12345_img/
-
-
- imgScale_train = 1.0
- imgScale_test = 1.0
- nviews = 5
- clean_image = True
- importance_sample = True
-
- # test dataset
- test_img_wh = [256, 256]
- test_clip_wh = [0, 0]
-
-
- test_dir_comment = train
-}
-
-train {
- learning_rate = 2e-4
- learning_rate_milestone = [100000, 150000, 200000]
- learning_rate_factor = 0.5
- end_iter = 200000
- save_freq = 5000
- val_freq = 1600
- val_mesh_freq = 1600
- report_freq = 100
-
- N_rays = 512
-
- validate_resolution_level = 4
- anneal_start = 0
- anneal_end = 25000
- anneal_start_lod1 = 0
- anneal_end_lod1 = 15000
-
- use_white_bkgd = True
-
- # Loss
- sdf_igr_weight = 0.1
- sdf_sparse_weight = 0.02
- sdf_decay_param = 100
- fg_bg_weight = 0.1
- bg_ratio = 0.3
- depth_loss_weight = 0.0
- if_fix_lod0_networks = False
-}
-
-model {
- num_lods = 1
-
- sdf_network_lod0 {
- lod = 0,
- ch_in = 56, # the channel num of fused pyramid features
- voxel_size = 0.02105263, # 0.02083333, should be 2/95
- vol_dims = [96, 96, 96],
- hidden_dim = 128,
- cost_type = variance_mean
- d_pyramid_feature_compress = 16,
- regnet_d_out = 16,
- num_sdf_layers = 4,
- # position embedding
- multires = 6
- }
-
-
- sdf_network_lod1 {
- lod = 1,
- ch_in = 56, # the channel num of fused pyramid features
- voxel_size = 0.0104712, #0.01041667, should be 2/191
- vol_dims = [192, 192, 192],
- hidden_dim = 128,
- cost_type = variance_mean
- d_pyramid_feature_compress = 8,
- regnet_d_out = 16,
- num_sdf_layers = 4,
-
- # position embedding
- multires = 6
- }
-
-
- variance_network {
- init_val = 0.2
- }
-
- variance_network_lod1 {
- init_val = 0.2
- }
-
- rendering_network {
- in_geometry_feat_ch = 16
- in_rendering_feat_ch = 56
- anti_alias_pooling = True
- }
-
- rendering_network_lod1 {
- in_geometry_feat_ch = 16 # default 8
- in_rendering_feat_ch = 56
- anti_alias_pooling = True
-
- }
-
-
- trainer {
- n_samples_lod0 = 64
- n_importance_lod0 = 64
- n_samples_lod1 = 64
- n_importance_lod1 = 64
- n_outside = 0 # 128 if render_outside_uniform_sampling
- perturb = 1.0
- alpha_type = div
- }
-}
diff --git a/One-2-3-45-master 2/reconstruction/data/One2345_eval_new_data.py b/One-2-3-45-master 2/reconstruction/data/One2345_eval_new_data.py
deleted file mode 100644
index 5aa70f2c3ff4cb7002bc7897179a37490bd40de2..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/data/One2345_eval_new_data.py
+++ /dev/null
@@ -1,377 +0,0 @@
-from torch.utils.data import Dataset
-import os
-import json
-import numpy as np
-import cv2
-from PIL import Image
-import torch
-from torchvision import transforms as T
-from data.scene import get_boundingbox
-
-from models.rays import gen_rays_from_single_image, gen_random_rays_from_single_image
-from kornia import create_meshgrid
-
-def get_ray_directions(H, W, focal, center=None):
- """
- Get ray directions for all pixels in camera coordinate.
- Reference: https://www.scratchapixel.com/lessons/3d-basic-rendering/
- ray-tracing-generating-camera-rays/standard-coordinate-systems
- Inputs:
- H, W, focal: image height, width and focal length
- Outputs:
- directions: (H, W, 3), the direction of the rays in camera coordinate
- """
- grid = create_meshgrid(H, W, normalized_coordinates=False)[0] + 0.5 # 1xHxWx2
-
- i, j = grid.unbind(-1)
- # the direction here is without +0.5 pixel centering as calibration is not so accurate
- # see https://github.com/bmild/nerf/issues/24
- cent = center if center is not None else [W / 2, H / 2]
- directions = torch.stack([(i - cent[0]) / focal[0], (j - cent[1]) / focal[1], torch.ones_like(i)], -1) # (H, W, 3)
-
- return directions
-
-def load_K_Rt_from_P(filename, P=None):
- if P is None:
- lines = open(filename).read().splitlines()
- if len(lines) == 4:
- lines = lines[1:]
- lines = [[x[0], x[1], x[2], x[3]] for x in (x.split(" ") for x in lines)]
- P = np.asarray(lines).astype(np.float32).squeeze()
-
- out = cv2.decomposeProjectionMatrix(P)
- K = out[0]
- R = out[1]
- t = out[2]
-
- K = K / K[2, 2]
- intrinsics = np.eye(4)
- intrinsics[:3, :3] = K
-
- pose = np.eye(4, dtype=np.float32)
- pose[:3, :3] = R.transpose()
- pose[:3, 3] = (t[:3] / t[3])[:, 0]
-
- return intrinsics, pose # ! return cam2world matrix here
-
-
-# ! load one ref-image with multiple src-images in camera coordinate system
-class BlenderPerView(Dataset):
- def __init__(self, root_dir, split, img_wh=(256, 256), downSample=1.0,
- N_rays=512,
- vol_dims=[128, 128, 128], batch_size=1,
- clean_image=False, importance_sample=False,
- specific_dataset_name = 'GSO'
- ):
-
-
- self.root_dir = root_dir
- self.split = split
-
- self.specific_dataset_name = specific_dataset_name
- self.N_rays = N_rays
- self.batch_size = batch_size # - used for construct new metas for gru fusion training
-
- self.clean_image = clean_image
- self.importance_sample = importance_sample
- self.scale_factor = 1.0
- self.scale_mat = np.float32(np.diag([1, 1, 1, 1.0]))
- assert self.split == 'val' or 'export_mesh', 'only support val or export_mesh'
- # find all subfolders
- main_folder = os.path.join(root_dir, self.specific_dataset_name)
- self.shape_list = [""] # os.listdir(main_folder) # MODIFIED
- self.shape_list.sort()
-
- self.lvis_paths = []
- for shape_name in self.shape_list:
- self.lvis_paths.append(os.path.join(main_folder, shape_name))
-
- if img_wh is not None:
- assert img_wh[0] % 32 == 0 and img_wh[1] % 32 == 0, \
- 'img_wh must both be multiples of 32!'
-
- # * bounding box for rendering
- self.bbox_min = np.array([-1.0, -1.0, -1.0])
- self.bbox_max = np.array([1.0, 1.0, 1.0])
-
- # - used for cost volume regularization
- self.voxel_dims = torch.tensor(vol_dims, dtype=torch.float32)
- self.partial_vol_origin = torch.tensor([-1., -1., -1.], dtype=torch.float32)
-
-
- def define_transforms(self):
- self.transform = T.Compose([T.ToTensor()])
-
-
- def load_cam_info(self):
- for vid, img_id in enumerate(self.img_ids):
- intrinsic, extrinsic, near_far = self.intrinsic, np.linalg.inv(self.c2ws[vid]), self.near_far
- self.all_intrinsics.append(intrinsic)
- self.all_extrinsics.append(extrinsic)
- self.all_near_fars.append(near_far)
-
- def read_mask(self, filename):
- mask_h = cv2.imread(filename, 0)
- mask_h = cv2.resize(mask_h, None, fx=self.downSample, fy=self.downSample,
- interpolation=cv2.INTER_NEAREST)
- mask = cv2.resize(mask_h, None, fx=0.25, fy=0.25,
- interpolation=cv2.INTER_NEAREST)
-
- mask[mask > 0] = 1 # the masks stored in png are not binary
- mask_h[mask_h > 0] = 1
-
- return mask, mask_h
-
- def cal_scale_mat(self, img_hw, intrinsics, extrinsics, near_fars, factor=1.):
-
- center, radius, bounds = get_boundingbox(img_hw, intrinsics, extrinsics, near_fars)
-
- radius = radius * factor
- scale_mat = np.diag([radius, radius, radius, 1.0])
- scale_mat[:3, 3] = center.cpu().numpy()
- scale_mat = scale_mat.astype(np.float32)
-
- return scale_mat, 1. / radius.cpu().numpy()
-
- def __len__(self):
- return len(self.lvis_paths)
-
- def __getitem__(self, idx):
- sample = {}
- origin_idx = idx
- imgs, depths_h, masks_h = [], [], [] # full size (256, 256)
- intrinsics, w2cs, c2ws, near_fars = [], [], [], [] # record proj-mats between views
-
- folder_path = self.lvis_paths[idx]
- target_idx = 0
- # last subdir name
- shape_name = os.path.split(folder_path)[-1]
-
- pose_json_path = os.path.join(folder_path, "pose.json")
- with open(pose_json_path, 'r') as f:
- meta = json.load(f)
-
- self.img_ids = list(meta["c2ws"].keys()) # e.g. "view_0", "view_7", "view_0_2_10"
- self.img_wh = (256, 256)
- self.input_poses = np.array(list(meta["c2ws"].values()))
- intrinsic = np.eye(4)
- intrinsic[:3, :3] = np.array(meta["intrinsics"])
- self.intrinsic = intrinsic
- self.near_far = np.array(meta["near_far"])
- self.define_transforms()
- self.blender2opencv = np.array(
- [[1, 0, 0, 0], [0, -1, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1]]
- )
-
- self.c2ws = []
- self.w2cs = []
- self.all_intrinsics = [] # the cam info of the whole scene
- self.all_extrinsics = []
- self.all_near_fars = []
-
- for idx, img_id in enumerate(self.img_ids):
- pose = self.input_poses[idx]
- c2w = pose @ self.blender2opencv
- self.c2ws.append(c2w)
- self.all_intrinsics.append(self.intrinsic)
- self.all_near_fars.append(self.near_far)
- self.all_extrinsics.append(np.linalg.inv(c2w))
- self.w2cs.append(np.linalg.inv(c2w))
- self.c2ws = np.stack(self.c2ws, axis=0)
- self.w2cs = np.stack(self.w2cs, axis=0)
-
-
- # target view
- c2w = self.c2ws[target_idx]
- w2c = np.linalg.inv(c2w)
- w2c_ref = w2c
- w2c_ref_inv = np.linalg.inv(w2c_ref)
-
- w2cs.append(w2c @ w2c_ref_inv)
- c2ws.append(np.linalg.inv(w2c @ w2c_ref_inv))
-
- img_filename = os.path.join(folder_path, 'stage1_8', f'{self.img_ids[target_idx]}')
-
- img = Image.open(img_filename)
- img = self.transform(img) # (4, h, w)
-
-
- if img.shape[0] == 4:
- img = img[:3] * img[-1:] + (1 - img[-1:]) # blend A to RGB
- imgs += [img]
-
-
- depth_h = torch.ones((img.shape[1], img.shape[2]), dtype=torch.float32)
- depth_h = depth_h.fill_(-1.0)
- mask_h = torch.ones((img.shape[1], img.shape[2]), dtype=torch.int32)
-
-
- depths_h.append(depth_h)
- masks_h.append(mask_h)
-
- intrinsic = self.intrinsic
- intrinsics.append(intrinsic)
-
- near_fars.append(self.all_near_fars[target_idx])
- image_perm = 0 # only supervised on reference view
-
- mask_dilated = None
-
- src_views = range(8, 8 + 8 * 4)
-
- for vid in src_views:
-
- img_filename = os.path.join(folder_path, 'stage2_8', f'{self.img_ids[vid]}')
- img = Image.open(img_filename)
- img_wh = self.img_wh
-
- img = self.transform(img)
- if img.shape[0] == 4:
- img = img[:3] * img[-1:] + (1 - img[-1:]) # blend A to RGB
-
- imgs += [img]
- depth_h = np.ones(img.shape[1:], dtype=np.float32)
- depths_h.append(depth_h)
- masks_h.append(np.ones(img.shape[1:], dtype=np.int32))
-
- near_fars.append(self.all_near_fars[vid])
- intrinsics.append(self.all_intrinsics[vid])
-
- w2cs.append(self.all_extrinsics[vid] @ w2c_ref_inv)
-
-
- # ! estimate scale_mat
- scale_mat, scale_factor = self.cal_scale_mat(
- img_hw=[img_wh[1], img_wh[0]],
- intrinsics=intrinsics, extrinsics=w2cs,
- near_fars=near_fars, factor=1.1
- )
-
-
- new_near_fars = []
- new_w2cs = []
- new_c2ws = []
- new_affine_mats = []
- new_depths_h = []
- for intrinsic, extrinsic, near_far, depth in zip(intrinsics, w2cs, near_fars, depths_h):
-
- P = intrinsic @ extrinsic @ scale_mat
- P = P[:3, :4]
- # - should use load_K_Rt_from_P() to obtain c2w
- c2w = load_K_Rt_from_P(None, P)[1]
- w2c = np.linalg.inv(c2w)
- new_w2cs.append(w2c)
- new_c2ws.append(c2w)
- affine_mat = np.eye(4)
- affine_mat[:3, :4] = intrinsic[:3, :3] @ w2c[:3, :4]
- new_affine_mats.append(affine_mat)
-
- camera_o = c2w[:3, 3]
- dist = np.sqrt(np.sum(camera_o ** 2))
- near = dist - 1
- far = dist + 1
-
- new_near_fars.append([0.95 * near, 1.05 * far])
- new_depths_h.append(depth * scale_factor)
-
- imgs = torch.stack(imgs).float()
- depths_h = np.stack(new_depths_h)
- masks_h = np.stack(masks_h)
-
- affine_mats = np.stack(new_affine_mats)
- intrinsics, w2cs, c2ws, near_fars = np.stack(intrinsics), np.stack(new_w2cs), np.stack(new_c2ws), np.stack(
- new_near_fars)
-
- if self.split == 'train':
- start_idx = 0
- else:
- start_idx = 1
-
-
- target_w2cs = []
- target_intrinsics = []
- new_target_w2cs = []
- for i_idx in range(8):
- target_w2cs.append(self.all_extrinsics[i_idx] @ w2c_ref_inv)
- target_intrinsics.append(self.all_intrinsics[i_idx])
-
- for intrinsic, extrinsic in zip(target_intrinsics, target_w2cs):
-
- P = intrinsic @ extrinsic @ scale_mat
- P = P[:3, :4]
- # - should use load_K_Rt_from_P() to obtain c2w
- c2w = load_K_Rt_from_P(None, P)[1]
- w2c = np.linalg.inv(c2w)
- new_target_w2cs.append(w2c)
- target_w2cs = np.stack(new_target_w2cs)
-
-
-
- view_ids = [idx] + list(src_views)
- sample['origin_idx'] = origin_idx
- sample['images'] = imgs # (V, 3, H, W)
- sample['depths_h'] = torch.from_numpy(depths_h.astype(np.float32)) # (V, H, W)
- sample['masks_h'] = torch.from_numpy(masks_h.astype(np.float32)) # (V, H, W)
- sample['w2cs'] = torch.from_numpy(w2cs.astype(np.float32)) # (V, 4, 4)
- sample['c2ws'] = torch.from_numpy(c2ws.astype(np.float32)) # (V, 4, 4)
- sample['target_candidate_w2cs'] = torch.from_numpy(target_w2cs.astype(np.float32)) # (8, 4, 4)
- sample['near_fars'] = torch.from_numpy(near_fars.astype(np.float32)) # (V, 2)
- sample['intrinsics'] = torch.from_numpy(intrinsics.astype(np.float32))[:, :3, :3] # (V, 3, 3)
- sample['view_ids'] = torch.from_numpy(np.array(view_ids))
- sample['affine_mats'] = torch.from_numpy(affine_mats.astype(np.float32)) # ! in world space
-
- sample['scan'] = shape_name
-
- sample['scale_factor'] = torch.tensor(scale_factor)
- sample['img_wh'] = torch.from_numpy(np.array(img_wh))
- sample['render_img_idx'] = torch.tensor(image_perm)
- sample['partial_vol_origin'] = self.partial_vol_origin
- sample['meta'] = str(self.specific_dataset_name) + '_' + str(shape_name) + "_refview" + str(view_ids[0])
- # print("meta: ", sample['meta'])
-
- # - image to render
- sample['query_image'] = sample['images'][0]
- sample['query_c2w'] = sample['c2ws'][0]
- sample['query_w2c'] = sample['w2cs'][0]
- sample['query_intrinsic'] = sample['intrinsics'][0]
- sample['query_depth'] = sample['depths_h'][0]
- sample['query_mask'] = sample['masks_h'][0]
- sample['query_near_far'] = sample['near_fars'][0]
-
- sample['images'] = sample['images'][start_idx:] # (V, 3, H, W)
- sample['depths_h'] = sample['depths_h'][start_idx:] # (V, H, W)
- sample['masks_h'] = sample['masks_h'][start_idx:] # (V, H, W)
- sample['w2cs'] = sample['w2cs'][start_idx:] # (V, 4, 4)
- sample['c2ws'] = sample['c2ws'][start_idx:] # (V, 4, 4)
- sample['intrinsics'] = sample['intrinsics'][start_idx:] # (V, 3, 3)
- sample['view_ids'] = sample['view_ids'][start_idx:]
- sample['affine_mats'] = sample['affine_mats'][start_idx:] # ! in world space
-
- sample['scale_mat'] = torch.from_numpy(scale_mat)
- sample['trans_mat'] = torch.from_numpy(w2c_ref_inv)
-
- # - generate rays
- if ('val' in self.split) or ('test' in self.split):
- sample_rays = gen_rays_from_single_image(
- img_wh[1], img_wh[0],
- sample['query_image'],
- sample['query_intrinsic'],
- sample['query_c2w'],
- depth=sample['query_depth'],
- mask=sample['query_mask'] if self.clean_image else None)
- else:
- sample_rays = gen_random_rays_from_single_image(
- img_wh[1], img_wh[0],
- self.N_rays,
- sample['query_image'],
- sample['query_intrinsic'],
- sample['query_c2w'],
- depth=sample['query_depth'],
- mask=sample['query_mask'] if self.clean_image else None,
- dilated_mask=mask_dilated,
- importance_sample=self.importance_sample)
-
-
- sample['rays'] = sample_rays
-
- return sample
diff --git a/One-2-3-45-master 2/reconstruction/data/One2345_train.py b/One-2-3-45-master 2/reconstruction/data/One2345_train.py
deleted file mode 100644
index 0e3cbe37d82ba026f24b12c9a47d29f8999fb827..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/data/One2345_train.py
+++ /dev/null
@@ -1,393 +0,0 @@
-from torch.utils.data import Dataset
-import os
-import numpy as np
-import cv2
-from PIL import Image
-import torch
-from torchvision import transforms as T
-from data.scene import get_boundingbox
-from models.rays import gen_rays_from_single_image, gen_random_rays_from_single_image
-import json
-
-from kornia import create_meshgrid
-def get_ray_directions(H, W, focal, center=None):
- """
- Get ray directions for all pixels in camera coordinate.
- Reference: https://www.scratchapixel.com/lessons/3d-basic-rendering/
- ray-tracing-generating-camera-rays/standard-coordinate-systems
- Inputs:
- H, W, focal: image height, width and focal length
- Outputs:
- directions: (H, W, 3), the direction of the rays in camera coordinate
- """
- grid = create_meshgrid(H, W, normalized_coordinates=False)[0] + 0.5 # 1xHxWx2
-
- i, j = grid.unbind(-1)
- # the direction here is without +0.5 pixel centering as calibration is not so accurate
- # see https://github.com/bmild/nerf/issues/24
- cent = center if center is not None else [W / 2, H / 2]
- directions = torch.stack([(i - cent[0]) / focal[0], (j - cent[1]) / focal[1], torch.ones_like(i)], -1) # (H, W, 3)
-
- return directions
-
-def load_K_Rt_from_P(filename, P=None):
- if P is None:
- lines = open(filename).read().splitlines()
- if len(lines) == 4:
- lines = lines[1:]
- lines = [[x[0], x[1], x[2], x[3]] for x in (x.split(" ") for x in lines)]
- P = np.asarray(lines).astype(np.float32).squeeze()
-
- out = cv2.decomposeProjectionMatrix(P)
- K = out[0]
- R = out[1]
- t = out[2]
-
- K = K / K[2, 2]
- intrinsics = np.eye(4)
- intrinsics[:3, :3] = K
-
- pose = np.eye(4, dtype=np.float32)
- pose[:3, :3] = R.transpose() # ? why need transpose here
- pose[:3, 3] = (t[:3] / t[3])[:, 0]
-
- return intrinsics, pose # ! return cam2world matrix here
-
-
-# ! load one ref-image with multiple src-images in camera coordinate system
-class BlenderPerView(Dataset):
- def __init__(self, root_dir, split, img_wh=(256, 256), downSample=1.0,
- N_rays=512,
- vol_dims=[128, 128, 128], batch_size=1,
- clean_image=False, importance_sample=False,):
-
- self.root_dir = root_dir
- self.split = split
-
- self.N_rays = N_rays
- self.batch_size = batch_size
-
- self.clean_image = clean_image
- self.importance_sample = importance_sample
- self.scale_factor = 1.0
- self.scale_mat = np.float32(np.diag([1, 1, 1, 1.0]))
-
- lvis_json_path = os.path.join(self.root_dir, 'lvis_split_cc_by.json') # you can define your own split
-
- with open(lvis_json_path, 'r') as f:
- lvis_paths = json.load(f)
- if self.split == 'train':
- self.lvis_paths = lvis_paths['train']
- else:
- self.lvis_paths = lvis_paths['val']
- if img_wh is not None:
- assert img_wh[0] % 32 == 0 and img_wh[1] % 32 == 0, \
- 'img_wh must both be multiples of 32!'
-
-
- pose_json_path = os.path.join(self.root_dir, 'One2345_training_pose.json')
- with open(pose_json_path, 'r') as f:
- meta = json.load(f)
-
- self.img_ids = list(meta["c2ws"].keys())
- self.img_wh = img_wh
- self.input_poses = np.array(list(meta["c2ws"].values()))
- intrinsic = np.eye(4)
- intrinsic[:3, :3] = np.array(meta["intrinsics"])
- self.intrinsic = intrinsic
- self.near_far = np.array(meta["near_far"])
- # self.near_far[1] = 1.8
- self.define_transforms()
- self.blender2opencv = np.array(
- [[1, 0, 0, 0], [0, -1, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1]]
- )
-
-
- self.c2ws = []
- self.w2cs = []
- self.all_intrinsics = [] # the cam info of the whole scene
- self.all_extrinsics = []
- self.all_near_fars = []
-
- for idx, img_id in enumerate(self.img_ids):
- pose = self.input_poses[idx]
- c2w = pose @ self.blender2opencv
- self.c2ws.append(c2w)
- self.all_intrinsics.append(self.intrinsic)
- self.all_near_fars.append(self.near_far)
- self.all_extrinsics.append(np.linalg.inv(c2w))
- self.w2cs.append(np.linalg.inv(c2w))
- self.c2ws = np.stack(self.c2ws, axis=0)
- self.w2cs = np.stack(self.w2cs, axis=0)
-
- # * bounding box for rendering
- self.bbox_min = np.array([-1.0, -1.0, -1.0])
- self.bbox_max = np.array([1.0, 1.0, 1.0])
-
- # - used for cost volume regularization
- self.voxel_dims = torch.tensor(vol_dims, dtype=torch.float32)
- self.partial_vol_origin = torch.tensor([-1., -1., -1.], dtype=torch.float32)
-
-
- def define_transforms(self):
- self.transform = T.Compose([T.ToTensor()])
-
-
- def read_mask(self, filename):
- mask_h = cv2.imread(filename, 0)
- mask_h = cv2.resize(mask_h, None, fx=self.downSample, fy=self.downSample,
- interpolation=cv2.INTER_NEAREST)
- mask = cv2.resize(mask_h, None, fx=0.25, fy=0.25,
- interpolation=cv2.INTER_NEAREST)
-
- mask[mask > 0] = 1 # the masks stored in png are not binary
- mask_h[mask_h > 0] = 1
-
- return mask, mask_h
-
- def cal_scale_mat(self, img_hw, intrinsics, extrinsics, near_fars, factor=1.):
-
- center, radius, bounds = get_boundingbox(img_hw, intrinsics, extrinsics, near_fars)
-
- radius = radius * factor
- scale_mat = np.diag([radius, radius, radius, 1.0])
- scale_mat[:3, 3] = center.cpu().numpy()
- scale_mat = scale_mat.astype(np.float32)
-
- return scale_mat, 1. / radius.cpu().numpy()
-
- def __len__(self):
- return 8 * len(self.lvis_paths)
-
-
- def __getitem__(self, idx):
- sample = {}
- origin_idx = idx
- imgs, depths_h, masks_h = [], [], [] # full size (256, 256)
- intrinsics, w2cs, c2ws, near_fars = [], [], [], [] # record proj mats between views
-
- folder_uid_dict = self.lvis_paths[idx//8]
- idx = idx % 8 # [0, 7]
- folder_id = folder_uid_dict['folder_id']
- uid = folder_uid_dict['uid']
-
- # target view
- c2w = self.c2ws[idx]
- w2c = np.linalg.inv(c2w)
- w2c_ref = w2c
- w2c_ref_inv = np.linalg.inv(w2c_ref)
-
- w2cs.append(w2c @ w2c_ref_inv)
- c2ws.append(np.linalg.inv(w2c @ w2c_ref_inv))
-
- img_filename = os.path.join(self.root_dir, 'zero12345_narrow', folder_id, uid, f'view_{idx}.png')
- depth_filename = os.path.join(os.path.join(self.root_dir, 'zero12345_narrow', folder_id, uid, f'view_{idx}_depth_mm.png'))
-
- img = Image.open(img_filename)
- img = self.transform(img) # (4, h, w)
-
- if img.shape[0] == 4:
- img = img[:3] * img[-1:] + (1 - img[-1:]) # blend A to RGB
- imgs += [img]
-
- depth_h = cv2.imread(depth_filename, cv2.IMREAD_UNCHANGED).astype(np.uint16) / 1000.0
- mask_h = depth_h > 0
- directions = get_ray_directions(self.img_wh[1], self.img_wh[0], [self.intrinsic[0, 0], self.intrinsic[1, 1]]) # [H, W, 3]
- surface_points = directions * depth_h[..., None] # [H, W, 3]
- distance = np.linalg.norm(surface_points, axis=-1) # [H, W]
- depth_h = distance
-
- depths_h.append(depth_h)
- masks_h.append(mask_h)
-
- intrinsic = self.intrinsic
- intrinsics.append(intrinsic)
-
- near_fars.append(self.all_near_fars[idx])
- image_perm = 0 # only supervised on reference view
-
- mask_dilated = None
-
- src_views = range(8, 8 + 8 * 4)
-
- for vid in src_views:
- img_filename = os.path.join(self.root_dir, "zero12345_narrow", folder_id, uid, f'view_{(vid - 8) // 4}_{vid%4}_10.png')
-
- img = Image.open(img_filename)
- img_wh = self.img_wh
-
- img = self.transform(img)
- if img.shape[0] == 4:
- img = img[:3] * img[-1:] + (1 - img[-1:]) # blend A to RGB
-
- imgs += [img]
- depth_h = np.ones(img.shape[1:], dtype=np.float32)
- depths_h.append(depth_h)
- masks_h.append(np.ones(img.shape[1:], dtype=np.int32))
-
- near_fars.append(self.all_near_fars[vid])
- intrinsics.append(self.all_intrinsics[vid])
-
- w2cs.append(self.all_extrinsics[vid] @ w2c_ref_inv)
-
-
- # ! estimate scale_mat
- scale_mat, scale_factor = self.cal_scale_mat(
- img_hw=[img_wh[1], img_wh[0]],
- intrinsics=intrinsics, extrinsics=w2cs,
- near_fars=near_fars, factor=1.1
- )
-
-
- new_near_fars = []
- new_w2cs = []
- new_c2ws = []
- new_affine_mats = []
- new_depths_h = []
- for intrinsic, extrinsic, near_far, depth in zip(intrinsics, w2cs, near_fars, depths_h):
-
- P = intrinsic @ extrinsic @ scale_mat
- P = P[:3, :4]
- # - should use load_K_Rt_from_P() to obtain c2w
- c2w = load_K_Rt_from_P(None, P)[1]
- w2c = np.linalg.inv(c2w)
- new_w2cs.append(w2c)
- new_c2ws.append(c2w)
- affine_mat = np.eye(4)
- affine_mat[:3, :4] = intrinsic[:3, :3] @ w2c[:3, :4]
- new_affine_mats.append(affine_mat)
-
- camera_o = c2w[:3, 3]
- dist = np.sqrt(np.sum(camera_o ** 2))
- near = (dist - 1).clip(min=0.02)
- far = dist + 1
-
- new_near_fars.append([0.95 * near, 1.05 * far])
- new_depths_h.append(depth * scale_factor)
-
- if self.split == 'train':
- # randomly select one view from eight views as reference view
- idx_to_select = np.random.randint(0, 8)
-
- img_filename = os.path.join(self.root_dir, 'zero12345_narrow', folder_id, uid, f'view_{idx_to_select}.png')
- img = Image.open(img_filename)
- img = self.transform(img) # (4, h, w)
-
- if img.shape[0] == 4:
- img = img[:3] * img[-1:] + (1 - img[-1:]) # blend A to RGB
-
- imgs[0] = img
-
- w2c_selected = self.all_extrinsics[idx_to_select] @ w2c_ref_inv
- P = self.all_intrinsics[idx_to_select] @ w2c_selected @ scale_mat
- P = P[:3, :4]
-
- c2w = load_K_Rt_from_P(None, P)[1]
- w2c = np.linalg.inv(c2w)
- affine_mat = np.eye(4)
- affine_mat[:3, :4] = self.all_intrinsics[idx_to_select][:3, :3] @ w2c[:3, :4]
- new_affine_mats[0] = affine_mat
- camera_o = c2w[:3, 3]
- dist = np.sqrt(np.sum(camera_o ** 2))
- near = (dist - 1).clip(min=0.02)
- far = dist + 1
- new_near_fars[0] = [0.95 * near, 1.05 * far]
-
- new_w2cs[0] = w2c
- new_c2ws[0] = c2w
-
- depth_filename = os.path.join(os.path.join(self.root_dir, 'zero12345_narrow', folder_id, uid, f'view_{idx_to_select}_depth_mm.png'))
- depth_h = cv2.imread(depth_filename, cv2.IMREAD_UNCHANGED).astype(np.uint16) / 1000.0
- mask_h = depth_h > 0
- directions = get_ray_directions(self.img_wh[1], self.img_wh[0], [self.intrinsic[0, 0], self.intrinsic[1, 1]]) # [H, W, 3]
- surface_points = directions * depth_h[..., None] # [H, W, 3]
- distance = np.linalg.norm(surface_points, axis=-1) # [H, W]
- depth_h = distance * scale_factor
-
- new_depths_h[0] = depth_h
- masks_h[0] = mask_h
-
-
- imgs = torch.stack(imgs).float()
- depths_h = np.stack(new_depths_h)
- masks_h = np.stack(masks_h)
-
- affine_mats = np.stack(new_affine_mats)
- intrinsics, w2cs, c2ws, near_fars = np.stack(intrinsics), np.stack(new_w2cs), np.stack(new_c2ws), np.stack(
- new_near_fars)
-
- if self.split == 'train':
- start_idx = 0
- else:
- start_idx = 1
-
-
- view_ids = [idx] + list(src_views)
- sample['origin_idx'] = origin_idx
- sample['images'] = imgs # (V, 3, H, W)
- sample['depths_h'] = torch.from_numpy(depths_h.astype(np.float32)) # (V, H, W)
- sample['masks_h'] = torch.from_numpy(masks_h.astype(np.float32)) # (V, H, W)
- sample['w2cs'] = torch.from_numpy(w2cs.astype(np.float32)) # (V, 4, 4)
- sample['c2ws'] = torch.from_numpy(c2ws.astype(np.float32)) # (V, 4, 4)
- sample['near_fars'] = torch.from_numpy(near_fars.astype(np.float32)) # (V, 2)
- sample['intrinsics'] = torch.from_numpy(intrinsics.astype(np.float32))[:, :3, :3] # (V, 3, 3)
- sample['view_ids'] = torch.from_numpy(np.array(view_ids))
- sample['affine_mats'] = torch.from_numpy(affine_mats.astype(np.float32)) # ! in world space
-
- # sample['light_idx'] = torch.tensor(light_idx)
- sample['scan'] = folder_id
-
- sample['scale_factor'] = torch.tensor(scale_factor)
- sample['img_wh'] = torch.from_numpy(np.array(img_wh))
- sample['render_img_idx'] = torch.tensor(image_perm)
- sample['partial_vol_origin'] = self.partial_vol_origin
- sample['meta'] = str(folder_id) + "_" + str(uid) + "_refview" + str(view_ids[0])
-
-
- # - image to render
- sample['query_image'] = sample['images'][0]
- sample['query_c2w'] = sample['c2ws'][0]
- sample['query_w2c'] = sample['w2cs'][0]
- sample['query_intrinsic'] = sample['intrinsics'][0]
- sample['query_depth'] = sample['depths_h'][0]
- sample['query_mask'] = sample['masks_h'][0]
- sample['query_near_far'] = sample['near_fars'][0]
-
-
- sample['images'] = sample['images'][start_idx:] # (V, 3, H, W)
- sample['depths_h'] = sample['depths_h'][start_idx:] # (V, H, W)
- sample['masks_h'] = sample['masks_h'][start_idx:] # (V, H, W)
- sample['w2cs'] = sample['w2cs'][start_idx:] # (V, 4, 4)
- sample['c2ws'] = sample['c2ws'][start_idx:] # (V, 4, 4)
- sample['intrinsics'] = sample['intrinsics'][start_idx:] # (V, 3, 3)
- sample['view_ids'] = sample['view_ids'][start_idx:]
- sample['affine_mats'] = sample['affine_mats'][start_idx:] # ! in world space
-
- sample['scale_mat'] = torch.from_numpy(scale_mat)
- sample['trans_mat'] = torch.from_numpy(w2c_ref_inv)
-
- # - generate rays
- if ('val' in self.split) or ('test' in self.split):
- sample_rays = gen_rays_from_single_image(
- img_wh[1], img_wh[0],
- sample['query_image'],
- sample['query_intrinsic'],
- sample['query_c2w'],
- depth=sample['query_depth'],
- mask=sample['query_mask'] if self.clean_image else None)
- else:
- sample_rays = gen_random_rays_from_single_image(
- img_wh[1], img_wh[0],
- self.N_rays,
- sample['query_image'],
- sample['query_intrinsic'],
- sample['query_c2w'],
- depth=sample['query_depth'],
- mask=sample['query_mask'] if self.clean_image else None,
- dilated_mask=mask_dilated,
- importance_sample=self.importance_sample)
-
-
- sample['rays'] = sample_rays
-
- return sample
diff --git a/One-2-3-45-master 2/reconstruction/data/scene.py b/One-2-3-45-master 2/reconstruction/data/scene.py
deleted file mode 100644
index 5f34f4abf9977fba8a3f8785ef4f0c95dbd9fa1b..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/data/scene.py
+++ /dev/null
@@ -1,101 +0,0 @@
-import numpy as np
-import torch
-
-
-def rigid_transform(xyz, transform):
- """Applies a rigid transform (c2w) to an (N, 3) pointcloud.
- """
- device = xyz.device
- xyz_h = torch.cat([xyz, torch.ones((len(xyz), 1)).to(device)], dim=1) # (N, 4)
- xyz_t_h = (transform @ xyz_h.T).T # * checked: the same with the below
-
- return xyz_t_h[:, :3]
-
-
-def get_view_frustum(min_depth, max_depth, size, cam_intr, c2w):
- """Get corners of 3D camera view frustum of depth image
- """
- device = cam_intr.device
- im_h, im_w = size
- im_h = int(im_h)
- im_w = int(im_w)
- view_frust_pts = torch.stack([
- (torch.tensor([0, 0, im_w, im_w, 0, 0, im_w, im_w]).to(device) - cam_intr[0, 2]) * torch.tensor(
- [min_depth, min_depth, min_depth, min_depth, max_depth, max_depth, max_depth, max_depth]).to(device) /
- cam_intr[0, 0],
- (torch.tensor([0, im_h, 0, im_h, 0, im_h, 0, im_h]).to(device) - cam_intr[1, 2]) * torch.tensor(
- [min_depth, min_depth, min_depth, min_depth, max_depth, max_depth, max_depth, max_depth]).to(device) /
- cam_intr[1, 1],
- torch.tensor([min_depth, min_depth, min_depth, min_depth, max_depth, max_depth, max_depth, max_depth]).to(
- device)
- ])
- view_frust_pts = view_frust_pts.type(torch.float32)
- c2w = c2w.type(torch.float32)
- view_frust_pts = rigid_transform(view_frust_pts.T, c2w).T
- return view_frust_pts
-
-
-def set_pixel_coords(h, w):
- i_range = torch.arange(0, h).view(1, h, 1).expand(1, h, w).type(torch.float32) # [1, H, W]
- j_range = torch.arange(0, w).view(1, 1, w).expand(1, h, w).type(torch.float32) # [1, H, W]
- ones = torch.ones(1, h, w).type(torch.float32)
-
- pixel_coords = torch.stack((j_range, i_range, ones), dim=1) # [1, 3, H, W]
-
- return pixel_coords
-
-
-def get_boundingbox(img_hw, intrinsics, extrinsics, near_fars):
- """
- # get the minimum bounding box of all visual hulls
- :param img_hw:
- :param intrinsics:
- :param extrinsics:
- :param near_fars:
- :return:
- """
-
- bnds = torch.zeros((3, 2))
- bnds[:, 0] = np.inf
- bnds[:, 1] = -np.inf
-
- if isinstance(intrinsics, list):
- num = len(intrinsics)
- else:
- num = intrinsics.shape[0]
- # print("num: ", num)
- view_frust_pts_list = []
- for i in range(num):
- if not isinstance(intrinsics[i], torch.Tensor):
- cam_intr = torch.tensor(intrinsics[i])
- w2c = torch.tensor(extrinsics[i])
- c2w = torch.inverse(w2c)
- else:
- cam_intr = intrinsics[i]
- w2c = extrinsics[i]
- c2w = torch.inverse(w2c)
- min_depth, max_depth = near_fars[i][0], near_fars[i][1]
- # todo: check the coresponding points are matched
-
- view_frust_pts = get_view_frustum(min_depth, max_depth, img_hw, cam_intr, c2w)
- bnds[:, 0] = torch.min(bnds[:, 0], torch.min(view_frust_pts, dim=1)[0])
- bnds[:, 1] = torch.max(bnds[:, 1], torch.max(view_frust_pts, dim=1)[0])
- view_frust_pts_list.append(view_frust_pts)
- all_view_frust_pts = torch.cat(view_frust_pts_list, dim=1)
-
- # print("all_view_frust_pts: ", all_view_frust_pts.shape)
- # distance = torch.norm(all_view_frust_pts, dim=0)
- # print("distance: ", distance)
-
- # print("all_view_frust_pts_z: ", all_view_frust_pts[2, :])
-
- center = torch.tensor(((bnds[0, 1] + bnds[0, 0]) / 2, (bnds[1, 1] + bnds[1, 0]) / 2,
- (bnds[2, 1] + bnds[2, 0]) / 2))
-
- lengths = bnds[:, 1] - bnds[:, 0]
-
- max_length, _ = torch.max(lengths, dim=0)
- radius = max_length / 2
-
- # print("radius: ", radius)
- return center, radius, bnds
diff --git a/One-2-3-45-master 2/reconstruction/exp/lod0/.gitignore b/One-2-3-45-master 2/reconstruction/exp/lod0/.gitignore
deleted file mode 100644
index 35c54109136367b098bb5112c0b87cee09444c0b..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/exp/lod0/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-checkpoints_*/
\ No newline at end of file
diff --git a/One-2-3-45-master 2/reconstruction/exp/lod0/checkpoints/.gitkeep b/One-2-3-45-master 2/reconstruction/exp/lod0/checkpoints/.gitkeep
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/One-2-3-45-master 2/reconstruction/exp_runner_generic_blender_train.py b/One-2-3-45-master 2/reconstruction/exp_runner_generic_blender_train.py
deleted file mode 100644
index a72e49be96d88ed2ab6677e17a26685a2c46e65e..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/exp_runner_generic_blender_train.py
+++ /dev/null
@@ -1,627 +0,0 @@
-import torch
-from torch.utils.data import DataLoader
-import argparse
-import os
-import logging
-import numpy as np
-from shutil import copyfile
-from torch.utils.tensorboard import SummaryWriter
-from icecream import ic
-from tqdm import tqdm
-from pyhocon import ConfigFactory
-
-from models.fields import SingleVarianceNetwork
-
-from models.featurenet import FeatureNet
-
-from models.trainer_generic import GenericTrainer
-
-from models.sparse_sdf_network import SparseSdfNetwork
-
-from models.rendering_network import GeneralRenderingNetwork
-
-from datetime import datetime
-
-from data.One2345_train import BlenderPerView
-from termcolor import colored
-
-from datetime import datetime
-
-class Runner:
- def __init__(self, conf_path, mode='train', is_continue=False,
- is_restore=False, restore_lod0=False, local_rank=0):
-
- # Initial setting
- self.device = torch.device('cuda:%d' % local_rank)
- # self.device = torch.device('cuda')
- self.num_devices = torch.cuda.device_count()
- self.is_continue = is_continue
- self.is_restore = is_restore
- self.restore_lod0 = restore_lod0
- self.mode = mode
- self.model_list = []
- self.logger = logging.getLogger('exp_logger')
-
- print(colored("detected %d GPUs" % self.num_devices, "red"))
-
- self.conf_path = conf_path
- self.conf = ConfigFactory.parse_file(conf_path)
- self.timestamp = None
- if not self.is_continue:
- self.timestamp = '_{:%Y_%m_%d_%H_%M_%S}'.format(datetime.now())
- self.base_exp_dir = self.conf['general.base_exp_dir'] + self.timestamp
- else:
- self.base_exp_dir = self.conf['general.base_exp_dir']
- self.conf['general.base_exp_dir'] = self.base_exp_dir
- print(colored("base_exp_dir: " + self.base_exp_dir, 'yellow'))
- os.makedirs(self.base_exp_dir, exist_ok=True)
- self.iter_step = 0
- self.val_step = 0
-
- # trainning parameters
- self.end_iter = self.conf.get_int('train.end_iter')
- self.save_freq = self.conf.get_int('train.save_freq')
- self.report_freq = self.conf.get_int('train.report_freq')
- self.val_freq = self.conf.get_int('train.val_freq')
- self.val_mesh_freq = self.conf.get_int('train.val_mesh_freq')
- self.batch_size = self.num_devices # use DataParallel to warp
- self.validate_resolution_level = self.conf.get_int('train.validate_resolution_level')
- self.learning_rate = self.conf.get_float('train.learning_rate')
- self.learning_rate_milestone = self.conf.get_list('train.learning_rate_milestone')
- self.learning_rate_factor = self.conf.get_float('train.learning_rate_factor')
- self.use_white_bkgd = self.conf.get_bool('train.use_white_bkgd')
- self.N_rays = self.conf.get_int('train.N_rays')
-
- # warmup params for sdf gradient
- self.anneal_start_lod0 = self.conf.get_float('train.anneal_start', default=0)
- self.anneal_end_lod0 = self.conf.get_float('train.anneal_end', default=0)
- self.anneal_start_lod1 = self.conf.get_float('train.anneal_start_lod1', default=0)
- self.anneal_end_lod1 = self.conf.get_float('train.anneal_end_lod1', default=0)
-
- self.writer = None
-
- # Networks
- self.num_lods = self.conf.get_int('model.num_lods')
-
- self.rendering_network_outside = None
- self.sdf_network_lod0 = None
- self.sdf_network_lod1 = None
- self.variance_network_lod0 = None
- self.variance_network_lod1 = None
- self.rendering_network_lod0 = None
- self.rendering_network_lod1 = None
- self.pyramid_feature_network = None # extract 2d pyramid feature maps from images, used for geometry
- self.pyramid_feature_network_lod1 = None # may use different feature network for different lod
-
- # * pyramid_feature_network
- self.pyramid_feature_network = FeatureNet().to(self.device)
- self.sdf_network_lod0 = SparseSdfNetwork(**self.conf['model.sdf_network_lod0']).to(self.device)
- self.variance_network_lod0 = SingleVarianceNetwork(**self.conf['model.variance_network']).to(self.device)
-
- if self.num_lods > 1:
- self.sdf_network_lod1 = SparseSdfNetwork(**self.conf['model.sdf_network_lod1']).to(self.device)
- self.variance_network_lod1 = SingleVarianceNetwork(**self.conf['model.variance_network']).to(self.device)
-
- self.rendering_network_lod0 = GeneralRenderingNetwork(**self.conf['model.rendering_network']).to(
- self.device)
-
- if self.num_lods > 1:
- self.pyramid_feature_network_lod1 = FeatureNet().to(self.device)
- self.rendering_network_lod1 = GeneralRenderingNetwork(
- **self.conf['model.rendering_network_lod1']).to(self.device)
- if self.mode == 'export_mesh' or self.mode == 'val':
- base_exp_dir_to_store = os.path.join(self.base_exp_dir, '{:%Y_%m_%d_%H_%M_%S}'.format(datetime.now()))
- else:
- base_exp_dir_to_store = self.base_exp_dir
-
- print(colored(f"Store in: {base_exp_dir_to_store}", "blue"))
- # Renderer model
- self.trainer = GenericTrainer(
- self.rendering_network_outside,
- self.pyramid_feature_network,
- self.pyramid_feature_network_lod1,
- self.sdf_network_lod0,
- self.sdf_network_lod1,
- self.variance_network_lod0,
- self.variance_network_lod1,
- self.rendering_network_lod0,
- self.rendering_network_lod1,
- **self.conf['model.trainer'],
- timestamp=self.timestamp,
- base_exp_dir=base_exp_dir_to_store,
- conf=self.conf)
-
- self.data_setup() # * data setup
-
- self.optimizer_setup()
-
- # Load checkpoint
- latest_model_name = None
- if is_continue:
- model_list_raw = os.listdir(os.path.join(self.base_exp_dir, 'checkpoints'))
- model_list = []
- for model_name in model_list_raw:
- if model_name.startswith('ckpt'):
- if model_name[-3:] == 'pth': # and int(model_name[5:-4]) <= self.end_iter:
- model_list.append(model_name)
- model_list.sort()
- latest_model_name = model_list[-1]
-
- if latest_model_name is not None:
- self.logger.info('Find checkpoint: {}'.format(latest_model_name))
- self.load_checkpoint(latest_model_name)
-
- self.trainer = torch.nn.DataParallel(self.trainer).to(self.device)
-
- if self.mode[:5] == 'train':
- self.file_backup()
-
- def optimizer_setup(self):
- self.params_to_train = self.trainer.get_trainable_params()
- self.optimizer = torch.optim.Adam(self.params_to_train, lr=self.learning_rate)
-
- def data_setup(self):
- """
- if use ddp, use setup() not prepare_data(),
- prepare_data() only called on 1 GPU/TPU in distributed
- :return:
- """
-
- self.train_dataset = BlenderPerView(
- root_dir=self.conf['dataset.trainpath'],
- split=self.conf.get_string('dataset.train_split', default='train'),
- downSample=self.conf['dataset.imgScale_train'],
- N_rays=self.N_rays,
- batch_size=self.batch_size,
- clean_image=True, # True for training
- importance_sample=self.conf.get_bool('dataset.importance_sample', default=False),
- )
-
- self.val_dataset = BlenderPerView(
- root_dir=self.conf['dataset.valpath'],
- split=self.conf.get_string('dataset.test_split', default='test'),
- downSample=self.conf['dataset.imgScale_test'],
- N_rays=self.N_rays,
- batch_size=self.batch_size,
- clean_image=self.conf.get_bool('dataset.mask_out_image',
- default=False) if self.mode != 'train' else False,
- importance_sample=self.conf.get_bool('dataset.importance_sample', default=False),
- )
-
- # item = self.train_dataset.__getitem__(0)
- self.train_dataloader = DataLoader(self.train_dataset,
- shuffle=True,
- num_workers=4 * self.batch_size,
- batch_size=self.batch_size,
- pin_memory=True,
- drop_last=True
- )
-
- self.val_dataloader = DataLoader(self.val_dataset,
- shuffle=False,
- num_workers=4 * self.batch_size,
- batch_size=self.batch_size,
- pin_memory=True,
- drop_last=False
- )
-
- self.val_dataloader_iterator = iter(self.val_dataloader) # - should be after "reconstruct_metas_for_gru_fusion"
-
- def train(self):
- self.writer = SummaryWriter(log_dir=os.path.join(self.base_exp_dir, 'logs'))
-
- dataloader = self.train_dataloader
-
- epochs_needed = int(1 + self.end_iter // len(dataloader))
- self.end_iter = epochs_needed * len(dataloader)
- self.adjust_learning_rate()
- print(colored("starting training learning rate: {:.5f}".format(self.optimizer.param_groups[0]['lr']), "yellow"))
-
- background_rgb = None
- if self.use_white_bkgd:
- background_rgb = 1.0
-
- for epoch_i in range(epochs_needed):
-
- print(colored("current epoch %d" % epoch_i, 'red'))
- dataloader = tqdm(dataloader)
-
- for batch in dataloader:
- batch['batch_idx'] = torch.tensor([x for x in range(self.batch_size)]) # used to get meta
-
- if self.iter_step > self.end_iter:
- break
-
- # - warmup params
- if self.num_lods == 1:
- alpha_inter_ratio_lod0 = self.get_alpha_inter_ratio(self.anneal_start_lod0, self.anneal_end_lod0)
- else:
- alpha_inter_ratio_lod0 = 1.
- alpha_inter_ratio_lod1 = self.get_alpha_inter_ratio(self.anneal_start_lod1, self.anneal_end_lod1)
-
- losses = self.trainer(
- batch,
- background_rgb=background_rgb,
- alpha_inter_ratio_lod0=alpha_inter_ratio_lod0,
- alpha_inter_ratio_lod1=alpha_inter_ratio_lod1,
- iter_step=self.iter_step,
- mode='train',
- )
-
- loss_types = ['loss_lod0', 'loss_lod1']
-
- losses_lod0 = losses['losses_lod0']
- losses_lod1 = losses['losses_lod1']
- loss = 0
- for loss_type in loss_types:
- if losses[loss_type] is not None:
- loss = loss + losses[loss_type].mean()
- self.optimizer.zero_grad()
- loss.backward()
- torch.nn.utils.clip_grad_norm_(self.params_to_train, 1.0)
- self.optimizer.step()
- self.iter_step += 1
-
- if self.iter_step % self.report_freq == 0:
- self.writer.add_scalar('Loss/loss', loss, self.iter_step)
- self.writer.add_scalar('Loss/loss_fg_bg_loss', losses_lod0['fg_bg_loss'].mean(), self.iter_step)
- if losses_lod0 is not None:
- self.writer.add_scalar('Loss/d_loss_lod0',
- losses_lod0['depth_loss'].mean() if losses_lod0 is not None else 0,
- self.iter_step)
- self.writer.add_scalar('Loss/sparse_loss_lod0',
- losses_lod0[
- 'sparse_loss'].mean() if losses_lod0 is not None else 0,
- self.iter_step)
- self.writer.add_scalar('Loss/color_loss_lod0',
- losses_lod0['color_fine_loss'].mean()
- if losses_lod0['color_fine_loss'] is not None else 0,
- self.iter_step)
-
- self.writer.add_scalar('statis/psnr_lod0',
- losses_lod0['psnr'].mean()
- if losses_lod0['psnr'] is not None else 0,
- self.iter_step)
-
- self.writer.add_scalar('param/variance_lod0',
- 1. / torch.exp(self.variance_network_lod0.variance * 10),
- self.iter_step)
- self.writer.add_scalar('param/eikonal_loss', losses_lod0['gradient_error_loss'].mean() if losses_lod0 is not None else 0,
- self.iter_step)
-
- ######## - lod 1
- if self.num_lods > 1:
- self.writer.add_scalar('Loss/d_loss_lod1',
- losses_lod1['depth_loss'].mean() if losses_lod1 is not None else 0,
- self.iter_step)
- self.writer.add_scalar('Loss/sparse_loss_lod1',
- losses_lod1[
- 'sparse_loss'].mean() if losses_lod1 is not None else 0,
- self.iter_step)
- self.writer.add_scalar('Loss/color_loss_lod1',
- losses_lod1['color_fine_loss'].mean()
- if losses_lod1['color_fine_loss'] is not None else 0,
- self.iter_step)
- self.writer.add_scalar('statis/sdf_mean_lod1',
- losses_lod1['sdf_mean'].mean() if losses_lod1 is not None else 0,
- self.iter_step)
- self.writer.add_scalar('statis/psnr_lod1',
- losses_lod1['psnr'].mean()
- if losses_lod1['psnr'] is not None else 0,
- self.iter_step)
- self.writer.add_scalar('statis/sparseness_0.01_lod1',
- losses_lod1['sparseness_1'].mean()
- if losses_lod1['sparseness_1'] is not None else 0,
- self.iter_step)
- self.writer.add_scalar('statis/sparseness_0.02_lod1',
- losses_lod1['sparseness_2'].mean()
- if losses_lod1['sparseness_2'] is not None else 0,
- self.iter_step)
- self.writer.add_scalar('param/variance_lod1',
- 1. / torch.exp(self.variance_network_lod1.variance * 10),
- self.iter_step)
-
- print(self.base_exp_dir)
- print(
- 'iter:{:8>d} '
- 'loss = {:.4f} '
- 'd_loss_lod0 = {:.4f} '
- 'color_loss_lod0 = {:.4f} '
- 'sparse_loss_lod0= {:.4f} '
- 'd_loss_lod1 = {:.4f} '
- 'color_loss_lod1 = {:.4f} '
- ' lr = {:.5f}'.format(
- self.iter_step, loss,
- losses_lod0['depth_loss'].mean() if losses_lod0 is not None else 0,
- losses_lod0['color_fine_loss'].mean() if losses_lod0 is not None else 0,
- losses_lod0['sparse_loss'].mean() if losses_lod0 is not None else 0,
- losses_lod1['depth_loss'].mean() if losses_lod1 is not None else 0,
- losses_lod1['color_fine_loss'].mean() if losses_lod1 is not None else 0,
- self.optimizer.param_groups[0]['lr']))
-
- print(colored('alpha_inter_ratio_lod0 = {:.4f} alpha_inter_ratio_lod1 = {:.4f}\n'.format(
- alpha_inter_ratio_lod0, alpha_inter_ratio_lod1), 'green'))
-
- if losses_lod0 is not None:
- # print("[TEST]: weights_sum in print", losses_lod0['weights_sum'].mean())
- # import ipdb; ipdb.set_trace()
- print(
- 'iter:{:8>d} '
- 'variance = {:.5f} '
- 'weights_sum = {:.4f} '
- 'weights_sum_fg = {:.4f} '
- 'alpha_sum = {:.4f} '
- 'sparse_weight= {:.4f} '
- 'background_loss = {:.4f} '
- 'background_weight = {:.4f} '
- .format(
- self.iter_step,
- losses_lod0['variance'].mean(),
- losses_lod0['weights_sum'].mean(),
- losses_lod0['weights_sum_fg'].mean(),
- losses_lod0['alpha_sum'].mean(),
- losses_lod0['sparse_weight'].mean(),
- losses_lod0['fg_bg_loss'].mean(),
- losses_lod0['fg_bg_weight'].mean(),
- ))
-
- if losses_lod1 is not None:
- print(
- 'iter:{:8>d} '
- 'variance = {:.5f} '
- ' weights_sum = {:.4f} '
- 'alpha_sum = {:.4f} '
- 'fg_bg_loss = {:.4f} '
- 'fg_bg_weight = {:.4f} '
- 'sparse_weight= {:.4f} '
- 'fg_bg_loss = {:.4f} '
- 'fg_bg_weight = {:.4f} '
- .format(
- self.iter_step,
- losses_lod1['variance'].mean(),
- losses_lod1['weights_sum'].mean(),
- losses_lod1['alpha_sum'].mean(),
- losses_lod1['fg_bg_loss'].mean(),
- losses_lod1['fg_bg_weight'].mean(),
- losses_lod1['sparse_weight'].mean(),
- losses_lod1['fg_bg_loss'].mean(),
- losses_lod1['fg_bg_weight'].mean(),
- ))
-
- if self.iter_step % self.save_freq == 0:
- self.save_checkpoint()
-
- if self.iter_step % self.val_freq == 0:
- self.validate()
-
- # - ajust learning rate
- self.adjust_learning_rate()
-
- def adjust_learning_rate(self):
- # - ajust learning rate, cosine learning schedule
- learning_rate = (np.cos(np.pi * self.iter_step / self.end_iter) + 1.0) * 0.5 * 0.9 + 0.1
- learning_rate = self.learning_rate * learning_rate
- for g in self.optimizer.param_groups:
- g['lr'] = learning_rate
-
- def get_alpha_inter_ratio(self, start, end):
- if end == 0.0:
- return 1.0
- elif self.iter_step < start:
- return 0.0
- else:
- return np.min([1.0, (self.iter_step - start) / (end - start)])
-
- def file_backup(self):
- # copy python file
- dir_lis = self.conf['general.recording']
- os.makedirs(os.path.join(self.base_exp_dir, 'recording'), exist_ok=True)
- for dir_name in dir_lis:
- cur_dir = os.path.join(self.base_exp_dir, 'recording', dir_name)
- os.makedirs(cur_dir, exist_ok=True)
- files = os.listdir(dir_name)
- for f_name in files:
- if f_name[-3:] == '.py':
- copyfile(os.path.join(dir_name, f_name), os.path.join(cur_dir, f_name))
-
- # copy configs
- copyfile(self.conf_path, os.path.join(self.base_exp_dir, 'recording', 'config.conf'))
-
- def load_checkpoint(self, checkpoint_name):
-
- def load_state_dict(network, checkpoint, comment):
- if network is not None:
- try:
- pretrained_dict = checkpoint[comment]
-
- model_dict = network.state_dict()
-
- # 1. filter out unnecessary keys
- pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict}
- # 2. overwrite entries in the existing state dict
- model_dict.update(pretrained_dict)
- # 3. load the new state dict
- network.load_state_dict(pretrained_dict)
- except:
- print(colored(comment + " load fails", 'yellow'))
-
- checkpoint = torch.load(os.path.join(self.base_exp_dir, 'checkpoints', checkpoint_name),
- map_location=self.device)
-
- load_state_dict(self.rendering_network_outside, checkpoint, 'rendering_network_outside')
-
- load_state_dict(self.sdf_network_lod0, checkpoint, 'sdf_network_lod0')
- load_state_dict(self.sdf_network_lod1, checkpoint, 'sdf_network_lod1')
-
- load_state_dict(self.pyramid_feature_network, checkpoint, 'pyramid_feature_network')
- load_state_dict(self.pyramid_feature_network_lod1, checkpoint, 'pyramid_feature_network_lod1')
-
- load_state_dict(self.variance_network_lod0, checkpoint, 'variance_network_lod0')
- load_state_dict(self.variance_network_lod1, checkpoint, 'variance_network_lod1')
-
- load_state_dict(self.rendering_network_lod0, checkpoint, 'rendering_network_lod0')
- load_state_dict(self.rendering_network_lod1, checkpoint, 'rendering_network_lod1')
-
- if self.restore_lod0: # use the trained lod0 networks to initialize lod1 networks
- load_state_dict(self.sdf_network_lod1, checkpoint, 'sdf_network_lod0')
- load_state_dict(self.pyramid_feature_network_lod1, checkpoint, 'pyramid_feature_network')
- load_state_dict(self.rendering_network_lod1, checkpoint, 'rendering_network_lod0')
-
- if self.is_continue and (not self.restore_lod0):
- try:
- self.optimizer.load_state_dict(checkpoint['optimizer'])
- except:
- print(colored("load optimizer fails", "yellow"))
- self.iter_step = checkpoint['iter_step']
- self.val_step = checkpoint['val_step'] if 'val_step' in checkpoint.keys() else 0
-
- self.logger.info('End')
-
- def save_checkpoint(self):
-
- def save_state_dict(network, checkpoint, comment):
- if network is not None:
- checkpoint[comment] = network.state_dict()
-
- checkpoint = {
- 'optimizer': self.optimizer.state_dict(),
- 'iter_step': self.iter_step,
- 'val_step': self.val_step,
- }
-
- save_state_dict(self.sdf_network_lod0, checkpoint, "sdf_network_lod0")
- save_state_dict(self.sdf_network_lod1, checkpoint, "sdf_network_lod1")
-
- save_state_dict(self.rendering_network_outside, checkpoint, 'rendering_network_outside')
- save_state_dict(self.rendering_network_lod0, checkpoint, "rendering_network_lod0")
- save_state_dict(self.rendering_network_lod1, checkpoint, "rendering_network_lod1")
-
- save_state_dict(self.variance_network_lod0, checkpoint, 'variance_network_lod0')
- save_state_dict(self.variance_network_lod1, checkpoint, 'variance_network_lod1')
-
- save_state_dict(self.pyramid_feature_network, checkpoint, 'pyramid_feature_network')
- save_state_dict(self.pyramid_feature_network_lod1, checkpoint, 'pyramid_feature_network_lod1')
-
- os.makedirs(os.path.join(self.base_exp_dir, 'checkpoints'), exist_ok=True)
- torch.save(checkpoint,
- os.path.join(self.base_exp_dir, 'checkpoints', 'ckpt_{:0>6d}.pth'.format(self.iter_step)))
-
- def validate(self, idx=-1, resolution_level=-1):
- # validate image
-
- ic(self.iter_step, idx)
- self.logger.info('Validate begin')
- if idx < 0:
- idx = self.val_step
- self.val_step += 1
-
- try:
- batch = next(self.val_dataloader_iterator)
- # batch = self.val_dataloader_iterator.next()
- except:
- self.val_dataloader_iterator = iter(self.val_dataloader) # reset
-
- batch = next(self.val_dataloader_iterator)
-
-
- background_rgb = None
- if self.use_white_bkgd:
- background_rgb = 1.0
-
- batch['batch_idx'] = torch.tensor([x for x in range(self.batch_size)])
-
- # - warmup params
- if self.num_lods == 1:
- alpha_inter_ratio_lod0 = self.get_alpha_inter_ratio(self.anneal_start_lod0, self.anneal_end_lod0)
- else:
- alpha_inter_ratio_lod0 = 1.
- alpha_inter_ratio_lod1 = self.get_alpha_inter_ratio(self.anneal_start_lod1, self.anneal_end_lod1)
-
- self.trainer(
- batch,
- background_rgb=background_rgb,
- alpha_inter_ratio_lod0=alpha_inter_ratio_lod0,
- alpha_inter_ratio_lod1=alpha_inter_ratio_lod1,
- iter_step=self.iter_step,
- save_vis=True,
- mode='val',
- )
-
-
- def export_mesh(self, idx=-1, resolution_level=-1):
- # validate image
-
- ic(self.iter_step, idx)
- self.logger.info('Validate begin')
- import time
- start1 = time.time()
- if idx < 0:
- idx = self.val_step
- # idx = np.random.randint(len(self.val_dataset))
- self.val_step += 1
-
- try:
- batch = self.val_dataloader_iterator.next()
- except:
- self.val_dataloader_iterator = iter(self.val_dataloader) # reset
-
- batch = self.val_dataloader_iterator.next()
-
-
- background_rgb = None
- if self.use_white_bkgd:
- background_rgb = 1.0
-
- batch['batch_idx'] = torch.tensor([x for x in range(self.batch_size)])
-
- # - warmup params
- if self.num_lods == 1:
- alpha_inter_ratio_lod0 = self.get_alpha_inter_ratio(self.anneal_start_lod0, self.anneal_end_lod0)
- else:
- alpha_inter_ratio_lod0 = 1.
- alpha_inter_ratio_lod1 = self.get_alpha_inter_ratio(self.anneal_start_lod1, self.anneal_end_lod1)
- end1 = time.time()
- print("time for getting data", end1 - start1)
- self.trainer(
- batch,
- background_rgb=background_rgb,
- alpha_inter_ratio_lod0=alpha_inter_ratio_lod0,
- alpha_inter_ratio_lod1=alpha_inter_ratio_lod1,
- iter_step=self.iter_step,
- save_vis=True,
- mode='export_mesh',
- )
-
-
-if __name__ == '__main__':
- # torch.set_default_tensor_type('torch.cuda.FloatTensor')
- torch.set_default_dtype(torch.float32)
- FORMAT = "[%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s"
- logging.basicConfig(level=logging.INFO, format=FORMAT)
-
- parser = argparse.ArgumentParser()
- parser.add_argument('--conf', type=str, default='./confs/base.conf')
- parser.add_argument('--mode', type=str, default='train')
- parser.add_argument('--threshold', type=float, default=0.0)
- parser.add_argument('--is_continue', default=False, action="store_true")
- parser.add_argument('--is_restore', default=False, action="store_true")
- parser.add_argument('--is_finetune', default=False, action="store_true")
- parser.add_argument('--train_from_scratch', default=False, action="store_true")
- parser.add_argument('--restore_lod0', default=False, action="store_true")
- parser.add_argument('--local_rank', type=int, default=0)
- args = parser.parse_args()
-
- torch.cuda.set_device(args.local_rank)
- torch.backends.cudnn.benchmark = True # ! make training 2x faster
-
- runner = Runner(args.conf, args.mode, args.is_continue, args.is_restore, args.restore_lod0,
- args.local_rank)
-
- if args.mode == 'train':
- runner.train()
- elif args.mode == 'val':
- for i in range(len(runner.val_dataset)):
- runner.validate()
- elif args.mode == 'export_mesh':
- for i in range(len(runner.val_dataset)):
- runner.export_mesh()
diff --git a/One-2-3-45-master 2/reconstruction/exp_runner_generic_blender_val.py b/One-2-3-45-master 2/reconstruction/exp_runner_generic_blender_val.py
deleted file mode 100644
index 7485fdfc315ddfebe0462ef79da8da0073b639df..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/exp_runner_generic_blender_val.py
+++ /dev/null
@@ -1,625 +0,0 @@
-import os
-import logging
-import argparse
-import numpy as np
-from shutil import copyfile
-import torch
-from torch.utils.data import DataLoader
-from torch.utils.tensorboard import SummaryWriter
-from rich import print
-from tqdm import tqdm
-from pyhocon import ConfigFactory
-
-import sys
-sys.path.append(os.path.dirname(__file__))
-
-from models.fields import SingleVarianceNetwork
-from models.featurenet import FeatureNet
-from models.trainer_generic import GenericTrainer
-from models.sparse_sdf_network import SparseSdfNetwork
-from models.rendering_network import GeneralRenderingNetwork
-from data.One2345_eval_new_data import BlenderPerView
-
-
-from datetime import datetime
-
-class Runner:
- def __init__(self, conf_path, mode='train', is_continue=False,
- is_restore=False, restore_lod0=False, local_rank=0):
-
- # Initial setting
- self.device = torch.device('cuda:%d' % local_rank)
- # self.device = torch.device('cuda')
- self.num_devices = torch.cuda.device_count()
- self.is_continue = is_continue or (mode == "export_mesh")
- self.is_restore = is_restore
- self.restore_lod0 = restore_lod0
- self.mode = mode
- self.model_list = []
- self.logger = logging.getLogger('exp_logger')
-
- print("detected %d GPUs" % self.num_devices)
-
- self.conf_path = conf_path
- self.conf = ConfigFactory.parse_file(conf_path)
- self.timestamp = None
- if not self.is_continue:
- self.timestamp = '_{:%Y_%m_%d_%H_%M_%S}'.format(datetime.now())
- self.base_exp_dir = self.conf['general.base_exp_dir'] + self.timestamp
- else:
- self.base_exp_dir = self.conf['general.base_exp_dir']
- self.conf['general.base_exp_dir'] = self.base_exp_dir
- print("base_exp_dir: " + self.base_exp_dir)
- os.makedirs(self.base_exp_dir, exist_ok=True)
- self.iter_step = 0
- self.val_step = 0
-
- # trainning parameters
- self.end_iter = self.conf.get_int('train.end_iter')
- self.save_freq = self.conf.get_int('train.save_freq')
- self.report_freq = self.conf.get_int('train.report_freq')
- self.val_freq = self.conf.get_int('train.val_freq')
- self.val_mesh_freq = self.conf.get_int('train.val_mesh_freq')
- self.batch_size = self.num_devices # use DataParallel to warp
- self.validate_resolution_level = self.conf.get_int('train.validate_resolution_level')
- self.learning_rate = self.conf.get_float('train.learning_rate')
- self.learning_rate_milestone = self.conf.get_list('train.learning_rate_milestone')
- self.learning_rate_factor = self.conf.get_float('train.learning_rate_factor')
- self.use_white_bkgd = self.conf.get_bool('train.use_white_bkgd')
- self.N_rays = self.conf.get_int('train.N_rays')
-
- # warmup params for sdf gradient
- self.anneal_start_lod0 = self.conf.get_float('train.anneal_start', default=0)
- self.anneal_end_lod0 = self.conf.get_float('train.anneal_end', default=0)
- self.anneal_start_lod1 = self.conf.get_float('train.anneal_start_lod1', default=0)
- self.anneal_end_lod1 = self.conf.get_float('train.anneal_end_lod1', default=0)
-
- self.writer = None
-
- # Networks
- self.num_lods = self.conf.get_int('model.num_lods')
-
- self.rendering_network_outside = None
- self.sdf_network_lod0 = None
- self.sdf_network_lod1 = None
- self.variance_network_lod0 = None
- self.variance_network_lod1 = None
- self.rendering_network_lod0 = None
- self.rendering_network_lod1 = None
- self.pyramid_feature_network = None # extract 2d pyramid feature maps from images, used for geometry
- self.pyramid_feature_network_lod1 = None # may use different feature network for different lod
-
- # * pyramid_feature_network
- self.pyramid_feature_network = FeatureNet().to(self.device)
- self.sdf_network_lod0 = SparseSdfNetwork(**self.conf['model.sdf_network_lod0']).to(self.device)
- self.variance_network_lod0 = SingleVarianceNetwork(**self.conf['model.variance_network']).to(self.device)
-
- if self.num_lods > 1:
- self.sdf_network_lod1 = SparseSdfNetwork(**self.conf['model.sdf_network_lod1']).to(self.device)
- self.variance_network_lod1 = SingleVarianceNetwork(**self.conf['model.variance_network']).to(self.device)
-
- self.rendering_network_lod0 = GeneralRenderingNetwork(**self.conf['model.rendering_network']).to(
- self.device)
-
- if self.num_lods > 1:
- self.pyramid_feature_network_lod1 = FeatureNet().to(self.device)
- self.rendering_network_lod1 = GeneralRenderingNetwork(
- **self.conf['model.rendering_network_lod1']).to(self.device)
- if self.mode == 'export_mesh' or self.mode == 'val':
- # base_exp_dir_to_store = os.path.join(self.base_exp_dir, '{:%Y_%m_%d_%H_%M_%S}'.format(datetime.now()))
- base_exp_dir_to_store = os.path.join("../", args.specific_dataset_name) #"../gradio_tmp" # MODIFIED
- else:
- base_exp_dir_to_store = self.base_exp_dir
-
- print(f"Store in: {base_exp_dir_to_store}")
- # Renderer model
- self.trainer = GenericTrainer(
- self.rendering_network_outside,
- self.pyramid_feature_network,
- self.pyramid_feature_network_lod1,
- self.sdf_network_lod0,
- self.sdf_network_lod1,
- self.variance_network_lod0,
- self.variance_network_lod1,
- self.rendering_network_lod0,
- self.rendering_network_lod1,
- **self.conf['model.trainer'],
- timestamp=self.timestamp,
- base_exp_dir=base_exp_dir_to_store,
- conf=self.conf)
-
- self.data_setup() # * data setup
-
- self.optimizer_setup()
-
- # Load checkpoint
- latest_model_name = None
- if self.is_continue:
- model_list_raw = os.listdir(os.path.join(self.base_exp_dir, 'checkpoints'))
- model_list = []
- for model_name in model_list_raw:
- if model_name.startswith('ckpt'):
- if model_name[-3:] == 'pth': # and int(model_name[5:-4]) <= self.end_iter:
- model_list.append(model_name)
- model_list.sort()
- latest_model_name = model_list[-1]
-
- if latest_model_name is not None:
- self.logger.info('Find checkpoint: {}'.format(latest_model_name))
- self.load_checkpoint(latest_model_name)
-
- self.trainer = torch.nn.DataParallel(self.trainer).to(self.device)
-
- if self.mode[:5] == 'train':
- self.file_backup()
-
- def optimizer_setup(self):
- self.params_to_train = self.trainer.get_trainable_params()
- self.optimizer = torch.optim.Adam(self.params_to_train, lr=self.learning_rate)
-
- def data_setup(self):
- """
- if use ddp, use setup() not prepare_data(),
- prepare_data() only called on 1 GPU/TPU in distributed
- :return:
- """
-
- self.train_dataset = BlenderPerView(
- root_dir=self.conf['dataset.trainpath'],
- split=self.conf.get_string('dataset.train_split', default='train'),
- downSample=self.conf['dataset.imgScale_train'],
- N_rays=self.N_rays,
- batch_size=self.batch_size,
- clean_image=True, # True for training
- importance_sample=self.conf.get_bool('dataset.importance_sample', default=False),
- specific_dataset_name = args.specific_dataset_name
- )
-
- self.val_dataset = BlenderPerView(
- root_dir=self.conf['dataset.valpath'],
- split=self.conf.get_string('dataset.test_split', default='test'),
- downSample=self.conf['dataset.imgScale_test'],
- N_rays=self.N_rays,
- batch_size=self.batch_size,
- clean_image=self.conf.get_bool('dataset.mask_out_image',
- default=False) if self.mode != 'train' else False,
- importance_sample=self.conf.get_bool('dataset.importance_sample', default=False),
- specific_dataset_name = args.specific_dataset_name
- )
-
- self.train_dataloader = DataLoader(self.train_dataset,
- shuffle=True,
- num_workers=4 * self.batch_size,
- # num_workers=1,
- batch_size=self.batch_size,
- pin_memory=True,
- drop_last=True
- )
-
- self.val_dataloader = DataLoader(self.val_dataset,
- # shuffle=False if self.mode == 'train' else True,
- shuffle=False,
- num_workers=4 * self.batch_size,
- # num_workers=1,
- batch_size=self.batch_size,
- pin_memory=True,
- drop_last=False
- )
-
- self.val_dataloader_iterator = iter(self.val_dataloader) # - should be after "reconstruct_metas_for_gru_fusion"
-
- def train(self):
- self.writer = SummaryWriter(log_dir=os.path.join(self.base_exp_dir, 'logs'))
- res_step = self.end_iter - self.iter_step
-
- dataloader = self.train_dataloader
-
- epochs = int(1 + res_step // len(dataloader))
-
- self.adjust_learning_rate()
- print("starting training learning rate: {:.5f}".format(self.optimizer.param_groups[0]['lr']))
-
- background_rgb = None
- if self.use_white_bkgd:
- # background_rgb = torch.ones([1, 3]).to(self.device)
- background_rgb = 1.0
-
- for epoch_i in range(epochs):
-
- print("current epoch %d" % epoch_i)
- dataloader = tqdm(dataloader)
-
- for batch in dataloader:
- # print("Checker1:, fetch data")
- batch['batch_idx'] = torch.tensor([x for x in range(self.batch_size)]) # used to get meta
-
- # - warmup params
- if self.num_lods == 1:
- alpha_inter_ratio_lod0 = self.get_alpha_inter_ratio(self.anneal_start_lod0, self.anneal_end_lod0)
- else:
- alpha_inter_ratio_lod0 = 1.
- alpha_inter_ratio_lod1 = self.get_alpha_inter_ratio(self.anneal_start_lod1, self.anneal_end_lod1)
-
- losses = self.trainer(
- batch,
- background_rgb=background_rgb,
- alpha_inter_ratio_lod0=alpha_inter_ratio_lod0,
- alpha_inter_ratio_lod1=alpha_inter_ratio_lod1,
- iter_step=self.iter_step,
- mode='train',
- )
-
- loss_types = ['loss_lod0', 'loss_lod1']
- # print("[TEST]: weights_sum in trainer return", losses['losses_lod0']['weights_sum'].mean())
-
- losses_lod0 = losses['losses_lod0']
- losses_lod1 = losses['losses_lod1']
- # import ipdb; ipdb.set_trace()
- loss = 0
- for loss_type in loss_types:
- if losses[loss_type] is not None:
- loss = loss + losses[loss_type].mean()
- # print("Checker4:, begin BP")
- self.optimizer.zero_grad()
- loss.backward()
- torch.nn.utils.clip_grad_norm_(self.params_to_train, 1.0)
- self.optimizer.step()
- # print("Checker5:, end BP")
- self.iter_step += 1
-
- if self.iter_step % self.report_freq == 0:
- self.writer.add_scalar('Loss/loss', loss, self.iter_step)
-
- if losses_lod0 is not None:
- self.writer.add_scalar('Loss/d_loss_lod0',
- losses_lod0['depth_loss'].mean() if losses_lod0 is not None else 0,
- self.iter_step)
- self.writer.add_scalar('Loss/sparse_loss_lod0',
- losses_lod0[
- 'sparse_loss'].mean() if losses_lod0 is not None else 0,
- self.iter_step)
- self.writer.add_scalar('Loss/color_loss_lod0',
- losses_lod0['color_fine_loss'].mean()
- if losses_lod0['color_fine_loss'] is not None else 0,
- self.iter_step)
-
- self.writer.add_scalar('statis/psnr_lod0',
- losses_lod0['psnr'].mean()
- if losses_lod0['psnr'] is not None else 0,
- self.iter_step)
-
- self.writer.add_scalar('param/variance_lod0',
- 1. / torch.exp(self.variance_network_lod0.variance * 10),
- self.iter_step)
- self.writer.add_scalar('param/eikonal_loss', losses_lod0['gradient_error_loss'].mean() if losses_lod0 is not None else 0,
- self.iter_step)
-
- ######## - lod 1
- if self.num_lods > 1:
- self.writer.add_scalar('Loss/d_loss_lod1',
- losses_lod1['depth_loss'].mean() if losses_lod1 is not None else 0,
- self.iter_step)
- self.writer.add_scalar('Loss/sparse_loss_lod1',
- losses_lod1[
- 'sparse_loss'].mean() if losses_lod1 is not None else 0,
- self.iter_step)
- self.writer.add_scalar('Loss/color_loss_lod1',
- losses_lod1['color_fine_loss'].mean()
- if losses_lod1['color_fine_loss'] is not None else 0,
- self.iter_step)
- self.writer.add_scalar('statis/sdf_mean_lod1',
- losses_lod1['sdf_mean'].mean() if losses_lod1 is not None else 0,
- self.iter_step)
- self.writer.add_scalar('statis/psnr_lod1',
- losses_lod1['psnr'].mean()
- if losses_lod1['psnr'] is not None else 0,
- self.iter_step)
- self.writer.add_scalar('statis/sparseness_0.01_lod1',
- losses_lod1['sparseness_1'].mean()
- if losses_lod1['sparseness_1'] is not None else 0,
- self.iter_step)
- self.writer.add_scalar('statis/sparseness_0.02_lod1',
- losses_lod1['sparseness_2'].mean()
- if losses_lod1['sparseness_2'] is not None else 0,
- self.iter_step)
- self.writer.add_scalar('param/variance_lod1',
- 1. / torch.exp(self.variance_network_lod1.variance * 10),
- self.iter_step)
-
- print(self.base_exp_dir)
- print(
- 'iter:{:8>d} '
- 'loss = {:.4f} '
- 'd_loss_lod0 = {:.4f} '
- 'color_loss_lod0 = {:.4f} '
- 'sparse_loss_lod0= {:.4f} '
- 'd_loss_lod1 = {:.4f} '
- 'color_loss_lod1 = {:.4f} '
- ' lr = {:.5f}'.format(
- self.iter_step, loss,
- losses_lod0['depth_loss'].mean() if losses_lod0 is not None else 0,
- losses_lod0['color_fine_loss'].mean() if losses_lod0 is not None else 0,
- losses_lod0['sparse_loss'].mean() if losses_lod0 is not None else 0,
- losses_lod1['depth_loss'].mean() if losses_lod1 is not None else 0,
- losses_lod1['color_fine_loss'].mean() if losses_lod1 is not None else 0,
- self.optimizer.param_groups[0]['lr']))
-
- print('alpha_inter_ratio_lod0 = {:.4f} alpha_inter_ratio_lod1 = {:.4f}\n'.format(
- alpha_inter_ratio_lod0, alpha_inter_ratio_lod1))
-
- if losses_lod0 is not None:
- # print("[TEST]: weights_sum in print", losses_lod0['weights_sum'].mean())
- # import ipdb; ipdb.set_trace()
- print(
- 'iter:{:8>d} '
- 'variance = {:.5f} '
- 'weights_sum = {:.4f} '
- 'weights_sum_fg = {:.4f} '
- 'alpha_sum = {:.4f} '
- 'sparse_weight= {:.4f} '
- 'background_loss = {:.4f} '
- 'background_weight = {:.4f} '
- .format(
- self.iter_step,
- losses_lod0['variance'].mean(),
- losses_lod0['weights_sum'].mean(),
- losses_lod0['weights_sum_fg'].mean(),
- losses_lod0['alpha_sum'].mean(),
- losses_lod0['sparse_weight'].mean(),
- losses_lod0['fg_bg_loss'].mean(),
- losses_lod0['fg_bg_weight'].mean(),
- ))
-
- if losses_lod1 is not None:
- print(
- 'iter:{:8>d} '
- 'variance = {:.5f} '
- ' weights_sum = {:.4f} '
- 'alpha_sum = {:.4f} '
- 'fg_bg_loss = {:.4f} '
- 'fg_bg_weight = {:.4f} '
- 'sparse_weight= {:.4f} '
- 'fg_bg_loss = {:.4f} '
- 'fg_bg_weight = {:.4f} '
- .format(
- self.iter_step,
- losses_lod1['variance'].mean(),
- losses_lod1['weights_sum'].mean(),
- losses_lod1['alpha_sum'].mean(),
- losses_lod1['fg_bg_loss'].mean(),
- losses_lod1['fg_bg_weight'].mean(),
- losses_lod1['sparse_weight'].mean(),
- losses_lod1['fg_bg_loss'].mean(),
- losses_lod1['fg_bg_weight'].mean(),
- ))
-
- if self.iter_step % self.save_freq == 0:
- self.save_checkpoint()
-
- if self.iter_step % self.val_freq == 0:
- self.validate()
-
- # - ajust learning rate
- self.adjust_learning_rate()
-
- def adjust_learning_rate(self):
- # - ajust learning rate, cosine learning schedule
- learning_rate = (np.cos(np.pi * self.iter_step / self.end_iter) + 1.0) * 0.5 * 0.9 + 0.1
- learning_rate = self.learning_rate * learning_rate
- for g in self.optimizer.param_groups:
- g['lr'] = learning_rate
-
- def get_alpha_inter_ratio(self, start, end):
- if end == 0.0:
- return 1.0
- elif self.iter_step < start:
- return 0.0
- else:
- return np.min([1.0, (self.iter_step - start) / (end - start)])
-
- def file_backup(self):
- # copy python file
- dir_lis = self.conf['general.recording']
- os.makedirs(os.path.join(self.base_exp_dir, 'recording'), exist_ok=True)
- for dir_name in dir_lis:
- cur_dir = os.path.join(self.base_exp_dir, 'recording', dir_name)
- os.makedirs(cur_dir, exist_ok=True)
- files = os.listdir(dir_name)
- for f_name in files:
- if f_name[-3:] == '.py':
- copyfile(os.path.join(dir_name, f_name), os.path.join(cur_dir, f_name))
-
- # copy configs
- copyfile(self.conf_path, os.path.join(self.base_exp_dir, 'recording', 'config.conf'))
-
- def load_checkpoint(self, checkpoint_name):
-
- def load_state_dict(network, checkpoint, comment):
- if network is not None:
- try:
- pretrained_dict = checkpoint[comment]
-
- model_dict = network.state_dict()
-
- # 1. filter out unnecessary keys
- pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict}
- # 2. overwrite entries in the existing state dict
- model_dict.update(pretrained_dict)
- # 3. load the new state dict
- network.load_state_dict(pretrained_dict)
- except:
- print(comment + " load fails")
-
- checkpoint = torch.load(os.path.join(self.base_exp_dir, 'checkpoints', checkpoint_name),
- map_location=self.device)
-
- load_state_dict(self.rendering_network_outside, checkpoint, 'rendering_network_outside')
-
- load_state_dict(self.sdf_network_lod0, checkpoint, 'sdf_network_lod0')
- load_state_dict(self.sdf_network_lod1, checkpoint, 'sdf_network_lod1')
-
- load_state_dict(self.pyramid_feature_network, checkpoint, 'pyramid_feature_network')
- load_state_dict(self.pyramid_feature_network_lod1, checkpoint, 'pyramid_feature_network_lod1')
-
- load_state_dict(self.variance_network_lod0, checkpoint, 'variance_network_lod0')
- load_state_dict(self.variance_network_lod1, checkpoint, 'variance_network_lod1')
-
- load_state_dict(self.rendering_network_lod0, checkpoint, 'rendering_network_lod0')
- load_state_dict(self.rendering_network_lod1, checkpoint, 'rendering_network_lod1')
-
- if self.restore_lod0: # use the trained lod0 networks to initialize lod1 networks
- load_state_dict(self.sdf_network_lod1, checkpoint, 'sdf_network_lod0')
- load_state_dict(self.pyramid_feature_network_lod1, checkpoint, 'pyramid_feature_network')
- load_state_dict(self.rendering_network_lod1, checkpoint, 'rendering_network_lod0')
-
- if self.is_continue and (not self.restore_lod0):
- try:
- self.optimizer.load_state_dict(checkpoint['optimizer'])
- except:
- print("load optimizer fails")
- self.iter_step = checkpoint['iter_step']
- self.val_step = checkpoint['val_step'] if 'val_step' in checkpoint.keys() else 0
-
- self.logger.info('End')
-
- def save_checkpoint(self):
-
- def save_state_dict(network, checkpoint, comment):
- if network is not None:
- checkpoint[comment] = network.state_dict()
-
- checkpoint = {
- 'optimizer': self.optimizer.state_dict(),
- 'iter_step': self.iter_step,
- 'val_step': self.val_step,
- }
-
- save_state_dict(self.sdf_network_lod0, checkpoint, "sdf_network_lod0")
- save_state_dict(self.sdf_network_lod1, checkpoint, "sdf_network_lod1")
-
- save_state_dict(self.rendering_network_outside, checkpoint, 'rendering_network_outside')
- save_state_dict(self.rendering_network_lod0, checkpoint, "rendering_network_lod0")
- save_state_dict(self.rendering_network_lod1, checkpoint, "rendering_network_lod1")
-
- save_state_dict(self.variance_network_lod0, checkpoint, 'variance_network_lod0')
- save_state_dict(self.variance_network_lod1, checkpoint, 'variance_network_lod1')
-
- save_state_dict(self.pyramid_feature_network, checkpoint, 'pyramid_feature_network')
- save_state_dict(self.pyramid_feature_network_lod1, checkpoint, 'pyramid_feature_network_lod1')
-
- os.makedirs(os.path.join(self.base_exp_dir, 'checkpoints'), exist_ok=True)
- torch.save(checkpoint,
- os.path.join(self.base_exp_dir, 'checkpoints', 'ckpt_{:0>6d}.pth'.format(self.iter_step)))
-
- def validate(self, resolution_level=-1):
- # validate image
- print("iter_step: ", self.iter_step)
- self.logger.info('Validate begin')
- self.val_step += 1
-
- try:
- batch = next(self.val_dataloader_iterator)
- except:
- self.val_dataloader_iterator = iter(self.val_dataloader) # reset
-
- batch = next(self.val_dataloader_iterator)
-
-
- background_rgb = None
- if self.use_white_bkgd:
- # background_rgb = torch.ones([1, 3]).to(self.device)
- background_rgb = 1.0
-
- batch['batch_idx'] = torch.tensor([x for x in range(self.batch_size)])
-
- # - warmup params
- if self.num_lods == 1:
- alpha_inter_ratio_lod0 = self.get_alpha_inter_ratio(self.anneal_start_lod0, self.anneal_end_lod0)
- else:
- alpha_inter_ratio_lod0 = 1.
- alpha_inter_ratio_lod1 = self.get_alpha_inter_ratio(self.anneal_start_lod1, self.anneal_end_lod1)
-
- self.trainer(
- batch,
- background_rgb=background_rgb,
- alpha_inter_ratio_lod0=alpha_inter_ratio_lod0,
- alpha_inter_ratio_lod1=alpha_inter_ratio_lod1,
- iter_step=self.iter_step,
- save_vis=True,
- mode='val',
- )
-
-
- def export_mesh(self, resolution=360):
- print("iter_step: ", self.iter_step)
- self.logger.info('Validate begin')
- self.val_step += 1
-
- try:
- batch = next(self.val_dataloader_iterator)
- except:
- self.val_dataloader_iterator = iter(self.val_dataloader) # reset
-
- batch = next(self.val_dataloader_iterator)
-
-
- background_rgb = None
- if self.use_white_bkgd:
- background_rgb = 1.0
-
- batch['batch_idx'] = torch.tensor([x for x in range(self.batch_size)])
-
- # - warmup params
- if self.num_lods == 1:
- alpha_inter_ratio_lod0 = self.get_alpha_inter_ratio(self.anneal_start_lod0, self.anneal_end_lod0)
- else:
- alpha_inter_ratio_lod0 = 1.
- alpha_inter_ratio_lod1 = self.get_alpha_inter_ratio(self.anneal_start_lod1, self.anneal_end_lod1)
- self.trainer(
- batch,
- background_rgb=background_rgb,
- alpha_inter_ratio_lod0=alpha_inter_ratio_lod0,
- alpha_inter_ratio_lod1=alpha_inter_ratio_lod1,
- iter_step=self.iter_step,
- save_vis=True,
- mode='export_mesh',
- resolution=resolution,
- )
-
-
-if __name__ == '__main__':
- # torch.set_default_tensor_type('torch.cuda.FloatTensor')
- torch.set_default_dtype(torch.float32)
- FORMAT = "[%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s"
- logging.basicConfig(level=logging.INFO, format=FORMAT)
-
- parser = argparse.ArgumentParser()
- parser.add_argument('--conf', type=str, default='./confs/base.conf')
- parser.add_argument('--mode', type=str, default='train')
- parser.add_argument('--threshold', type=float, default=0.0)
- parser.add_argument('--is_continue', default=False, action="store_true")
- parser.add_argument('--is_restore', default=False, action="store_true")
- parser.add_argument('--is_finetune', default=False, action="store_true")
- parser.add_argument('--train_from_scratch', default=False, action="store_true")
- parser.add_argument('--restore_lod0', default=False, action="store_true")
- parser.add_argument('--local_rank', type=int, default=0)
- parser.add_argument('--specific_dataset_name', type=str, default='GSO')
- parser.add_argument('--resolution', type=int, default=360)
-
-
- args = parser.parse_args()
-
- torch.cuda.set_device(args.local_rank)
- torch.backends.cudnn.benchmark = True # ! make training 2x faster
-
- runner = Runner(args.conf, args.mode, args.is_continue, args.is_restore, args.restore_lod0,
- args.local_rank)
-
- if args.mode == 'train':
- runner.train()
- elif args.mode == 'val':
- for i in range(len(runner.val_dataset)):
- runner.validate()
- elif args.mode == 'export_mesh':
- for i in range(len(runner.val_dataset)):
- runner.export_mesh(resolution=args.resolution)
diff --git a/One-2-3-45-master 2/reconstruction/loss/__init__.py b/One-2-3-45-master 2/reconstruction/loss/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/One-2-3-45-master 2/reconstruction/loss/color_loss.py b/One-2-3-45-master 2/reconstruction/loss/color_loss.py
deleted file mode 100644
index abf3f0eb51c6ed29799a870d5833b23c4c41dde8..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/loss/color_loss.py
+++ /dev/null
@@ -1,152 +0,0 @@
-import torch
-import torch.nn as nn
-from loss.ncc import NCC
-
-
-class Normalize(nn.Module):
- def __init__(self):
- super(Normalize, self).__init__()
-
- def forward(self, bottom):
- qn = torch.norm(bottom, p=2, dim=1).unsqueeze(dim=1) + 1e-12
- top = bottom.div(qn)
-
- return top
-
-
-class OcclusionColorLoss(nn.Module):
- def __init__(self, alpha=1, beta=0.025, gama=0.01, occlusion_aware=True, weight_thred=[0.6]):
- super(OcclusionColorLoss, self).__init__()
- self.alpha = alpha
- self.beta = beta
- self.gama = gama
- self.occlusion_aware = occlusion_aware
- self.eps = 1e-4
-
- self.weight_thred = weight_thred
- self.adjuster = ParamAdjuster(self.weight_thred, self.beta)
-
- def forward(self, pred, gt, weight, mask, detach=False, occlusion_aware=True):
- """
-
- :param pred: [N_pts, 3]
- :param gt: [N_pts, 3]
- :param weight: [N_pts]
- :param mask: [N_pts]
- :return:
- """
- if detach:
- weight = weight.detach()
-
- error = torch.abs(pred - gt).sum(dim=-1, keepdim=False) # [N_pts]
- error = error[mask]
-
- if not (self.occlusion_aware and occlusion_aware):
- return torch.mean(error), torch.mean(error)
-
- beta = self.adjuster(weight.mean())
-
- # weight = weight[mask]
- weight = weight.clamp(0.0, 1.0)
- term1 = self.alpha * torch.mean(weight[mask] * error)
- term2 = beta * torch.log(1 - weight + self.eps).mean()
- term3 = self.gama * torch.log(weight + self.eps).mean()
-
- return term1 + term2 + term3, term1
-
-
-class OcclusionColorPatchLoss(nn.Module):
- def __init__(self, alpha=1, beta=0.025, gama=0.015,
- occlusion_aware=True, type='l1', h_patch_size=3, weight_thred=[0.6]):
- super(OcclusionColorPatchLoss, self).__init__()
- self.alpha = alpha
- self.beta = beta
- self.gama = gama
- self.occlusion_aware = occlusion_aware
- self.type = type # 'l1' or 'ncc' loss
- self.ncc = NCC(h_patch_size=h_patch_size)
- self.eps = 1e-4
- self.weight_thred = weight_thred
-
- self.adjuster = ParamAdjuster(self.weight_thred, self.beta)
-
- print("type {} patch_size {} beta {} gama {} weight_thred {}".format(type, h_patch_size, beta, gama,
- weight_thred))
-
- def forward(self, pred, gt, weight, mask, penalize_ratio=0.9, detach=False, occlusion_aware=True):
- """
-
- :param pred: [N_pts, Npx, 3]
- :param gt: [N_pts, Npx, 3]
- :param weight: [N_pts]
- :param mask: [N_pts]
- :return:
- """
-
- if detach:
- weight = weight.detach()
-
- if self.type == 'l1':
- error = torch.abs(pred - gt).mean(dim=-1, keepdim=False).sum(dim=-1, keepdim=False) # [N_pts]
- elif self.type == 'ncc':
- error = 1 - self.ncc(pred[:, None, :, :], gt)[:, 0] # ncc 1 positive, -1 negative
- error, indices = torch.sort(error)
- mask = torch.index_select(mask, 0, index=indices)
- mask[int(penalize_ratio * mask.shape[0]):] = False # can help boundaries
- elif self.type == 'ssd':
- error = ((pred - gt) ** 2).mean(dim=-1, keepdim=False).sum(dim=-1, keepdims=False)
-
- error = error[mask]
- if not (self.occlusion_aware and occlusion_aware):
- return torch.mean(error), torch.mean(error), 0.
-
- # * weight adjuster
- beta = self.adjuster(weight.mean())
-
- # weight = weight[mask]
- weight = weight.clamp(0.0, 1.0)
-
- term1 = self.alpha * torch.mean(weight[mask] * error)
- term2 = beta * torch.log(1 - weight + self.eps).mean()
- term3 = self.gama * torch.log(weight + self.eps).mean()
-
- return term1 + term2 + term3, term1, beta
-
-
-class ParamAdjuster(nn.Module):
- def __init__(self, weight_thred, param):
- super(ParamAdjuster, self).__init__()
- self.weight_thred = weight_thred
- self.thred_num = len(weight_thred)
- self.param = param
- self.global_step = 0
- self.statis_window = 100
- self.counter = 0
- self.adjusted = False
- self.adjusted_step = 0
- self.thred_idx = 0
-
- def reset(self):
- self.counter = 0
- self.adjusted = False
-
- def adjust(self):
- if (self.counter / self.statis_window) > 0.3:
- self.param = self.param + 0.005
- self.adjusted = True
- self.adjusted_step = self.global_step
- self.thred_idx += 1
- print("adjusted param, now {}".format(self.param))
-
- def forward(self, weight_mean):
- self.global_step += 1
-
- if (self.global_step % self.statis_window == 0) and self.adjusted is False:
- self.adjust()
- self.reset()
-
- if self.thred_idx < self.thred_num:
- if weight_mean < self.weight_thred[self.thred_idx] and (not self.adjusted):
- self.counter += 1
-
- return self.param
diff --git a/One-2-3-45-master 2/reconstruction/loss/depth_loss.py b/One-2-3-45-master 2/reconstruction/loss/depth_loss.py
deleted file mode 100644
index cba92851a79857ff6edd5c2f2eb12a2972b85bdc..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/loss/depth_loss.py
+++ /dev/null
@@ -1,71 +0,0 @@
-import torch
-import torch.nn as nn
-import torch.nn.functional as F
-
-
-class DepthLoss(nn.Module):
- def __init__(self, type='l1'):
- super(DepthLoss, self).__init__()
- self.type = type
-
-
- def forward(self, depth_pred, depth_gt, mask=None):
- if (depth_gt < 0).sum() > 0:
- # print("no depth loss")
- return torch.tensor(0.0).to(depth_pred.device)
- if mask is not None:
- mask_d = (depth_gt > 0).float()
-
- mask = mask * mask_d
-
- mask_sum = mask.sum() + 1e-5
- depth_error = (depth_pred - depth_gt) * mask
- depth_loss = F.l1_loss(depth_error, torch.zeros_like(depth_error).to(depth_error.device),
- reduction='sum') / mask_sum
- else:
- depth_error = depth_pred - depth_gt
- depth_loss = F.l1_loss(depth_error, torch.zeros_like(depth_error).to(depth_error.device),
- reduction='mean')
- return depth_loss
-
-def forward(self, depth_pred, depth_gt, mask=None):
- if mask is not None:
- mask_d = (depth_gt > 0).float()
-
- mask = mask * mask_d
-
- mask_sum = mask.sum() + 1e-5
- depth_error = (depth_pred - depth_gt) * mask
- depth_loss = F.l1_loss(depth_error, torch.zeros_like(depth_error).to(depth_error.device),
- reduction='sum') / mask_sum
- else:
- depth_error = depth_pred - depth_gt
- depth_loss = F.l1_loss(depth_error, torch.zeros_like(depth_error).to(depth_error.device),
- reduction='mean')
- return depth_loss
-
-class DepthSmoothLoss(nn.Module):
- def __init__(self):
- super(DepthSmoothLoss, self).__init__()
-
- def forward(self, disp, img, mask):
- """
- Computes the smoothness loss for a disparity image
- The color image is used for edge-aware smoothness
- :param disp: [B, 1, H, W]
- :param img: [B, 1, H, W]
- :param mask: [B, 1, H, W]
- :return:
- """
- grad_disp_x = torch.abs(disp[:, :, :, :-1] - disp[:, :, :, 1:])
- grad_disp_y = torch.abs(disp[:, :, :-1, :] - disp[:, :, 1:, :])
-
- grad_img_x = torch.mean(torch.abs(img[:, :, :, :-1] - img[:, :, :, 1:]), 1, keepdim=True)
- grad_img_y = torch.mean(torch.abs(img[:, :, :-1, :] - img[:, :, 1:, :]), 1, keepdim=True)
-
- grad_disp_x *= torch.exp(-grad_img_x)
- grad_disp_y *= torch.exp(-grad_img_y)
-
- grad_disp = (grad_disp_x * mask[:, :, :, :-1]).mean() + (grad_disp_y * mask[:, :, :-1, :]).mean()
-
- return grad_disp
diff --git a/One-2-3-45-master 2/reconstruction/loss/depth_metric.py b/One-2-3-45-master 2/reconstruction/loss/depth_metric.py
deleted file mode 100644
index e8b6249ac6a06906e20a344f468fc1c6e4b992ae..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/loss/depth_metric.py
+++ /dev/null
@@ -1,240 +0,0 @@
-import numpy as np
-
-
-def l1(depth1, depth2):
- """
- Computes the l1 errors between the two depth maps.
- Takes preprocessed depths (no nans, infs and non-positive values)
-
- depth1: one depth map
- depth2: another depth map
-
- Returns:
- L1(log)
-
- """
- assert (np.all(np.isfinite(depth1) & np.isfinite(depth2) & (depth1 >= 0) & (depth2 >= 0)))
- diff = depth1 - depth2
- num_pixels = float(diff.size)
-
- if num_pixels == 0:
- return np.nan
- else:
- return np.sum(np.absolute(diff)) / num_pixels
-
-
-def l1_inverse(depth1, depth2):
- """
- Computes the l1 errors between inverses of two depth maps.
- Takes preprocessed depths (no nans, infs and non-positive values)
-
- depth1: one depth map
- depth2: another depth map
-
- Returns:
- L1(log)
-
- """
- assert (np.all(np.isfinite(depth1) & np.isfinite(depth2) & (depth1 >= 0) & (depth2 >= 0)))
- diff = np.reciprocal(depth1) - np.reciprocal(depth2)
- num_pixels = float(diff.size)
-
- if num_pixels == 0:
- return np.nan
- else:
- return np.sum(np.absolute(diff)) / num_pixels
-
-
-def rmse_log(depth1, depth2):
- """
- Computes the root min square errors between the logs of two depth maps.
- Takes preprocessed depths (no nans, infs and non-positive values)
-
- depth1: one depth map
- depth2: another depth map
-
- Returns:
- RMSE(log)
-
- """
- assert (np.all(np.isfinite(depth1) & np.isfinite(depth2) & (depth1 >= 0) & (depth2 >= 0)))
- log_diff = np.log(depth1) - np.log(depth2)
- num_pixels = float(log_diff.size)
-
- if num_pixels == 0:
- return np.nan
- else:
- return np.sqrt(np.sum(np.square(log_diff)) / num_pixels)
-
-
-def rmse(depth1, depth2):
- """
- Computes the root min square errors between the two depth maps.
- Takes preprocessed depths (no nans, infs and non-positive values)
-
- depth1: one depth map
- depth2: another depth map
-
- Returns:
- RMSE(log)
-
- """
- assert (np.all(np.isfinite(depth1) & np.isfinite(depth2) & (depth1 >= 0) & (depth2 >= 0)))
- diff = depth1 - depth2
- num_pixels = float(diff.size)
-
- if num_pixels == 0:
- return np.nan
- else:
- return np.sqrt(np.sum(np.square(diff)) / num_pixels)
-
-
-def scale_invariant(depth1, depth2):
- """
- Computes the scale invariant loss based on differences of logs of depth maps.
- Takes preprocessed depths (no nans, infs and non-positive values)
-
- depth1: one depth map
- depth2: another depth map
-
- Returns:
- scale_invariant_distance
-
- """
- # sqrt(Eq. 3)
- assert (np.all(np.isfinite(depth1) & np.isfinite(depth2) & (depth1 >= 0) & (depth2 >= 0)))
- log_diff = np.log(depth1) - np.log(depth2)
- num_pixels = float(log_diff.size)
-
- if num_pixels == 0:
- return np.nan
- else:
- return np.sqrt(np.sum(np.square(log_diff)) / num_pixels - np.square(np.sum(log_diff)) / np.square(num_pixels))
-
-
-def abs_relative(depth_pred, depth_gt):
- """
- Computes relative absolute distance.
- Takes preprocessed depths (no nans, infs and non-positive values)
-
- depth_pred: depth map prediction
- depth_gt: depth map ground truth
-
- Returns:
- abs_relative_distance
-
- """
- assert (np.all(np.isfinite(depth_pred) & np.isfinite(depth_gt) & (depth_pred >= 0) & (depth_gt >= 0)))
- diff = depth_pred - depth_gt
- num_pixels = float(diff.size)
-
- if num_pixels == 0:
- return np.nan
- else:
- return np.sum(np.absolute(diff) / depth_gt) / num_pixels
-
-
-def avg_log10(depth1, depth2):
- """
- Computes average log_10 error (Liu, Neural Fields, 2015).
- Takes preprocessed depths (no nans, infs and non-positive values)
-
- depth1: one depth map
- depth2: another depth map
-
- Returns:
- abs_relative_distance
-
- """
- assert (np.all(np.isfinite(depth1) & np.isfinite(depth2) & (depth1 >= 0) & (depth2 >= 0)))
- log_diff = np.log10(depth1) - np.log10(depth2)
- num_pixels = float(log_diff.size)
-
- if num_pixels == 0:
- return np.nan
- else:
- return np.sum(np.absolute(log_diff)) / num_pixels
-
-
-def sq_relative(depth_pred, depth_gt):
- """
- Computes relative squared distance.
- Takes preprocessed depths (no nans, infs and non-positive values)
-
- depth_pred: depth map prediction
- depth_gt: depth map ground truth
-
- Returns:
- squared_relative_distance
-
- """
- assert (np.all(np.isfinite(depth_pred) & np.isfinite(depth_gt) & (depth_pred >= 0) & (depth_gt >= 0)))
- diff = depth_pred - depth_gt
- num_pixels = float(diff.size)
-
- if num_pixels == 0:
- return np.nan
- else:
- return np.sum(np.square(diff) / depth_gt) / num_pixels
-
-
-def ratio_threshold(depth1, depth2, threshold):
- """
- Computes the percentage of pixels for which the ratio of the two depth maps is less than a given threshold.
- Takes preprocessed depths (no nans, infs and non-positive values)
-
- depth1: one depth map
- depth2: another depth map
-
- Returns:
- percentage of pixels with ratio less than the threshold
-
- """
- assert (threshold > 0.)
- assert (np.all(np.isfinite(depth1) & np.isfinite(depth2) & (depth1 >= 0) & (depth2 >= 0)))
- log_diff = np.log(depth1) - np.log(depth2)
- num_pixels = float(log_diff.size)
-
- if num_pixels == 0:
- return np.nan
- else:
- return float(np.sum(np.absolute(log_diff) < np.log(threshold))) / num_pixels
-
-
-def compute_depth_errors(depth_pred, depth_gt, valid_mask):
- """
- Computes different distance measures between two depth maps.
-
- depth_pred: depth map prediction
- depth_gt: depth map ground truth
- distances_to_compute: which distances to compute
-
- Returns:
- a dictionary with computed distances, and the number of valid pixels
-
- """
- depth_pred = depth_pred[valid_mask]
- depth_gt = depth_gt[valid_mask]
- num_valid = np.sum(valid_mask)
-
- distances_to_compute = ['l1',
- 'l1_inverse',
- 'scale_invariant',
- 'abs_relative',
- 'sq_relative',
- 'avg_log10',
- 'rmse_log',
- 'rmse',
- 'ratio_threshold_1.25',
- 'ratio_threshold_1.5625',
- 'ratio_threshold_1.953125']
-
- results = {'num_valid': num_valid}
- for dist in distances_to_compute:
- if dist.startswith('ratio_threshold'):
- threshold = float(dist.split('_')[-1])
- results[dist] = ratio_threshold(depth_pred, depth_gt, threshold)
- else:
- results[dist] = globals()[dist](depth_pred, depth_gt)
-
- return results
diff --git a/One-2-3-45-master 2/reconstruction/loss/ncc.py b/One-2-3-45-master 2/reconstruction/loss/ncc.py
deleted file mode 100644
index 768fcefc3aab55d8e3fed49f23ffb4a974eec4ec..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/loss/ncc.py
+++ /dev/null
@@ -1,65 +0,0 @@
-import torch
-import torch.nn.functional as F
-import numpy as np
-from math import exp, sqrt
-
-
-class NCC(torch.nn.Module):
- def __init__(self, h_patch_size, mode='rgb'):
- super(NCC, self).__init__()
- self.window_size = 2 * h_patch_size + 1
- self.mode = mode # 'rgb' or 'gray'
- self.channel = 3
- self.register_buffer("window", create_window(self.window_size, self.channel))
-
- def forward(self, img_pred, img_gt):
- """
- :param img_pred: [Npx, nviews, npatch, c]
- :param img_gt: [Npx, npatch, c]
- :return:
- """
- ntotpx, nviews, npatch, channels = img_pred.shape
-
- patch_size = int(sqrt(npatch))
- patch_img_pred = img_pred.reshape(ntotpx, nviews, patch_size, patch_size, channels).permute(0, 1, 4, 2,
- 3).contiguous()
- patch_img_gt = img_gt.reshape(ntotpx, patch_size, patch_size, channels).permute(0, 3, 1, 2)
-
- return _ncc(patch_img_pred, patch_img_gt, self.window, self.channel)
-
-
-def gaussian(window_size, sigma):
- gauss = torch.Tensor([exp(-(x - window_size // 2) ** 2 / float(2 * sigma ** 2)) for x in range(window_size)])
- return gauss / gauss.sum()
-
-
-def create_window(window_size, channel, std=1.5):
- _1D_window = gaussian(window_size, std).unsqueeze(1)
- _2D_window = _1D_window.mm(_1D_window.t()).unsqueeze(0).unsqueeze(0)
- window = _2D_window.expand(channel, 1, window_size, window_size).contiguous()
- return window
-
-
-def _ncc(pred, gt, window, channel):
- ntotpx, nviews, nc, h, w = pred.shape
- flat_pred = pred.view(-1, nc, h, w)
- mu1 = F.conv2d(flat_pred, window, padding=0, groups=channel).view(ntotpx, nviews, nc)
- mu2 = F.conv2d(gt, window, padding=0, groups=channel).view(ntotpx, nc)
-
- mu1_sq = mu1.pow(2)
- mu2_sq = mu2.pow(2).unsqueeze(1) # (ntotpx, 1, nc)
-
- sigma1_sq = F.conv2d(flat_pred * flat_pred, window, padding=0, groups=channel).view(ntotpx, nviews, nc) - mu1_sq
- sigma2_sq = F.conv2d(gt * gt, window, padding=0, groups=channel).view(ntotpx, 1, 3) - mu2_sq
-
- sigma1 = torch.sqrt(sigma1_sq + 1e-4)
- sigma2 = torch.sqrt(sigma2_sq + 1e-4)
-
- pred_norm = (pred - mu1[:, :, :, None, None]) / (sigma1[:, :, :, None, None] + 1e-8) # [ntotpx, nviews, nc, h, w]
- gt_norm = (gt[:, None, :, :, :] - mu2[:, None, :, None, None]) / (
- sigma2[:, :, :, None, None] + 1e-8) # ntotpx, nc, h, w
-
- ncc = F.conv2d((pred_norm * gt_norm).view(-1, nc, h, w), window, padding=0, groups=channel).view(
- ntotpx, nviews, nc)
-
- return torch.mean(ncc, dim=2)
diff --git a/One-2-3-45-master 2/reconstruction/models/__init__.py b/One-2-3-45-master 2/reconstruction/models/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/One-2-3-45-master 2/reconstruction/models/embedder.py b/One-2-3-45-master 2/reconstruction/models/embedder.py
deleted file mode 100644
index d327d92d9f64c0b32908dbee864160b65daa450e..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/models/embedder.py
+++ /dev/null
@@ -1,101 +0,0 @@
-import torch
-import torch.nn as nn
-
-""" Positional encoding embedding. Code was taken from https://github.com/bmild/nerf. """
-
-
-class Embedder:
- def __init__(self, **kwargs):
- self.kwargs = kwargs
- self.create_embedding_fn()
-
- def create_embedding_fn(self):
- embed_fns = []
- d = self.kwargs['input_dims']
- out_dim = 0
- if self.kwargs['include_input']:
- embed_fns.append(lambda x: x)
- out_dim += d
-
- max_freq = self.kwargs['max_freq_log2']
- N_freqs = self.kwargs['num_freqs']
-
- if self.kwargs['log_sampling']:
- freq_bands = 2. ** torch.linspace(0., max_freq, N_freqs)
- else:
- freq_bands = torch.linspace(2. ** 0., 2. ** max_freq, N_freqs)
-
- for freq in freq_bands:
- for p_fn in self.kwargs['periodic_fns']:
- if self.kwargs['normalize']:
- embed_fns.append(lambda x, p_fn=p_fn,
- freq=freq: p_fn(x * freq) / freq)
- else:
- embed_fns.append(lambda x, p_fn=p_fn,
- freq=freq: p_fn(x * freq))
- out_dim += d
-
- self.embed_fns = embed_fns
- self.out_dim = out_dim
-
- def embed(self, inputs):
- return torch.cat([fn(inputs) for fn in self.embed_fns], -1)
-
-
-def get_embedder(multires, normalize=False, input_dims=3):
- embed_kwargs = {
- 'include_input': True,
- 'input_dims': input_dims,
- 'max_freq_log2': multires - 1,
- 'num_freqs': multires,
- 'normalize': normalize,
- 'log_sampling': True,
- 'periodic_fns': [torch.sin, torch.cos],
- }
-
- embedder_obj = Embedder(**embed_kwargs)
-
- def embed(x, eo=embedder_obj): return eo.embed(x)
-
- return embed, embedder_obj.out_dim
-
-
-class Embedding(nn.Module):
- def __init__(self, in_channels, N_freqs, logscale=True, normalize=False):
- """
- Defines a function that embeds x to (x, sin(2^k x), cos(2^k x), ...)
- in_channels: number of input channels (3 for both xyz and direction)
- """
- super(Embedding, self).__init__()
- self.N_freqs = N_freqs
- self.in_channels = in_channels
- self.funcs = [torch.sin, torch.cos]
- self.out_channels = in_channels * (len(self.funcs) * N_freqs + 1)
- self.normalize = normalize
-
- if logscale:
- self.freq_bands = 2 ** torch.linspace(0, N_freqs - 1, N_freqs)
- else:
- self.freq_bands = torch.linspace(1, 2 ** (N_freqs - 1), N_freqs)
-
- def forward(self, x):
- """
- Embeds x to (x, sin(2^k x), cos(2^k x), ...)
- Different from the paper, "x" is also in the output
- See https://github.com/bmild/nerf/issues/12
-
- Inputs:
- x: (B, self.in_channels)
-
- Outputs:
- out: (B, self.out_channels)
- """
- out = [x]
- for freq in self.freq_bands:
- for func in self.funcs:
- if self.normalize:
- out += [func(freq * x) / freq]
- else:
- out += [func(freq * x)]
-
- return torch.cat(out, -1)
diff --git a/One-2-3-45-master 2/reconstruction/models/fast_renderer.py b/One-2-3-45-master 2/reconstruction/models/fast_renderer.py
deleted file mode 100644
index 1faeba85e5b156d0de12e430287d90f4a803aa92..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/models/fast_renderer.py
+++ /dev/null
@@ -1,316 +0,0 @@
-import torch
-import torch.nn.functional as F
-import torch.nn as nn
-from icecream import ic
-
-
-# - neus: use sphere-tracing to speed up depth maps extraction
-# This code snippet is heavily borrowed from IDR.
-class FastRenderer(nn.Module):
- def __init__(self):
- super(FastRenderer, self).__init__()
-
- self.sdf_threshold = 5e-5
- self.line_search_step = 0.5
- self.line_step_iters = 1
- self.sphere_tracing_iters = 10
- self.n_steps = 100
- self.n_secant_steps = 8
-
- # - use sdf_network to inference sdf value or directly interpolate sdf value from precomputed sdf_volume
- self.network_inference = False
-
- def extract_depth_maps(self, rays_o, rays_d, near, far, sdf_network, conditional_volume):
- with torch.no_grad():
- curr_start_points, network_object_mask, acc_start_dis = self.get_intersection(
- rays_o, rays_d, near, far,
- sdf_network, conditional_volume)
-
- network_object_mask = network_object_mask.reshape(-1)
-
- return network_object_mask, acc_start_dis
-
- def get_intersection(self, rays_o, rays_d, near, far, sdf_network, conditional_volume):
- device = rays_o.device
- num_pixels, _ = rays_d.shape
-
- curr_start_points, unfinished_mask_start, acc_start_dis, acc_end_dis, min_dis, max_dis = \
- self.sphere_tracing(rays_o, rays_d, near, far, sdf_network, conditional_volume)
-
- network_object_mask = (acc_start_dis < acc_end_dis)
-
- # The non convergent rays should be handled by the sampler
- sampler_mask = unfinished_mask_start
- sampler_net_obj_mask = torch.zeros_like(sampler_mask).bool().to(device)
- if sampler_mask.sum() > 0:
- # sampler_min_max = torch.zeros((num_pixels, 2)).to(device)
- # sampler_min_max[sampler_mask, 0] = acc_start_dis[sampler_mask]
- # sampler_min_max[sampler_mask, 1] = acc_end_dis[sampler_mask]
-
- # ray_sampler(self, rays_o, rays_d, near, far, sampler_mask):
- sampler_pts, sampler_net_obj_mask, sampler_dists = self.ray_sampler(rays_o,
- rays_d,
- acc_start_dis,
- acc_end_dis,
- sampler_mask,
- sdf_network,
- conditional_volume
- )
-
- curr_start_points[sampler_mask] = sampler_pts[sampler_mask]
- acc_start_dis[sampler_mask] = sampler_dists[sampler_mask][:, None]
- network_object_mask[sampler_mask] = sampler_net_obj_mask[sampler_mask][:, None]
-
- # print('----------------------------------------------------------------')
- # print('RayTracing: object = {0}/{1}, secant on {2}/{3}.'
- # .format(network_object_mask.sum(), len(network_object_mask), sampler_net_obj_mask.sum(),
- # sampler_mask.sum()))
- # print('----------------------------------------------------------------')
-
- return curr_start_points, network_object_mask, acc_start_dis
-
- def sphere_tracing(self, rays_o, rays_d, near, far, sdf_network, conditional_volume):
- ''' Run sphere tracing algorithm for max iterations from both sides of unit sphere intersection '''
-
- device = rays_o.device
-
- unfinished_mask_start = (near < far).reshape(-1).clone()
- unfinished_mask_end = (near < far).reshape(-1).clone()
-
- # Initialize start current points
- curr_start_points = rays_o + rays_d * near
- acc_start_dis = near.clone()
-
- # Initialize end current points
- curr_end_points = rays_o + rays_d * far
- acc_end_dis = far.clone()
-
- # Initizlize min and max depth
- min_dis = acc_start_dis.clone()
- max_dis = acc_end_dis.clone()
-
- # Iterate on the rays (from both sides) till finding a surface
- iters = 0
-
- next_sdf_start = torch.zeros_like(acc_start_dis).to(device)
-
- if self.network_inference:
- sdf_func = sdf_network.sdf
- else:
- sdf_func = sdf_network.sdf_from_sdfvolume
-
- next_sdf_start[unfinished_mask_start] = sdf_func(
- curr_start_points[unfinished_mask_start],
- conditional_volume, lod=0, gru_fusion=False)['sdf_pts_scale%d' % 0]
-
- next_sdf_end = torch.zeros_like(acc_end_dis).to(device)
- next_sdf_end[unfinished_mask_end] = sdf_func(curr_end_points[unfinished_mask_end],
- conditional_volume, lod=0, gru_fusion=False)[
- 'sdf_pts_scale%d' % 0]
-
- while True:
- # Update sdf
- curr_sdf_start = torch.zeros_like(acc_start_dis).to(device)
- curr_sdf_start[unfinished_mask_start] = next_sdf_start[unfinished_mask_start]
- curr_sdf_start[curr_sdf_start <= self.sdf_threshold] = 0
-
- curr_sdf_end = torch.zeros_like(acc_end_dis).to(device)
- curr_sdf_end[unfinished_mask_end] = next_sdf_end[unfinished_mask_end]
- curr_sdf_end[curr_sdf_end <= self.sdf_threshold] = 0
-
- # Update masks
- unfinished_mask_start = unfinished_mask_start & (curr_sdf_start > self.sdf_threshold).reshape(-1)
- unfinished_mask_end = unfinished_mask_end & (curr_sdf_end > self.sdf_threshold).reshape(-1)
-
- if (
- unfinished_mask_start.sum() == 0 and unfinished_mask_end.sum() == 0) or iters == self.sphere_tracing_iters:
- break
- iters += 1
-
- # Make step
- # Update distance
- acc_start_dis = acc_start_dis + curr_sdf_start
- acc_end_dis = acc_end_dis - curr_sdf_end
-
- # Update points
- curr_start_points = rays_o + acc_start_dis * rays_d
- curr_end_points = rays_o + acc_end_dis * rays_d
-
- # Fix points which wrongly crossed the surface
- next_sdf_start = torch.zeros_like(acc_start_dis).to(device)
- if unfinished_mask_start.sum() > 0:
- next_sdf_start[unfinished_mask_start] = sdf_func(curr_start_points[unfinished_mask_start],
- conditional_volume, lod=0, gru_fusion=False)[
- 'sdf_pts_scale%d' % 0]
-
- next_sdf_end = torch.zeros_like(acc_end_dis).to(device)
- if unfinished_mask_end.sum() > 0:
- next_sdf_end[unfinished_mask_end] = sdf_func(curr_end_points[unfinished_mask_end],
- conditional_volume, lod=0, gru_fusion=False)[
- 'sdf_pts_scale%d' % 0]
-
- not_projected_start = (next_sdf_start < 0).reshape(-1)
- not_projected_end = (next_sdf_end < 0).reshape(-1)
- not_proj_iters = 0
-
- while (
- not_projected_start.sum() > 0 or not_projected_end.sum() > 0) and not_proj_iters < self.line_step_iters:
- # Step backwards
- if not_projected_start.sum() > 0:
- acc_start_dis[not_projected_start] -= ((1 - self.line_search_step) / (2 ** not_proj_iters)) * \
- curr_sdf_start[not_projected_start]
- curr_start_points[not_projected_start] = (rays_o + acc_start_dis * rays_d)[not_projected_start]
-
- next_sdf_start[not_projected_start] = sdf_func(
- curr_start_points[not_projected_start],
- conditional_volume, lod=0, gru_fusion=False)['sdf_pts_scale%d' % 0]
-
- if not_projected_end.sum() > 0:
- acc_end_dis[not_projected_end] += ((1 - self.line_search_step) / (2 ** not_proj_iters)) * \
- curr_sdf_end[
- not_projected_end]
- curr_end_points[not_projected_end] = (rays_o + acc_end_dis * rays_d)[not_projected_end]
-
- # Calc sdf
-
- next_sdf_end[not_projected_end] = sdf_func(
- curr_end_points[not_projected_end],
- conditional_volume, lod=0, gru_fusion=False)['sdf_pts_scale%d' % 0]
-
- # Update mask
- not_projected_start = (next_sdf_start < 0).reshape(-1)
- not_projected_end = (next_sdf_end < 0).reshape(-1)
- not_proj_iters += 1
-
- unfinished_mask_start = unfinished_mask_start & (acc_start_dis < acc_end_dis).reshape(-1)
- unfinished_mask_end = unfinished_mask_end & (acc_start_dis < acc_end_dis).reshape(-1)
-
- return curr_start_points, unfinished_mask_start, acc_start_dis, acc_end_dis, min_dis, max_dis
-
- def ray_sampler(self, rays_o, rays_d, near, far, sampler_mask, sdf_network, conditional_volume):
- ''' Sample the ray in a given range and run secant on rays which have sign transition '''
- device = rays_o.device
- num_pixels, _ = rays_d.shape
- sampler_pts = torch.zeros(num_pixels, 3).to(device).float()
- sampler_dists = torch.zeros(num_pixels).to(device).float()
-
- intervals_dist = torch.linspace(0, 1, steps=self.n_steps).to(device).view(1, -1)
-
- pts_intervals = near + intervals_dist * (far - near)
- points = rays_o[:, None, :] + pts_intervals[:, :, None] * rays_d[:, None, :]
-
- # Get the non convergent rays
- mask_intersect_idx = torch.nonzero(sampler_mask).flatten()
- points = points.reshape((-1, self.n_steps, 3))[sampler_mask, :, :]
- pts_intervals = pts_intervals.reshape((-1, self.n_steps))[sampler_mask]
-
- if self.network_inference:
- sdf_func = sdf_network.sdf
- else:
- sdf_func = sdf_network.sdf_from_sdfvolume
-
- sdf_val_all = []
- for pnts in torch.split(points.reshape(-1, 3), 100000, dim=0):
- sdf_val_all.append(sdf_func(pnts,
- conditional_volume, lod=0, gru_fusion=False)['sdf_pts_scale%d' % 0])
- sdf_val = torch.cat(sdf_val_all).reshape(-1, self.n_steps)
-
- tmp = torch.sign(sdf_val) * torch.arange(self.n_steps, 0, -1).to(device).float().reshape(
- (1, self.n_steps)) # Force argmin to return the first min value
- sampler_pts_ind = torch.argmin(tmp, -1)
- sampler_pts[mask_intersect_idx] = points[torch.arange(points.shape[0]), sampler_pts_ind, :]
- sampler_dists[mask_intersect_idx] = pts_intervals[torch.arange(pts_intervals.shape[0]), sampler_pts_ind]
-
- net_surface_pts = (sdf_val[torch.arange(sdf_val.shape[0]), sampler_pts_ind] < 0)
-
- # take points with minimal SDF value for P_out pixels
- p_out_mask = ~net_surface_pts
- n_p_out = p_out_mask.sum()
- if n_p_out > 0:
- out_pts_idx = torch.argmin(sdf_val[p_out_mask, :], -1)
- sampler_pts[mask_intersect_idx[p_out_mask]] = points[p_out_mask, :, :][torch.arange(n_p_out), out_pts_idx,
- :]
- sampler_dists[mask_intersect_idx[p_out_mask]] = pts_intervals[p_out_mask, :][
- torch.arange(n_p_out), out_pts_idx]
-
- # Get Network object mask
- sampler_net_obj_mask = sampler_mask.clone()
- sampler_net_obj_mask[mask_intersect_idx[~net_surface_pts]] = False
-
- # Run Secant method
- secant_pts = net_surface_pts
- n_secant_pts = secant_pts.sum()
- if n_secant_pts > 0:
- # Get secant z predictions
- z_high = pts_intervals[torch.arange(pts_intervals.shape[0]), sampler_pts_ind][secant_pts]
- sdf_high = sdf_val[torch.arange(sdf_val.shape[0]), sampler_pts_ind][secant_pts]
- z_low = pts_intervals[secant_pts][torch.arange(n_secant_pts), sampler_pts_ind[secant_pts] - 1]
- sdf_low = sdf_val[secant_pts][torch.arange(n_secant_pts), sampler_pts_ind[secant_pts] - 1]
-
- cam_loc_secant = rays_o[mask_intersect_idx[secant_pts]]
- ray_directions_secant = rays_d[mask_intersect_idx[secant_pts]]
- z_pred_secant = self.secant(sdf_low, sdf_high, z_low, z_high, cam_loc_secant, ray_directions_secant,
- sdf_network, conditional_volume)
-
- # Get points
- sampler_pts[mask_intersect_idx[secant_pts]] = cam_loc_secant + z_pred_secant[:,
- None] * ray_directions_secant
- sampler_dists[mask_intersect_idx[secant_pts]] = z_pred_secant
-
- return sampler_pts, sampler_net_obj_mask, sampler_dists
-
- def secant(self, sdf_low, sdf_high, z_low, z_high, rays_o, rays_d, sdf_network, conditional_volume):
- ''' Runs the secant method for interval [z_low, z_high] for n_secant_steps '''
-
- if self.network_inference:
- sdf_func = sdf_network.sdf
- else:
- sdf_func = sdf_network.sdf_from_sdfvolume
-
- z_pred = -sdf_low * (z_high - z_low) / (sdf_high - sdf_low) + z_low
- for i in range(self.n_secant_steps):
- p_mid = rays_o + z_pred[:, None] * rays_d
- sdf_mid = sdf_func(p_mid,
- conditional_volume, lod=0, gru_fusion=False)['sdf_pts_scale%d' % 0].reshape(-1)
- ind_low = (sdf_mid > 0).reshape(-1)
- if ind_low.sum() > 0:
- z_low[ind_low] = z_pred[ind_low]
- sdf_low[ind_low] = sdf_mid[ind_low]
- ind_high = sdf_mid < 0
- if ind_high.sum() > 0:
- z_high[ind_high] = z_pred[ind_high]
- sdf_high[ind_high] = sdf_mid[ind_high]
-
- z_pred = - sdf_low * (z_high - z_low) / (sdf_high - sdf_low) + z_low
-
- return z_pred # 1D tensor
-
- def minimal_sdf_points(self, num_pixels, sdf, cam_loc, ray_directions, mask, min_dis, max_dis):
- ''' Find points with minimal SDF value on rays for P_out pixels '''
- device = sdf.device
- n_mask_points = mask.sum()
-
- n = self.n_steps
- # steps = torch.linspace(0.0, 1.0,n).to(device)
- steps = torch.empty(n).uniform_(0.0, 1.0).to(device)
- mask_max_dis = max_dis[mask].unsqueeze(-1)
- mask_min_dis = min_dis[mask].unsqueeze(-1)
- steps = steps.unsqueeze(0).repeat(n_mask_points, 1) * (mask_max_dis - mask_min_dis) + mask_min_dis
-
- mask_points = cam_loc.unsqueeze(1).repeat(1, num_pixels, 1).reshape(-1, 3)[mask]
- mask_rays = ray_directions[mask, :]
-
- mask_points_all = mask_points.unsqueeze(1).repeat(1, n, 1) + steps.unsqueeze(-1) * mask_rays.unsqueeze(
- 1).repeat(1, n, 1)
- points = mask_points_all.reshape(-1, 3)
-
- mask_sdf_all = []
- for pnts in torch.split(points, 100000, dim=0):
- mask_sdf_all.append(sdf(pnts))
-
- mask_sdf_all = torch.cat(mask_sdf_all).reshape(-1, n)
- min_vals, min_idx = mask_sdf_all.min(-1)
- min_mask_points = mask_points_all.reshape(-1, n, 3)[torch.arange(0, n_mask_points), min_idx]
- min_mask_dist = steps.reshape(-1, n)[torch.arange(0, n_mask_points), min_idx]
-
- return min_mask_points, min_mask_dist
diff --git a/One-2-3-45-master 2/reconstruction/models/featurenet.py b/One-2-3-45-master 2/reconstruction/models/featurenet.py
deleted file mode 100644
index 652e65967708f57a1722c5951d53e72f05ddf1d3..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/models/featurenet.py
+++ /dev/null
@@ -1,91 +0,0 @@
-import torch
-
-# ! amazing!!!! autograd.grad with set_detect_anomaly(True) will cause memory leak
-# ! https://github.com/pytorch/pytorch/issues/51349
-# torch.autograd.set_detect_anomaly(True)
-import torch.nn as nn
-import torch.nn.functional as F
-from inplace_abn import InPlaceABN
-
-
-############################################# MVS Net models ################################################
-class ConvBnReLU(nn.Module):
- def __init__(self, in_channels, out_channels,
- kernel_size=3, stride=1, pad=1,
- norm_act=InPlaceABN):
- super(ConvBnReLU, self).__init__()
- self.conv = nn.Conv2d(in_channels, out_channels,
- kernel_size, stride=stride, padding=pad, bias=False)
- self.bn = norm_act(out_channels)
-
- def forward(self, x):
- return self.bn(self.conv(x))
-
-
-class ConvBnReLU3D(nn.Module):
- def __init__(self, in_channels, out_channels,
- kernel_size=3, stride=1, pad=1,
- norm_act=InPlaceABN):
- super(ConvBnReLU3D, self).__init__()
- self.conv = nn.Conv3d(in_channels, out_channels,
- kernel_size, stride=stride, padding=pad, bias=False)
- self.bn = norm_act(out_channels)
- # self.bn = nn.ReLU()
-
- def forward(self, x):
- return self.bn(self.conv(x))
-
-
-################################### feature net ######################################
-class FeatureNet(nn.Module):
- """
- output 3 levels of features using a FPN structure
- """
-
- def __init__(self, norm_act=InPlaceABN):
- super(FeatureNet, self).__init__()
-
- self.conv0 = nn.Sequential(
- ConvBnReLU(3, 8, 3, 1, 1, norm_act=norm_act),
- ConvBnReLU(8, 8, 3, 1, 1, norm_act=norm_act))
-
- self.conv1 = nn.Sequential(
- ConvBnReLU(8, 16, 5, 2, 2, norm_act=norm_act),
- ConvBnReLU(16, 16, 3, 1, 1, norm_act=norm_act),
- ConvBnReLU(16, 16, 3, 1, 1, norm_act=norm_act))
-
- self.conv2 = nn.Sequential(
- ConvBnReLU(16, 32, 5, 2, 2, norm_act=norm_act),
- ConvBnReLU(32, 32, 3, 1, 1, norm_act=norm_act),
- ConvBnReLU(32, 32, 3, 1, 1, norm_act=norm_act))
-
- self.toplayer = nn.Conv2d(32, 32, 1)
- self.lat1 = nn.Conv2d(16, 32, 1)
- self.lat0 = nn.Conv2d(8, 32, 1)
-
- # to reduce channel size of the outputs from FPN
- self.smooth1 = nn.Conv2d(32, 16, 3, padding=1)
- self.smooth0 = nn.Conv2d(32, 8, 3, padding=1)
-
- def _upsample_add(self, x, y):
- return F.interpolate(x, scale_factor=2,
- mode="bilinear", align_corners=True) + y
-
- def forward(self, x):
- # x: (B, 3, H, W)
- conv0 = self.conv0(x) # (B, 8, H, W)
- conv1 = self.conv1(conv0) # (B, 16, H//2, W//2)
- conv2 = self.conv2(conv1) # (B, 32, H//4, W//4)
- feat2 = self.toplayer(conv2) # (B, 32, H//4, W//4)
- feat1 = self._upsample_add(feat2, self.lat1(conv1)) # (B, 32, H//2, W//2)
- feat0 = self._upsample_add(feat1, self.lat0(conv0)) # (B, 32, H, W)
-
- # reduce output channels
- feat1 = self.smooth1(feat1) # (B, 16, H//2, W//2)
- feat0 = self.smooth0(feat0) # (B, 8, H, W)
-
- # feats = {"level_0": feat0,
- # "level_1": feat1,
- # "level_2": feat2}
-
- return [feat2, feat1, feat0] # coarser to finer features
diff --git a/One-2-3-45-master 2/reconstruction/models/fields.py b/One-2-3-45-master 2/reconstruction/models/fields.py
deleted file mode 100644
index 184e4a55399f56f8f505379ce4a14add8821c4c4..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/models/fields.py
+++ /dev/null
@@ -1,333 +0,0 @@
-# The codes are from NeuS
-
-import torch
-import torch.nn as nn
-import torch.nn.functional as F
-import numpy as np
-from models.embedder import get_embedder
-
-
-class SDFNetwork(nn.Module):
- def __init__(self,
- d_in,
- d_out,
- d_hidden,
- n_layers,
- skip_in=(4,),
- multires=0,
- bias=0.5,
- scale=1,
- geometric_init=True,
- weight_norm=True,
- activation='softplus',
- conditional_type='multiply'):
- super(SDFNetwork, self).__init__()
-
- dims = [d_in] + [d_hidden for _ in range(n_layers)] + [d_out]
-
- self.embed_fn_fine = None
-
- if multires > 0:
- embed_fn, input_ch = get_embedder(multires, input_dims=d_in, normalize=False)
- self.embed_fn_fine = embed_fn
- dims[0] = input_ch
-
- self.num_layers = len(dims)
- self.skip_in = skip_in
- self.scale = scale
-
- for l in range(0, self.num_layers - 1):
- if l + 1 in self.skip_in:
- out_dim = dims[l + 1] - dims[0]
- else:
- out_dim = dims[l + 1]
-
- lin = nn.Linear(dims[l], out_dim)
-
- if geometric_init:
- if l == self.num_layers - 2:
- torch.nn.init.normal_(lin.weight, mean=np.sqrt(np.pi) / np.sqrt(dims[l]), std=0.0001)
- torch.nn.init.constant_(lin.bias, -bias)
- elif multires > 0 and l == 0:
- torch.nn.init.constant_(lin.bias, 0.0)
- torch.nn.init.constant_(lin.weight[:, 3:], 0.0)
- torch.nn.init.normal_(lin.weight[:, :3], 0.0, np.sqrt(2) / np.sqrt(out_dim))
- elif multires > 0 and l in self.skip_in:
- torch.nn.init.constant_(lin.bias, 0.0)
- torch.nn.init.normal_(lin.weight, 0.0, np.sqrt(2) / np.sqrt(out_dim))
- torch.nn.init.constant_(lin.weight[:, -(dims[0] - 3):], 0.0) # ? why dims[0] - 3
- else:
- torch.nn.init.constant_(lin.bias, 0.0)
- torch.nn.init.normal_(lin.weight, 0.0, np.sqrt(2) / np.sqrt(out_dim))
-
- if weight_norm:
- lin = nn.utils.weight_norm(lin)
-
- setattr(self, "lin" + str(l), lin)
-
- if activation == 'softplus':
- self.activation = nn.Softplus(beta=100)
- else:
- assert activation == 'relu'
- self.activation = nn.ReLU()
-
- def forward(self, inputs):
- inputs = inputs * self.scale
- if self.embed_fn_fine is not None:
- inputs = self.embed_fn_fine(inputs)
-
- x = inputs
- for l in range(0, self.num_layers - 1):
- lin = getattr(self, "lin" + str(l))
-
- if l in self.skip_in:
- x = torch.cat([x, inputs], 1) / np.sqrt(2)
-
- x = lin(x)
-
- if l < self.num_layers - 2:
- x = self.activation(x)
- return torch.cat([x[:, :1] / self.scale, x[:, 1:]], dim=-1)
-
- def sdf(self, x):
- return self.forward(x)[:, :1]
-
- def sdf_hidden_appearance(self, x):
- return self.forward(x)
-
- def gradient(self, x):
- x.requires_grad_(True)
- y = self.sdf(x)
- d_output = torch.ones_like(y, requires_grad=False, device=y.device)
- gradients = torch.autograd.grad(
- outputs=y,
- inputs=x,
- grad_outputs=d_output,
- create_graph=True,
- retain_graph=True,
- only_inputs=True)[0]
- return gradients.unsqueeze(1)
-
-
-class VarianceNetwork(nn.Module):
- def __init__(self, d_in, d_out, d_hidden, n_layers, skip_in=(4,), multires=0):
- super(VarianceNetwork, self).__init__()
-
- dims = [d_in] + [d_hidden for _ in range(n_layers)] + [d_out]
-
- self.embed_fn_fine = None
-
- if multires > 0:
- embed_fn, input_ch = get_embedder(multires, normalize=False)
- self.embed_fn_fine = embed_fn
- dims[0] = input_ch
-
- self.num_layers = len(dims)
- self.skip_in = skip_in
-
- for l in range(0, self.num_layers - 1):
- if l + 1 in self.skip_in:
- out_dim = dims[l + 1] - dims[0]
- else:
- out_dim = dims[l + 1]
-
- lin = nn.Linear(dims[l], out_dim)
- setattr(self, "lin" + str(l), lin)
-
- self.relu = nn.ReLU()
- self.softplus = nn.Softplus(beta=100)
-
- def forward(self, inputs):
- if self.embed_fn_fine is not None:
- inputs = self.embed_fn_fine(inputs)
-
- x = inputs
- for l in range(0, self.num_layers - 1):
- lin = getattr(self, "lin" + str(l))
-
- if l in self.skip_in:
- x = torch.cat([x, inputs], 1) / np.sqrt(2)
-
- x = lin(x)
-
- if l < self.num_layers - 2:
- x = self.relu(x)
-
- # return torch.exp(x)
- return 1.0 / (self.softplus(x + 0.5) + 1e-3)
-
- def coarse(self, inputs):
- return self.forward(inputs)[:, :1]
-
- def fine(self, inputs):
- return self.forward(inputs)[:, 1:]
-
-
-class FixVarianceNetwork(nn.Module):
- def __init__(self, base):
- super(FixVarianceNetwork, self).__init__()
- self.base = base
- self.iter_step = 0
-
- def set_iter_step(self, iter_step):
- self.iter_step = iter_step
-
- def forward(self, x):
- return torch.ones([len(x), 1]) * np.exp(-self.iter_step / self.base)
-
-
-class SingleVarianceNetwork(nn.Module):
- def __init__(self, init_val=1.0):
- super(SingleVarianceNetwork, self).__init__()
- self.register_parameter('variance', nn.Parameter(torch.tensor(init_val)))
-
- def forward(self, x):
- return torch.ones([len(x), 1]).to(x.device) * torch.exp(self.variance * 10.0)
-
-
-
-class RenderingNetwork(nn.Module):
- def __init__(
- self,
- d_feature,
- mode,
- d_in,
- d_out,
- d_hidden,
- n_layers,
- weight_norm=True,
- multires_view=0,
- squeeze_out=True,
- d_conditional_colors=0
- ):
- super().__init__()
-
- self.mode = mode
- self.squeeze_out = squeeze_out
- dims = [d_in + d_feature] + [d_hidden for _ in range(n_layers)] + [d_out]
-
- self.embedview_fn = None
- if multires_view > 0:
- embedview_fn, input_ch = get_embedder(multires_view)
- self.embedview_fn = embedview_fn
- dims[0] += (input_ch - 3)
-
- self.num_layers = len(dims)
-
- for l in range(0, self.num_layers - 1):
- out_dim = dims[l + 1]
- lin = nn.Linear(dims[l], out_dim)
-
- if weight_norm:
- lin = nn.utils.weight_norm(lin)
-
- setattr(self, "lin" + str(l), lin)
-
- self.relu = nn.ReLU()
-
- def forward(self, points, normals, view_dirs, feature_vectors):
- if self.embedview_fn is not None:
- view_dirs = self.embedview_fn(view_dirs)
-
- rendering_input = None
-
- if self.mode == 'idr':
- rendering_input = torch.cat([points, view_dirs, normals, feature_vectors], dim=-1)
- elif self.mode == 'no_view_dir':
- rendering_input = torch.cat([points, normals, feature_vectors], dim=-1)
- elif self.mode == 'no_normal':
- rendering_input = torch.cat([points, view_dirs, feature_vectors], dim=-1)
- elif self.mode == 'no_points':
- rendering_input = torch.cat([view_dirs, normals, feature_vectors], dim=-1)
- elif self.mode == 'no_points_no_view_dir':
- rendering_input = torch.cat([normals, feature_vectors], dim=-1)
-
- x = rendering_input
-
- for l in range(0, self.num_layers - 1):
- lin = getattr(self, "lin" + str(l))
-
- x = lin(x)
-
- if l < self.num_layers - 2:
- x = self.relu(x)
-
- if self.squeeze_out:
- x = torch.sigmoid(x)
- return x
-
-
-# Code from nerf-pytorch
-class NeRF(nn.Module):
- def __init__(self, D=8, W=256, d_in=3, d_in_view=3, multires=0, multires_view=0, output_ch=4, skips=[4],
- use_viewdirs=False):
- """
- """
- super(NeRF, self).__init__()
- self.D = D
- self.W = W
- self.d_in = d_in
- self.d_in_view = d_in_view
- self.input_ch = 3
- self.input_ch_view = 3
- self.embed_fn = None
- self.embed_fn_view = None
-
- if multires > 0:
- embed_fn, input_ch = get_embedder(multires, input_dims=d_in, normalize=False)
- self.embed_fn = embed_fn
- self.input_ch = input_ch
-
- if multires_view > 0:
- embed_fn_view, input_ch_view = get_embedder(multires_view, input_dims=d_in_view, normalize=False)
- self.embed_fn_view = embed_fn_view
- self.input_ch_view = input_ch_view
-
- self.skips = skips
- self.use_viewdirs = use_viewdirs
-
- self.pts_linears = nn.ModuleList(
- [nn.Linear(self.input_ch, W)] + [nn.Linear(W, W) if i not in self.skips else nn.Linear(W + self.input_ch, W)
- for i in
- range(D - 1)])
-
- ### Implementation according to the official code release (https://github.com/bmild/nerf/blob/master/run_nerf_helpers.py#L104-L105)
- self.views_linears = nn.ModuleList([nn.Linear(self.input_ch_view + W, W // 2)])
-
- ### Implementation according to the paper
- # self.views_linears = nn.ModuleList(
- # [nn.Linear(input_ch_views + W, W//2)] + [nn.Linear(W//2, W//2) for i in range(D//2)])
-
- if use_viewdirs:
- self.feature_linear = nn.Linear(W, W)
- self.alpha_linear = nn.Linear(W, 1)
- self.rgb_linear = nn.Linear(W // 2, 3)
- else:
- self.output_linear = nn.Linear(W, output_ch)
-
- def forward(self, input_pts, input_views):
- if self.embed_fn is not None:
- input_pts = self.embed_fn(input_pts)
- if self.embed_fn_view is not None:
- input_views = self.embed_fn_view(input_views)
-
- h = input_pts
- for i, l in enumerate(self.pts_linears):
- h = self.pts_linears[i](h)
- h = F.relu(h)
- if i in self.skips:
- h = torch.cat([input_pts, h], -1)
-
- if self.use_viewdirs:
- alpha = self.alpha_linear(h)
- feature = self.feature_linear(h)
- h = torch.cat([feature, input_views], -1)
-
- for i, l in enumerate(self.views_linears):
- h = self.views_linears[i](h)
- h = F.relu(h)
-
- rgb = self.rgb_linear(h)
- return alpha + 1.0, rgb
- else:
- assert False
diff --git a/One-2-3-45-master 2/reconstruction/models/patch_projector.py b/One-2-3-45-master 2/reconstruction/models/patch_projector.py
deleted file mode 100644
index 24bb64527a1f9a9a1c6db8cd290d38f65b63b6d4..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/models/patch_projector.py
+++ /dev/null
@@ -1,211 +0,0 @@
-"""
-Patch Projector
-"""
-import torch
-import torch.nn as nn
-import torch.nn.functional as F
-import numpy as np
-from models.render_utils import sample_ptsFeatures_from_featureMaps
-
-
-class PatchProjector():
- def __init__(self, patch_size):
- self.h_patch_size = patch_size
- self.offsets = build_patch_offset(patch_size) # the warping patch offsets index
-
- self.z_axis = torch.tensor([0, 0, 1]).float()
-
- self.plane_dist_thresh = 0.001
-
- # * correctness checked
- def pixel_warp(self, pts, imgs, intrinsics,
- w2cs, img_wh=None):
- """
-
- :param pts: [N_rays, n_samples, 3]
- :param imgs: [N_views, 3, H, W]
- :param intrinsics: [N_views, 4, 4]
- :param c2ws: [N_views, 4, 4]
- :param img_wh:
- :return:
- """
- if img_wh is None:
- N_views, _, sizeH, sizeW = imgs.shape
- img_wh = [sizeW, sizeH]
-
- pts_color, valid_mask = sample_ptsFeatures_from_featureMaps(
- pts, imgs, w2cs, intrinsics, img_wh,
- proj_matrix=None, return_mask=True) # [N_views, c, N_rays, n_samples], [N_views, N_rays, n_samples]
-
- pts_color = pts_color.permute(2, 3, 0, 1)
- valid_mask = valid_mask.permute(1, 2, 0)
-
- return pts_color, valid_mask # [N_rays, n_samples, N_views, 3] , [N_rays, n_samples, N_views]
-
- def patch_warp(self, pts, uv, normals, src_imgs,
- ref_intrinsic, src_intrinsics,
- ref_c2w, src_c2ws, img_wh=None
- ):
- """
-
- :param pts: [N_rays, n_samples, 3]
- :param uv : [N_rays, 2] normalized in (-1, 1)
- :param normals: [N_rays, n_samples, 3] The normal of pt in world space
- :param src_imgs: [N_src, 3, h, w]
- :param ref_intrinsic: [4,4]
- :param src_intrinsics: [N_src, 4, 4]
- :param ref_c2w: [4,4]
- :param src_c2ws: [N_src, 4, 4]
- :return:
- """
- device = pts.device
-
- N_rays, n_samples, _ = pts.shape
- N_pts = N_rays * n_samples
-
- N_src, _, sizeH, sizeW = src_imgs.shape
-
- if img_wh is not None:
- sizeW, sizeH = img_wh[0], img_wh[1]
-
- # scale uv from (-1, 1) to (0, W/H)
- uv[:, 0] = (uv[:, 0] + 1) / 2. * (sizeW - 1)
- uv[:, 1] = (uv[:, 1] + 1) / 2. * (sizeH - 1)
-
- ref_intr = ref_intrinsic[:3, :3]
- inv_ref_intr = torch.inverse(ref_intr)
- src_intrs = src_intrinsics[:, :3, :3]
- inv_src_intrs = torch.inverse(src_intrs)
-
- ref_pose = ref_c2w
- inv_ref_pose = torch.inverse(ref_pose)
- src_poses = src_c2ws
- inv_src_poses = torch.inverse(src_poses)
-
- ref_cam_loc = ref_pose[:3, 3].unsqueeze(0) # [1, 3]
- sampled_dists = torch.norm(pts - ref_cam_loc, dim=-1) # [N_pts, 1]
-
- relative_proj = inv_src_poses @ ref_pose
- R_rel = relative_proj[:, :3, :3]
- t_rel = relative_proj[:, :3, 3:]
- R_ref = inv_ref_pose[:3, :3]
- t_ref = inv_ref_pose[:3, 3:]
-
- pts = pts.view(-1, 3)
- normals = normals.view(-1, 3)
-
- with torch.no_grad():
- rot_normals = R_ref @ normals.unsqueeze(-1) # [N_pts, 3, 1]
- points_in_ref = R_ref @ pts.unsqueeze(
- -1) + t_ref # [N_pts, 3, 1] points in the reference frame coordiantes system
- d1 = torch.sum(rot_normals * points_in_ref, dim=1).unsqueeze(
- 1) # distance from the plane to ref camera center
-
- d2 = torch.sum(rot_normals.unsqueeze(1) * (-R_rel.transpose(1, 2) @ t_rel).unsqueeze(0),
- dim=2) # distance from the plane to src camera center
- valid_hom = (torch.abs(d1) > self.plane_dist_thresh) & (
- torch.abs(d1 - d2) > self.plane_dist_thresh) & ((d2 / d1) < 1)
-
- d1 = d1.squeeze()
- sign = torch.sign(d1)
- sign[sign == 0] = 1
- d = torch.clamp(torch.abs(d1), 1e-8) * sign
-
- H = src_intrs.unsqueeze(1) @ (
- R_rel.unsqueeze(1) + t_rel.unsqueeze(1) @ rot_normals.view(1, N_pts, 1, 3) / d.view(1,
- N_pts,
- 1, 1)
- ) @ inv_ref_intr.view(1, 1, 3, 3)
-
- # replace invalid homs with fronto-parallel homographies
- H_invalid = src_intrs.unsqueeze(1) @ (
- R_rel.unsqueeze(1) + t_rel.unsqueeze(1) @ self.z_axis.to(device).view(1, 1, 1, 3).expand(-1, N_pts,
- -1,
- -1) / sampled_dists.view(
- 1, N_pts, 1, 1)
- ) @ inv_ref_intr.view(1, 1, 3, 3)
- tmp_m = ~valid_hom.view(-1, N_src).t()
- H[tmp_m] = H_invalid[tmp_m]
-
- pixels = uv.view(N_rays, 1, 2) + self.offsets.float().to(device)
- Npx = pixels.shape[1]
- grid, warp_mask_full = self.patch_homography(H, pixels)
-
- warp_mask_full = warp_mask_full & (grid[..., 0] < (sizeW - self.h_patch_size)) & (
- grid[..., 1] < (sizeH - self.h_patch_size)) & (grid >= self.h_patch_size).all(dim=-1)
- warp_mask_full = warp_mask_full.view(N_src, N_rays, n_samples, Npx)
-
- grid = torch.clamp(normalize(grid, sizeH, sizeW), -10, 10)
-
- sampled_rgb_val = F.grid_sample(src_imgs, grid.view(N_src, -1, 1, 2), align_corners=True).squeeze(
- -1).transpose(1, 2)
- sampled_rgb_val = sampled_rgb_val.view(N_src, N_rays, n_samples, Npx, 3)
-
- warp_mask_full = warp_mask_full.permute(1, 2, 0, 3).contiguous() # (N_rays, n_samples, N_src, Npx)
- sampled_rgb_val = sampled_rgb_val.permute(1, 2, 0, 3, 4).contiguous() # (N_rays, n_samples, N_src, Npx, 3)
-
- return sampled_rgb_val, warp_mask_full
-
- def patch_homography(self, H, uv):
- N, Npx = uv.shape[:2]
- Nsrc = H.shape[0]
- H = H.view(Nsrc, N, -1, 3, 3)
- hom_uv = add_hom(uv)
-
- # einsum is 30 times faster
- # tmp = (H.view(Nsrc, N, -1, 1, 3, 3) @ hom_uv.view(1, N, 1, -1, 3, 1)).squeeze(-1).view(Nsrc, -1, 3)
- tmp = torch.einsum("vprik,pok->vproi", H, hom_uv).reshape(Nsrc, -1, 3)
-
- grid = tmp[..., :2] / torch.clamp(tmp[..., 2:], 1e-8)
- mask = tmp[..., 2] > 0
- return grid, mask
-
-
-def add_hom(pts):
- try:
- dev = pts.device
- ones = torch.ones(pts.shape[:-1], device=dev).unsqueeze(-1)
- return torch.cat((pts, ones), dim=-1)
-
- except AttributeError:
- ones = np.ones((pts.shape[0], 1))
- return np.concatenate((pts, ones), axis=1)
-
-
-def normalize(flow, h, w, clamp=None):
- # either h and w are simple float or N torch.tensor where N batch size
- try:
- h.device
-
- except AttributeError:
- h = torch.tensor(h, device=flow.device).float().unsqueeze(0)
- w = torch.tensor(w, device=flow.device).float().unsqueeze(0)
-
- if len(flow.shape) == 4:
- w = w.unsqueeze(1).unsqueeze(2)
- h = h.unsqueeze(1).unsqueeze(2)
- elif len(flow.shape) == 3:
- w = w.unsqueeze(1)
- h = h.unsqueeze(1)
- elif len(flow.shape) == 5:
- w = w.unsqueeze(0).unsqueeze(2).unsqueeze(2)
- h = h.unsqueeze(0).unsqueeze(2).unsqueeze(2)
-
- res = torch.empty_like(flow)
- if res.shape[-1] == 3:
- res[..., 2] = 1
-
- # for grid_sample with align_corners=True
- # https://github.com/pytorch/pytorch/blob/c371542efc31b1abfe6f388042aa3ab0cef935f2/aten/src/ATen/native/GridSampler.h#L33
- res[..., 0] = 2 * flow[..., 0] / (w - 1) - 1
- res[..., 1] = 2 * flow[..., 1] / (h - 1) - 1
-
- if clamp:
- return torch.clamp(res, -clamp, clamp)
- else:
- return res
-
-
-def build_patch_offset(h_patch_size):
- offsets = torch.arange(-h_patch_size, h_patch_size + 1)
- return torch.stack(torch.meshgrid(offsets, offsets, indexing="ij")[::-1], dim=-1).view(1, -1, 2) # nb_pixels_patch * 2
diff --git a/One-2-3-45-master 2/reconstruction/models/projector.py b/One-2-3-45-master 2/reconstruction/models/projector.py
deleted file mode 100644
index aa58d3f896edefff25cbb6fa713e7342d9b84a1d..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/models/projector.py
+++ /dev/null
@@ -1,425 +0,0 @@
-# The codes are partly from IBRNet
-
-import torch
-import torch.nn.functional as F
-from models.render_utils import sample_ptsFeatures_from_featureMaps, sample_ptsFeatures_from_featureVolume
-
-def safe_l2_normalize(x, dim=None, eps=1e-6):
- return F.normalize(x, p=2, dim=dim, eps=eps)
-
-class Projector():
- """
- Obtain features from geometryVolume and rendering_feature_maps for generalized rendering
- """
-
- def compute_angle(self, xyz, query_c2w, supporting_c2ws):
- """
-
- :param xyz: [N_rays, n_samples,3 ]
- :param query_c2w: [1,4,4]
- :param supporting_c2ws: [n,4,4]
- :return:
- """
- N_rays, n_samples, _ = xyz.shape
- num_views = supporting_c2ws.shape[0]
- xyz = xyz.reshape(-1, 3)
-
- ray2tar_pose = (query_c2w[:, :3, 3].unsqueeze(1) - xyz.unsqueeze(0))
- ray2tar_pose /= (torch.norm(ray2tar_pose, dim=-1, keepdim=True) + 1e-6)
- ray2support_pose = (supporting_c2ws[:, :3, 3].unsqueeze(1) - xyz.unsqueeze(0))
- ray2support_pose /= (torch.norm(ray2support_pose, dim=-1, keepdim=True) + 1e-6)
- ray_diff = ray2tar_pose - ray2support_pose
- ray_diff_norm = torch.norm(ray_diff, dim=-1, keepdim=True)
- ray_diff_dot = torch.sum(ray2tar_pose * ray2support_pose, dim=-1, keepdim=True)
- ray_diff_direction = ray_diff / torch.clamp(ray_diff_norm, min=1e-6)
- ray_diff = torch.cat([ray_diff_direction, ray_diff_dot], dim=-1)
- ray_diff = ray_diff.reshape((num_views, N_rays, n_samples, 4)) # the last dimension (4) is dot-product
- return ray_diff.detach()
-
-
- def compute_angle_view_independent(self, xyz, surface_normals, supporting_c2ws):
- """
-
- :param xyz: [N_rays, n_samples,3 ]
- :param surface_normals: [N_rays, n_samples,3 ]
- :param supporting_c2ws: [n,4,4]
- :return:
- """
- N_rays, n_samples, _ = xyz.shape
- num_views = supporting_c2ws.shape[0]
- xyz = xyz.reshape(-1, 3)
-
- ray2tar_pose = surface_normals
- ray2support_pose = (supporting_c2ws[:, :3, 3].unsqueeze(1) - xyz.unsqueeze(0))
- ray2support_pose /= (torch.norm(ray2support_pose, dim=-1, keepdim=True) + 1e-6)
- ray_diff = ray2tar_pose - ray2support_pose
- ray_diff_norm = torch.norm(ray_diff, dim=-1, keepdim=True)
- ray_diff_dot = torch.sum(ray2tar_pose * ray2support_pose, dim=-1, keepdim=True)
- ray_diff_direction = ray_diff / torch.clamp(ray_diff_norm, min=1e-6)
- ray_diff = torch.cat([ray_diff_direction, ray_diff_dot], dim=-1)
- ray_diff = ray_diff.reshape((num_views, N_rays, n_samples, 4)) # the last dimension (4) is dot-product,
- # and the first three dimensions is the normalized ray diff vector
- return ray_diff.detach()
-
- @torch.no_grad()
- def compute_z_diff(self, xyz, w2cs, intrinsics, pred_depth_values):
- """
- compute the depth difference of query pts projected on the image and the predicted depth values of the image
- :param xyz: [N_rays, n_samples,3 ]
- :param w2cs: [N_views, 4, 4]
- :param intrinsics: [N_views, 3, 3]
- :param pred_depth_values: [N_views, N_rays, n_samples,1 ]
- :param pred_depth_masks: [N_views, N_rays, n_samples]
- :return:
- """
- device = xyz.device
- N_views = w2cs.shape[0]
- N_rays, n_samples, _ = xyz.shape
- proj_matrix = torch.matmul(intrinsics, w2cs[:, :3, :])
-
- proj_rot = proj_matrix[:, :3, :3]
- proj_trans = proj_matrix[:, :3, 3:]
-
- batch_xyz = xyz.permute(2, 0, 1).contiguous().view(1, 3, N_rays * n_samples).repeat(N_views, 1, 1)
-
- proj_xyz = proj_rot.bmm(batch_xyz) + proj_trans
-
- # X = proj_xyz[:, 0]
- # Y = proj_xyz[:, 1]
- Z = proj_xyz[:, 2].clamp(min=1e-3) # [N_views, N_rays*n_samples]
- proj_z = Z.view(N_views, N_rays, n_samples, 1)
-
- z_diff = proj_z - pred_depth_values # [N_views, N_rays, n_samples,1 ]
-
- return z_diff
-
- def compute(self,
- pts,
- # * 3d geometry feature volumes
- geometryVolume=None,
- geometryVolumeMask=None,
- vol_dims=None,
- partial_vol_origin=None,
- vol_size=None,
- # * 2d rendering feature maps
- rendering_feature_maps=None,
- color_maps=None,
- w2cs=None,
- intrinsics=None,
- img_wh=None,
- query_img_idx=0, # the index of the N_views dim for rendering
- query_c2w=None,
- pred_depth_maps=None, # no use here
- pred_depth_masks=None # no use here
- ):
- """
- extract features of pts for rendering
- :param pts:
- :param geometryVolume:
- :param vol_dims:
- :param partial_vol_origin:
- :param vol_size:
- :param rendering_feature_maps:
- :param color_maps:
- :param w2cs:
- :param intrinsics:
- :param img_wh:
- :param rendering_img_idx: by default, we render the first view of w2cs
- :return:
- """
- device = pts.device
- c2ws = torch.inverse(w2cs)
-
- if len(pts.shape) == 2:
- pts = pts[None, :, :]
-
- N_rays, n_samples, _ = pts.shape
- N_views = rendering_feature_maps.shape[0] # shape (N_views, C, H, W)
-
- supporting_img_idxs = torch.LongTensor([x for x in range(N_views) if x != query_img_idx]).to(device)
- query_img_idx = torch.LongTensor([query_img_idx]).to(device)
-
- if query_c2w is None and query_img_idx > -1:
- query_c2w = torch.index_select(c2ws, 0, query_img_idx)
- supporting_c2ws = torch.index_select(c2ws, 0, supporting_img_idxs)
- supporting_w2cs = torch.index_select(w2cs, 0, supporting_img_idxs)
- supporting_rendering_feature_maps = torch.index_select(rendering_feature_maps, 0, supporting_img_idxs)
- supporting_color_maps = torch.index_select(color_maps, 0, supporting_img_idxs)
- supporting_intrinsics = torch.index_select(intrinsics, 0, supporting_img_idxs)
-
- if pred_depth_maps is not None:
- supporting_depth_maps = torch.index_select(pred_depth_maps, 0, supporting_img_idxs)
- supporting_depth_masks = torch.index_select(pred_depth_masks, 0, supporting_img_idxs)
- # print("N_supporting_views: ", N_views - 1)
- N_supporting_views = N_views - 1
- else:
- supporting_c2ws = c2ws
- supporting_w2cs = w2cs
- supporting_rendering_feature_maps = rendering_feature_maps
- supporting_color_maps = color_maps
- supporting_intrinsics = intrinsics
- supporting_depth_maps = pred_depth_masks
- supporting_depth_masks = pred_depth_masks
- # print("N_supporting_views: ", N_views)
- N_supporting_views = N_views
- # import ipdb; ipdb.set_trace()
- if geometryVolume is not None:
- # * sample feature of pts from 3D feature volume
- pts_geometry_feature, pts_geometry_masks_0 = sample_ptsFeatures_from_featureVolume(
- pts, geometryVolume, vol_dims,
- partial_vol_origin, vol_size) # [N_rays, n_samples, C], [N_rays, n_samples]
-
- if len(geometryVolumeMask.shape) == 3:
- geometryVolumeMask = geometryVolumeMask[None, :, :, :]
-
- pts_geometry_masks_1, _ = sample_ptsFeatures_from_featureVolume(
- pts, geometryVolumeMask.to(geometryVolume.dtype), vol_dims,
- partial_vol_origin, vol_size) # [N_rays, n_samples, C]
-
- pts_geometry_masks = pts_geometry_masks_0 & (pts_geometry_masks_1[..., 0] > 0)
- else:
- pts_geometry_feature = None
- pts_geometry_masks = None
-
- # * sample feature of pts from 2D feature maps
- pts_rendering_feats, pts_rendering_mask = sample_ptsFeatures_from_featureMaps(
- pts, supporting_rendering_feature_maps, supporting_w2cs,
- supporting_intrinsics, img_wh,
- return_mask=True) # [N_views, C, N_rays, n_samples], # [N_views, N_rays, n_samples]
- # import ipdb; ipdb.set_trace()
- # * size (N_views, N_rays*n_samples, c)
- pts_rendering_feats = pts_rendering_feats.permute(0, 2, 3, 1).contiguous()
-
- pts_rendering_colors = sample_ptsFeatures_from_featureMaps(pts, supporting_color_maps, supporting_w2cs,
- supporting_intrinsics, img_wh)
- # * size (N_views, N_rays*n_samples, c)
- pts_rendering_colors = pts_rendering_colors.permute(0, 2, 3, 1).contiguous()
-
- rgb_feats = torch.cat([pts_rendering_colors, pts_rendering_feats], dim=-1) # [N_views, N_rays, n_samples, 3+c]
-
-
- ray_diff = self.compute_angle(pts, query_c2w, supporting_c2ws) # [N_views, N_rays, n_samples, 4]
- # import ipdb; ipdb.set_trace()
- if pts_geometry_masks is not None:
- final_mask = pts_geometry_masks[None, :, :].repeat(N_supporting_views, 1, 1) & \
- pts_rendering_mask # [N_views, N_rays, n_samples]
- else:
- final_mask = pts_rendering_mask
- # import ipdb; ipdb.set_trace()
- z_diff, pts_pred_depth_masks = None, None
-
- if pred_depth_maps is not None:
- pts_pred_depth_values = sample_ptsFeatures_from_featureMaps(pts, supporting_depth_maps, supporting_w2cs,
- supporting_intrinsics, img_wh)
- pts_pred_depth_values = pts_pred_depth_values.permute(0, 2, 3,
- 1).contiguous() # (N_views, N_rays*n_samples, 1)
-
- # - pts_pred_depth_masks are critical than final_mask,
- # - the ray containing few invalid pts will be treated invalid
- pts_pred_depth_masks = sample_ptsFeatures_from_featureMaps(pts, supporting_depth_masks.float(),
- supporting_w2cs,
- supporting_intrinsics, img_wh)
-
- pts_pred_depth_masks = pts_pred_depth_masks.permute(0, 2, 3, 1).contiguous()[:, :, :,
- 0] # (N_views, N_rays*n_samples)
-
- z_diff = self.compute_z_diff(pts, supporting_w2cs, supporting_intrinsics, pts_pred_depth_values)
- # import ipdb; ipdb.set_trace()
- return pts_geometry_feature, rgb_feats, ray_diff, final_mask, z_diff, pts_pred_depth_masks
-
-
- def compute_view_independent(
- self,
- pts,
- # * 3d geometry feature volumes
- geometryVolume=None,
- geometryVolumeMask=None,
- sdf_network=None,
- lod=0,
- vol_dims=None,
- partial_vol_origin=None,
- vol_size=None,
- # * 2d rendering feature maps
- rendering_feature_maps=None,
- color_maps=None,
- w2cs=None,
- target_candidate_w2cs=None,
- intrinsics=None,
- img_wh=None,
- query_img_idx=0, # the index of the N_views dim for rendering
- query_c2w=None,
- pred_depth_maps=None, # no use here
- pred_depth_masks=None # no use here
- ):
- """
- extract features of pts for rendering
- :param pts:
- :param geometryVolume:
- :param vol_dims:
- :param partial_vol_origin:
- :param vol_size:
- :param rendering_feature_maps:
- :param color_maps:
- :param w2cs:
- :param intrinsics:
- :param img_wh:
- :param rendering_img_idx: by default, we render the first view of w2cs
- :return:
- """
- device = pts.device
- c2ws = torch.inverse(w2cs)
-
- if len(pts.shape) == 2:
- pts = pts[None, :, :]
-
- N_rays, n_samples, _ = pts.shape
- N_views = rendering_feature_maps.shape[0] # shape (N_views, C, H, W)
-
- supporting_img_idxs = torch.LongTensor([x for x in range(N_views) if x != query_img_idx]).to(device)
- query_img_idx = torch.LongTensor([query_img_idx]).to(device)
-
- if query_c2w is None and query_img_idx > -1:
- query_c2w = torch.index_select(c2ws, 0, query_img_idx)
- supporting_c2ws = torch.index_select(c2ws, 0, supporting_img_idxs)
- supporting_w2cs = torch.index_select(w2cs, 0, supporting_img_idxs)
- supporting_rendering_feature_maps = torch.index_select(rendering_feature_maps, 0, supporting_img_idxs)
- supporting_color_maps = torch.index_select(color_maps, 0, supporting_img_idxs)
- supporting_intrinsics = torch.index_select(intrinsics, 0, supporting_img_idxs)
-
- if pred_depth_maps is not None:
- supporting_depth_maps = torch.index_select(pred_depth_maps, 0, supporting_img_idxs)
- supporting_depth_masks = torch.index_select(pred_depth_masks, 0, supporting_img_idxs)
- # print("N_supporting_views: ", N_views - 1)
- N_supporting_views = N_views - 1
- else:
- supporting_c2ws = c2ws
- supporting_w2cs = w2cs
- supporting_rendering_feature_maps = rendering_feature_maps
- supporting_color_maps = color_maps
- supporting_intrinsics = intrinsics
- supporting_depth_maps = pred_depth_masks
- supporting_depth_masks = pred_depth_masks
- # print("N_supporting_views: ", N_views)
- N_supporting_views = N_views
- # import ipdb; ipdb.set_trace()
- if geometryVolume is not None:
- # * sample feature of pts from 3D feature volume
- pts_geometry_feature, pts_geometry_masks_0 = sample_ptsFeatures_from_featureVolume(
- pts, geometryVolume, vol_dims,
- partial_vol_origin, vol_size) # [N_rays, n_samples, C], [N_rays, n_samples]
-
- if len(geometryVolumeMask.shape) == 3:
- geometryVolumeMask = geometryVolumeMask[None, :, :, :]
-
- pts_geometry_masks_1, _ = sample_ptsFeatures_from_featureVolume(
- pts, geometryVolumeMask.to(geometryVolume.dtype), vol_dims,
- partial_vol_origin, vol_size) # [N_rays, n_samples, C]
-
- pts_geometry_masks = pts_geometry_masks_0 & (pts_geometry_masks_1[..., 0] > 0)
- else:
- pts_geometry_feature = None
- pts_geometry_masks = None
-
- # * sample feature of pts from 2D feature maps
- pts_rendering_feats, pts_rendering_mask = sample_ptsFeatures_from_featureMaps(
- pts, supporting_rendering_feature_maps, supporting_w2cs,
- supporting_intrinsics, img_wh,
- return_mask=True) # [N_views, C, N_rays, n_samples], # [N_views, N_rays, n_samples]
-
- # * size (N_views, N_rays*n_samples, c)
- pts_rendering_feats = pts_rendering_feats.permute(0, 2, 3, 1).contiguous()
-
- pts_rendering_colors = sample_ptsFeatures_from_featureMaps(pts, supporting_color_maps, supporting_w2cs,
- supporting_intrinsics, img_wh)
- # * size (N_views, N_rays*n_samples, c)
- pts_rendering_colors = pts_rendering_colors.permute(0, 2, 3, 1).contiguous()
-
- rgb_feats = torch.cat([pts_rendering_colors, pts_rendering_feats], dim=-1) # [N_views, N_rays, n_samples, 3+c]
-
- # import ipdb; ipdb.set_trace()
-
- gradients = sdf_network.gradient(
- pts.reshape(-1, 3), # pts.squeeze(0),
- geometryVolume.unsqueeze(0),
- lod=lod
- ).squeeze()
-
- surface_normals = safe_l2_normalize(gradients, dim=-1) # [npts, 3]
- # input normals
- ren_ray_diff = self.compute_angle_view_independent(
- xyz=pts,
- surface_normals=surface_normals,
- supporting_c2ws=supporting_c2ws
- )
-
- # # choose closest target view direction from 32 candidate views
- # # choose the closest source view as view direction instead of the normals vectors
- # pts2src_centers = safe_l2_normalize((supporting_c2ws[:, :3, 3].unsqueeze(1) - pts)) # [N_views, npts, 3]
-
- # cosine_distance = torch.sum(pts2src_centers * surface_normals, dim=-1, keepdim=True) # [N_views, npts, 1]
- # # choose the largest cosine distance as the view direction
- # max_idx = torch.argmax(cosine_distance, dim=0) # [npts, 1]
-
- # chosen_view_direction = pts2src_centers[max_idx.squeeze(), torch.arange(pts.shape[1]), :] # [npts, 3]
- # ren_ray_diff = self.compute_angle_view_independent(
- # xyz=pts,
- # surface_normals=chosen_view_direction,
- # supporting_c2ws=supporting_c2ws
- # )
-
-
-
- # # choose closest target view direction from 8 candidate views
- # # choose the closest source view as view direction instead of the normals vectors
- # target_candidate_c2ws = torch.inverse(target_candidate_w2cs)
- # pts2src_centers = safe_l2_normalize((target_candidate_c2ws[:, :3, 3].unsqueeze(1) - pts)) # [N_views, npts, 3]
-
- # cosine_distance = torch.sum(pts2src_centers * surface_normals, dim=-1, keepdim=True) # [N_views, npts, 1]
- # # choose the largest cosine distance as the view direction
- # max_idx = torch.argmax(cosine_distance, dim=0) # [npts, 1]
-
- # chosen_view_direction = pts2src_centers[max_idx.squeeze(), torch.arange(pts.shape[1]), :] # [npts, 3]
- # ren_ray_diff = self.compute_angle_view_independent(
- # xyz=pts,
- # surface_normals=chosen_view_direction,
- # supporting_c2ws=supporting_c2ws
- # )
-
-
- # ray_diff = self.compute_angle(pts, query_c2w, supporting_c2ws) # [N_views, N_rays, n_samples, 4]
- # import ipdb; ipdb.set_trace()
-
-
- # input_directions = safe_l2_normalize(pts)
- # ren_ray_diff = self.compute_angle_view_independent(
- # xyz=pts,
- # surface_normals=input_directions,
- # supporting_c2ws=supporting_c2ws
- # )
-
- if pts_geometry_masks is not None:
- final_mask = pts_geometry_masks[None, :, :].repeat(N_supporting_views, 1, 1) & \
- pts_rendering_mask # [N_views, N_rays, n_samples]
- else:
- final_mask = pts_rendering_mask
- # import ipdb; ipdb.set_trace()
- z_diff, pts_pred_depth_masks = None, None
-
- if pred_depth_maps is not None:
- pts_pred_depth_values = sample_ptsFeatures_from_featureMaps(pts, supporting_depth_maps, supporting_w2cs,
- supporting_intrinsics, img_wh)
- pts_pred_depth_values = pts_pred_depth_values.permute(0, 2, 3,
- 1).contiguous() # (N_views, N_rays*n_samples, 1)
-
- # - pts_pred_depth_masks are critical than final_mask,
- # - the ray containing few invalid pts will be treated invalid
- pts_pred_depth_masks = sample_ptsFeatures_from_featureMaps(pts, supporting_depth_masks.float(),
- supporting_w2cs,
- supporting_intrinsics, img_wh)
-
- pts_pred_depth_masks = pts_pred_depth_masks.permute(0, 2, 3, 1).contiguous()[:, :, :,
- 0] # (N_views, N_rays*n_samples)
-
- z_diff = self.compute_z_diff(pts, supporting_w2cs, supporting_intrinsics, pts_pred_depth_values)
- # import ipdb; ipdb.set_trace()
- return pts_geometry_feature, rgb_feats, ren_ray_diff, final_mask, z_diff, pts_pred_depth_masks
diff --git a/One-2-3-45-master 2/reconstruction/models/rays.py b/One-2-3-45-master 2/reconstruction/models/rays.py
deleted file mode 100644
index 98f871c951ade0edb53b8f377e22170817e342f8..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/models/rays.py
+++ /dev/null
@@ -1,320 +0,0 @@
-import os, torch
-import numpy as np
-
-import torch.nn.functional as F
-
-def build_patch_offset(h_patch_size):
- offsets = torch.arange(-h_patch_size, h_patch_size + 1)
- return torch.stack(torch.meshgrid(offsets, offsets, indexing="ij")[::-1], dim=-1).view(1, -1, 2) # nb_pixels_patch * 2
-
-
-def gen_rays_from_single_image(H, W, image, intrinsic, c2w, depth=None, mask=None):
- """
- generate rays in world space, for image image
- :param H:
- :param W:
- :param intrinsics: [3,3]
- :param c2ws: [4,4]
- :return:
- """
- device = image.device
- ys, xs = torch.meshgrid(torch.linspace(0, H - 1, H),
- torch.linspace(0, W - 1, W), indexing="ij") # pytorch's meshgrid has indexing='ij'
- p = torch.stack([xs, ys, torch.ones_like(ys)], dim=-1) # H, W, 3
-
- # normalized ndc uv coordinates, (-1, 1)
- ndc_u = 2 * xs / (W - 1) - 1
- ndc_v = 2 * ys / (H - 1) - 1
- rays_ndc_uv = torch.stack([ndc_u, ndc_v], dim=-1).view(-1, 2).float().to(device)
-
- intrinsic_inv = torch.inverse(intrinsic)
-
- p = p.view(-1, 3).float().to(device) # N_rays, 3
- p = torch.matmul(intrinsic_inv[None, :3, :3], p[:, :, None]).squeeze() # N_rays, 3
- rays_v = p / torch.linalg.norm(p, ord=2, dim=-1, keepdim=True) # N_rays, 3
- rays_v = torch.matmul(c2w[None, :3, :3], rays_v[:, :, None]).squeeze() # N_rays, 3
- rays_o = c2w[None, :3, 3].expand(rays_v.shape) # N_rays, 3
-
- image = image.permute(1, 2, 0)
- color = image.view(-1, 3)
- depth = depth.view(-1, 1) if depth is not None else None
- mask = mask.view(-1, 1) if mask is not None else torch.ones([H * W, 1]).to(device)
- sample = {
- 'rays_o': rays_o,
- 'rays_v': rays_v,
- 'rays_ndc_uv': rays_ndc_uv,
- 'rays_color': color,
- # 'rays_depth': depth,
- 'rays_mask': mask,
- 'rays_norm_XYZ_cam': p # - XYZ_cam, before multiply depth
- }
- if depth is not None:
- sample['rays_depth'] = depth
-
- return sample
-
-
-def gen_random_rays_from_single_image(H, W, N_rays, image, intrinsic, c2w, depth=None, mask=None, dilated_mask=None,
- importance_sample=False, h_patch_size=3):
- """
- generate random rays in world space, for a single image
- :param H:
- :param W:
- :param N_rays:
- :param image: [3, H, W]
- :param intrinsic: [3,3]
- :param c2w: [4,4]
- :param depth: [H, W]
- :param mask: [H, W]
- :return:
- """
- device = image.device
-
- if dilated_mask is None:
- dilated_mask = mask
-
- if not importance_sample:
- pixels_x = torch.randint(low=0, high=W, size=[N_rays])
- pixels_y = torch.randint(low=0, high=H, size=[N_rays])
- elif importance_sample and dilated_mask is not None: # sample more pts in the valid mask regions
- pixels_x_1 = torch.randint(low=0, high=W, size=[N_rays // 4])
- pixels_y_1 = torch.randint(low=0, high=H, size=[N_rays // 4])
-
- ys, xs = torch.meshgrid(torch.linspace(0, H - 1, H),
- torch.linspace(0, W - 1, W), indexing="ij") # pytorch's meshgrid has indexing='ij'
- p = torch.stack([xs, ys], dim=-1) # H, W, 2
-
- try:
- p_valid = p[dilated_mask > 0] # [num, 2]
- random_idx = torch.randint(low=0, high=p_valid.shape[0], size=[N_rays // 4 * 3])
- except:
- print("dilated_mask.shape: ", dilated_mask.shape)
- print("dilated_mask valid number", dilated_mask.sum())
-
- raise ValueError("hhhh")
- p_select = p_valid[random_idx] # [N_rays//2, 2]
- pixels_x_2 = p_select[:, 0]
- pixels_y_2 = p_select[:, 1]
-
- pixels_x = torch.cat([pixels_x_1, pixels_x_2], dim=0).to(torch.int64)
- pixels_y = torch.cat([pixels_y_1, pixels_y_2], dim=0).to(torch.int64)
-
- # - crop patch from images
- offsets = build_patch_offset(h_patch_size).to(device)
- grid_patch = torch.stack([pixels_x, pixels_y], dim=-1).view(-1, 1, 2) + offsets.float() # [N_pts, Npx, 2]
- patch_mask = (pixels_x > h_patch_size) * (pixels_x < (W - h_patch_size)) * (pixels_y > h_patch_size) * (
- pixels_y < H - h_patch_size) # [N_pts]
- grid_patch_u = 2 * grid_patch[:, :, 0] / (W - 1) - 1
- grid_patch_v = 2 * grid_patch[:, :, 1] / (H - 1) - 1
- grid_patch_uv = torch.stack([grid_patch_u, grid_patch_v], dim=-1) # [N_pts, Npx, 2]
- patch_color = F.grid_sample(image[None, :, :, :], grid_patch_uv[None, :, :, :], mode='bilinear',
- padding_mode='zeros',align_corners=True)[0] # [3, N_pts, Npx]
- patch_color = patch_color.permute(1, 2, 0).contiguous()
-
- # normalized ndc uv coordinates, (-1, 1)
- ndc_u = 2 * pixels_x / (W - 1) - 1
- ndc_v = 2 * pixels_y / (H - 1) - 1
- rays_ndc_uv = torch.stack([ndc_u, ndc_v], dim=-1).view(-1, 2).float().to(device)
-
- image = image.permute(1, 2, 0) # H ,W, C
- color = image[(pixels_y, pixels_x)] # N_rays, 3
-
- if mask is not None:
- mask = mask[(pixels_y, pixels_x)] # N_rays
- patch_mask = patch_mask * mask # N_rays
- mask = mask.view(-1, 1)
- else:
- mask = torch.ones([N_rays, 1])
-
- if depth is not None:
- depth = depth[(pixels_y, pixels_x)] # N_rays
- depth = depth.view(-1, 1)
-
- intrinsic_inv = torch.inverse(intrinsic)
-
- p = torch.stack([pixels_x, pixels_y, torch.ones_like(pixels_y)], dim=-1).float().to(device) # N_rays, 3
- p = torch.matmul(intrinsic_inv[None, :3, :3], p[:, :, None]).squeeze() # N_rays, 3
- rays_v = p / torch.linalg.norm(p, ord=2, dim=-1, keepdim=True) # N_rays, 3
- rays_v = torch.matmul(c2w[None, :3, :3], rays_v[:, :, None]).squeeze() # N_rays, 3
- rays_o = c2w[None, :3, 3].expand(rays_v.shape) # N_rays, 3
-
- sample = {
- 'rays_o': rays_o,
- 'rays_v': rays_v,
- 'rays_ndc_uv': rays_ndc_uv,
- 'rays_color': color,
- # 'rays_depth': depth,
- 'rays_mask': mask,
- 'rays_norm_XYZ_cam': p, # - XYZ_cam, before multiply depth,
- 'rays_patch_color': patch_color,
- 'rays_patch_mask': patch_mask.view(-1, 1)
- }
-
- if depth is not None:
- sample['rays_depth'] = depth
-
- return sample
-
-
-def gen_random_rays_of_patch_from_single_image(H, W, N_rays, num_neighboring_pts, patch_size,
- image, intrinsic, c2w, depth=None, mask=None):
- """
- generate random rays in world space, for a single image
- sample rays from local patches
- :param H:
- :param W:
- :param N_rays: the number of center rays of patches
- :param image: [3, H, W]
- :param intrinsic: [3,3]
- :param c2w: [4,4]
- :param depth: [H, W]
- :param mask: [H, W]
- :return:
- """
- device = image.device
- patch_radius_max = patch_size // 2
-
- unit_u = 2 / (W - 1)
- unit_v = 2 / (H - 1)
-
- pixels_x_center = torch.randint(low=patch_size, high=W - patch_size, size=[N_rays])
- pixels_y_center = torch.randint(low=patch_size, high=H - patch_size, size=[N_rays])
-
- # normalized ndc uv coordinates, (-1, 1)
- ndc_u_center = 2 * pixels_x_center / (W - 1) - 1
- ndc_v_center = 2 * pixels_y_center / (H - 1) - 1
- ndc_uv_center = torch.stack([ndc_u_center, ndc_v_center], dim=-1).view(-1, 2).float().to(device)[:, None,
- :] # [N_rays, 1, 2]
-
- shift_u, shift_v = torch.rand([N_rays, num_neighboring_pts, 1]), torch.rand(
- [N_rays, num_neighboring_pts, 1]) # uniform distribution of [0,1)
- shift_u = 2 * (shift_u - 0.5) # mapping to [-1, 1)
- shift_v = 2 * (shift_v - 0.5)
-
- # - avoid sample points which are too close to center point
- shift_uv = torch.cat([(shift_u * patch_radius_max) * unit_u, (shift_v * patch_radius_max) * unit_v],
- dim=-1) # [N_rays, num_npts, 2]
- neighboring_pts_uv = ndc_uv_center + shift_uv # [N_rays, num_npts, 2]
-
- sampled_pts_uv = torch.cat([ndc_uv_center, neighboring_pts_uv], dim=1) # concat the center point
-
- # sample the gts
- color = F.grid_sample(image[None, :, :, :], sampled_pts_uv[None, :, :, :], mode='bilinear',
- align_corners=True)[0] # [3, N_rays, num_npts]
- depth = F.grid_sample(depth[None, None, :, :], sampled_pts_uv[None, :, :, :], mode='bilinear',
- align_corners=True)[0] # [1, N_rays, num_npts]
-
- mask = F.grid_sample(mask[None, None, :, :].to(torch.float32), sampled_pts_uv[None, :, :, :], mode='nearest',
- align_corners=True).to(torch.int64)[0] # [1, N_rays, num_npts]
-
- intrinsic_inv = torch.inverse(intrinsic)
-
- sampled_pts_uv = sampled_pts_uv.view(N_rays * (1 + num_neighboring_pts), 2)
- color = color.permute(1, 2, 0).contiguous().view(N_rays * (1 + num_neighboring_pts), 3)
- depth = depth.permute(1, 2, 0).contiguous().view(N_rays * (1 + num_neighboring_pts), 1)
- mask = mask.permute(1, 2, 0).contiguous().view(N_rays * (1 + num_neighboring_pts), 1)
-
- pixels_x = (sampled_pts_uv[:, 0] + 1) * (W - 1) / 2
- pixels_y = (sampled_pts_uv[:, 1] + 1) * (H - 1) / 2
- p = torch.stack([pixels_x, pixels_y, torch.ones_like(pixels_y)], dim=-1).float().to(device) # N_rays*num_pts, 3
- p = torch.matmul(intrinsic_inv[None, :3, :3], p[:, :, None]).squeeze() # N_rays*num_pts, 3
- rays_v = p / torch.linalg.norm(p, ord=2, dim=-1, keepdim=True) # N_rays*num_pts, 3
- rays_v = torch.matmul(c2w[None, :3, :3], rays_v[:, :, None]).squeeze() # N_rays*num_pts, 3
- rays_o = c2w[None, :3, 3].expand(rays_v.shape) # N_rays*num_pts, 3
-
- sample = {
- 'rays_o': rays_o,
- 'rays_v': rays_v,
- 'rays_ndc_uv': sampled_pts_uv,
- 'rays_color': color,
- 'rays_depth': depth,
- 'rays_mask': mask,
- # 'rays_norm_XYZ_cam': p # - XYZ_cam, before multiply depth
- }
-
- return sample
-
-
-def gen_random_rays_from_batch_images(H, W, N_rays, images, intrinsics, c2ws, depths=None, masks=None):
- """
-
- :param H:
- :param W:
- :param N_rays:
- :param images: [B,3,H,W]
- :param intrinsics: [B, 3, 3]
- :param c2ws: [B, 4, 4]
- :param depths: [B,H,W]
- :param masks: [B,H,W]
- :return:
- """
- assert len(images.shape) == 4
-
- rays_o = []
- rays_v = []
- rays_color = []
- rays_depth = []
- rays_mask = []
- for i in range(images.shape[0]):
- sample = gen_random_rays_from_single_image(H, W, N_rays, images[i], intrinsics[i], c2ws[i],
- depth=depths[i] if depths is not None else None,
- mask=masks[i] if masks is not None else None)
- rays_o.append(sample['rays_o'])
- rays_v.append(sample['rays_v'])
- rays_color.append(sample['rays_color'])
- if depths is not None:
- rays_depth.append(sample['rays_depth'])
- if masks is not None:
- rays_mask.append(sample['rays_mask'])
-
- sample = {
- 'rays_o': torch.stack(rays_o, dim=0), # [batch, N_rays, 3]
- 'rays_v': torch.stack(rays_v, dim=0),
- 'rays_color': torch.stack(rays_color, dim=0),
- 'rays_depth': torch.stack(rays_depth, dim=0) if depths is not None else None,
- 'rays_mask': torch.stack(rays_mask, dim=0) if masks is not None else None
- }
- return sample
-
-
-from scipy.spatial.transform import Rotation as Rot
-from scipy.spatial.transform import Slerp
-
-
-def gen_rays_between(c2w_0, c2w_1, intrinsic, ratio, H, W, resolution_level=1):
- device = c2w_0.device
-
- l = resolution_level
- tx = torch.linspace(0, W - 1, W // l)
- ty = torch.linspace(0, H - 1, H // l)
- pixels_x, pixels_y = torch.meshgrid(tx, ty, indexing="ij")
- p = torch.stack([pixels_x, pixels_y, torch.ones_like(pixels_y)], dim=-1).to(device) # W, H, 3
-
- intrinsic_inv = torch.inverse(intrinsic[:3, :3])
- p = torch.matmul(intrinsic_inv[None, None, :3, :3], p[:, :, :, None]).squeeze() # W, H, 3
- rays_v = p / torch.linalg.norm(p, ord=2, dim=-1, keepdim=True) # W, H, 3
- trans = c2w_0[:3, 3] * (1.0 - ratio) + c2w_1[:3, 3] * ratio
-
- pose_0 = c2w_0.detach().cpu().numpy()
- pose_1 = c2w_1.detach().cpu().numpy()
- pose_0 = np.linalg.inv(pose_0)
- pose_1 = np.linalg.inv(pose_1)
- rot_0 = pose_0[:3, :3]
- rot_1 = pose_1[:3, :3]
- rots = Rot.from_matrix(np.stack([rot_0, rot_1]))
- key_times = [0, 1]
- key_rots = [rot_0, rot_1]
- slerp = Slerp(key_times, rots)
- rot = slerp(ratio)
- pose = np.diag([1.0, 1.0, 1.0, 1.0])
- pose = pose.astype(np.float32)
- pose[:3, :3] = rot.as_matrix()
- pose[:3, 3] = ((1.0 - ratio) * pose_0 + ratio * pose_1)[:3, 3]
- pose = np.linalg.inv(pose)
-
- c2w = torch.from_numpy(pose).to(device)
- rot = torch.from_numpy(pose[:3, :3]).cuda()
- trans = torch.from_numpy(pose[:3, 3]).cuda()
- rays_v = torch.matmul(rot[None, None, :3, :3], rays_v[:, :, :, None]).squeeze() # W, H, 3
- rays_o = trans[None, None, :3].expand(rays_v.shape) # W, H, 3
- return c2w, rays_o.transpose(0, 1).contiguous().view(-1, 3), rays_v.transpose(0, 1).contiguous().view(-1, 3)
diff --git a/One-2-3-45-master 2/reconstruction/models/render_utils.py b/One-2-3-45-master 2/reconstruction/models/render_utils.py
deleted file mode 100644
index c14d5761234a16a19ed10509f9f0972adaf04c9a..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/models/render_utils.py
+++ /dev/null
@@ -1,120 +0,0 @@
-import torch
-import torch.nn as nn
-import torch.nn.functional as F
-
-from ops.back_project import cam2pixel
-
-
-def sample_pdf(bins, weights, n_samples, det=False):
- '''
- :param bins: tensor of shape [N_rays, M+1], M is the number of bins
- :param weights: tensor of shape [N_rays, M]
- :param N_samples: number of samples along each ray
- :param det: if True, will perform deterministic sampling
- :return: [N_rays, N_samples]
- '''
- device = weights.device
-
- weights = weights + 1e-5 # prevent nans
- pdf = weights / torch.sum(weights, -1, keepdim=True)
- cdf = torch.cumsum(pdf, -1)
- cdf = torch.cat([torch.zeros_like(cdf[..., :1]).to(device), cdf], -1)
-
- # if bins.shape[1] != weights.shape[1]: # - minor modification, add this constraint
- # cdf = torch.cat([torch.zeros_like(cdf[..., :1]).to(device), cdf], -1)
- # Take uniform samples
- if det:
- u = torch.linspace(0. + 0.5 / n_samples, 1. - 0.5 / n_samples, steps=n_samples).to(device)
- u = u.expand(list(cdf.shape[:-1]) + [n_samples])
- else:
- u = torch.rand(list(cdf.shape[:-1]) + [n_samples]).to(device)
-
- # Invert CDF
- u = u.contiguous()
- # inds = searchsorted(cdf, u, side='right')
- inds = torch.searchsorted(cdf, u, right=True)
-
- below = torch.max(torch.zeros_like(inds - 1), inds - 1)
- above = torch.min((cdf.shape[-1] - 1) * torch.ones_like(inds), inds)
- inds_g = torch.stack([below, above], -1) # (batch, n_samples, 2)
-
- matched_shape = [inds_g.shape[0], inds_g.shape[1], cdf.shape[-1]]
- cdf_g = torch.gather(cdf.unsqueeze(1).expand(matched_shape), 2, inds_g)
- bins_g = torch.gather(bins.unsqueeze(1).expand(matched_shape), 2, inds_g)
-
- denom = (cdf_g[..., 1] - cdf_g[..., 0])
- denom = torch.where(denom < 1e-5, torch.ones_like(denom), denom)
- t = (u - cdf_g[..., 0]) / denom
- samples = bins_g[..., 0] + t * (bins_g[..., 1] - bins_g[..., 0])
-
- # pdb.set_trace()
- return samples
-
-
-def sample_ptsFeatures_from_featureVolume(pts, featureVolume, vol_dims=None, partial_vol_origin=None, vol_size=None):
- """
- sample feature of pts_wrd from featureVolume, all in world space
- :param pts: [N_rays, n_samples, 3]
- :param featureVolume: [C,wX,wY,wZ]
- :param vol_dims: [3] "3" for dimX, dimY, dimZ
- :param partial_vol_origin: [3]
- :return: pts_feature: [N_rays, n_samples, C]
- :return: valid_mask: [N_rays]
- """
-
- N_rays, n_samples, _ = pts.shape
-
- if vol_dims is None:
- pts_normalized = pts
- else:
- # normalized to (-1, 1)
- pts_normalized = 2 * (pts - partial_vol_origin[None, None, :]) / (vol_size * (vol_dims[None, None, :] - 1)) - 1
-
- valid_mask = (torch.abs(pts_normalized[:, :, 0]) < 1.0) & (
- torch.abs(pts_normalized[:, :, 1]) < 1.0) & (
- torch.abs(pts_normalized[:, :, 2]) < 1.0) # (N_rays, n_samples)
-
- pts_normalized = torch.flip(pts_normalized, dims=[-1]) # ! reverse the xyz for grid_sample
-
- # ! checked grid_sample, (x,y,z) is for (D,H,W), reverse for (W,H,D)
- pts_feature = F.grid_sample(featureVolume[None, :, :, :, :], pts_normalized[None, None, :, :, :],
- padding_mode='zeros',
- align_corners=True).view(-1, N_rays, n_samples) # [C, N_rays, n_samples]
-
- pts_feature = pts_feature.permute(1, 2, 0) # [N_rays, n_samples, C]
- return pts_feature, valid_mask
-
-
-def sample_ptsFeatures_from_featureMaps(pts, featureMaps, w2cs, intrinsics, WH, proj_matrix=None, return_mask=False):
- """
- sample features of pts from 2d feature maps
- :param pts: [N_rays, N_samples, 3]
- :param featureMaps: [N_views, C, H, W]
- :param w2cs: [N_views, 4, 4]
- :param intrinsics: [N_views, 3, 3]
- :param proj_matrix: [N_views, 4, 4]
- :param HW:
- :return:
- """
- # normalized to (-1, 1)
- N_rays, n_samples, _ = pts.shape
- N_views = featureMaps.shape[0]
-
- if proj_matrix is None:
- proj_matrix = torch.matmul(intrinsics, w2cs[:, :3, :])
-
- pts = pts.permute(2, 0, 1).contiguous().view(1, 3, N_rays, n_samples).repeat(N_views, 1, 1, 1)
- pixel_grids = cam2pixel(pts, proj_matrix[:, :3, :3], proj_matrix[:, :3, 3:],
- 'zeros', sizeH=WH[1], sizeW=WH[0]) # (nviews, N_rays, n_samples, 2)
-
- valid_mask = (torch.abs(pixel_grids[:, :, :, 0]) < 1.0) & (
- torch.abs(pixel_grids[:, :, :, 1]) < 1.00) # (nviews, N_rays, n_samples)
-
- pts_feature = F.grid_sample(featureMaps, pixel_grids,
- padding_mode='zeros',
- align_corners=True) # [N_views, C, N_rays, n_samples]
-
- if return_mask:
- return pts_feature, valid_mask
- else:
- return pts_feature
diff --git a/One-2-3-45-master 2/reconstruction/models/rendering_network.py b/One-2-3-45-master 2/reconstruction/models/rendering_network.py
deleted file mode 100644
index b2c919703e0eea0e0e86f5781d2216b03879d3e2..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/models/rendering_network.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# the codes are partly borrowed from IBRNet
-
-import torch
-import torch.nn as nn
-import torch.nn.functional as F
-
-torch._C._jit_set_profiling_executor(False)
-torch._C._jit_set_profiling_mode(False)
-
-
-# default tensorflow initialization of linear layers
-def weights_init(m):
- if isinstance(m, nn.Linear):
- nn.init.kaiming_normal_(m.weight.data)
- if m.bias is not None:
- nn.init.zeros_(m.bias.data)
-
-
-@torch.jit.script
-def fused_mean_variance(x, weight):
- mean = torch.sum(x * weight, dim=2, keepdim=True)
- var = torch.sum(weight * (x - mean) ** 2, dim=2, keepdim=True)
- return mean, var
-
-
-class GeneralRenderingNetwork(nn.Module):
- """
- This model is not sensitive to finetuning
- """
-
- def __init__(self, in_geometry_feat_ch=8, in_rendering_feat_ch=56, anti_alias_pooling=True):
- super(GeneralRenderingNetwork, self).__init__()
-
- self.in_geometry_feat_ch = in_geometry_feat_ch
- self.in_rendering_feat_ch = in_rendering_feat_ch
- self.anti_alias_pooling = anti_alias_pooling
-
- if self.anti_alias_pooling:
- self.s = nn.Parameter(torch.tensor(0.2), requires_grad=True)
- activation_func = nn.ELU(inplace=True)
-
- self.ray_dir_fc = nn.Sequential(nn.Linear(4, 16),
- activation_func,
- nn.Linear(16, in_rendering_feat_ch + 3),
- activation_func)
-
- self.base_fc = nn.Sequential(nn.Linear((in_rendering_feat_ch + 3) * 3 + in_geometry_feat_ch, 64),
- activation_func,
- nn.Linear(64, 32),
- activation_func)
-
- self.vis_fc = nn.Sequential(nn.Linear(32, 32),
- activation_func,
- nn.Linear(32, 33),
- activation_func,
- )
-
- self.vis_fc2 = nn.Sequential(nn.Linear(32, 32),
- activation_func,
- nn.Linear(32, 1),
- nn.Sigmoid()
- )
-
- self.rgb_fc = nn.Sequential(nn.Linear(32 + 1 + 4, 16),
- activation_func,
- nn.Linear(16, 8),
- activation_func,
- nn.Linear(8, 1))
-
- self.base_fc.apply(weights_init)
- self.vis_fc2.apply(weights_init)
- self.vis_fc.apply(weights_init)
- self.rgb_fc.apply(weights_init)
-
- def forward(self, geometry_feat, rgb_feat, ray_diff, mask):
- '''
- :param geometry_feat: geometry features indicates sdf [n_rays, n_samples, n_feat]
- :param rgb_feat: rgbs and image features [n_views, n_rays, n_samples, n_feat]
- :param ray_diff: ray direction difference [n_views, n_rays, n_samples, 4], first 3 channels are directions,
- last channel is inner product
- :param mask: mask for whether each projection is valid or not. [n_views, n_rays, n_samples]
- :return: rgb and density output, [n_rays, n_samples, 4]
- '''
-
- rgb_feat = rgb_feat.permute(1, 2, 0, 3).contiguous()
- ray_diff = ray_diff.permute(1, 2, 0, 3).contiguous()
- mask = mask[:, :, :, None].permute(1, 2, 0, 3).contiguous()
- num_views = rgb_feat.shape[2]
- geometry_feat = geometry_feat[:, :, None, :].repeat(1, 1, num_views, 1)
-
- direction_feat = self.ray_dir_fc(ray_diff)
- rgb_in = rgb_feat[..., :3]
- rgb_feat = rgb_feat + direction_feat
-
- if self.anti_alias_pooling:
- _, dot_prod = torch.split(ray_diff, [3, 1], dim=-1)
- exp_dot_prod = torch.exp(torch.abs(self.s) * (dot_prod - 1))
- weight = (exp_dot_prod - torch.min(exp_dot_prod, dim=2, keepdim=True)[0]) * mask
- weight = weight / (torch.sum(weight, dim=2, keepdim=True) + 1e-8)
- else:
- weight = mask / (torch.sum(mask, dim=2, keepdim=True) + 1e-8)
-
- # compute mean and variance across different views for each point
- mean, var = fused_mean_variance(rgb_feat, weight) # [n_rays, n_samples, 1, n_feat]
- globalfeat = torch.cat([mean, var], dim=-1) # [n_rays, n_samples, 1, 2*n_feat]
-
- x = torch.cat([geometry_feat, globalfeat.expand(-1, -1, num_views, -1), rgb_feat],
- dim=-1) # [n_rays, n_samples, n_views, 3*n_feat+n_geo_feat]
- x = self.base_fc(x)
-
- x_vis = self.vis_fc(x * weight)
- x_res, vis = torch.split(x_vis, [x_vis.shape[-1] - 1, 1], dim=-1)
- vis = torch.sigmoid(vis) * mask
- x = x + x_res
- vis = self.vis_fc2(x * vis) * mask
-
- # rgb computation
- x = torch.cat([x, vis, ray_diff], dim=-1)
- x = self.rgb_fc(x)
- x = x.masked_fill(mask == 0, -1e9)
- blending_weights_valid = F.softmax(x, dim=2) # color blending
- rgb_out = torch.sum(rgb_in * blending_weights_valid, dim=2)
-
- mask = mask.detach().to(rgb_out.dtype) # [n_rays, n_samples, n_views, 1]
- mask = torch.sum(mask, dim=2, keepdim=False)
- mask = mask >= 2 # more than 2 views see the point
- mask = torch.sum(mask.to(rgb_out.dtype), dim=1, keepdim=False)
- valid_mask = mask > 8 # valid rays, more than 8 valid samples
- return rgb_out, valid_mask # (N_rays, n_samples, 3), (N_rays, 1)
diff --git a/One-2-3-45-master 2/reconstruction/models/sparse_neus_renderer.py b/One-2-3-45-master 2/reconstruction/models/sparse_neus_renderer.py
deleted file mode 100644
index 96ffc7b547e0f83a177a81f36be38375d9cd26fb..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/models/sparse_neus_renderer.py
+++ /dev/null
@@ -1,985 +0,0 @@
-"""
-The codes are heavily borrowed from NeuS
-"""
-
-import os
-import cv2 as cv
-import torch
-import torch.nn as nn
-import torch.nn.functional as F
-import numpy as np
-import logging
-import mcubes
-from icecream import ic
-from models.render_utils import sample_pdf
-
-from models.projector import Projector
-from tsparse.torchsparse_utils import sparse_to_dense_channel
-
-from models.fast_renderer import FastRenderer
-
-from models.patch_projector import PatchProjector
-
-
-class SparseNeuSRenderer(nn.Module):
- """
- conditional neus render;
- optimize on normalized world space;
- warped by nn.Module to support DataParallel traning
- """
-
- def __init__(self,
- rendering_network_outside,
- sdf_network,
- variance_network,
- rendering_network,
- n_samples,
- n_importance,
- n_outside,
- perturb,
- alpha_type='div',
- conf=None
- ):
- super(SparseNeuSRenderer, self).__init__()
-
- self.conf = conf
- self.base_exp_dir = conf['general.base_exp_dir']
-
- # network setups
- self.rendering_network_outside = rendering_network_outside
- self.sdf_network = sdf_network
- self.variance_network = variance_network
- self.rendering_network = rendering_network
-
- self.n_samples = n_samples
- self.n_importance = n_importance
- self.n_outside = n_outside
- self.perturb = perturb
- self.alpha_type = alpha_type
-
- self.rendering_projector = Projector() # used to obtain features for generalized rendering
-
- self.h_patch_size = self.conf.get_int('model.h_patch_size', default=3)
- self.patch_projector = PatchProjector(self.h_patch_size)
-
- self.ray_tracer = FastRenderer() # ray_tracer to extract depth maps from sdf_volume
-
- # - fitted rendering or general rendering
- try:
- self.if_fitted_rendering = self.sdf_network.if_fitted_rendering
- except:
- self.if_fitted_rendering = False
-
- def up_sample(self, rays_o, rays_d, z_vals, sdf, n_importance, inv_variance,
- conditional_valid_mask_volume=None):
- device = rays_o.device
- batch_size, n_samples = z_vals.shape
- pts = rays_o[:, None, :] + rays_d[:, None, :] * z_vals[..., :, None] # n_rays, n_samples, 3
-
- if conditional_valid_mask_volume is not None:
- pts_mask = self.get_pts_mask_for_conditional_volume(pts.view(-1, 3), conditional_valid_mask_volume)
- pts_mask = pts_mask.reshape(batch_size, n_samples)
- pts_mask = pts_mask[:, :-1] * pts_mask[:, 1:] # [batch_size, n_samples-1]
- else:
- pts_mask = torch.ones([batch_size, n_samples]).to(pts.device)
-
- sdf = sdf.reshape(batch_size, n_samples)
- prev_sdf, next_sdf = sdf[:, :-1], sdf[:, 1:]
- prev_z_vals, next_z_vals = z_vals[:, :-1], z_vals[:, 1:]
- mid_sdf = (prev_sdf + next_sdf) * 0.5
- dot_val = None
- if self.alpha_type == 'uniform':
- dot_val = torch.ones([batch_size, n_samples - 1]) * -1.0
- else:
- dot_val = (next_sdf - prev_sdf) / (next_z_vals - prev_z_vals + 1e-5)
- prev_dot_val = torch.cat([torch.zeros([batch_size, 1]).to(device), dot_val[:, :-1]], dim=-1)
- dot_val = torch.stack([prev_dot_val, dot_val], dim=-1)
- dot_val, _ = torch.min(dot_val, dim=-1, keepdim=False)
- dot_val = dot_val.clip(-10.0, 0.0) * pts_mask
- dist = (next_z_vals - prev_z_vals)
- prev_esti_sdf = mid_sdf - dot_val * dist * 0.5
- next_esti_sdf = mid_sdf + dot_val * dist * 0.5
- prev_cdf = torch.sigmoid(prev_esti_sdf * inv_variance)
- next_cdf = torch.sigmoid(next_esti_sdf * inv_variance)
- alpha_sdf = (prev_cdf - next_cdf + 1e-5) / (prev_cdf + 1e-5)
-
- alpha = alpha_sdf
-
- # - apply pts_mask
- alpha = pts_mask * alpha
-
- weights = alpha * torch.cumprod(
- torch.cat([torch.ones([batch_size, 1]).to(device), 1. - alpha + 1e-7], -1), -1)[:, :-1]
-
- z_samples = sample_pdf(z_vals, weights, n_importance, det=True).detach()
- return z_samples
-
- def cat_z_vals(self, rays_o, rays_d, z_vals, new_z_vals, sdf, lod,
- sdf_network, gru_fusion,
- # * related to conditional feature
- conditional_volume=None,
- conditional_valid_mask_volume=None
- ):
- device = rays_o.device
- batch_size, n_samples = z_vals.shape
- _, n_importance = new_z_vals.shape
- pts = rays_o[:, None, :] + rays_d[:, None, :] * new_z_vals[..., :, None]
-
- if conditional_valid_mask_volume is not None:
- pts_mask = self.get_pts_mask_for_conditional_volume(pts.view(-1, 3), conditional_valid_mask_volume)
- pts_mask = pts_mask.reshape(batch_size, n_importance)
- pts_mask_bool = (pts_mask > 0).view(-1)
- else:
- pts_mask = torch.ones([batch_size, n_importance]).to(pts.device)
-
- new_sdf = torch.ones([batch_size * n_importance, 1]).to(pts.dtype).to(device) * 100
-
- if torch.sum(pts_mask) > 1:
- new_outputs = sdf_network.sdf(pts.reshape(-1, 3)[pts_mask_bool], conditional_volume, lod=lod)
- new_sdf[pts_mask_bool] = new_outputs['sdf_pts_scale%d' % lod] # .reshape(batch_size, n_importance)
-
- new_sdf = new_sdf.view(batch_size, n_importance)
-
- z_vals = torch.cat([z_vals, new_z_vals], dim=-1)
- sdf = torch.cat([sdf, new_sdf], dim=-1)
-
- z_vals, index = torch.sort(z_vals, dim=-1)
- xx = torch.arange(batch_size)[:, None].expand(batch_size, n_samples + n_importance).reshape(-1)
- index = index.reshape(-1)
- sdf = sdf[(xx, index)].reshape(batch_size, n_samples + n_importance)
-
- return z_vals, sdf
-
- @torch.no_grad()
- def get_pts_mask_for_conditional_volume(self, pts, mask_volume):
- """
-
- :param pts: [N, 3]
- :param mask_volume: [1, 1, X, Y, Z]
- :return:
- """
- num_pts = pts.shape[0]
- pts = pts.view(1, 1, 1, num_pts, 3) # - should be in range (-1, 1)
-
- pts = torch.flip(pts, dims=[-1])
-
- pts_mask = F.grid_sample(mask_volume, pts, mode='nearest') # [1, c, 1, 1, num_pts]
- pts_mask = pts_mask.view(-1, num_pts).permute(1, 0).contiguous() # [num_pts, 1]
-
- return pts_mask
-
- def render_core(self,
- rays_o,
- rays_d,
- z_vals,
- sample_dist,
- lod,
- sdf_network,
- rendering_network,
- background_alpha=None, # - no use here
- background_sampled_color=None, # - no use here
- background_rgb=None, # - no use here
- alpha_inter_ratio=0.0,
- # * related to conditional feature
- conditional_volume=None,
- conditional_valid_mask_volume=None,
- # * 2d feature maps
- feature_maps=None,
- color_maps=None,
- w2cs=None,
- intrinsics=None,
- img_wh=None,
- query_c2w=None, # - used for testing
- if_general_rendering=True,
- if_render_with_grad=True,
- # * used for blending mlp rendering network
- img_index=None,
- rays_uv=None,
- # * used for clear bg and fg
- bg_num=0
- ):
- device = rays_o.device
- N_rays = rays_o.shape[0]
- _, n_samples = z_vals.shape
- dists = z_vals[..., 1:] - z_vals[..., :-1]
- dists = torch.cat([dists, torch.Tensor([sample_dist]).expand(dists[..., :1].shape).to(device)], -1)
-
- mid_z_vals = z_vals + dists * 0.5
- mid_dists = mid_z_vals[..., 1:] - mid_z_vals[..., :-1]
-
- pts = rays_o[:, None, :] + rays_d[:, None, :] * mid_z_vals[..., :, None] # n_rays, n_samples, 3
- dirs = rays_d[:, None, :].expand(pts.shape)
-
- pts = pts.reshape(-1, 3)
- dirs = dirs.reshape(-1, 3)
-
- # * if conditional_volume is restored from sparse volume, need mask for pts
- if conditional_valid_mask_volume is not None:
- pts_mask = self.get_pts_mask_for_conditional_volume(pts, conditional_valid_mask_volume)
- pts_mask = pts_mask.reshape(N_rays, n_samples).float().detach()
- pts_mask_bool = (pts_mask > 0).view(-1)
-
- if torch.sum(pts_mask_bool.float()) < 1: # ! when render out image, may meet this problem
- pts_mask_bool[:100] = True
-
- else:
- pts_mask = torch.ones([N_rays, n_samples]).to(pts.device)
- # import ipdb; ipdb.set_trace()
- # pts_valid = pts[pts_mask_bool]
- sdf_nn_output = sdf_network.sdf(pts[pts_mask_bool], conditional_volume, lod=lod)
-
- sdf = torch.ones([N_rays * n_samples, 1]).to(pts.dtype).to(device) * 100
- sdf[pts_mask_bool] = sdf_nn_output['sdf_pts_scale%d' % lod] # [N_rays*n_samples, 1]
- feature_vector_valid = sdf_nn_output['sdf_features_pts_scale%d' % lod]
- feature_vector = torch.zeros([N_rays * n_samples, feature_vector_valid.shape[1]]).to(pts.dtype).to(device)
- feature_vector[pts_mask_bool] = feature_vector_valid
-
- # * estimate alpha from sdf
- gradients = torch.zeros([N_rays * n_samples, 3]).to(pts.dtype).to(device)
- # import ipdb; ipdb.set_trace()
- gradients[pts_mask_bool] = sdf_network.gradient(
- pts[pts_mask_bool], conditional_volume, lod=lod).squeeze()
-
- sampled_color_mlp = None
- rendering_valid_mask_mlp = None
- sampled_color_patch = None
- rendering_patch_mask = None
-
- if self.if_fitted_rendering: # used for fine-tuning
- position_latent = sdf_nn_output['sampled_latent_scale%d' % lod]
- sampled_color_mlp = torch.zeros([N_rays * n_samples, 3]).to(pts.dtype).to(device)
- sampled_color_mlp_mask = torch.zeros([N_rays * n_samples, 1]).to(pts.dtype).to(device)
-
- # - extract pixel
- pts_pixel_color, pts_pixel_mask = self.patch_projector.pixel_warp(
- pts[pts_mask_bool][:, None, :], color_maps, intrinsics,
- w2cs, img_wh=None) # [N_rays * n_samples,1, N_views, 3] , [N_rays*n_samples, 1, N_views]
- pts_pixel_color = pts_pixel_color[:, 0, :, :] # [N_rays * n_samples, N_views, 3]
- pts_pixel_mask = pts_pixel_mask[:, 0, :] # [N_rays*n_samples, N_views]
-
- # - extract patch
- if_patch_blending = False if rays_uv is None else True
- pts_patch_color, pts_patch_mask = None, None
- if if_patch_blending:
- pts_patch_color, pts_patch_mask = self.patch_projector.patch_warp(
- pts.reshape([N_rays, n_samples, 3]),
- rays_uv, gradients.reshape([N_rays, n_samples, 3]),
- color_maps,
- intrinsics[0], intrinsics,
- query_c2w[0], torch.inverse(w2cs), img_wh=None
- ) # (N_rays, n_samples, N_src, Npx, 3), (N_rays, n_samples, N_src, Npx)
- N_src, Npx = pts_patch_mask.shape[2:]
- pts_patch_color = pts_patch_color.view(N_rays * n_samples, N_src, Npx, 3)[pts_mask_bool]
- pts_patch_mask = pts_patch_mask.view(N_rays * n_samples, N_src, Npx)[pts_mask_bool]
-
- sampled_color_patch = torch.zeros([N_rays * n_samples, Npx, 3]).to(device)
- sampled_color_patch_mask = torch.zeros([N_rays * n_samples, 1]).to(device)
-
- sampled_color_mlp_, sampled_color_mlp_mask_, \
- sampled_color_patch_, sampled_color_patch_mask_ = sdf_network.color_blend(
- pts[pts_mask_bool],
- position_latent,
- gradients[pts_mask_bool],
- dirs[pts_mask_bool],
- feature_vector[pts_mask_bool],
- img_index=img_index,
- pts_pixel_color=pts_pixel_color,
- pts_pixel_mask=pts_pixel_mask,
- pts_patch_color=pts_patch_color,
- pts_patch_mask=pts_patch_mask
-
- ) # [n, 3], [n, 1]
- sampled_color_mlp[pts_mask_bool] = sampled_color_mlp_
- sampled_color_mlp_mask[pts_mask_bool] = sampled_color_mlp_mask_.float()
- sampled_color_mlp = sampled_color_mlp.view(N_rays, n_samples, 3)
- sampled_color_mlp_mask = sampled_color_mlp_mask.view(N_rays, n_samples)
- rendering_valid_mask_mlp = torch.mean(pts_mask * sampled_color_mlp_mask, dim=-1, keepdim=True) > 0.5
-
- # patch blending
- if if_patch_blending:
- sampled_color_patch[pts_mask_bool] = sampled_color_patch_
- sampled_color_patch_mask[pts_mask_bool] = sampled_color_patch_mask_.float()
- sampled_color_patch = sampled_color_patch.view(N_rays, n_samples, Npx, 3)
- sampled_color_patch_mask = sampled_color_patch_mask.view(N_rays, n_samples)
- rendering_patch_mask = torch.mean(pts_mask * sampled_color_patch_mask, dim=-1,
- keepdim=True) > 0.5 # [N_rays, 1]
- else:
- sampled_color_patch, rendering_patch_mask = None, None
-
- if if_general_rendering: # used for general training
- # [512, 128, 16]; [4, 512, 128, 59]; [4, 512, 128, 4]
- ren_geo_feats, ren_rgb_feats, ren_ray_diff, ren_mask, _, _ = self.rendering_projector.compute(
- pts.view(N_rays, n_samples, 3),
- # * 3d geometry feature volumes
- geometryVolume=conditional_volume[0],
- geometryVolumeMask=conditional_valid_mask_volume[0],
- # * 2d rendering feature maps
- rendering_feature_maps=feature_maps, # [n_views, 56, 256, 256]
- color_maps=color_maps,
- w2cs=w2cs,
- intrinsics=intrinsics,
- img_wh=img_wh,
- query_img_idx=0, # the index of the N_views dim for rendering
- query_c2w=query_c2w,
- )
-
- # (N_rays, n_samples, 3)
- if if_render_with_grad:
- # import ipdb; ipdb.set_trace()
- # [nrays, 3] [nrays, 1]
- sampled_color, rendering_valid_mask = rendering_network(
- ren_geo_feats, ren_rgb_feats, ren_ray_diff, ren_mask)
- # import ipdb; ipdb.set_trace()
- else:
- with torch.no_grad():
- sampled_color, rendering_valid_mask = rendering_network(
- ren_geo_feats, ren_rgb_feats, ren_ray_diff, ren_mask)
- else:
- sampled_color, rendering_valid_mask = None, None
-
- inv_variance = self.variance_network(feature_vector)[:, :1].clip(1e-6, 1e6)
-
- true_dot_val = (dirs * gradients).sum(-1, keepdim=True) # * calculate
-
- iter_cos = -(F.relu(-true_dot_val * 0.5 + 0.5) * (1.0 - alpha_inter_ratio) + F.relu(
- -true_dot_val) * alpha_inter_ratio) # always non-positive
-
- iter_cos = iter_cos * pts_mask.view(-1, 1)
-
- true_estimate_sdf_half_next = sdf + iter_cos.clip(-10.0, 10.0) * dists.reshape(-1, 1) * 0.5
- true_estimate_sdf_half_prev = sdf - iter_cos.clip(-10.0, 10.0) * dists.reshape(-1, 1) * 0.5
-
- prev_cdf = torch.sigmoid(true_estimate_sdf_half_prev * inv_variance)
- next_cdf = torch.sigmoid(true_estimate_sdf_half_next * inv_variance)
-
- p = prev_cdf - next_cdf
- c = prev_cdf
-
- if self.alpha_type == 'div':
- alpha_sdf = ((p + 1e-5) / (c + 1e-5)).reshape(N_rays, n_samples).clip(0.0, 1.0)
- elif self.alpha_type == 'uniform':
- uniform_estimate_sdf_half_next = sdf - dists.reshape(-1, 1) * 0.5
- uniform_estimate_sdf_half_prev = sdf + dists.reshape(-1, 1) * 0.5
- uniform_prev_cdf = torch.sigmoid(uniform_estimate_sdf_half_prev * inv_variance)
- uniform_next_cdf = torch.sigmoid(uniform_estimate_sdf_half_next * inv_variance)
- uniform_alpha = F.relu(
- (uniform_prev_cdf - uniform_next_cdf + 1e-5) / (uniform_prev_cdf + 1e-5)).reshape(
- N_rays, n_samples).clip(0.0, 1.0)
- alpha_sdf = uniform_alpha
- else:
- assert False
-
- alpha = alpha_sdf
-
- # - apply pts_mask
- alpha = alpha * pts_mask
-
- # pts_radius = torch.linalg.norm(pts, ord=2, dim=-1, keepdim=True).reshape(N_rays, n_samples)
- # inside_sphere = (pts_radius < 1.0).float().detach()
- # relax_inside_sphere = (pts_radius < 1.2).float().detach()
- inside_sphere = pts_mask
- relax_inside_sphere = pts_mask
-
- weights = alpha * torch.cumprod(torch.cat([torch.ones([N_rays, 1]).to(device), 1. - alpha + 1e-7], -1), -1)[:,
- :-1] # n_rays, n_samples
- weights_sum = weights.sum(dim=-1, keepdim=True)
- alpha_sum = alpha.sum(dim=-1, keepdim=True)
-
- if bg_num > 0:
- weights_sum_fg = weights[:, :-bg_num].sum(dim=-1, keepdim=True)
- else:
- weights_sum_fg = weights_sum
-
- if sampled_color is not None:
- color = (sampled_color * weights[:, :, None]).sum(dim=1)
- else:
- color = None
- # import ipdb; ipdb.set_trace()
-
- if background_rgb is not None and color is not None:
- color = color + background_rgb * (1.0 - weights_sum)
- # print("color device:" + str(color.device))
- # if color is not None:
- # # import ipdb; ipdb.set_trace()
- # color = color + (1.0 - weights_sum)
-
-
- ###################* mlp color rendering #####################
- color_mlp = None
- # import ipdb; ipdb.set_trace()
- if sampled_color_mlp is not None:
- color_mlp = (sampled_color_mlp * weights[:, :, None]).sum(dim=1)
-
- if background_rgb is not None and color_mlp is not None:
- color_mlp = color_mlp + background_rgb * (1.0 - weights_sum)
-
- ############################ * patch blending ################
- blended_color_patch = None
- if sampled_color_patch is not None:
- blended_color_patch = (sampled_color_patch * weights[:, :, None, None]).sum(dim=1) # [N_rays, Npx, 3]
-
- ######################################################
-
- gradient_error = (torch.linalg.norm(gradients.reshape(N_rays, n_samples, 3), ord=2,
- dim=-1) - 1.0) ** 2
- # ! the gradient normal should be masked out, the pts out of the bounding box should also be penalized
- gradient_error = (pts_mask * gradient_error).sum() / (
- (pts_mask).sum() + 1e-5)
-
- depth = (mid_z_vals * weights[:, :n_samples]).sum(dim=1, keepdim=True)
- # print("[TEST]: weights_sum in render_core", weights_sum.mean())
- # print("[TEST]: weights_sum in render_core NAN number", weights_sum.isnan().sum())
- # if weights_sum.isnan().sum() > 0:
- # import ipdb; ipdb.set_trace()
- return {
- 'color': color,
- 'color_mask': rendering_valid_mask, # (N_rays, 1)
- 'color_mlp': color_mlp,
- 'color_mlp_mask': rendering_valid_mask_mlp,
- 'sdf': sdf, # (N_rays, n_samples)
- 'depth': depth, # (N_rays, 1)
- 'dists': dists,
- 'gradients': gradients.reshape(N_rays, n_samples, 3),
- 'variance': 1.0 / inv_variance,
- 'mid_z_vals': mid_z_vals,
- 'weights': weights,
- 'weights_sum': weights_sum,
- 'alpha_sum': alpha_sum,
- 'alpha_mean': alpha.mean(),
- 'cdf': c.reshape(N_rays, n_samples),
- 'gradient_error': gradient_error,
- 'inside_sphere': inside_sphere,
- 'blended_color_patch': blended_color_patch,
- 'blended_color_patch_mask': rendering_patch_mask,
- 'weights_sum_fg': weights_sum_fg
- }
-
- def render(self, rays_o, rays_d, near, far, sdf_network, rendering_network,
- perturb_overwrite=-1,
- background_rgb=None,
- alpha_inter_ratio=0.0,
- # * related to conditional feature
- lod=None,
- conditional_volume=None,
- conditional_valid_mask_volume=None,
- # * 2d feature maps
- feature_maps=None,
- color_maps=None,
- w2cs=None,
- intrinsics=None,
- img_wh=None,
- query_c2w=None, # -used for testing
- if_general_rendering=True,
- if_render_with_grad=True,
- # * used for blending mlp rendering network
- img_index=None,
- rays_uv=None,
- # * importance sample for second lod network
- pre_sample=False, # no use here
- # * for clear foreground
- bg_ratio=0.0
- ):
- device = rays_o.device
- N_rays = len(rays_o)
- # sample_dist = 2.0 / self.n_samples
- sample_dist = ((far - near) / self.n_samples).mean().item()
- z_vals = torch.linspace(0.0, 1.0, self.n_samples).to(device)
- z_vals = near + (far - near) * z_vals[None, :]
-
- bg_num = int(self.n_samples * bg_ratio)
-
- if z_vals.shape[0] == 1:
- z_vals = z_vals.repeat(N_rays, 1)
-
- if bg_num > 0:
- z_vals_bg = z_vals[:, self.n_samples - bg_num:]
- z_vals = z_vals[:, :self.n_samples - bg_num]
-
- n_samples = self.n_samples - bg_num
- perturb = self.perturb
-
- # - significantly speed up training, for the second lod network
- if pre_sample:
- z_vals = self.sample_z_vals_from_maskVolume(rays_o, rays_d, near, far,
- conditional_valid_mask_volume)
-
- if perturb_overwrite >= 0:
- perturb = perturb_overwrite
- if perturb > 0:
- # get intervals between samples
- mids = .5 * (z_vals[..., 1:] + z_vals[..., :-1])
- upper = torch.cat([mids, z_vals[..., -1:]], -1)
- lower = torch.cat([z_vals[..., :1], mids], -1)
- # stratified samples in those intervals
- t_rand = torch.rand(z_vals.shape).to(device)
- z_vals = lower + (upper - lower) * t_rand
-
- background_alpha = None
- background_sampled_color = None
- z_val_before = z_vals.clone()
- # Up sample
- if self.n_importance > 0:
- with torch.no_grad():
- pts = rays_o[:, None, :] + rays_d[:, None, :] * z_vals[..., :, None]
-
- sdf_outputs = sdf_network.sdf(
- pts.reshape(-1, 3), conditional_volume, lod=lod)
- # pdb.set_trace()
- sdf = sdf_outputs['sdf_pts_scale%d' % lod].reshape(N_rays, self.n_samples - bg_num)
-
- n_steps = 4
- for i in range(n_steps):
- new_z_vals = self.up_sample(rays_o, rays_d, z_vals, sdf, self.n_importance // n_steps,
- 64 * 2 ** i,
- conditional_valid_mask_volume=conditional_valid_mask_volume,
- )
-
- # if new_z_vals.isnan().sum() > 0:
- # import ipdb; ipdb.set_trace()
-
- z_vals, sdf = self.cat_z_vals(
- rays_o, rays_d, z_vals, new_z_vals, sdf, lod,
- sdf_network, gru_fusion=False,
- conditional_volume=conditional_volume,
- conditional_valid_mask_volume=conditional_valid_mask_volume,
- )
-
- del sdf
-
- n_samples = self.n_samples + self.n_importance
-
- # Background
- ret_outside = None
-
- # Render
- if bg_num > 0:
- z_vals = torch.cat([z_vals, z_vals_bg], dim=1)
- # if z_vals.isnan().sum() > 0:
- # import ipdb; ipdb.set_trace()
- ret_fine = self.render_core(rays_o,
- rays_d,
- z_vals,
- sample_dist,
- lod,
- sdf_network,
- rendering_network,
- background_rgb=background_rgb,
- background_alpha=background_alpha,
- background_sampled_color=background_sampled_color,
- alpha_inter_ratio=alpha_inter_ratio,
- # * related to conditional feature
- conditional_volume=conditional_volume,
- conditional_valid_mask_volume=conditional_valid_mask_volume,
- # * 2d feature maps
- feature_maps=feature_maps,
- color_maps=color_maps,
- w2cs=w2cs,
- intrinsics=intrinsics,
- img_wh=img_wh,
- query_c2w=query_c2w,
- if_general_rendering=if_general_rendering,
- if_render_with_grad=if_render_with_grad,
- # * used for blending mlp rendering network
- img_index=img_index,
- rays_uv=rays_uv
- )
-
- color_fine = ret_fine['color']
-
- if self.n_outside > 0:
- color_fine_mask = torch.logical_or(ret_fine['color_mask'], ret_outside['color_mask'])
- else:
- color_fine_mask = ret_fine['color_mask']
-
- weights = ret_fine['weights']
- weights_sum = ret_fine['weights_sum']
-
- gradients = ret_fine['gradients']
- mid_z_vals = ret_fine['mid_z_vals']
-
- # depth = (mid_z_vals * weights[:, :n_samples]).sum(dim=1, keepdim=True)
- depth = ret_fine['depth']
- depth_varaince = ((mid_z_vals - depth) ** 2 * weights[:, :n_samples]).sum(dim=-1, keepdim=True)
- variance = ret_fine['variance'].reshape(N_rays, n_samples).mean(dim=-1, keepdim=True)
-
- # - randomly sample points from the volume, and maximize the sdf
- pts_random = torch.rand([1024, 3]).float().to(device) * 2 - 1 # normalized to (-1, 1)
- sdf_random = sdf_network.sdf(pts_random, conditional_volume, lod=lod)['sdf_pts_scale%d' % lod]
-
- result = {
- 'depth': depth,
- 'color_fine': color_fine,
- 'color_fine_mask': color_fine_mask,
- 'color_outside': ret_outside['color'] if ret_outside is not None else None,
- 'color_outside_mask': ret_outside['color_mask'] if ret_outside is not None else None,
- 'color_mlp': ret_fine['color_mlp'],
- 'color_mlp_mask': ret_fine['color_mlp_mask'],
- 'variance': variance.mean(),
- 'cdf_fine': ret_fine['cdf'],
- 'depth_variance': depth_varaince,
- 'weights_sum': weights_sum,
- 'weights_max': torch.max(weights, dim=-1, keepdim=True)[0],
- 'alpha_sum': ret_fine['alpha_sum'].mean(),
- 'alpha_mean': ret_fine['alpha_mean'],
- 'gradients': gradients,
- 'weights': weights,
- 'gradient_error_fine': ret_fine['gradient_error'],
- 'inside_sphere': ret_fine['inside_sphere'],
- 'sdf': ret_fine['sdf'],
- 'sdf_random': sdf_random,
- 'blended_color_patch': ret_fine['blended_color_patch'],
- 'blended_color_patch_mask': ret_fine['blended_color_patch_mask'],
- 'weights_sum_fg': ret_fine['weights_sum_fg']
- }
-
- return result
-
- @torch.no_grad()
- def sample_z_vals_from_sdfVolume(self, rays_o, rays_d, near, far, sdf_volume, mask_volume):
- # ? based on sdf to do importance sampling, seems that too biased on pre-estimation
- device = rays_o.device
- N_rays = len(rays_o)
- n_samples = self.n_samples * 2
-
- z_vals = torch.linspace(0.0, 1.0, n_samples).to(device)
- z_vals = near + (far - near) * z_vals[None, :]
-
- if z_vals.shape[0] == 1:
- z_vals = z_vals.repeat(N_rays, 1)
-
- pts = rays_o[:, None, :] + rays_d[:, None, :] * z_vals[..., :, None]
-
- sdf = self.get_pts_mask_for_conditional_volume(pts.view(-1, 3), sdf_volume).reshape([N_rays, n_samples])
-
- new_z_vals = self.up_sample(rays_o, rays_d, z_vals, sdf, self.n_samples,
- 200,
- conditional_valid_mask_volume=mask_volume,
- )
- return new_z_vals
-
- @torch.no_grad()
- def sample_z_vals_from_maskVolume(self, rays_o, rays_d, near, far, mask_volume): # don't use
- device = rays_o.device
- N_rays = len(rays_o)
- n_samples = self.n_samples * 2
-
- z_vals = torch.linspace(0.0, 1.0, n_samples).to(device)
- z_vals = near + (far - near) * z_vals[None, :]
-
- if z_vals.shape[0] == 1:
- z_vals = z_vals.repeat(N_rays, 1)
-
- mid_z_vals = (z_vals[:, 1:] + z_vals[:, :-1]) * 0.5
-
- pts = rays_o[:, None, :] + rays_d[:, None, :] * mid_z_vals[..., :, None]
-
- pts_mask = self.get_pts_mask_for_conditional_volume(pts.view(-1, 3), mask_volume).reshape(
- [N_rays, n_samples - 1])
-
- # empty voxel set to 0.1, non-empty voxel set to 1
- weights = torch.where(pts_mask > 0, torch.ones_like(pts_mask).to(device),
- 0.1 * torch.ones_like(pts_mask).to(device))
-
- # sample more pts in non-empty voxels
- z_samples = sample_pdf(z_vals, weights, self.n_samples, det=True).detach()
- return z_samples
-
- @torch.no_grad()
- def filter_pts_by_depthmaps(self, coords, pred_depth_maps, proj_matrices,
- partial_vol_origin, voxel_size,
- near, far, depth_interval, d_plane_nums):
- """
- Use the pred_depthmaps to remove redundant pts (pruned by sdf, sdf always have two sides, the back side is useless)
- :param coords: [n, 3] int coords
- :param pred_depth_maps: [N_views, 1, h, w]
- :param proj_matrices: [N_views, 4, 4]
- :param partial_vol_origin: [3]
- :param voxel_size: 1
- :param near: 1
- :param far: 1
- :param depth_interval: 1
- :param d_plane_nums: 1
- :return:
- """
- device = pred_depth_maps.device
- n_views, _, sizeH, sizeW = pred_depth_maps.shape
-
- if len(partial_vol_origin.shape) == 1:
- partial_vol_origin = partial_vol_origin[None, :]
- pts = coords * voxel_size + partial_vol_origin
-
- rs_grid = pts.unsqueeze(0).expand(n_views, -1, -1)
- rs_grid = rs_grid.permute(0, 2, 1).contiguous() # [n_views, 3, n_pts]
- nV = rs_grid.shape[-1]
- rs_grid = torch.cat([rs_grid, torch.ones([n_views, 1, nV]).to(device)], dim=1) # [n_views, 4, n_pts]
-
- # Project grid
- im_p = proj_matrices @ rs_grid # - transform world pts to image UV space # [n_views, 4, n_pts]
- im_x, im_y, im_z = im_p[:, 0], im_p[:, 1], im_p[:, 2]
- im_x = im_x / im_z
- im_y = im_y / im_z
-
- im_grid = torch.stack([2 * im_x / (sizeW - 1) - 1, 2 * im_y / (sizeH - 1) - 1], dim=-1)
-
- im_grid = im_grid.view(n_views, 1, -1, 2)
- sampled_depths = torch.nn.functional.grid_sample(pred_depth_maps, im_grid, mode='bilinear',
- padding_mode='zeros',
- align_corners=True)[:, 0, 0, :] # [n_views, n_pts]
- sampled_depths_valid = (sampled_depths > 0.5 * near).float()
- valid_d_min = (sampled_depths - d_plane_nums * depth_interval).clamp(near.item(),
- far.item()) * sampled_depths_valid
- valid_d_max = (sampled_depths + d_plane_nums * depth_interval).clamp(near.item(),
- far.item()) * sampled_depths_valid
-
- mask = im_grid.abs() <= 1
- mask = mask[:, 0] # [n_views, n_pts, 2]
- mask = (mask.sum(dim=-1) == 2) & (im_z > valid_d_min) & (im_z < valid_d_max)
-
- mask = mask.view(n_views, -1)
- mask = mask.permute(1, 0).contiguous() # [num_pts, nviews]
-
- mask_final = torch.sum(mask.float(), dim=1, keepdim=False) > 0
-
- return mask_final
-
- @torch.no_grad()
- def get_valid_sparse_coords_by_sdf_depthfilter(self, sdf_volume, coords_volume, mask_volume, feature_volume,
- pred_depth_maps, proj_matrices,
- partial_vol_origin, voxel_size,
- near, far, depth_interval, d_plane_nums,
- threshold=0.02, maximum_pts=110000):
- """
- assume batch size == 1, from the first lod to get sparse voxels
- :param sdf_volume: [1, X, Y, Z]
- :param coords_volume: [3, X, Y, Z]
- :param mask_volume: [1, X, Y, Z]
- :param feature_volume: [C, X, Y, Z]
- :param threshold:
- :return:
- """
- device = coords_volume.device
- _, dX, dY, dZ = coords_volume.shape
-
- def prune(sdf_pts, coords_pts, mask_volume, threshold):
- occupancy_mask = (torch.abs(sdf_pts) < threshold).squeeze(1) # [num_pts]
- valid_coords = coords_pts[occupancy_mask]
-
- # - filter backside surface by depth maps
- mask_filtered = self.filter_pts_by_depthmaps(valid_coords, pred_depth_maps, proj_matrices,
- partial_vol_origin, voxel_size,
- near, far, depth_interval, d_plane_nums)
- valid_coords = valid_coords[mask_filtered]
-
- # - dilate
- occupancy_mask = sparse_to_dense_channel(valid_coords, 1, [dX, dY, dZ], 1, 0, device) # [dX, dY, dZ, 1]
-
- # - dilate
- occupancy_mask = occupancy_mask.float()
- occupancy_mask = occupancy_mask.view(1, 1, dX, dY, dZ)
- occupancy_mask = F.avg_pool3d(occupancy_mask, kernel_size=7, stride=1, padding=3)
- occupancy_mask = occupancy_mask.view(-1, 1) > 0
-
- final_mask = torch.logical_and(mask_volume, occupancy_mask)[:, 0] # [num_pts]
-
- return final_mask, torch.sum(final_mask.float())
-
- C, dX, dY, dZ = feature_volume.shape
- sdf_volume = sdf_volume.permute(1, 2, 3, 0).contiguous().view(-1, 1)
- coords_volume = coords_volume.permute(1, 2, 3, 0).contiguous().view(-1, 3)
- mask_volume = mask_volume.permute(1, 2, 3, 0).contiguous().view(-1, 1)
- feature_volume = feature_volume.permute(1, 2, 3, 0).contiguous().view(-1, C)
-
- # - for check
- # sdf_volume = torch.rand_like(sdf_volume).float().to(sdf_volume.device) * 0.02
-
- final_mask, valid_num = prune(sdf_volume, coords_volume, mask_volume, threshold)
-
- while (valid_num > maximum_pts) and (threshold > 0.003):
- threshold = threshold - 0.002
- final_mask, valid_num = prune(sdf_volume, coords_volume, mask_volume, threshold)
-
- valid_coords = coords_volume[final_mask] # [N, 3]
- valid_feature = feature_volume[final_mask] # [N, C]
-
- valid_coords = torch.cat([torch.ones([valid_coords.shape[0], 1]).to(valid_coords.device) * 0,
- valid_coords], dim=1) # [N, 4], append batch idx
-
- # ! if the valid_num is still larger than maximum_pts, sample part of pts
- if valid_num > maximum_pts:
- valid_num = valid_num.long()
- occupancy = torch.ones([valid_num]).to(device) > 0
- choice = np.random.choice(valid_num.cpu().numpy(), valid_num.cpu().numpy() - maximum_pts,
- replace=False)
- ind = torch.nonzero(occupancy).to(device)
- occupancy[ind[choice]] = False
- valid_coords = valid_coords[occupancy]
- valid_feature = valid_feature[occupancy]
-
- print(threshold, "randomly sample to save memory")
-
- return valid_coords, valid_feature
-
- @torch.no_grad()
- def get_valid_sparse_coords_by_sdf(self, sdf_volume, coords_volume, mask_volume, feature_volume, threshold=0.02,
- maximum_pts=110000):
- """
- assume batch size == 1, from the first lod to get sparse voxels
- :param sdf_volume: [num_pts, 1]
- :param coords_volume: [3, X, Y, Z]
- :param mask_volume: [1, X, Y, Z]
- :param feature_volume: [C, X, Y, Z]
- :param threshold:
- :return:
- """
-
- def prune(sdf_volume, mask_volume, threshold):
- occupancy_mask = torch.abs(sdf_volume) < threshold # [num_pts, 1]
-
- # - dilate
- occupancy_mask = occupancy_mask.float()
- occupancy_mask = occupancy_mask.view(1, 1, dX, dY, dZ)
- occupancy_mask = F.avg_pool3d(occupancy_mask, kernel_size=7, stride=1, padding=3)
- occupancy_mask = occupancy_mask.view(-1, 1) > 0
-
- final_mask = torch.logical_and(mask_volume, occupancy_mask)[:, 0] # [num_pts]
-
- return final_mask, torch.sum(final_mask.float())
-
- C, dX, dY, dZ = feature_volume.shape
- coords_volume = coords_volume.permute(1, 2, 3, 0).contiguous().view(-1, 3)
- mask_volume = mask_volume.permute(1, 2, 3, 0).contiguous().view(-1, 1)
- feature_volume = feature_volume.permute(1, 2, 3, 0).contiguous().view(-1, C)
-
- final_mask, valid_num = prune(sdf_volume, mask_volume, threshold)
-
- while (valid_num > maximum_pts) and (threshold > 0.003):
- threshold = threshold - 0.002
- final_mask, valid_num = prune(sdf_volume, mask_volume, threshold)
-
- valid_coords = coords_volume[final_mask] # [N, 3]
- valid_feature = feature_volume[final_mask] # [N, C]
-
- valid_coords = torch.cat([torch.ones([valid_coords.shape[0], 1]).to(valid_coords.device) * 0,
- valid_coords], dim=1) # [N, 4], append batch idx
-
- # ! if the valid_num is still larger than maximum_pts, sample part of pts
- if valid_num > maximum_pts:
- device = sdf_volume.device
- valid_num = valid_num.long()
- occupancy = torch.ones([valid_num]).to(device) > 0
- choice = np.random.choice(valid_num.cpu().numpy(), valid_num.cpu().numpy() - maximum_pts,
- replace=False)
- ind = torch.nonzero(occupancy).to(device)
- occupancy[ind[choice]] = False
- valid_coords = valid_coords[occupancy]
- valid_feature = valid_feature[occupancy]
-
- print(threshold, "randomly sample to save memory")
-
- return valid_coords, valid_feature
-
- @torch.no_grad()
- def extract_fields(self, bound_min, bound_max, resolution, query_func, device,
- # * related to conditional feature
- **kwargs
- ):
- N = 64
- X = torch.linspace(bound_min[0], bound_max[0], resolution).to(device).split(N)
- Y = torch.linspace(bound_min[1], bound_max[1], resolution).to(device).split(N)
- Z = torch.linspace(bound_min[2], bound_max[2], resolution).to(device).split(N)
-
- u = np.zeros([resolution, resolution, resolution], dtype=np.float32)
- with torch.no_grad():
- for xi, xs in enumerate(X):
- for yi, ys in enumerate(Y):
- for zi, zs in enumerate(Z):
- xx, yy, zz = torch.meshgrid(xs, ys, zs, indexing="ij")
- pts = torch.cat([xx.reshape(-1, 1), yy.reshape(-1, 1), zz.reshape(-1, 1)], dim=-1)
-
- # ! attention, the query function is different for extract geometry and fields
- output = query_func(pts, **kwargs)
- sdf = output['sdf_pts_scale%d' % kwargs['lod']].reshape(len(xs), len(ys),
- len(zs)).detach().cpu().numpy()
-
- u[xi * N: xi * N + len(xs), yi * N: yi * N + len(ys), zi * N: zi * N + len(zs)] = -1 * sdf
- return u
-
- @torch.no_grad()
- def extract_geometry(self, sdf_network, bound_min, bound_max, resolution, threshold, device, occupancy_mask=None,
- # * 3d feature volume
- **kwargs
- ):
- # logging.info('threshold: {}'.format(threshold))
-
- u = self.extract_fields(bound_min, bound_max, resolution,
- lambda pts, **kwargs: sdf_network.sdf(pts, **kwargs),
- # - sdf need to be multiplied by -1
- device,
- # * 3d feature volume
- **kwargs
- )
- if occupancy_mask is not None:
- dX, dY, dZ = occupancy_mask.shape
- empty_mask = 1 - occupancy_mask
- empty_mask = empty_mask.view(1, 1, dX, dY, dZ)
- # - dilation
- # empty_mask = F.avg_pool3d(empty_mask, kernel_size=7, stride=1, padding=3)
- empty_mask = F.interpolate(empty_mask, [resolution, resolution, resolution], mode='nearest')
- empty_mask = empty_mask.view(resolution, resolution, resolution).cpu().numpy() > 0
- u[empty_mask] = -100
- del empty_mask
-
- vertices, triangles = mcubes.marching_cubes(u, threshold)
- b_max_np = bound_max.detach().cpu().numpy()
- b_min_np = bound_min.detach().cpu().numpy()
-
- vertices = vertices / (resolution - 1.0) * (b_max_np - b_min_np)[None, :] + b_min_np[None, :]
- return vertices, triangles, u
-
- @torch.no_grad()
- def extract_depth_maps(self, sdf_network, con_volume, intrinsics, c2ws, H, W, near, far):
- """
- extract depth maps from the density volume
- :param con_volume: [1, 1+C, dX, dY, dZ] can by con_volume or sdf_volume
- :param c2ws: [B, 4, 4]
- :param H:
- :param W:
- :param near:
- :param far:
- :return:
- """
- device = con_volume.device
- batch_size = intrinsics.shape[0]
-
- with torch.no_grad():
- ys, xs = torch.meshgrid(torch.linspace(0, H - 1, H),
- torch.linspace(0, W - 1, W), indexing="ij") # pytorch's meshgrid has indexing='ij'
- p = torch.stack([xs, ys, torch.ones_like(ys)], dim=-1) # H, W, 3
-
- intrinsics_inv = torch.inverse(intrinsics)
-
- p = p.view(-1, 3).float().to(device) # N_rays, 3
- p = torch.matmul(intrinsics_inv[:, None, :3, :3], p[:, :, None]).squeeze() # Batch, N_rays, 3
- rays_v = p / torch.linalg.norm(p, ord=2, dim=-1, keepdim=True) # Batch, N_rays, 3
- rays_v = torch.matmul(c2ws[:, None, :3, :3], rays_v[:, :, :, None]).squeeze() # Batch, N_rays, 3
- rays_o = c2ws[:, None, :3, 3].expand(rays_v.shape) # Batch, N_rays, 3
- rays_d = rays_v
-
- rays_o = rays_o.contiguous().view(-1, 3)
- rays_d = rays_d.contiguous().view(-1, 3)
-
- ################## - sphere tracer to extract depth maps ######################
- depth_masks_sphere, depth_maps_sphere = self.ray_tracer.extract_depth_maps(
- rays_o, rays_d,
- near[None, :].repeat(rays_o.shape[0], 1),
- far[None, :].repeat(rays_o.shape[0], 1),
- sdf_network, con_volume
- )
-
- depth_maps = depth_maps_sphere.view(batch_size, 1, H, W)
- depth_masks = depth_masks_sphere.view(batch_size, 1, H, W)
-
- depth_maps = torch.where(depth_masks, depth_maps,
- torch.zeros_like(depth_masks.float()).to(device)) # fill invalid pixels by 0
-
- return depth_maps, depth_masks
diff --git a/One-2-3-45-master 2/reconstruction/models/sparse_sdf_network.py b/One-2-3-45-master 2/reconstruction/models/sparse_sdf_network.py
deleted file mode 100644
index 817f40ed08b7cb65fb284a4666d6f6a4a3c52683..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/models/sparse_sdf_network.py
+++ /dev/null
@@ -1,907 +0,0 @@
-import numpy as np
-import torch
-import torch.nn as nn
-import torch.nn.functional as F
-from torchsparse.tensor import PointTensor, SparseTensor
-import torchsparse.nn as spnn
-
-from tsparse.modules import SparseCostRegNet
-from tsparse.torchsparse_utils import sparse_to_dense_channel
-from ops.grid_sampler import grid_sample_3d, tricubic_sample_3d
-
-# from .gru_fusion import GRUFusion
-from ops.back_project import back_project_sparse_type
-from ops.generate_grids import generate_grid
-
-from inplace_abn import InPlaceABN
-
-from models.embedder import Embedding
-from models.featurenet import ConvBnReLU
-
-import pdb
-import random
-
-torch._C._jit_set_profiling_executor(False)
-torch._C._jit_set_profiling_mode(False)
-
-
-@torch.jit.script
-def fused_mean_variance(x, weight):
- mean = torch.sum(x * weight, dim=1, keepdim=True)
- var = torch.sum(weight * (x - mean) ** 2, dim=1, keepdim=True)
- return mean, var
-
-
-class LatentSDFLayer(nn.Module):
- def __init__(self,
- d_in=3,
- d_out=129,
- d_hidden=128,
- n_layers=4,
- skip_in=(4,),
- multires=0,
- bias=0.5,
- geometric_init=True,
- weight_norm=True,
- activation='softplus',
- d_conditional_feature=16):
- super(LatentSDFLayer, self).__init__()
-
- self.d_conditional_feature = d_conditional_feature
-
- # concat latent code for ench layer input excepting the first layer and the last layer
- dims_in = [d_in] + [d_hidden + d_conditional_feature for _ in range(n_layers - 2)] + [d_hidden]
- dims_out = [d_hidden for _ in range(n_layers - 1)] + [d_out]
-
- self.embed_fn_fine = None
-
- if multires > 0:
- embed_fn = Embedding(in_channels=d_in, N_freqs=multires) # * include the input
- self.embed_fn_fine = embed_fn
- dims_in[0] = embed_fn.out_channels
-
- self.num_layers = n_layers
- self.skip_in = skip_in
-
- for l in range(0, self.num_layers - 1):
- if l in self.skip_in:
- in_dim = dims_in[l] + dims_in[0]
- else:
- in_dim = dims_in[l]
-
- out_dim = dims_out[l]
- lin = nn.Linear(in_dim, out_dim)
-
- if geometric_init: # - from IDR code,
- if l == self.num_layers - 2:
- torch.nn.init.normal_(lin.weight, mean=np.sqrt(np.pi) / np.sqrt(in_dim), std=0.0001)
- torch.nn.init.constant_(lin.bias, -bias)
- # the channels for latent codes are set to 0
- torch.nn.init.constant_(lin.weight[:, -d_conditional_feature:], 0.0)
- torch.nn.init.constant_(lin.bias[-d_conditional_feature:], 0.0)
-
- elif multires > 0 and l == 0: # the first layer
- torch.nn.init.constant_(lin.bias, 0.0)
- # * the channels for position embeddings are set to 0
- torch.nn.init.constant_(lin.weight[:, 3:], 0.0)
- # * the channels for the xyz coordinate (3 channels) for initialized by normal distribution
- torch.nn.init.normal_(lin.weight[:, :3], 0.0, np.sqrt(2) / np.sqrt(out_dim))
- elif multires > 0 and l in self.skip_in:
- torch.nn.init.constant_(lin.bias, 0.0)
- torch.nn.init.normal_(lin.weight, 0.0, np.sqrt(2) / np.sqrt(out_dim))
- # * the channels for position embeddings (and conditional_feature) are initialized to 0
- torch.nn.init.constant_(lin.weight[:, -(dims_in[0] - 3 + d_conditional_feature):], 0.0)
- else:
- torch.nn.init.constant_(lin.bias, 0.0)
- torch.nn.init.normal_(lin.weight, 0.0, np.sqrt(2) / np.sqrt(out_dim))
- # the channels for latent code are initialized to 0
- torch.nn.init.constant_(lin.weight[:, -d_conditional_feature:], 0.0)
-
- if weight_norm:
- lin = nn.utils.weight_norm(lin)
-
- setattr(self, "lin" + str(l), lin)
-
- if activation == 'softplus':
- self.activation = nn.Softplus(beta=100)
- else:
- assert activation == 'relu'
- self.activation = nn.ReLU()
-
- def forward(self, inputs, latent):
- inputs = inputs
- if self.embed_fn_fine is not None:
- inputs = self.embed_fn_fine(inputs)
-
- # - only for lod1 network can use the pretrained params of lod0 network
- if latent.shape[1] != self.d_conditional_feature:
- latent = torch.cat([latent, latent], dim=1)
-
- x = inputs
- for l in range(0, self.num_layers - 1):
- lin = getattr(self, "lin" + str(l))
-
- # * due to the conditional bias, different from original neus version
- if l in self.skip_in:
- x = torch.cat([x, inputs], 1) / np.sqrt(2)
-
- if 0 < l < self.num_layers - 1:
- x = torch.cat([x, latent], 1)
-
- x = lin(x)
-
- if l < self.num_layers - 2:
- x = self.activation(x)
-
- return x
-
-
-class SparseSdfNetwork(nn.Module):
- '''
- Coarse-to-fine sparse cost regularization network
- return sparse volume feature for extracting sdf
- '''
-
- def __init__(self, lod, ch_in, voxel_size, vol_dims,
- hidden_dim=128, activation='softplus',
- cost_type='variance_mean',
- d_pyramid_feature_compress=16,
- regnet_d_out=8, num_sdf_layers=4,
- multires=6,
- ):
- super(SparseSdfNetwork, self).__init__()
-
- self.lod = lod # - gradually training, the current regularization lod
- self.ch_in = ch_in
- self.voxel_size = voxel_size # - the voxel size of the current volume
- self.vol_dims = torch.tensor(vol_dims) # - the dims of the current volume
-
- self.selected_views_num = 2 # the number of selected views for feature aggregation
- self.hidden_dim = hidden_dim
- self.activation = activation
- self.cost_type = cost_type
- self.d_pyramid_feature_compress = d_pyramid_feature_compress
- self.gru_fusion = None
-
- self.regnet_d_out = regnet_d_out
- self.multires = multires
-
- self.pos_embedder = Embedding(3, self.multires)
-
- self.compress_layer = ConvBnReLU(
- self.ch_in, self.d_pyramid_feature_compress, 3, 1, 1,
- norm_act=InPlaceABN)
- sparse_ch_in = self.d_pyramid_feature_compress * 2
-
- sparse_ch_in = sparse_ch_in + 16 if self.lod > 0 else sparse_ch_in
- self.sparse_costreg_net = SparseCostRegNet(
- d_in=sparse_ch_in, d_out=self.regnet_d_out)
- # self.regnet_d_out = self.sparse_costreg_net.d_out
-
- if activation == 'softplus':
- self.activation = nn.Softplus(beta=100)
- else:
- assert activation == 'relu'
- self.activation = nn.ReLU()
-
- self.sdf_layer = LatentSDFLayer(d_in=3,
- d_out=self.hidden_dim + 1,
- d_hidden=self.hidden_dim,
- n_layers=num_sdf_layers,
- multires=multires,
- geometric_init=True,
- weight_norm=True,
- activation=activation,
- d_conditional_feature=16 # self.regnet_d_out
- )
-
- def upsample(self, pre_feat, pre_coords, interval, num=8):
- '''
-
- :param pre_feat: (Tensor), features from last level, (N, C)
- :param pre_coords: (Tensor), coordinates from last level, (N, 4) (4 : Batch ind, x, y, z)
- :param interval: interval of voxels, interval = scale ** 2
- :param num: 1 -> 8
- :return: up_feat : (Tensor), upsampled features, (N*8, C)
- :return: up_coords: (N*8, 4), upsampled coordinates, (4 : Batch ind, x, y, z)
- '''
- with torch.no_grad():
- pos_list = [1, 2, 3, [1, 2], [1, 3], [2, 3], [1, 2, 3]]
- n, c = pre_feat.shape
- up_feat = pre_feat.unsqueeze(1).expand(-1, num, -1).contiguous()
- up_coords = pre_coords.unsqueeze(1).repeat(1, num, 1).contiguous()
- for i in range(num - 1):
- up_coords[:, i + 1, pos_list[i]] += interval
-
- up_feat = up_feat.view(-1, c)
- up_coords = up_coords.view(-1, 4)
-
- return up_feat, up_coords
-
- def aggregate_multiview_features(self, multiview_features, multiview_masks):
- """
- aggregate mutli-view features by compute their cost variance
- :param multiview_features: (num of voxels, num_of_views, c)
- :param multiview_masks: (num of voxels, num_of_views)
- :return:
- """
- num_pts, n_views, C = multiview_features.shape
-
- counts = torch.sum(multiview_masks, dim=1, keepdim=False) # [num_pts]
-
- assert torch.all(counts > 0) # the point is visible for at least 1 view
-
- volume_sum = torch.sum(multiview_features, dim=1, keepdim=False) # [num_pts, C]
- volume_sq_sum = torch.sum(multiview_features ** 2, dim=1, keepdim=False)
-
- if volume_sum.isnan().sum() > 0:
- import ipdb; ipdb.set_trace()
-
- del multiview_features
-
- counts = 1. / (counts + 1e-5)
- costvar = volume_sq_sum * counts[:, None] - (volume_sum * counts[:, None]) ** 2
-
- costvar_mean = torch.cat([costvar, volume_sum * counts[:, None]], dim=1)
- del volume_sum, volume_sq_sum, counts
-
-
-
- return costvar_mean
-
- def sparse_to_dense_volume(self, coords, feature, vol_dims, interval, device=None):
- """
- convert the sparse volume into dense volume to enable trilinear sampling
- to save GPU memory;
- :param coords: [num_pts, 3]
- :param feature: [num_pts, C]
- :param vol_dims: [3] dX, dY, dZ
- :param interval:
- :return:
- """
-
- # * assume batch size is 1
- if device is None:
- device = feature.device
-
- coords_int = (coords / interval).to(torch.int64)
- vol_dims = (vol_dims / interval).to(torch.int64)
-
- # - if stored in CPU, too slow
- dense_volume = sparse_to_dense_channel(
- coords_int.to(device), feature.to(device), vol_dims.to(device),
- feature.shape[1], 0, device) # [X, Y, Z, C]
-
- valid_mask_volume = sparse_to_dense_channel(
- coords_int.to(device),
- torch.ones([feature.shape[0], 1]).to(feature.device),
- vol_dims.to(device),
- 1, 0, device) # [X, Y, Z, 1]
-
- dense_volume = dense_volume.permute(3, 0, 1, 2).contiguous().unsqueeze(0) # [1, C, X, Y, Z]
- valid_mask_volume = valid_mask_volume.permute(3, 0, 1, 2).contiguous().unsqueeze(0) # [1, 1, X, Y, Z]
-
- return dense_volume, valid_mask_volume
-
- def get_conditional_volume(self, feature_maps, partial_vol_origin, proj_mats, sizeH=None, sizeW=None, lod=0,
- pre_coords=None, pre_feats=None,
- ):
- """
-
- :param feature_maps: pyramid features (B,V,C0+C1+C2,H,W) fused pyramid features
- :param partial_vol_origin: [B, 3] the world coordinates of the volume origin (0,0,0)
- :param proj_mats: projection matrix transform world pts into image space [B,V,4,4] suitable for original image size
- :param sizeH: the H of original image size
- :param sizeW: the W of original image size
- :param pre_coords: the coordinates of sparse volume from the prior lod
- :param pre_feats: the features of sparse volume from the prior lod
- :return:
- """
- device = proj_mats.device
- bs = feature_maps.shape[0]
- N_views = feature_maps.shape[1]
- minimum_visible_views = np.min([1, N_views - 1])
- # import ipdb; ipdb.set_trace()
- outputs = {}
- pts_samples = []
-
- # ----coarse to fine----
-
- # * use fused pyramid feature maps are very important
- if self.compress_layer is not None:
- feats = self.compress_layer(feature_maps[0])
- else:
- feats = feature_maps[0]
- feats = feats[:, None, :, :, :] # [V, B, C, H, W]
- KRcam = proj_mats.permute(1, 0, 2, 3).contiguous() # [V, B, 4, 4]
- interval = 1
-
- if self.lod == 0:
- # ----generate new coords----
- coords = generate_grid(self.vol_dims, 1)[0]
- coords = coords.view(3, -1).to(device) # [3, num_pts]
- up_coords = []
- for b in range(bs):
- up_coords.append(torch.cat([torch.ones(1, coords.shape[-1]).to(coords.device) * b, coords]))
- up_coords = torch.cat(up_coords, dim=1).permute(1, 0).contiguous()
- # * since we only estimate the geometry of input reference image at one time;
- # * mask the outside of the camera frustum
- # import ipdb; ipdb.set_trace()
- frustum_mask = back_project_sparse_type(
- up_coords, partial_vol_origin, self.voxel_size,
- feats, KRcam, sizeH=sizeH, sizeW=sizeW, only_mask=True) # [num_pts, n_views]
- frustum_mask = torch.sum(frustum_mask, dim=-1) > minimum_visible_views # ! here should be large
- up_coords = up_coords[frustum_mask] # [num_pts_valid, 4]
-
- else:
- # ----upsample coords----
- assert pre_feats is not None
- assert pre_coords is not None
- up_feat, up_coords = self.upsample(pre_feats, pre_coords, 1)
-
- # ----back project----
- # give each valid 3d grid point all valid 2D features and masks
- multiview_features, multiview_masks = back_project_sparse_type(
- up_coords, partial_vol_origin, self.voxel_size, feats,
- KRcam, sizeH=sizeH, sizeW=sizeW) # (num of voxels, num_of_views, c), (num of voxels, num_of_views)
- # num_of_views = all views
-
- # if multiview_features.isnan().sum() > 0:
- # import ipdb; ipdb.set_trace()
-
- # import ipdb; ipdb.set_trace()
- if self.lod > 0:
- # ! need another invalid voxels filtering
- frustum_mask = torch.sum(multiview_masks, dim=-1) > 1
- up_feat = up_feat[frustum_mask]
- up_coords = up_coords[frustum_mask]
- multiview_features = multiview_features[frustum_mask]
- multiview_masks = multiview_masks[frustum_mask]
- # if multiview_features.isnan().sum() > 0:
- # import ipdb; ipdb.set_trace()
- volume = self.aggregate_multiview_features(multiview_features, multiview_masks) # compute variance for all images features
- # import ipdb; ipdb.set_trace()
-
- # if volume.isnan().sum() > 0:
- # import ipdb; ipdb.set_trace()
-
- del multiview_features, multiview_masks
-
- # ----concat feature from last stage----
- if self.lod != 0:
- feat = torch.cat([volume, up_feat], dim=1)
- else:
- feat = volume
-
- # batch index is in the last position
- r_coords = up_coords[:, [1, 2, 3, 0]]
-
- # if feat.isnan().sum() > 0:
- # print('feat has nan:', feat.isnan().sum())
- # import ipdb; ipdb.set_trace()
-
- sparse_feat = SparseTensor(feat, r_coords.to(
- torch.int32)) # - directly use sparse tensor to avoid point2voxel operations
- # import ipdb; ipdb.set_trace()
- feat = self.sparse_costreg_net(sparse_feat)
-
- dense_volume, valid_mask_volume = self.sparse_to_dense_volume(up_coords[:, 1:], feat, self.vol_dims, interval,
- device=None) # [1, C/1, X, Y, Z]
-
- # if dense_volume.isnan().sum() > 0:
- # import ipdb; ipdb.set_trace()
-
-
- outputs['dense_volume_scale%d' % self.lod] = dense_volume # [1, 16, 96, 96, 96]
- outputs['valid_mask_volume_scale%d' % self.lod] = valid_mask_volume # [1, 1, 96, 96, 96]
- outputs['visible_mask_scale%d' % self.lod] = valid_mask_volume # [1, 1, 96, 96, 96]
- outputs['coords_scale%d' % self.lod] = generate_grid(self.vol_dims, interval).to(device)
- # import ipdb; ipdb.set_trace()
- return outputs
-
- def sdf(self, pts, conditional_volume, lod):
- num_pts = pts.shape[0]
- device = pts.device
- pts_ = pts.clone()
- pts = pts.view(1, 1, 1, num_pts, 3) # - should be in range (-1, 1)
-
- pts = torch.flip(pts, dims=[-1])
- # import ipdb; ipdb.set_trace()
- sampled_feature = grid_sample_3d(conditional_volume, pts) # [1, c, 1, 1, num_pts]
- sampled_feature = sampled_feature.view(-1, num_pts).permute(1, 0).contiguous().to(device)
-
- sdf_pts = self.sdf_layer(pts_, sampled_feature)
-
- outputs = {}
- outputs['sdf_pts_scale%d' % lod] = sdf_pts[:, :1]
- outputs['sdf_features_pts_scale%d' % lod] = sdf_pts[:, 1:]
- outputs['sampled_latent_scale%d' % lod] = sampled_feature
-
- return outputs
-
- @torch.no_grad()
- def sdf_from_sdfvolume(self, pts, sdf_volume, lod=0):
- num_pts = pts.shape[0]
- device = pts.device
- pts_ = pts.clone()
- pts = pts.view(1, 1, 1, num_pts, 3) # - should be in range (-1, 1)
-
- pts = torch.flip(pts, dims=[-1])
-
- sdf = torch.nn.functional.grid_sample(sdf_volume, pts, mode='bilinear', align_corners=True,
- padding_mode='border')
- sdf = sdf.view(-1, num_pts).permute(1, 0).contiguous().to(device)
-
- outputs = {}
- outputs['sdf_pts_scale%d' % lod] = sdf
-
- return outputs
-
- @torch.no_grad()
- def get_sdf_volume(self, conditional_volume, mask_volume, coords_volume, partial_origin):
- """
-
- :param conditional_volume: [1,C, dX,dY,dZ]
- :param mask_volume: [1,1, dX,dY,dZ]
- :param coords_volume: [1,3, dX,dY,dZ]
- :return:
- """
- device = conditional_volume.device
- chunk_size = 10240
-
- _, C, dX, dY, dZ = conditional_volume.shape
- conditional_volume = conditional_volume.view(C, dX * dY * dZ).permute(1, 0).contiguous()
- mask_volume = mask_volume.view(-1)
- coords_volume = coords_volume.view(3, dX * dY * dZ).permute(1, 0).contiguous()
-
- pts = coords_volume * self.voxel_size + partial_origin # [dX*dY*dZ, 3]
-
- sdf_volume = torch.ones([dX * dY * dZ, 1]).float().to(device)
-
- conditional_volume = conditional_volume[mask_volume > 0]
- pts = pts[mask_volume > 0]
- conditional_volume = conditional_volume.split(chunk_size)
- pts = pts.split(chunk_size)
-
- sdf_all = []
- for pts_part, feature_part in zip(pts, conditional_volume):
- sdf_part = self.sdf_layer(pts_part, feature_part)[:, :1]
- sdf_all.append(sdf_part)
-
- sdf_all = torch.cat(sdf_all, dim=0)
- sdf_volume[mask_volume > 0] = sdf_all
- sdf_volume = sdf_volume.view(1, 1, dX, dY, dZ)
- return sdf_volume
-
- def gradient(self, x, conditional_volume, lod):
- """
- return the gradient of specific lod
- :param x:
- :param lod:
- :return:
- """
- x.requires_grad_(True)
- # import ipdb; ipdb.set_trace()
- with torch.enable_grad():
- output = self.sdf(x, conditional_volume, lod)
- y = output['sdf_pts_scale%d' % lod]
-
- d_output = torch.ones_like(y, requires_grad=False, device=y.device)
- # ! Distributed Data Parallel doesn’t work with torch.autograd.grad()
- # ! (i.e. it will only work if gradients are to be accumulated in .grad attributes of parameters).
- gradients = torch.autograd.grad(
- outputs=y,
- inputs=x,
- grad_outputs=d_output,
- create_graph=True,
- retain_graph=True,
- only_inputs=True)[0]
- return gradients.unsqueeze(1)
-
-
-def sparse_to_dense_volume(coords, feature, vol_dims, interval, device=None):
- """
- convert the sparse volume into dense volume to enable trilinear sampling
- to save GPU memory;
- :param coords: [num_pts, 3]
- :param feature: [num_pts, C]
- :param vol_dims: [3] dX, dY, dZ
- :param interval:
- :return:
- """
-
- # * assume batch size is 1
- if device is None:
- device = feature.device
-
- coords_int = (coords / interval).to(torch.int64)
- vol_dims = (vol_dims / interval).to(torch.int64)
-
- # - if stored in CPU, too slow
- dense_volume = sparse_to_dense_channel(
- coords_int.to(device), feature.to(device), vol_dims.to(device),
- feature.shape[1], 0, device) # [X, Y, Z, C]
-
- valid_mask_volume = sparse_to_dense_channel(
- coords_int.to(device),
- torch.ones([feature.shape[0], 1]).to(feature.device),
- vol_dims.to(device),
- 1, 0, device) # [X, Y, Z, 1]
-
- dense_volume = dense_volume.permute(3, 0, 1, 2).contiguous().unsqueeze(0) # [1, C, X, Y, Z]
- valid_mask_volume = valid_mask_volume.permute(3, 0, 1, 2).contiguous().unsqueeze(0) # [1, 1, X, Y, Z]
-
- return dense_volume, valid_mask_volume
-
-
-class SdfVolume(nn.Module):
- def __init__(self, volume, coords=None, type='dense'):
- super(SdfVolume, self).__init__()
- self.volume = torch.nn.Parameter(volume, requires_grad=True)
- self.coords = coords
- self.type = type
-
- def forward(self):
- return self.volume
-
-
-class FinetuneOctreeSdfNetwork(nn.Module):
- '''
- After obtain the conditional volume from generalized network;
- directly optimize the conditional volume
- The conditional volume is still sparse
- '''
-
- def __init__(self, voxel_size, vol_dims,
- origin=[-1., -1., -1.],
- hidden_dim=128, activation='softplus',
- regnet_d_out=8,
- multires=6,
- if_fitted_rendering=True,
- num_sdf_layers=4,
- ):
- super(FinetuneOctreeSdfNetwork, self).__init__()
-
- self.voxel_size = voxel_size # - the voxel size of the current volume
- self.vol_dims = torch.tensor(vol_dims) # - the dims of the current volume
-
- self.origin = torch.tensor(origin).to(torch.float32)
-
- self.hidden_dim = hidden_dim
- self.activation = activation
-
- self.regnet_d_out = regnet_d_out
-
- self.if_fitted_rendering = if_fitted_rendering
- self.multires = multires
- # d_in_embedding = self.regnet_d_out if self.pos_add_type == 'latent' else 3
- # self.pos_embedder = Embedding(d_in_embedding, self.multires)
-
- # - the optimized parameters
- self.sparse_volume_lod0 = None
- self.sparse_coords_lod0 = None
-
- if activation == 'softplus':
- self.activation = nn.Softplus(beta=100)
- else:
- assert activation == 'relu'
- self.activation = nn.ReLU()
-
- self.sdf_layer = LatentSDFLayer(d_in=3,
- d_out=self.hidden_dim + 1,
- d_hidden=self.hidden_dim,
- n_layers=num_sdf_layers,
- multires=multires,
- geometric_init=True,
- weight_norm=True,
- activation=activation,
- d_conditional_feature=16 # self.regnet_d_out
- )
-
- # - add mlp rendering when finetuning
- self.renderer = None
-
- d_in_renderer = 3 + self.regnet_d_out + 3 + 3
- self.renderer = BlendingRenderingNetwork(
- d_feature=self.hidden_dim - 1,
- mode='idr', # ! the view direction influence a lot
- d_in=d_in_renderer,
- d_out=50, # maximum 50 images
- d_hidden=self.hidden_dim,
- n_layers=3,
- weight_norm=True,
- multires_view=4,
- squeeze_out=True,
- )
-
- def initialize_conditional_volumes(self, dense_volume_lod0, dense_volume_mask_lod0,
- sparse_volume_lod0=None, sparse_coords_lod0=None):
- """
-
- :param dense_volume_lod0: [1,C,dX,dY,dZ]
- :param dense_volume_mask_lod0: [1,1,dX,dY,dZ]
- :param dense_volume_lod1:
- :param dense_volume_mask_lod1:
- :return:
- """
-
- if sparse_volume_lod0 is None:
- device = dense_volume_lod0.device
- _, C, dX, dY, dZ = dense_volume_lod0.shape
-
- dense_volume_lod0 = dense_volume_lod0.view(C, dX * dY * dZ).permute(1, 0).contiguous()
- mask_lod0 = dense_volume_mask_lod0.view(dX * dY * dZ) > 0
-
- self.sparse_volume_lod0 = SdfVolume(dense_volume_lod0[mask_lod0], type='sparse')
-
- coords = generate_grid(self.vol_dims, 1)[0] # [3, dX, dY, dZ]
- coords = coords.view(3, dX * dY * dZ).permute(1, 0).to(device)
- self.sparse_coords_lod0 = torch.nn.Parameter(coords[mask_lod0], requires_grad=False)
- else:
- self.sparse_volume_lod0 = SdfVolume(sparse_volume_lod0, type='sparse')
- self.sparse_coords_lod0 = torch.nn.Parameter(sparse_coords_lod0, requires_grad=False)
-
- def get_conditional_volume(self):
- dense_volume, valid_mask_volume = sparse_to_dense_volume(
- self.sparse_coords_lod0,
- self.sparse_volume_lod0(), self.vol_dims, interval=1,
- device=None) # [1, C/1, X, Y, Z]
-
- # valid_mask_volume = self.dense_volume_mask_lod0
-
- outputs = {}
- outputs['dense_volume_scale%d' % 0] = dense_volume
- outputs['valid_mask_volume_scale%d' % 0] = valid_mask_volume
-
- return outputs
-
- def tv_regularizer(self):
- dense_volume, valid_mask_volume = sparse_to_dense_volume(
- self.sparse_coords_lod0,
- self.sparse_volume_lod0(), self.vol_dims, interval=1,
- device=None) # [1, C/1, X, Y, Z]
-
- dx = (dense_volume[:, :, 1:, :, :] - dense_volume[:, :, :-1, :, :]) ** 2 # [1, C/1, X-1, Y, Z]
- dy = (dense_volume[:, :, :, 1:, :] - dense_volume[:, :, :, :-1, :]) ** 2 # [1, C/1, X, Y-1, Z]
- dz = (dense_volume[:, :, :, :, 1:] - dense_volume[:, :, :, :, :-1]) ** 2 # [1, C/1, X, Y, Z-1]
-
- tv = dx[:, :, :, :-1, :-1] + dy[:, :, :-1, :, :-1] + dz[:, :, :-1, :-1, :] # [1, C/1, X-1, Y-1, Z-1]
-
- mask = valid_mask_volume[:, :, :-1, :-1, :-1] * valid_mask_volume[:, :, 1:, :-1, :-1] * \
- valid_mask_volume[:, :, :-1, 1:, :-1] * valid_mask_volume[:, :, :-1, :-1, 1:]
-
- tv = torch.sqrt(tv + 1e-6).mean(dim=1, keepdim=True) * mask
- # tv = tv.mean(dim=1, keepdim=True) * mask
-
- assert torch.all(~torch.isnan(tv))
-
- return torch.mean(tv)
-
- def sdf(self, pts, conditional_volume, lod):
-
- outputs = {}
-
- num_pts = pts.shape[0]
- device = pts.device
- pts_ = pts.clone()
- pts = pts.view(1, 1, 1, num_pts, 3) # - should be in range (-1, 1)
-
- pts = torch.flip(pts, dims=[-1])
-
- sampled_feature = grid_sample_3d(conditional_volume, pts) # [1, c, 1, 1, num_pts]
- sampled_feature = sampled_feature.view(-1, num_pts).permute(1, 0).contiguous()
- outputs['sampled_latent_scale%d' % lod] = sampled_feature
-
- sdf_pts = self.sdf_layer(pts_, sampled_feature)
-
- lod = 0
- outputs['sdf_pts_scale%d' % lod] = sdf_pts[:, :1]
- outputs['sdf_features_pts_scale%d' % lod] = sdf_pts[:, 1:]
-
- return outputs
-
- def color_blend(self, pts, position, normals, view_dirs, feature_vectors, img_index,
- pts_pixel_color, pts_pixel_mask, pts_patch_color=None, pts_patch_mask=None):
-
- return self.renderer(torch.cat([pts, position], dim=-1), normals, view_dirs, feature_vectors,
- img_index, pts_pixel_color, pts_pixel_mask,
- pts_patch_color=pts_patch_color, pts_patch_mask=pts_patch_mask)
-
- def gradient(self, x, conditional_volume, lod):
- """
- return the gradient of specific lod
- :param x:
- :param lod:
- :return:
- """
- x.requires_grad_(True)
- output = self.sdf(x, conditional_volume, lod)
- y = output['sdf_pts_scale%d' % 0]
-
- d_output = torch.ones_like(y, requires_grad=False, device=y.device)
-
- gradients = torch.autograd.grad(
- outputs=y,
- inputs=x,
- grad_outputs=d_output,
- create_graph=True,
- retain_graph=True,
- only_inputs=True)[0]
- return gradients.unsqueeze(1)
-
- @torch.no_grad()
- def prune_dense_mask(self, threshold=0.02):
- """
- Just gradually prune the mask of dense volume to decrease the number of sdf network inference
- :return:
- """
- chunk_size = 10240
- coords = generate_grid(self.vol_dims_lod0, 1)[0] # [3, dX, dY, dZ]
-
- _, dX, dY, dZ = coords.shape
-
- pts = coords.view(3, -1).permute(1,
- 0).contiguous() * self.voxel_size_lod0 + self.origin[None, :] # [dX*dY*dZ, 3]
-
- # dense_volume = self.dense_volume_lod0() # [1,C,dX,dY,dZ]
- dense_volume, _ = sparse_to_dense_volume(
- self.sparse_coords_lod0,
- self.sparse_volume_lod0(), self.vol_dims_lod0, interval=1,
- device=None) # [1, C/1, X, Y, Z]
-
- sdf_volume = torch.ones([dX * dY * dZ, 1]).float().to(dense_volume.device) * 100
-
- mask = self.dense_volume_mask_lod0.view(-1) > 0
-
- pts_valid = pts[mask].to(dense_volume.device)
- feature_valid = dense_volume.view(self.regnet_d_out, -1).permute(1, 0).contiguous()[mask]
-
- pts_valid = pts_valid.split(chunk_size)
- feature_valid = feature_valid.split(chunk_size)
-
- sdf_list = []
-
- for pts_part, feature_part in zip(pts_valid, feature_valid):
- sdf_part = self.sdf_layer(pts_part, feature_part)[:, :1]
- sdf_list.append(sdf_part)
-
- sdf_list = torch.cat(sdf_list, dim=0)
-
- sdf_volume[mask] = sdf_list
-
- occupancy_mask = torch.abs(sdf_volume) < threshold # [num_pts, 1]
-
- # - dilate
- occupancy_mask = occupancy_mask.float()
- occupancy_mask = occupancy_mask.view(1, 1, dX, dY, dZ)
- occupancy_mask = F.avg_pool3d(occupancy_mask, kernel_size=7, stride=1, padding=3)
- occupancy_mask = occupancy_mask > 0
-
- self.dense_volume_mask_lod0 = torch.logical_and(self.dense_volume_mask_lod0,
- occupancy_mask).float() # (1, 1, dX, dY, dZ)
-
-
-class BlendingRenderingNetwork(nn.Module):
- def __init__(
- self,
- d_feature,
- mode,
- d_in,
- d_out,
- d_hidden,
- n_layers,
- weight_norm=True,
- multires_view=0,
- squeeze_out=True,
- ):
- super(BlendingRenderingNetwork, self).__init__()
-
- self.mode = mode
- self.squeeze_out = squeeze_out
- dims = [d_in + d_feature] + [d_hidden for _ in range(n_layers)] + [d_out]
-
- self.embedder = None
- if multires_view > 0:
- self.embedder = Embedding(3, multires_view)
- dims[0] += (self.embedder.out_channels - 3)
-
- self.num_layers = len(dims)
-
- for l in range(0, self.num_layers - 1):
- out_dim = dims[l + 1]
- lin = nn.Linear(dims[l], out_dim)
-
- if weight_norm:
- lin = nn.utils.weight_norm(lin)
-
- setattr(self, "lin" + str(l), lin)
-
- self.relu = nn.ReLU()
-
- self.color_volume = None
-
- self.softmax = nn.Softmax(dim=1)
-
- self.type = 'blending'
-
- def sample_pts_from_colorVolume(self, pts):
- device = pts.device
- num_pts = pts.shape[0]
- pts_ = pts.clone()
- pts = pts.view(1, 1, 1, num_pts, 3) # - should be in range (-1, 1)
-
- pts = torch.flip(pts, dims=[-1])
-
- sampled_color = grid_sample_3d(self.color_volume, pts) # [1, c, 1, 1, num_pts]
- sampled_color = sampled_color.view(-1, num_pts).permute(1, 0).contiguous().to(device)
-
- return sampled_color
-
- def forward(self, position, normals, view_dirs, feature_vectors, img_index,
- pts_pixel_color, pts_pixel_mask, pts_patch_color=None, pts_patch_mask=None):
- """
-
- :param position: can be 3d coord or interpolated volume latent
- :param normals:
- :param view_dirs:
- :param feature_vectors:
- :param img_index: [N_views], used to extract corresponding weights
- :param pts_pixel_color: [N_pts, N_views, 3]
- :param pts_pixel_mask: [N_pts, N_views]
- :param pts_patch_color: [N_pts, N_views, Npx, 3]
- :return:
- """
- if self.embedder is not None:
- view_dirs = self.embedder(view_dirs)
-
- rendering_input = None
-
- if self.mode == 'idr':
- rendering_input = torch.cat([position, view_dirs, normals, feature_vectors], dim=-1)
- elif self.mode == 'no_view_dir':
- rendering_input = torch.cat([position, normals, feature_vectors], dim=-1)
- elif self.mode == 'no_normal':
- rendering_input = torch.cat([position, view_dirs, feature_vectors], dim=-1)
- elif self.mode == 'no_points':
- rendering_input = torch.cat([view_dirs, normals, feature_vectors], dim=-1)
- elif self.mode == 'no_points_no_view_dir':
- rendering_input = torch.cat([normals, feature_vectors], dim=-1)
-
- x = rendering_input
-
- for l in range(0, self.num_layers - 1):
- lin = getattr(self, "lin" + str(l))
-
- x = lin(x)
-
- if l < self.num_layers - 2:
- x = self.relu(x) # [n_pts, d_out]
-
- ## extract value based on img_index
- x_extracted = torch.index_select(x, 1, img_index.long())
-
- weights_pixel = self.softmax(x_extracted) # [n_pts, N_views]
- weights_pixel = weights_pixel * pts_pixel_mask
- weights_pixel = weights_pixel / (
- torch.sum(weights_pixel.float(), dim=1, keepdim=True) + 1e-8) # [n_pts, N_views]
- final_pixel_color = torch.sum(pts_pixel_color * weights_pixel[:, :, None], dim=1,
- keepdim=False) # [N_pts, 3]
-
- final_pixel_mask = torch.sum(pts_pixel_mask.float(), dim=1, keepdim=True) > 0 # [N_pts, 1]
-
- final_patch_color, final_patch_mask = None, None
- # pts_patch_color [N_pts, N_views, Npx, 3]; pts_patch_mask [N_pts, N_views, Npx]
- if pts_patch_color is not None:
- N_pts, N_views, Npx, _ = pts_patch_color.shape
- patch_mask = torch.sum(pts_patch_mask, dim=-1, keepdim=False) > Npx - 1 # [N_pts, N_views]
-
- weights_patch = self.softmax(x_extracted) # [N_pts, N_views]
- weights_patch = weights_patch * patch_mask
- weights_patch = weights_patch / (
- torch.sum(weights_patch.float(), dim=1, keepdim=True) + 1e-8) # [n_pts, N_views]
-
- final_patch_color = torch.sum(pts_patch_color * weights_patch[:, :, None, None], dim=1,
- keepdim=False) # [N_pts, Npx, 3]
- final_patch_mask = torch.sum(patch_mask, dim=1, keepdim=True) > 0 # [N_pts, 1] at least one image sees
-
- return final_pixel_color, final_pixel_mask, final_patch_color, final_patch_mask
diff --git a/One-2-3-45-master 2/reconstruction/models/trainer_generic.py b/One-2-3-45-master 2/reconstruction/models/trainer_generic.py
deleted file mode 100644
index 18fe3ee1f9cb4c36550f4e8a3b7d2033995a0175..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/models/trainer_generic.py
+++ /dev/null
@@ -1,1380 +0,0 @@
-"""
-decouple the trainer with the renderer
-"""
-import os
-import cv2 as cv
-import torch
-import torch.nn as nn
-import torch.nn.functional as F
-
-import numpy as np
-
-import trimesh
-
-from utils.misc_utils import visualize_depth_numpy
-
-from utils.training_utils import numpy2tensor
-
-from loss.depth_loss import DepthLoss, DepthSmoothLoss
-
-from models.sparse_neus_renderer import SparseNeuSRenderer
-
-
-class GenericTrainer(nn.Module):
- def __init__(self,
- rendering_network_outside,
- pyramid_feature_network_lod0,
- pyramid_feature_network_lod1,
- sdf_network_lod0,
- sdf_network_lod1,
- variance_network_lod0,
- variance_network_lod1,
- rendering_network_lod0,
- rendering_network_lod1,
- n_samples_lod0,
- n_importance_lod0,
- n_samples_lod1,
- n_importance_lod1,
- n_outside,
- perturb,
- alpha_type='div',
- conf=None,
- timestamp="",
- mode='train',
- base_exp_dir=None,
- ):
- super(GenericTrainer, self).__init__()
-
- self.conf = conf
- self.timestamp = timestamp
-
-
- self.base_exp_dir = base_exp_dir
-
-
- self.anneal_start = self.conf.get_float('train.anneal_start', default=0.0)
- self.anneal_end = self.conf.get_float('train.anneal_end', default=0.0)
- self.anneal_start_lod1 = self.conf.get_float('train.anneal_start_lod1', default=0.0)
- self.anneal_end_lod1 = self.conf.get_float('train.anneal_end_lod1', default=0.0)
-
- # network setups
- self.rendering_network_outside = rendering_network_outside
- self.pyramid_feature_network_geometry_lod0 = pyramid_feature_network_lod0 # 2D pyramid feature network for geometry
- self.pyramid_feature_network_geometry_lod1 = pyramid_feature_network_lod1 # use differnet networks for the two lods
-
- # when num_lods==2, may consume too much memeory
- self.sdf_network_lod0 = sdf_network_lod0
- self.sdf_network_lod1 = sdf_network_lod1
-
- # - warpped by ModuleList to support DataParallel
- self.variance_network_lod0 = variance_network_lod0
- self.variance_network_lod1 = variance_network_lod1
-
- self.rendering_network_lod0 = rendering_network_lod0
- self.rendering_network_lod1 = rendering_network_lod1
-
- self.n_samples_lod0 = n_samples_lod0
- self.n_importance_lod0 = n_importance_lod0
- self.n_samples_lod1 = n_samples_lod1
- self.n_importance_lod1 = n_importance_lod1
- self.n_outside = n_outside
- self.num_lods = conf.get_int('model.num_lods') # the number of octree lods
- self.perturb = perturb
- self.alpha_type = alpha_type
-
- # - the two renderers
- self.sdf_renderer_lod0 = SparseNeuSRenderer(
- self.rendering_network_outside,
- self.sdf_network_lod0,
- self.variance_network_lod0,
- self.rendering_network_lod0,
- self.n_samples_lod0,
- self.n_importance_lod0,
- self.n_outside,
- self.perturb,
- alpha_type='div',
- conf=self.conf)
-
- self.sdf_renderer_lod1 = SparseNeuSRenderer(
- self.rendering_network_outside,
- self.sdf_network_lod1,
- self.variance_network_lod1,
- self.rendering_network_lod1,
- self.n_samples_lod1,
- self.n_importance_lod1,
- self.n_outside,
- self.perturb,
- alpha_type='div',
- conf=self.conf)
-
- self.if_fix_lod0_networks = self.conf.get_bool('train.if_fix_lod0_networks')
-
- # sdf network weights
- self.sdf_igr_weight = self.conf.get_float('train.sdf_igr_weight')
- self.sdf_sparse_weight = self.conf.get_float('train.sdf_sparse_weight', default=0)
- self.sdf_decay_param = self.conf.get_float('train.sdf_decay_param', default=100)
- self.fg_bg_weight = self.conf.get_float('train.fg_bg_weight', default=0.00)
- self.bg_ratio = self.conf.get_float('train.bg_ratio', default=0.0)
-
- self.depth_loss_weight = self.conf.get_float('train.depth_loss_weight', default=1.00)
-
- print("depth_loss_weight: ", self.depth_loss_weight)
- self.depth_criterion = DepthLoss()
-
- # - DataParallel mode, cannot modify attributes in forward()
- # self.iter_step = 0
- self.val_mesh_freq = self.conf.get_int('train.val_mesh_freq')
-
- # - True for finetuning; False for general training
- self.if_fitted_rendering = self.conf.get_bool('train.if_fitted_rendering', default=False)
-
- self.prune_depth_filter = self.conf.get_bool('model.prune_depth_filter', default=False)
-
- def get_trainable_params(self):
- # set trainable params
-
- self.params_to_train = []
-
- if not self.if_fix_lod0_networks:
- # load pretrained featurenet
- self.params_to_train += list(self.pyramid_feature_network_geometry_lod0.parameters())
- self.params_to_train += list(self.sdf_network_lod0.parameters())
- self.params_to_train += list(self.variance_network_lod0.parameters())
-
- if self.rendering_network_lod0 is not None:
- self.params_to_train += list(self.rendering_network_lod0.parameters())
-
- if self.sdf_network_lod1 is not None:
- # load pretrained featurenet
- self.params_to_train += list(self.pyramid_feature_network_geometry_lod1.parameters())
-
- self.params_to_train += list(self.sdf_network_lod1.parameters())
- self.params_to_train += list(self.variance_network_lod1.parameters())
- if self.rendering_network_lod1 is not None:
- self.params_to_train += list(self.rendering_network_lod1.parameters())
-
- return self.params_to_train
-
- def train_step(self, sample,
- perturb_overwrite=-1,
- background_rgb=None,
- alpha_inter_ratio_lod0=0.0,
- alpha_inter_ratio_lod1=0.0,
- iter_step=0,
- ):
- # * only support batch_size==1
- # ! attention: the list of string cannot be splited in DataParallel
- batch_idx = sample['batch_idx'][0]
- meta = sample['meta'][batch_idx] # the scan lighting ref_view info
-
- sizeW = sample['img_wh'][0][0]
- sizeH = sample['img_wh'][0][1]
- partial_vol_origin = sample['partial_vol_origin'] # [B, 3]
- near, far = sample['near_fars'][0, 0, :1], sample['near_fars'][0, 0, 1:]
-
- # the full-size ray variables
- sample_rays = sample['rays']
- rays_o = sample_rays['rays_o'][0]
- rays_d = sample_rays['rays_v'][0]
-
- imgs = sample['images'][0]
- intrinsics = sample['intrinsics'][0]
- intrinsics_l_4x = intrinsics.clone()
- intrinsics_l_4x[:, :2] *= 0.25
- w2cs = sample['w2cs'][0]
- c2ws = sample['c2ws'][0]
- proj_matrices = sample['affine_mats']
- scale_mat = sample['scale_mat']
- trans_mat = sample['trans_mat']
-
- # *********************** Lod==0 ***********************
- if not self.if_fix_lod0_networks:
- geometry_feature_maps = self.obtain_pyramid_feature_maps(imgs)
-
- conditional_features_lod0 = self.sdf_network_lod0.get_conditional_volume(
- feature_maps=geometry_feature_maps[None, 1:, :, :, :],
- partial_vol_origin=partial_vol_origin,
- proj_mats=proj_matrices[:,1:],
- # proj_mats=proj_matrices,
- sizeH=sizeH,
- sizeW=sizeW,
- lod=0,
- )
-
- else:
- with torch.no_grad():
- geometry_feature_maps = self.obtain_pyramid_feature_maps(imgs, lod=0)
- conditional_features_lod0 = self.sdf_network_lod0.get_conditional_volume(
- feature_maps=geometry_feature_maps[None, 1:, :, :, :],
- partial_vol_origin=partial_vol_origin,
- proj_mats=proj_matrices[:,1:],
- # proj_mats=proj_matrices,
- sizeH=sizeH,
- sizeW=sizeW,
- lod=0,
- )
-
- con_volume_lod0 = conditional_features_lod0['dense_volume_scale0']
-
- con_valid_mask_volume_lod0 = conditional_features_lod0['valid_mask_volume_scale0']
-
- coords_lod0 = conditional_features_lod0['coords_scale0'] # [1,3,wX,wY,wZ]
-
- # * extract depth maps for all the images
- depth_maps_lod0, depth_masks_lod0 = None, None
- if self.num_lods > 1:
- sdf_volume_lod0 = self.sdf_network_lod0.get_sdf_volume(
- con_volume_lod0, con_valid_mask_volume_lod0,
- coords_lod0, partial_vol_origin) # [1, 1, dX, dY, dZ]
-
- if self.prune_depth_filter:
- depth_maps_lod0_l4x, depth_masks_lod0_l4x = self.sdf_renderer_lod0.extract_depth_maps(
- self.sdf_network_lod0, sdf_volume_lod0, intrinsics_l_4x, c2ws,
- sizeH // 4, sizeW // 4, near * 1.5, far)
- depth_maps_lod0 = F.interpolate(depth_maps_lod0_l4x, size=(sizeH, sizeW), mode='bilinear',
- align_corners=True)
-
- # *************** losses
- loss_lod0, losses_lod0, depth_statis_lod0 = None, None, None
-
- if not self.if_fix_lod0_networks:
-
- render_out = self.sdf_renderer_lod0.render(
- rays_o, rays_d, near, far,
- self.sdf_network_lod0,
- self.rendering_network_lod0,
- background_rgb=background_rgb,
- alpha_inter_ratio=alpha_inter_ratio_lod0,
- # * related to conditional feature
- lod=0,
- conditional_volume=con_volume_lod0,
- conditional_valid_mask_volume=con_valid_mask_volume_lod0,
- # * 2d feature maps
- feature_maps=geometry_feature_maps,
- color_maps=imgs,
- w2cs=w2cs,
- intrinsics=intrinsics,
- img_wh=[sizeW, sizeH],
- if_general_rendering=True,
- if_render_with_grad=True,
- )
-
- loss_lod0, losses_lod0, depth_statis_lod0 = self.cal_losses_sdf(render_out, sample_rays,
- iter_step, lod=0)
-
- # *********************** Lod==1 ***********************
-
- loss_lod1, losses_lod1, depth_statis_lod1 = None, None, None
-
- if self.num_lods > 1:
- geometry_feature_maps_lod1 = self.obtain_pyramid_feature_maps(imgs, lod=1)
- # geometry_feature_maps_lod1 = self.obtain_pyramid_feature_maps(imgs, lod=1)
- if self.prune_depth_filter:
- pre_coords, pre_feats = self.sdf_renderer_lod0.get_valid_sparse_coords_by_sdf_depthfilter(
- sdf_volume_lod0[0], coords_lod0[0], con_valid_mask_volume_lod0[0], con_volume_lod0[0],
- depth_maps_lod0, proj_matrices[0],
- partial_vol_origin, self.sdf_network_lod0.voxel_size,
- near, far, self.sdf_network_lod0.voxel_size, 12)
- else:
- pre_coords, pre_feats = self.sdf_renderer_lod0.get_valid_sparse_coords_by_sdf(
- sdf_volume_lod0[0], coords_lod0[0], con_valid_mask_volume_lod0[0], con_volume_lod0[0])
-
- pre_coords[:, 1:] = pre_coords[:, 1:] * 2
-
- # ? It seems that training gru_fusion, this part should be trainable too
- conditional_features_lod1 = self.sdf_network_lod1.get_conditional_volume(
- feature_maps=geometry_feature_maps_lod1[None, 1:, :, :, :],
- partial_vol_origin=partial_vol_origin,
- proj_mats=proj_matrices[:,1:],
- # proj_mats=proj_matrices,
- sizeH=sizeH,
- sizeW=sizeW,
- pre_coords=pre_coords,
- pre_feats=pre_feats,
- )
-
- con_volume_lod1 = conditional_features_lod1['dense_volume_scale1']
- con_valid_mask_volume_lod1 = conditional_features_lod1['valid_mask_volume_scale1']
-
- # if not self.if_gru_fusion_lod1:
- render_out_lod1 = self.sdf_renderer_lod1.render(
- rays_o, rays_d, near, far,
- self.sdf_network_lod1,
- self.rendering_network_lod1,
- background_rgb=background_rgb,
- alpha_inter_ratio=alpha_inter_ratio_lod1,
- # * related to conditional feature
- lod=1,
- conditional_volume=con_volume_lod1,
- conditional_valid_mask_volume=con_valid_mask_volume_lod1,
- # * 2d feature maps
- feature_maps=geometry_feature_maps_lod1,
- color_maps=imgs,
- w2cs=w2cs,
- intrinsics=intrinsics,
- img_wh=[sizeW, sizeH],
- bg_ratio=self.bg_ratio,
- )
- loss_lod1, losses_lod1, depth_statis_lod1 = self.cal_losses_sdf(render_out_lod1, sample_rays,
- iter_step, lod=1)
-
-
- # # - extract mesh
- if iter_step % self.val_mesh_freq == 0:
- torch.cuda.empty_cache()
- self.validate_mesh(self.sdf_network_lod0,
- self.sdf_renderer_lod0.extract_geometry,
- conditional_volume=con_volume_lod0, lod=0,
- threshold=0,
- # occupancy_mask=con_valid_mask_volume_lod0[0, 0],
- mode='train_bg', meta=meta,
- iter_step=iter_step, scale_mat=scale_mat,
- trans_mat=trans_mat)
- torch.cuda.empty_cache()
-
- if self.num_lods > 1:
- self.validate_mesh(self.sdf_network_lod1,
- self.sdf_renderer_lod1.extract_geometry,
- conditional_volume=con_volume_lod1, lod=1,
- # occupancy_mask=con_valid_mask_volume_lod1[0, 0].detach(),
- mode='train_bg', meta=meta,
- iter_step=iter_step, scale_mat=scale_mat,
- trans_mat=trans_mat)
-
- losses = {
- # - lod 0
- 'loss_lod0': loss_lod0,
- 'losses_lod0': losses_lod0,
- 'depth_statis_lod0': depth_statis_lod0,
-
- # - lod 1
- 'loss_lod1': loss_lod1,
- 'losses_lod1': losses_lod1,
- 'depth_statis_lod1': depth_statis_lod1,
-
- }
-
- return losses
-
- def val_step(self, sample,
- perturb_overwrite=-1,
- background_rgb=None,
- alpha_inter_ratio_lod0=0.0,
- alpha_inter_ratio_lod1=0.0,
- iter_step=0,
- chunk_size=512,
- save_vis=False,
- ):
- # * only support batch_size==1
- # ! attention: the list of string cannot be splited in DataParallel
- batch_idx = sample['batch_idx'][0]
- meta = sample['meta'][batch_idx] # the scan lighting ref_view info
-
- sizeW = sample['img_wh'][0][0]
- sizeH = sample['img_wh'][0][1]
- H, W = sizeH, sizeW
-
- partial_vol_origin = sample['partial_vol_origin'] # [B, 3]
- near, far = sample['query_near_far'][0, :1], sample['query_near_far'][0, 1:]
-
- # the ray variables
- sample_rays = sample['rays']
- rays_o = sample_rays['rays_o'][0]
- rays_d = sample_rays['rays_v'][0]
- rays_ndc_uv = sample_rays['rays_ndc_uv'][0]
-
- imgs = sample['images'][0]
- intrinsics = sample['intrinsics'][0]
- intrinsics_l_4x = intrinsics.clone()
- intrinsics_l_4x[:, :2] *= 0.25
- w2cs = sample['w2cs'][0]
- c2ws = sample['c2ws'][0]
- proj_matrices = sample['affine_mats']
-
- # render_img_idx = sample['render_img_idx'][0]
- # true_img = sample['images'][0][render_img_idx]
-
- # - the image to render
- scale_mat = sample['scale_mat'] # [1,4,4] used to convert mesh into true scale
- trans_mat = sample['trans_mat']
- query_c2w = sample['query_c2w'] # [1,4,4]
- query_w2c = sample['query_w2c'] # [1,4,4]
- true_img = sample['query_image'][0]
- true_img = np.uint8(true_img.permute(1, 2, 0).cpu().numpy() * 255)
-
- depth_min, depth_max = near.cpu().numpy(), far.cpu().numpy()
-
- scale_factor = sample['scale_factor'][0].cpu().numpy()
- true_depth = sample['query_depth'] if 'query_depth' in sample.keys() else None
- if true_depth is not None:
- true_depth = true_depth[0].cpu().numpy()
- true_depth_colored = visualize_depth_numpy(true_depth, [depth_min, depth_max])[0]
- else:
- true_depth_colored = None
-
- rays_o = rays_o.reshape(-1, 3).split(chunk_size)
- rays_d = rays_d.reshape(-1, 3).split(chunk_size)
-
- # - obtain conditional features
- with torch.no_grad():
- # - obtain conditional features
- geometry_feature_maps = self.obtain_pyramid_feature_maps(imgs, lod=0)
-
- # - lod 0
- conditional_features_lod0 = self.sdf_network_lod0.get_conditional_volume(
- feature_maps=geometry_feature_maps[None, :, :, :, :],
- partial_vol_origin=partial_vol_origin,
- proj_mats=proj_matrices,
- sizeH=sizeH,
- sizeW=sizeW,
- lod=0,
- )
-
- con_volume_lod0 = conditional_features_lod0['dense_volume_scale0']
- con_valid_mask_volume_lod0 = conditional_features_lod0['valid_mask_volume_scale0']
- coords_lod0 = conditional_features_lod0['coords_scale0'] # [1,3,wX,wY,wZ]
-
- if self.num_lods > 1:
- sdf_volume_lod0 = self.sdf_network_lod0.get_sdf_volume(
- con_volume_lod0, con_valid_mask_volume_lod0,
- coords_lod0, partial_vol_origin) # [1, 1, dX, dY, dZ]
-
- depth_maps_lod0, depth_masks_lod0 = None, None
- if self.prune_depth_filter:
- depth_maps_lod0_l4x, depth_masks_lod0_l4x = self.sdf_renderer_lod0.extract_depth_maps(
- self.sdf_network_lod0, sdf_volume_lod0,
- intrinsics_l_4x, c2ws,
- sizeH // 4, sizeW // 4, near * 1.5, far) # - near*1.5 is a experienced number
- depth_maps_lod0 = F.interpolate(depth_maps_lod0_l4x, size=(sizeH, sizeW), mode='bilinear',
- align_corners=True)
- depth_masks_lod0 = F.interpolate(depth_masks_lod0_l4x.float(), size=(sizeH, sizeW), mode='nearest')
-
- #### visualize the depth_maps_lod0 for checking
- colored_depth_maps_lod0 = []
- for i in range(depth_maps_lod0.shape[0]):
- colored_depth_maps_lod0.append(
- visualize_depth_numpy(depth_maps_lod0[i, 0].cpu().numpy(), [depth_min, depth_max])[0])
-
- colored_depth_maps_lod0 = np.concatenate(colored_depth_maps_lod0, axis=0).astype(np.uint8)
- os.makedirs(os.path.join(self.base_exp_dir, 'depth_maps_lod0'), exist_ok=True)
- cv.imwrite(os.path.join(self.base_exp_dir, 'depth_maps_lod0',
- '{:0>8d}_{}.png'.format(iter_step, meta)),
- colored_depth_maps_lod0[:, :, ::-1])
-
- if self.num_lods > 1:
- geometry_feature_maps_lod1 = self.obtain_pyramid_feature_maps(imgs, lod=1)
-
- if self.prune_depth_filter:
- pre_coords, pre_feats = self.sdf_renderer_lod0.get_valid_sparse_coords_by_sdf_depthfilter(
- sdf_volume_lod0[0], coords_lod0[0], con_valid_mask_volume_lod0[0], con_volume_lod0[0],
- depth_maps_lod0, proj_matrices[0],
- partial_vol_origin, self.sdf_network_lod0.voxel_size,
- near, far, self.sdf_network_lod0.voxel_size, 12)
- else:
- pre_coords, pre_feats = self.sdf_renderer_lod0.get_valid_sparse_coords_by_sdf(
- sdf_volume_lod0[0], coords_lod0[0], con_valid_mask_volume_lod0[0], con_volume_lod0[0])
-
- pre_coords[:, 1:] = pre_coords[:, 1:] * 2
-
- with torch.no_grad():
- conditional_features_lod1 = self.sdf_network_lod1.get_conditional_volume(
- feature_maps=geometry_feature_maps_lod1[None, :, :, :, :],
- partial_vol_origin=partial_vol_origin,
- proj_mats=proj_matrices,
- sizeH=sizeH,
- sizeW=sizeW,
- pre_coords=pre_coords,
- pre_feats=pre_feats,
- )
-
- con_volume_lod1 = conditional_features_lod1['dense_volume_scale1']
- con_valid_mask_volume_lod1 = conditional_features_lod1['valid_mask_volume_scale1']
-
- out_rgb_fine = []
- out_normal_fine = []
- out_depth_fine = []
-
- out_rgb_fine_lod1 = []
- out_normal_fine_lod1 = []
- out_depth_fine_lod1 = []
-
- # out_depth_fine_explicit = []
- if save_vis:
- for rays_o_batch, rays_d_batch in zip(rays_o, rays_d):
-
- # ****** lod 0 ****
- render_out = self.sdf_renderer_lod0.render(
- rays_o_batch, rays_d_batch, near, far,
- self.sdf_network_lod0,
- self.rendering_network_lod0,
- background_rgb=background_rgb,
- alpha_inter_ratio=alpha_inter_ratio_lod0,
- # * related to conditional feature
- lod=0,
- conditional_volume=con_volume_lod0,
- conditional_valid_mask_volume=con_valid_mask_volume_lod0,
- # * 2d feature maps
- feature_maps=geometry_feature_maps,
- color_maps=imgs,
- w2cs=w2cs,
- intrinsics=intrinsics,
- img_wh=[sizeW, sizeH],
- query_c2w=query_c2w,
- if_render_with_grad=False,
- )
-
- feasible = lambda key: ((key in render_out) and (render_out[key] is not None))
-
- if feasible('depth'):
- out_depth_fine.append(render_out['depth'].detach().cpu().numpy())
-
- # if render_out['color_coarse'] is not None:
- if feasible('color_fine'):
- out_rgb_fine.append(render_out['color_fine'].detach().cpu().numpy())
- if feasible('gradients') and feasible('weights'):
- if render_out['inside_sphere'] is not None:
- out_normal_fine.append((render_out['gradients'] * render_out['weights'][:,
- :self.n_samples_lod0 + self.n_importance_lod0,
- None] * render_out['inside_sphere'][
- ..., None]).sum(dim=1).detach().cpu().numpy())
- else:
- out_normal_fine.append((render_out['gradients'] * render_out['weights'][:,
- :self.n_samples_lod0 + self.n_importance_lod0,
- None]).sum(dim=1).detach().cpu().numpy())
- del render_out
-
- # ****************** lod 1 **************************
- if self.num_lods > 1:
- for rays_o_batch, rays_d_batch in zip(rays_o, rays_d):
- render_out_lod1 = self.sdf_renderer_lod1.render(
- rays_o_batch, rays_d_batch, near, far,
- self.sdf_network_lod1,
- self.rendering_network_lod1,
- background_rgb=background_rgb,
- alpha_inter_ratio=alpha_inter_ratio_lod1,
- # * related to conditional feature
- lod=1,
- conditional_volume=con_volume_lod1,
- conditional_valid_mask_volume=con_valid_mask_volume_lod1,
- # * 2d feature maps
- feature_maps=geometry_feature_maps_lod1,
- color_maps=imgs,
- w2cs=w2cs,
- intrinsics=intrinsics,
- img_wh=[sizeW, sizeH],
- query_c2w=query_c2w,
- if_render_with_grad=False,
- )
-
- feasible = lambda key: ((key in render_out_lod1) and (render_out_lod1[key] is not None))
-
- if feasible('depth'):
- out_depth_fine_lod1.append(render_out_lod1['depth'].detach().cpu().numpy())
-
- # if render_out['color_coarse'] is not None:
- if feasible('color_fine'):
- out_rgb_fine_lod1.append(render_out_lod1['color_fine'].detach().cpu().numpy())
- if feasible('gradients') and feasible('weights'):
- if render_out_lod1['inside_sphere'] is not None:
- out_normal_fine_lod1.append((render_out_lod1['gradients'] * render_out_lod1['weights'][:,
- :self.n_samples_lod1 + self.n_importance_lod1,
- None] *
- render_out_lod1['inside_sphere'][
- ..., None]).sum(dim=1).detach().cpu().numpy())
- else:
- out_normal_fine_lod1.append((render_out_lod1['gradients'] * render_out_lod1['weights'][:,
- :self.n_samples_lod1 + self.n_importance_lod1,
- None]).sum(
- dim=1).detach().cpu().numpy())
- del render_out_lod1
-
- # - save visualization of lod 0
-
- self.save_visualization(true_img, true_depth_colored, out_depth_fine, out_normal_fine,
- query_w2c[0], out_rgb_fine, H, W,
- depth_min, depth_max, iter_step, meta, "val_lod0", true_depth=true_depth, scale_factor=scale_factor)
-
- if self.num_lods > 1:
- self.save_visualization(true_img, true_depth_colored, out_depth_fine_lod1, out_normal_fine_lod1,
- query_w2c[0], out_rgb_fine_lod1, H, W,
- depth_min, depth_max, iter_step, meta, "val_lod1", true_depth=true_depth, scale_factor=scale_factor)
-
- # - extract mesh
- if (iter_step % self.val_mesh_freq == 0):
- torch.cuda.empty_cache()
- self.validate_mesh(self.sdf_network_lod0,
- self.sdf_renderer_lod0.extract_geometry,
- conditional_volume=con_volume_lod0, lod=0,
- threshold=0,
- # occupancy_mask=con_valid_mask_volume_lod0[0, 0],
- mode='val_bg', meta=meta,
- iter_step=iter_step, scale_mat=scale_mat, trans_mat=trans_mat)
- torch.cuda.empty_cache()
-
- if self.num_lods > 1:
- self.validate_mesh(self.sdf_network_lod1,
- self.sdf_renderer_lod1.extract_geometry,
- conditional_volume=con_volume_lod1, lod=1,
- # occupancy_mask=con_valid_mask_volume_lod1[0, 0].detach(),
- mode='val_bg', meta=meta,
- iter_step=iter_step, scale_mat=scale_mat, trans_mat=trans_mat)
-
- torch.cuda.empty_cache()
-
- @torch.no_grad()
- def get_metrics_step(self, sample,
- perturb_overwrite=-1,
- background_rgb=None,
- alpha_inter_ratio_lod0=0.0,
- alpha_inter_ratio_lod1=0.0,
- iter_step=0,
- ):
- # * only support batch_size==1
- # ! attention: the list of string cannot be splited in DataParallel
- batch_idx = sample['batch_idx'][0]
- meta = sample['meta'][batch_idx] # the scan lighting ref_view info
-
- sizeW = sample['img_wh'][0][0]
- sizeH = sample['img_wh'][0][1]
- partial_vol_origin = sample['partial_vol_origin'] # [B, 3]
- near, far = sample['near_fars'][0, 0, :1], sample['near_fars'][0, 0, 1:]
-
- # the full-size ray variables
- sample_rays = sample['rays']
- rays_o = sample_rays['rays_o'][0]
- rays_d = sample_rays['rays_v'][0]
-
- imgs = sample['images'][0]
- intrinsics = sample['intrinsics'][0]
- intrinsics_l_4x = intrinsics.clone()
- intrinsics_l_4x[:, :2] *= 0.25
- w2cs = sample['w2cs'][0]
- c2ws = sample['c2ws'][0]
- proj_matrices = sample['affine_mats']
- scale_mat = sample['scale_mat']
- trans_mat = sample['trans_mat']
-
- # *********************** Lod==0 ***********************
- if not self.if_fix_lod0_networks:
- geometry_feature_maps = self.obtain_pyramid_feature_maps(imgs)
-
- conditional_features_lod0 = self.sdf_network_lod0.get_conditional_volume(
- feature_maps=geometry_feature_maps[None, 1:, :, :, :],
- partial_vol_origin=partial_vol_origin,
- proj_mats=proj_matrices[:,1:],
- # proj_mats=proj_matrices,
- sizeH=sizeH,
- sizeW=sizeW,
- lod=0,
- )
-
- else:
- with torch.no_grad():
- geometry_feature_maps = self.obtain_pyramid_feature_maps(imgs, lod=0)
- # geometry_feature_maps = self.obtain_pyramid_feature_maps(imgs, lod=0)
- conditional_features_lod0 = self.sdf_network_lod0.get_conditional_volume(
- feature_maps=geometry_feature_maps[None, 1:, :, :, :],
- partial_vol_origin=partial_vol_origin,
- proj_mats=proj_matrices[:,1:],
- # proj_mats=proj_matrices,
- sizeH=sizeH,
- sizeW=sizeW,
- lod=0,
- )
- con_volume_lod0 = conditional_features_lod0['dense_volume_scale0']
-
- con_valid_mask_volume_lod0 = conditional_features_lod0['valid_mask_volume_scale0']
- coords_lod0 = conditional_features_lod0['coords_scale0'] # [1,3,wX,wY,wZ]
-
- # * extract depth maps for all the images
- depth_maps_lod0, depth_masks_lod0 = None, None
- if self.num_lods > 1:
- sdf_volume_lod0 = self.sdf_network_lod0.get_sdf_volume(
- con_volume_lod0, con_valid_mask_volume_lod0,
- coords_lod0, partial_vol_origin) # [1, 1, dX, dY, dZ]
-
- if self.prune_depth_filter:
- depth_maps_lod0_l4x, depth_masks_lod0_l4x = self.sdf_renderer_lod0.extract_depth_maps(
- self.sdf_network_lod0, sdf_volume_lod0, intrinsics_l_4x, c2ws,
- sizeH // 4, sizeW // 4, near * 1.5, far)
- depth_maps_lod0 = F.interpolate(depth_maps_lod0_l4x, size=(sizeH, sizeW), mode='bilinear',
- align_corners=True)
- depth_masks_lod0 = F.interpolate(depth_masks_lod0_l4x.float(), size=(sizeH, sizeW), mode='nearest')
-
- # *************** losses
- loss_lod0, losses_lod0, depth_statis_lod0 = None, None, None
-
- if not self.if_fix_lod0_networks:
-
- render_out = self.sdf_renderer_lod0.render(
- rays_o, rays_d, near, far,
- self.sdf_network_lod0,
- self.rendering_network_lod0,
- background_rgb=background_rgb,
- alpha_inter_ratio=alpha_inter_ratio_lod0,
- # * related to conditional feature
- lod=0,
- conditional_volume=con_volume_lod0,
- conditional_valid_mask_volume=con_valid_mask_volume_lod0,
- # * 2d feature maps
- feature_maps=geometry_feature_maps,
- color_maps=imgs,
- w2cs=w2cs,
- intrinsics=intrinsics,
- img_wh=[sizeW, sizeH],
- if_general_rendering=True,
- if_render_with_grad=True,
- )
-
- loss_lod0, losses_lod0, depth_statis_lod0 = self.cal_losses_sdf(render_out, sample_rays,
- iter_step, lod=0)
-
- # *********************** Lod==1 ***********************
-
- loss_lod1, losses_lod1, depth_statis_lod1 = None, None, None
-
- if self.num_lods > 1:
- geometry_feature_maps_lod1 = self.obtain_pyramid_feature_maps(imgs, lod=1)
- # geometry_feature_maps_lod1 = self.obtain_pyramid_feature_maps(imgs, lod=1)
- if self.prune_depth_filter:
- pre_coords, pre_feats = self.sdf_renderer_lod0.get_valid_sparse_coords_by_sdf_depthfilter(
- sdf_volume_lod0[0], coords_lod0[0], con_valid_mask_volume_lod0[0], con_volume_lod0[0],
- depth_maps_lod0, proj_matrices[0],
- partial_vol_origin, self.sdf_network_lod0.voxel_size,
- near, far, self.sdf_network_lod0.voxel_size, 12)
- else:
- pre_coords, pre_feats = self.sdf_renderer_lod0.get_valid_sparse_coords_by_sdf(
- sdf_volume_lod0[0], coords_lod0[0], con_valid_mask_volume_lod0[0], con_volume_lod0[0])
-
- pre_coords[:, 1:] = pre_coords[:, 1:] * 2
-
- # ? It seems that training gru_fusion, this part should be trainable too
- conditional_features_lod1 = self.sdf_network_lod1.get_conditional_volume(
- feature_maps=geometry_feature_maps_lod1[None, 1:, :, :, :],
- partial_vol_origin=partial_vol_origin,
- proj_mats=proj_matrices[:,1:],
- # proj_mats=proj_matrices,
- sizeH=sizeH,
- sizeW=sizeW,
- pre_coords=pre_coords,
- pre_feats=pre_feats,
- )
-
- con_volume_lod1 = conditional_features_lod1['dense_volume_scale1']
- con_valid_mask_volume_lod1 = conditional_features_lod1['valid_mask_volume_scale1']
-
- # if not self.if_gru_fusion_lod1:
- render_out_lod1 = self.sdf_renderer_lod1.render(
- rays_o, rays_d, near, far,
- self.sdf_network_lod1,
- self.rendering_network_lod1,
- background_rgb=background_rgb,
- alpha_inter_ratio=alpha_inter_ratio_lod1,
- # * related to conditional feature
- lod=1,
- conditional_volume=con_volume_lod1,
- conditional_valid_mask_volume=con_valid_mask_volume_lod1,
- # * 2d feature maps
- feature_maps=geometry_feature_maps_lod1,
- color_maps=imgs,
- w2cs=w2cs,
- intrinsics=intrinsics,
- img_wh=[sizeW, sizeH],
- bg_ratio=self.bg_ratio,
- )
- loss_lod1, losses_lod1, depth_statis_lod1 = self.cal_losses_sdf(render_out_lod1, sample_rays,
- iter_step, lod=1)
-
-
- # # - extract mesh
- if iter_step % self.val_mesh_freq == 0:
- torch.cuda.empty_cache()
- self.validate_mesh(self.sdf_network_lod0,
- self.sdf_renderer_lod0.extract_geometry,
- conditional_volume=con_volume_lod0, lod=0,
- threshold=0,
- # occupancy_mask=con_valid_mask_volume_lod0[0, 0],
- mode='train_bg', meta=meta,
- iter_step=iter_step, scale_mat=scale_mat,
- trans_mat=trans_mat)
- torch.cuda.empty_cache()
-
- if self.num_lods > 1:
- self.validate_mesh(self.sdf_network_lod1,
- self.sdf_renderer_lod1.extract_geometry,
- conditional_volume=con_volume_lod1, lod=1,
- # occupancy_mask=con_valid_mask_volume_lod1[0, 0].detach(),
- mode='train_bg', meta=meta,
- iter_step=iter_step, scale_mat=scale_mat,
- trans_mat=trans_mat)
-
- losses = {
- # - lod 0
- 'loss_lod0': loss_lod0,
- 'losses_lod0': losses_lod0,
- 'depth_statis_lod0': depth_statis_lod0,
-
- # - lod 1
- 'loss_lod1': loss_lod1,
- 'losses_lod1': losses_lod1,
- 'depth_statis_lod1': depth_statis_lod1,
-
- }
-
- return losses
-
-
- def export_mesh_step(self, sample,
- iter_step=0,
- chunk_size=512,
- resolution=360,
- save_vis=False,
- ):
- # * only support batch_size==1
- # ! attention: the list of string cannot be splited in DataParallel
- batch_idx = sample['batch_idx'][0]
- meta = sample['meta'][batch_idx] # the scan lighting ref_view info
-
- sizeW = sample['img_wh'][0][0]
- sizeH = sample['img_wh'][0][1]
- H, W = sizeH, sizeW
-
- partial_vol_origin = sample['partial_vol_origin'] # [B, 3]
- near, far = sample['query_near_far'][0, :1], sample['query_near_far'][0, 1:]
-
- # the ray variables
- sample_rays = sample['rays']
- rays_o = sample_rays['rays_o'][0]
- rays_d = sample_rays['rays_v'][0]
-
- imgs = sample['images'][0]
- intrinsics = sample['intrinsics'][0]
- intrinsics_l_4x = intrinsics.clone()
- intrinsics_l_4x[:, :2] *= 0.25
- w2cs = sample['w2cs'][0]
- # target_candidate_w2cs = sample['target_candidate_w2cs'][0]
- proj_matrices = sample['affine_mats']
-
-
- # - the image to render
- scale_mat = sample['scale_mat'] # [1,4,4] used to convert mesh into true scale
- trans_mat = sample['trans_mat']
- query_c2w = sample['query_c2w'] # [1,4,4]
- true_img = sample['query_image'][0]
- true_img = np.uint8(true_img.permute(1, 2, 0).cpu().numpy() * 255)
-
- # depth_min, depth_max = near.cpu().numpy(), far.cpu().numpy()
-
- # scale_factor = sample['scale_factor'][0].cpu().numpy()
- # true_depth = sample['query_depth'] if 'query_depth' in sample.keys() else None
- # # if true_depth is not None:
- # # true_depth = true_depth[0].cpu().numpy()
- # # true_depth_colored = visualize_depth_numpy(true_depth, [depth_min, depth_max])[0]
- # # else:
- # # true_depth_colored = None
-
- rays_o = rays_o.reshape(-1, 3).split(chunk_size)
- rays_d = rays_d.reshape(-1, 3).split(chunk_size)
-
- # - obtain conditional features
- with torch.no_grad():
- # - obtain conditional features
- geometry_feature_maps = self.obtain_pyramid_feature_maps(imgs, lod=0)
- # - lod 0
- conditional_features_lod0 = self.sdf_network_lod0.get_conditional_volume(
- feature_maps=geometry_feature_maps[None, :, :, :, :],
- partial_vol_origin=partial_vol_origin,
- proj_mats=proj_matrices,
- sizeH=sizeH,
- sizeW=sizeW,
- lod=0,
- )
-
- con_volume_lod0 = conditional_features_lod0['dense_volume_scale0']
- con_valid_mask_volume_lod0 = conditional_features_lod0['valid_mask_volume_scale0']
- coords_lod0 = conditional_features_lod0['coords_scale0'] # [1,3,wX,wY,wZ]
-
- if self.num_lods > 1:
- sdf_volume_lod0 = self.sdf_network_lod0.get_sdf_volume(
- con_volume_lod0, con_valid_mask_volume_lod0,
- coords_lod0, partial_vol_origin) # [1, 1, dX, dY, dZ]
-
- depth_maps_lod0, depth_masks_lod0 = None, None
-
-
- if self.num_lods > 1:
- geometry_feature_maps_lod1 = self.obtain_pyramid_feature_maps(imgs, lod=1)
-
- if self.prune_depth_filter:
- pre_coords, pre_feats = self.sdf_renderer_lod0.get_valid_sparse_coords_by_sdf_depthfilter(
- sdf_volume_lod0[0], coords_lod0[0], con_valid_mask_volume_lod0[0], con_volume_lod0[0],
- depth_maps_lod0, proj_matrices[0],
- partial_vol_origin, self.sdf_network_lod0.voxel_size,
- near, far, self.sdf_network_lod0.voxel_size, 12)
- else:
- pre_coords, pre_feats = self.sdf_renderer_lod0.get_valid_sparse_coords_by_sdf(
- sdf_volume_lod0[0], coords_lod0[0], con_valid_mask_volume_lod0[0], con_volume_lod0[0])
-
- pre_coords[:, 1:] = pre_coords[:, 1:] * 2
-
- with torch.no_grad():
- conditional_features_lod1 = self.sdf_network_lod1.get_conditional_volume(
- feature_maps=geometry_feature_maps_lod1[None, :, :, :, :],
- partial_vol_origin=partial_vol_origin,
- proj_mats=proj_matrices,
- sizeH=sizeH,
- sizeW=sizeW,
- pre_coords=pre_coords,
- pre_feats=pre_feats,
- )
-
- con_volume_lod1 = conditional_features_lod1['dense_volume_scale1']
- con_valid_mask_volume_lod1 = conditional_features_lod1['valid_mask_volume_scale1']
-
-
- # - extract mesh
- if (iter_step % self.val_mesh_freq == 0):
- torch.cuda.empty_cache()
- self.validate_colored_mesh(
- density_or_sdf_network=self.sdf_network_lod0,
- func_extract_geometry=self.sdf_renderer_lod0.extract_geometry,
- resolution=resolution,
- conditional_volume=con_volume_lod0,
- conditional_valid_mask_volume = con_valid_mask_volume_lod0,
- feature_maps=geometry_feature_maps,
- color_maps=imgs,
- w2cs=w2cs,
- target_candidate_w2cs=None,
- intrinsics=intrinsics,
- rendering_network=self.rendering_network_lod0,
- rendering_projector=self.sdf_renderer_lod0.rendering_projector,
- lod=0,
- threshold=0,
- query_c2w=query_c2w,
- mode='val_bg', meta=meta,
- iter_step=iter_step, scale_mat=scale_mat, trans_mat=trans_mat
- )
- torch.cuda.empty_cache()
-
- if self.num_lods > 1:
- self.validate_colored_mesh(
- density_or_sdf_network=self.sdf_network_lod1,
- func_extract_geometry=self.sdf_renderer_lod1.extract_geometry,
- resolution=resolution,
- conditional_volume=con_volume_lod1,
- conditional_valid_mask_volume = con_valid_mask_volume_lod1,
- feature_maps=geometry_feature_maps,
- color_maps=imgs,
- w2cs=w2cs,
- target_candidate_w2cs=None,
- intrinsics=intrinsics,
- rendering_network=self.rendering_network_lod1,
- rendering_projector=self.sdf_renderer_lod1.rendering_projector,
- lod=1,
- threshold=0,
- query_c2w=query_c2w,
- mode='val_bg', meta=meta,
- iter_step=iter_step, scale_mat=scale_mat, trans_mat=trans_mat
- )
- torch.cuda.empty_cache()
-
-
-
-
- def save_visualization(self, true_img, true_colored_depth, out_depth, out_normal, w2cs, out_color, H, W,
- depth_min, depth_max, iter_step, meta, comment, out_color_mlp=[], true_depth=None, scale_factor=1.0):
- if len(out_color) > 0:
- img_fine = (np.concatenate(out_color, axis=0).reshape([H, W, 3]) * 256).clip(0, 255)
-
- if len(out_color_mlp) > 0:
- img_mlp = (np.concatenate(out_color_mlp, axis=0).reshape([H, W, 3]) * 256).clip(0, 255)
-
- if len(out_normal) > 0:
- normal_img = np.concatenate(out_normal, axis=0)
- rot = w2cs[:3, :3].detach().cpu().numpy()
- # - convert normal from world space to camera space
- normal_img = (np.matmul(rot[None, :, :],
- normal_img[:, :, None]).reshape([H, W, 3]) * 128 + 128).clip(0, 255)
- if len(out_depth) > 0:
- pred_depth = np.concatenate(out_depth, axis=0).reshape([H, W])
- pred_depth_colored = visualize_depth_numpy(pred_depth, [depth_min, depth_max])[0]
-
- if len(out_depth) > 0:
- os.makedirs(os.path.join(self.base_exp_dir, 'depths_' + comment), exist_ok=True)
- if true_colored_depth is not None:
-
- if true_depth is not None:
- depth_error_map = np.abs(true_depth - pred_depth) * 2.0 / scale_factor
- # [256, 256, 1] -> [256, 256, 3]
- depth_error_map = np.tile(depth_error_map[:, :, None], [1, 1, 3])
-
- depth_visualized = np.concatenate(
- [(depth_error_map * 255).astype(np.uint8), true_colored_depth, pred_depth_colored, true_img], axis=1)[:, :, ::-1]
- # print("depth_visualized.shape: ", depth_visualized.shape)
- # write depth error result text on img, the input is a numpy array of [256, 1024, 3]
- # cv.putText(depth_visualized.copy(), "depth_error_mean: {:.4f}".format(depth_error_map.mean()), (10, 30), cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2)
- else:
- depth_visualized = np.concatenate(
- [true_colored_depth, pred_depth_colored, true_img])[:, :, ::-1]
- cv.imwrite(
- os.path.join(self.base_exp_dir, 'depths_' + comment,
- '{:0>8d}_{}.png'.format(iter_step, meta)), depth_visualized
- )
- else:
- cv.imwrite(
- os.path.join(self.base_exp_dir, 'depths_' + comment,
- '{:0>8d}_{}.png'.format(iter_step, meta)),
- np.concatenate(
- [pred_depth_colored, true_img])[:, :, ::-1])
- if len(out_color) > 0:
- os.makedirs(os.path.join(self.base_exp_dir, 'synthesized_color_' + comment), exist_ok=True)
- cv.imwrite(os.path.join(self.base_exp_dir, 'synthesized_color_' + comment,
- '{:0>8d}_{}.png'.format(iter_step, meta)),
- np.concatenate(
- [img_fine, true_img])[:, :, ::-1]) # bgr2rgb
- # compute psnr (image pixel lie in [0, 255])
- # mse_loss = np.mean((img_fine - true_img) ** 2)
- # psnr = 10 * np.log10(255 ** 2 / mse_loss)
-
- if len(out_color_mlp) > 0:
- os.makedirs(os.path.join(self.base_exp_dir, 'synthesized_color_mlp_' + comment), exist_ok=True)
- cv.imwrite(os.path.join(self.base_exp_dir, 'synthesized_color_mlp_' + comment,
- '{:0>8d}_{}.png'.format(iter_step, meta)),
- np.concatenate(
- [img_mlp, true_img])[:, :, ::-1]) # bgr2rgb
-
- if len(out_normal) > 0:
- os.makedirs(os.path.join(self.base_exp_dir, 'normals_' + comment), exist_ok=True)
- cv.imwrite(os.path.join(self.base_exp_dir, 'normals_' + comment,
- '{:0>8d}_{}.png'.format(iter_step, meta)),
- normal_img[:, :, ::-1])
-
- def forward(self, sample,
- perturb_overwrite=-1,
- background_rgb=None,
- alpha_inter_ratio_lod0=0.0,
- alpha_inter_ratio_lod1=0.0,
- iter_step=0,
- mode='train',
- save_vis=False,
- resolution=360,
- ):
-
- if mode == 'train':
- return self.train_step(sample,
- perturb_overwrite=perturb_overwrite,
- background_rgb=background_rgb,
- alpha_inter_ratio_lod0=alpha_inter_ratio_lod0,
- alpha_inter_ratio_lod1=alpha_inter_ratio_lod1,
- iter_step=iter_step
- )
- elif mode == 'val':
- import time
- begin = time.time()
- result = self.val_step(sample,
- perturb_overwrite=perturb_overwrite,
- background_rgb=background_rgb,
- alpha_inter_ratio_lod0=alpha_inter_ratio_lod0,
- alpha_inter_ratio_lod1=alpha_inter_ratio_lod1,
- iter_step=iter_step,
- save_vis=save_vis,
- )
- end = time.time()
- print("val_step time: ", end - begin)
- return result
- elif mode == 'export_mesh':
- import time
- begin = time.time()
- result = self.export_mesh_step(sample,
- iter_step=iter_step,
- save_vis=save_vis,
- resolution=resolution,
- )
- end = time.time()
- print("export mesh time: ", end - begin)
- return result
- elif mode == 'get_metrics':
- return self.get_metrics_step(sample,
- perturb_overwrite=perturb_overwrite,
- background_rgb=background_rgb,
- alpha_inter_ratio_lod0=alpha_inter_ratio_lod0,
- alpha_inter_ratio_lod1=alpha_inter_ratio_lod1,
- iter_step=iter_step
- )
- def obtain_pyramid_feature_maps(self, imgs, lod=0):
- """
- get feature maps of all conditional images
- :param imgs:
- :return:
- """
-
- if lod == 0:
- extractor = self.pyramid_feature_network_geometry_lod0
- elif lod >= 1:
- extractor = self.pyramid_feature_network_geometry_lod1
-
- pyramid_feature_maps = extractor(imgs)
-
- # * the pyramid features are very important, if only use the coarst features, hard to optimize
- fused_feature_maps = torch.cat([
- F.interpolate(pyramid_feature_maps[0], scale_factor=4, mode='bilinear', align_corners=True),
- F.interpolate(pyramid_feature_maps[1], scale_factor=2, mode='bilinear', align_corners=True),
- pyramid_feature_maps[2]
- ], dim=1)
-
- return fused_feature_maps
-
- def cal_losses_sdf(self, render_out, sample_rays, iter_step=-1, lod=0):
-
- # loss weight schedule; the regularization terms should be added in later training stage
- def get_weight(iter_step, weight):
- if lod == 1:
- anneal_start = self.anneal_end if lod == 0 else self.anneal_end_lod1
- anneal_end = self.anneal_end if lod == 0 else self.anneal_end_lod1
- anneal_end = anneal_end * 2
- else:
- anneal_start = self.anneal_start if lod == 0 else self.anneal_start_lod1
- anneal_end = self.anneal_end if lod == 0 else self.anneal_end_lod1
- anneal_end = anneal_end * 2
-
- if iter_step < 0:
- return weight
-
- if anneal_end == 0.0:
- return weight
- elif iter_step < anneal_start:
- return 0.0
- else:
- return np.min(
- [1.0,
- (iter_step - anneal_start) / (anneal_end - anneal_start)]) * weight
-
- rays_o = sample_rays['rays_o'][0]
- rays_d = sample_rays['rays_v'][0]
- true_rgb = sample_rays['rays_color'][0]
-
- if 'rays_depth' in sample_rays.keys():
- true_depth = sample_rays['rays_depth'][0]
- else:
- true_depth = None
- mask = sample_rays['rays_mask'][0]
-
- color_fine = render_out['color_fine']
- color_fine_mask = render_out['color_fine_mask']
- depth_pred = render_out['depth']
-
- variance = render_out['variance']
- cdf_fine = render_out['cdf_fine']
- weight_sum = render_out['weights_sum']
-
- gradient_error_fine = render_out['gradient_error_fine']
-
- sdf = render_out['sdf']
-
- # * color generated by mlp
- color_mlp = render_out['color_mlp']
- color_mlp_mask = render_out['color_mlp_mask']
-
- if color_fine is not None:
- # Color loss
- color_mask = color_fine_mask if color_fine_mask is not None else mask
- color_mask = color_mask[..., 0]
- color_error = (color_fine[color_mask] - true_rgb[color_mask])
- color_fine_loss = F.l1_loss(color_error, torch.zeros_like(color_error).to(color_error.device),
- reduction='mean')
- psnr = 20.0 * torch.log10(
- 1.0 / (((color_fine[color_mask] - true_rgb[color_mask]) ** 2).mean() / (3.0)).sqrt())
- else:
- color_fine_loss = 0.
- psnr = 0.
-
- if color_mlp is not None:
- # Color loss
- color_mlp_mask = color_mlp_mask[..., 0]
- color_error_mlp = (color_mlp[color_mlp_mask] - true_rgb[color_mlp_mask])
- color_mlp_loss = F.l1_loss(color_error_mlp,
- torch.zeros_like(color_error_mlp).to(color_error_mlp.device),
- reduction='mean')
-
- psnr_mlp = 20.0 * torch.log10(
- 1.0 / (((color_mlp[color_mlp_mask] - true_rgb[color_mlp_mask]) ** 2).mean() / (3.0)).sqrt())
- else:
- color_mlp_loss = 0.
- psnr_mlp = 0.
-
- # depth loss is only used for inference, not included in total loss
- if true_depth is not None:
- # depth_loss = self.depth_criterion(depth_pred, true_depth, mask)
- depth_loss = self.depth_criterion(depth_pred, true_depth)
-
- depth_statis = None
- else:
- depth_loss = 0.
- depth_statis = None
-
- sparse_loss_1 = torch.exp(
- -1 * torch.abs(render_out['sdf_random']) * self.sdf_decay_param).mean() # - should equal
- sparse_loss_2 = torch.exp(-1 * torch.abs(sdf) * self.sdf_decay_param).mean()
- sparse_loss = (sparse_loss_1 + sparse_loss_2) / 2
-
- sdf_mean = torch.abs(sdf).mean()
- sparseness_1 = (torch.abs(sdf) < 0.01).to(torch.float32).mean()
- sparseness_2 = (torch.abs(sdf) < 0.02).to(torch.float32).mean()
-
- # Eikonal loss
- gradient_error_loss = gradient_error_fine
- # ! the first 50k, don't use bg constraint
- fg_bg_weight = 0.0 if iter_step < 50000 else get_weight(iter_step, self.fg_bg_weight)
-
- # Mask loss, optional
- # The images of DTU dataset contain large black regions (0 rgb values),
- # can use this data prior to make fg more clean
- background_loss = 0.0
- fg_bg_loss = 0.0
- if self.fg_bg_weight > 0 and torch.mean((mask < 0.5).to(torch.float32)) > 0.02:
- weights_sum_fg = render_out['weights_sum_fg']
- fg_bg_error = (weights_sum_fg - mask)
- fg_bg_loss = F.l1_loss(fg_bg_error,
- torch.zeros_like(fg_bg_error).to(fg_bg_error.device),
- reduction='mean')
-
-
- loss = self.depth_loss_weight * depth_loss + color_fine_loss + color_mlp_loss + \
- sparse_loss * get_weight(iter_step, self.sdf_sparse_weight) + \
- fg_bg_loss * fg_bg_weight + \
- gradient_error_loss * self.sdf_igr_weight # ! gradient_error_loss need a mask
-
- losses = {
- "loss": loss,
- "depth_loss": depth_loss,
- "color_fine_loss": color_fine_loss,
- "color_mlp_loss": color_mlp_loss,
- "gradient_error_loss": gradient_error_loss,
- "background_loss": background_loss,
- "sparse_loss": sparse_loss,
- "sparseness_1": sparseness_1,
- "sparseness_2": sparseness_2,
- "sdf_mean": sdf_mean,
- "psnr": psnr,
- "psnr_mlp": psnr_mlp,
- "weights_sum": render_out['weights_sum'],
- "weights_sum_fg": render_out['weights_sum_fg'],
- "alpha_sum": render_out['alpha_sum'],
- "variance": render_out['variance'],
- "sparse_weight": get_weight(iter_step, self.sdf_sparse_weight),
- "fg_bg_weight": fg_bg_weight,
- "fg_bg_loss": fg_bg_loss,
- }
- losses = numpy2tensor(losses, device=rays_o.device)
- return loss, losses, depth_statis
-
- @torch.no_grad()
- def validate_mesh(self, density_or_sdf_network, func_extract_geometry, world_space=True, resolution=360,
- threshold=0.0, mode='val',
- # * 3d feature volume
- conditional_volume=None, lod=None, occupancy_mask=None,
- bound_min=[-1, -1, -1], bound_max=[1, 1, 1], meta='', iter_step=0, scale_mat=None,
- trans_mat=None
- ):
-
- bound_min = torch.tensor(bound_min, dtype=torch.float32)
- bound_max = torch.tensor(bound_max, dtype=torch.float32)
-
- vertices, triangles, fields = func_extract_geometry(
- density_or_sdf_network,
- bound_min, bound_max, resolution=resolution,
- threshold=threshold, device=conditional_volume.device,
- # * 3d feature volume
- conditional_volume=conditional_volume, lod=lod,
- occupancy_mask=occupancy_mask
- )
-
-
- if scale_mat is not None:
- scale_mat_np = scale_mat.cpu().numpy()
- vertices = vertices * scale_mat_np[0][0, 0] + scale_mat_np[0][:3, 3][None]
-
- if trans_mat is not None: # w2c_ref_inv
- trans_mat_np = trans_mat.cpu().numpy()
- vertices_homo = np.concatenate([vertices, np.ones_like(vertices[:, :1])], axis=1)
- vertices = np.matmul(trans_mat_np, vertices_homo[:, :, None])[:, :3, 0]
-
- mesh = trimesh.Trimesh(vertices, triangles)
- os.makedirs(os.path.join(self.base_exp_dir, 'meshes_' + mode), exist_ok=True)
- mesh.export(os.path.join(self.base_exp_dir, 'meshes_' + mode,
- 'mesh_{:0>8d}_{}_lod{:0>1d}.ply'.format(iter_step, meta, lod)))
-
-
-
- def validate_colored_mesh(self, density_or_sdf_network, func_extract_geometry, world_space=True, resolution=360,
- threshold=0.0, mode='val',
- # * 3d feature volume
- conditional_volume=None,
- conditional_valid_mask_volume=None,
- feature_maps=None,
- color_maps = None,
- w2cs=None,
- target_candidate_w2cs=None,
- intrinsics=None,
- rendering_network=None,
- rendering_projector=None,
- query_c2w=None,
- lod=None, occupancy_mask=None,
- bound_min=[-1, -1, -1], bound_max=[1, 1, 1], meta='', iter_step=0, scale_mat=None,
- trans_mat=None
- ):
-
- bound_min = torch.tensor(bound_min, dtype=torch.float32)
- bound_max = torch.tensor(bound_max, dtype=torch.float32)
-
- vertices, triangles, fields = func_extract_geometry(
- density_or_sdf_network,
- bound_min, bound_max, resolution=resolution,
- threshold=threshold, device=conditional_volume.device,
- # * 3d feature volume
- conditional_volume=conditional_volume, lod=lod,
- occupancy_mask=occupancy_mask
- )
-
-
- with torch.no_grad():
- ren_geo_feats, ren_rgb_feats, ren_ray_diff, ren_mask, _, _ = rendering_projector.compute_view_independent(
- torch.tensor(vertices).to(conditional_volume),
- lod=lod,
- # * 3d geometry feature volumes
- geometryVolume=conditional_volume[0],
- geometryVolumeMask=conditional_valid_mask_volume[0],
- sdf_network=density_or_sdf_network,
- # * 2d rendering feature maps
- rendering_feature_maps=feature_maps, # [n_view, 56, 256, 256]
- color_maps=color_maps,
- w2cs=w2cs,
- target_candidate_w2cs=target_candidate_w2cs,
- intrinsics=intrinsics,
- img_wh=[256,256],
- query_img_idx=0, # the index of the N_views dim for rendering
- query_c2w=query_c2w,
- )
-
-
- vertices_color, rendering_valid_mask = rendering_network(
- ren_geo_feats, ren_rgb_feats, ren_ray_diff, ren_mask)
-
-
-
- if scale_mat is not None:
- scale_mat_np = scale_mat.cpu().numpy()
- vertices = vertices * scale_mat_np[0][0, 0] + scale_mat_np[0][:3, 3][None]
-
- if trans_mat is not None: # w2c_ref_inv
- trans_mat_np = trans_mat.cpu().numpy()
- vertices_homo = np.concatenate([vertices, np.ones_like(vertices[:, :1])], axis=1)
- vertices = np.matmul(trans_mat_np, vertices_homo[:, :, None])[:, :3, 0]
-
- vertices_color = np.array(vertices_color.squeeze(0).cpu() * 255, dtype=np.uint8)
- mesh = trimesh.Trimesh(vertices, triangles, vertex_colors=vertices_color)
- # os.makedirs(os.path.join(self.base_exp_dir, 'meshes_' + mode, 'lod{:0>1d}'.format(lod)), exist_ok=True)
- # mesh.export(os.path.join(self.base_exp_dir, 'meshes_' + mode, 'lod{:0>1d}'.format(lod),
- # 'mesh_{:0>8d}_{}_lod{:0>1d}.ply'.format(iter_step, meta, lod)))
-
- mesh.export(os.path.join(self.base_exp_dir, 'mesh.ply'))
\ No newline at end of file
diff --git a/One-2-3-45-master 2/reconstruction/ops/__init__.py b/One-2-3-45-master 2/reconstruction/ops/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/One-2-3-45-master 2/reconstruction/ops/back_project.py b/One-2-3-45-master 2/reconstruction/ops/back_project.py
deleted file mode 100644
index 5398f285f786a0e6c7a029138aa8a6554aae6e58..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/ops/back_project.py
+++ /dev/null
@@ -1,175 +0,0 @@
-import torch
-from torch.nn.functional import grid_sample
-
-
-def back_project_sparse_type(coords, origin, voxel_size, feats, KRcam, sizeH=None, sizeW=None, only_mask=False,
- with_proj_z=False):
- # - modified version from NeuRecon
- '''
- Unproject the image fetures to form a 3D (sparse) feature volume
-
- :param coords: coordinates of voxels,
- dim: (num of voxels, 4) (4 : batch ind, x, y, z)
- :param origin: origin of the partial voxel volume (xyz position of voxel (0, 0, 0))
- dim: (batch size, 3) (3: x, y, z)
- :param voxel_size: floats specifying the size of a voxel
- :param feats: image features
- dim: (num of views, batch size, C, H, W)
- :param KRcam: projection matrix
- dim: (num of views, batch size, 4, 4)
- :return: feature_volume_all: 3D feature volumes
- dim: (num of voxels, num_of_views, c)
- :return: mask_volume_all: indicate the voxel of sampled feature volume is valid or not
- dim: (num of voxels, num_of_views)
- '''
- n_views, bs, c, h, w = feats.shape
- device = feats.device
-
- if sizeH is None:
- sizeH, sizeW = h, w # - if the KRcam is not suitable for the current feats
-
- feature_volume_all = torch.zeros(coords.shape[0], n_views, c).to(device)
- mask_volume_all = torch.zeros([coords.shape[0], n_views], dtype=torch.int32).to(device)
- # import ipdb; ipdb.set_trace()
- for batch in range(bs):
- # import ipdb; ipdb.set_trace()
- batch_ind = torch.nonzero(coords[:, 0] == batch).squeeze(1)
- coords_batch = coords[batch_ind][:, 1:]
-
- coords_batch = coords_batch.view(-1, 3)
- origin_batch = origin[batch].unsqueeze(0)
- feats_batch = feats[:, batch]
- proj_batch = KRcam[:, batch]
-
- grid_batch = coords_batch * voxel_size + origin_batch.float()
- rs_grid = grid_batch.unsqueeze(0).expand(n_views, -1, -1)
- rs_grid = rs_grid.permute(0, 2, 1).contiguous()
- nV = rs_grid.shape[-1]
- rs_grid = torch.cat([rs_grid, torch.ones([n_views, 1, nV]).to(device)], dim=1)
-
- # Project grid
- im_p = proj_batch @ rs_grid # - transform world pts to image UV space
- im_x, im_y, im_z = im_p[:, 0], im_p[:, 1], im_p[:, 2]
-
- im_z[im_z >= 0] = im_z[im_z >= 0].clamp(min=1e-6)
-
- im_x = im_x / im_z
- im_y = im_y / im_z
-
- im_grid = torch.stack([2 * im_x / (sizeW - 1) - 1, 2 * im_y / (sizeH - 1) - 1], dim=-1)
- mask = im_grid.abs() <= 1
- mask = (mask.sum(dim=-1) == 2) & (im_z > 0)
-
- mask = mask.view(n_views, -1)
- mask = mask.permute(1, 0).contiguous() # [num_pts, nviews]
-
- mask_volume_all[batch_ind] = mask.to(torch.int32)
-
- if only_mask:
- return mask_volume_all
-
- feats_batch = feats_batch.view(n_views, c, h, w)
- im_grid = im_grid.view(n_views, 1, -1, 2)
- features = grid_sample(feats_batch, im_grid, padding_mode='zeros', align_corners=True)
- # if features.isnan().sum() > 0:
- # import ipdb; ipdb.set_trace()
- features = features.view(n_views, c, -1)
- features = features.permute(2, 0, 1).contiguous() # [num_pts, nviews, c]
-
- feature_volume_all[batch_ind] = features
-
- if with_proj_z:
- im_z = im_z.view(n_views, 1, -1).permute(2, 0, 1).contiguous() # [num_pts, nviews, 1]
- return feature_volume_all, mask_volume_all, im_z
- # if feature_volume_all.isnan().sum() > 0:
- # import ipdb; ipdb.set_trace()
- return feature_volume_all, mask_volume_all
-
-
-def cam2pixel(cam_coords, proj_c2p_rot, proj_c2p_tr, padding_mode, sizeH=None, sizeW=None, with_depth=False):
- """Transform coordinates in the camera frame to the pixel frame.
- Args:
- cam_coords: pixel coordinates defined in the first camera coordinates system -- [B, 3, H, W]
- proj_c2p_rot: rotation matrix of cameras -- [B, 3, 3]
- proj_c2p_tr: translation vectors of cameras -- [B, 3, 1]
- Returns:
- array of [-1,1] coordinates -- [B, H, W, 2]
- """
- b, _, h, w = cam_coords.size()
- if sizeH is None:
- sizeH = h
- sizeW = w
-
- cam_coords_flat = cam_coords.view(b, 3, -1) # [B, 3, H*W]
- if proj_c2p_rot is not None:
- pcoords = proj_c2p_rot.bmm(cam_coords_flat)
- else:
- pcoords = cam_coords_flat
-
- if proj_c2p_tr is not None:
- pcoords = pcoords + proj_c2p_tr # [B, 3, H*W]
- X = pcoords[:, 0]
- Y = pcoords[:, 1]
- Z = pcoords[:, 2].clamp(min=1e-3)
-
- X_norm = 2 * (X / Z) / (sizeW - 1) - 1 # Normalized, -1 if on extreme left,
- # 1 if on extreme right (x = w-1) [B, H*W]
- Y_norm = 2 * (Y / Z) / (sizeH - 1) - 1 # Idem [B, H*W]
- if padding_mode == 'zeros':
- X_mask = ((X_norm > 1) + (X_norm < -1)).detach()
- X_norm[X_mask] = 2 # make sure that no point in warped image is a combinaison of im and gray
- Y_mask = ((Y_norm > 1) + (Y_norm < -1)).detach()
- Y_norm[Y_mask] = 2
-
- if with_depth:
- pixel_coords = torch.stack([X_norm, Y_norm, Z], dim=2) # [B, H*W, 3]
- return pixel_coords.view(b, h, w, 3)
- else:
- pixel_coords = torch.stack([X_norm, Y_norm], dim=2) # [B, H*W, 2]
- return pixel_coords.view(b, h, w, 2)
-
-
-# * have already checked, should check whether proj_matrix is for right coordinate system and resolution
-def back_project_dense_type(coords, origin, voxel_size, feats, proj_matrix, sizeH=None, sizeW=None):
- '''
- Unproject the image fetures to form a 3D (dense) feature volume
-
- :param coords: coordinates of voxels,
- dim: (batch, nviews, 3, X,Y,Z)
- :param origin: origin of the partial voxel volume (xyz position of voxel (0, 0, 0))
- dim: (batch size, 3) (3: x, y, z)
- :param voxel_size: floats specifying the size of a voxel
- :param feats: image features
- dim: (batch size, num of views, C, H, W)
- :param proj_matrix: projection matrix
- dim: (batch size, num of views, 4, 4)
- :return: feature_volume_all: 3D feature volumes
- dim: (batch, nviews, C, X,Y,Z)
- :return: count: number of times each voxel can be seen
- dim: (batch, nviews, 1, X,Y,Z)
- '''
-
- batch, nviews, _, wX, wY, wZ = coords.shape
-
- if sizeH is None:
- sizeH, sizeW = feats.shape[-2:]
- proj_matrix = proj_matrix.view(batch * nviews, *proj_matrix.shape[2:])
-
- coords_wrd = coords * voxel_size + origin.view(batch, 1, 3, 1, 1, 1)
- coords_wrd = coords_wrd.view(batch * nviews, 3, wX * wY * wZ, 1) # (b*nviews,3,wX*wY*wZ, 1)
-
- pixel_grids = cam2pixel(coords_wrd, proj_matrix[:, :3, :3], proj_matrix[:, :3, 3:],
- 'zeros', sizeH=sizeH, sizeW=sizeW) # (b*nviews,wX*wY*wZ, 2)
- pixel_grids = pixel_grids.view(batch * nviews, 1, wX * wY * wZ, 2)
-
- feats = feats.view(batch * nviews, *feats.shape[2:]) # (b*nviews,c,h,w)
-
- ones = torch.ones((batch * nviews, 1, *feats.shape[2:])).to(feats.dtype).to(feats.device)
-
- features_volume = torch.nn.functional.grid_sample(feats, pixel_grids, padding_mode='zeros', align_corners=True)
- counts_volume = torch.nn.functional.grid_sample(ones, pixel_grids, padding_mode='zeros', align_corners=True)
-
- features_volume = features_volume.view(batch, nviews, -1, wX, wY, wZ) # (batch, nviews, C, X,Y,Z)
- counts_volume = counts_volume.view(batch, nviews, -1, wX, wY, wZ)
- return features_volume, counts_volume
-
diff --git a/One-2-3-45-master 2/reconstruction/ops/generate_grids.py b/One-2-3-45-master 2/reconstruction/ops/generate_grids.py
deleted file mode 100644
index 304c1c4c1a424c4bc219f39815ed43fea1d9de5d..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/ops/generate_grids.py
+++ /dev/null
@@ -1,33 +0,0 @@
-import torch
-
-
-def generate_grid(n_vox, interval):
- """
- generate grid
- if 3D volume, grid[:,:,x,y,z] = (x,y,z)
- :param n_vox:
- :param interval:
- :return:
- """
- with torch.no_grad():
- # Create voxel grid
- grid_range = [torch.arange(0, n_vox[axis], interval) for axis in range(3)]
- grid = torch.stack(torch.meshgrid(grid_range[0], grid_range[1], grid_range[2], indexing="ij")) # 3 dx dy dz
- # ! don't create tensor on gpu; imbalanced gpu memory in ddp mode
- grid = grid.unsqueeze(0).type(torch.float32) # 1 3 dx dy dz
-
- return grid
-
-
-if __name__ == "__main__":
- import torch.nn.functional as F
- grid = generate_grid([5, 6, 8], 1)
-
- pts = 2 * torch.tensor([1, 2, 3]) / (torch.tensor([5, 6, 8]) - 1) - 1
- pts = pts.view(1, 1, 1, 1, 3)
-
- pts = torch.flip(pts, dims=[-1])
-
- sampled = F.grid_sample(grid, pts, mode='nearest')
-
- print(sampled)
diff --git a/One-2-3-45-master 2/reconstruction/ops/grid_sampler.py b/One-2-3-45-master 2/reconstruction/ops/grid_sampler.py
deleted file mode 100644
index 44113faa705f0b98a5689c0e4fb9e7a95865d6c1..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/ops/grid_sampler.py
+++ /dev/null
@@ -1,467 +0,0 @@
-"""
-pytorch grid_sample doesn't support second-order derivative
-implement custom version
-"""
-
-import torch
-import torch.nn.functional as F
-import numpy as np
-
-
-def grid_sample_2d(image, optical):
- N, C, IH, IW = image.shape
- _, H, W, _ = optical.shape
-
- ix = optical[..., 0]
- iy = optical[..., 1]
-
- ix = ((ix + 1) / 2) * (IW - 1);
- iy = ((iy + 1) / 2) * (IH - 1);
- with torch.no_grad():
- ix_nw = torch.floor(ix);
- iy_nw = torch.floor(iy);
- ix_ne = ix_nw + 1;
- iy_ne = iy_nw;
- ix_sw = ix_nw;
- iy_sw = iy_nw + 1;
- ix_se = ix_nw + 1;
- iy_se = iy_nw + 1;
-
- nw = (ix_se - ix) * (iy_se - iy)
- ne = (ix - ix_sw) * (iy_sw - iy)
- sw = (ix_ne - ix) * (iy - iy_ne)
- se = (ix - ix_nw) * (iy - iy_nw)
-
- with torch.no_grad():
- torch.clamp(ix_nw, 0, IW - 1, out=ix_nw)
- torch.clamp(iy_nw, 0, IH - 1, out=iy_nw)
-
- torch.clamp(ix_ne, 0, IW - 1, out=ix_ne)
- torch.clamp(iy_ne, 0, IH - 1, out=iy_ne)
-
- torch.clamp(ix_sw, 0, IW - 1, out=ix_sw)
- torch.clamp(iy_sw, 0, IH - 1, out=iy_sw)
-
- torch.clamp(ix_se, 0, IW - 1, out=ix_se)
- torch.clamp(iy_se, 0, IH - 1, out=iy_se)
-
- image = image.view(N, C, IH * IW)
-
- nw_val = torch.gather(image, 2, (iy_nw * IW + ix_nw).long().view(N, 1, H * W).repeat(1, C, 1))
- ne_val = torch.gather(image, 2, (iy_ne * IW + ix_ne).long().view(N, 1, H * W).repeat(1, C, 1))
- sw_val = torch.gather(image, 2, (iy_sw * IW + ix_sw).long().view(N, 1, H * W).repeat(1, C, 1))
- se_val = torch.gather(image, 2, (iy_se * IW + ix_se).long().view(N, 1, H * W).repeat(1, C, 1))
-
- out_val = (nw_val.view(N, C, H, W) * nw.view(N, 1, H, W) +
- ne_val.view(N, C, H, W) * ne.view(N, 1, H, W) +
- sw_val.view(N, C, H, W) * sw.view(N, 1, H, W) +
- se_val.view(N, C, H, W) * se.view(N, 1, H, W))
-
- return out_val
-
-
-# - checked for correctness
-def grid_sample_3d(volume, optical):
- """
- bilinear sampling cannot guarantee continuous first-order gradient
- mimic pytorch grid_sample function
- The 8 corner points of a volume noted as: 4 points (front view); 4 points (back view)
- fnw (front north west) point
- bse (back south east) point
- :param volume: [B, C, X, Y, Z]
- :param optical: [B, x, y, z, 3]
- :return:
- """
- N, C, ID, IH, IW = volume.shape
- _, D, H, W, _ = optical.shape
-
- ix = optical[..., 0]
- iy = optical[..., 1]
- iz = optical[..., 2]
-
- ix = ((ix + 1) / 2) * (IW - 1)
- iy = ((iy + 1) / 2) * (IH - 1)
- iz = ((iz + 1) / 2) * (ID - 1)
-
- mask_x = (ix > 0) & (ix < IW)
- mask_y = (iy > 0) & (iy < IH)
- mask_z = (iz > 0) & (iz < ID)
-
- mask = mask_x & mask_y & mask_z # [B, x, y, z]
- mask = mask[:, None, :, :, :].repeat(1, C, 1, 1, 1) # [B, C, x, y, z]
-
- with torch.no_grad():
- # back north west
- ix_bnw = torch.floor(ix)
- iy_bnw = torch.floor(iy)
- iz_bnw = torch.floor(iz)
-
- ix_bne = ix_bnw + 1
- iy_bne = iy_bnw
- iz_bne = iz_bnw
-
- ix_bsw = ix_bnw
- iy_bsw = iy_bnw + 1
- iz_bsw = iz_bnw
-
- ix_bse = ix_bnw + 1
- iy_bse = iy_bnw + 1
- iz_bse = iz_bnw
-
- # front view
- ix_fnw = ix_bnw
- iy_fnw = iy_bnw
- iz_fnw = iz_bnw + 1
-
- ix_fne = ix_bnw + 1
- iy_fne = iy_bnw
- iz_fne = iz_bnw + 1
-
- ix_fsw = ix_bnw
- iy_fsw = iy_bnw + 1
- iz_fsw = iz_bnw + 1
-
- ix_fse = ix_bnw + 1
- iy_fse = iy_bnw + 1
- iz_fse = iz_bnw + 1
-
- # back view
- bnw = (ix_fse - ix) * (iy_fse - iy) * (iz_fse - iz) # smaller volume, larger weight
- bne = (ix - ix_fsw) * (iy_fsw - iy) * (iz_fsw - iz)
- bsw = (ix_fne - ix) * (iy - iy_fne) * (iz_fne - iz)
- bse = (ix - ix_fnw) * (iy - iy_fnw) * (iz_fnw - iz)
-
- # front view
- fnw = (ix_bse - ix) * (iy_bse - iy) * (iz - iz_bse) # smaller volume, larger weight
- fne = (ix - ix_bsw) * (iy_bsw - iy) * (iz - iz_bsw)
- fsw = (ix_bne - ix) * (iy - iy_bne) * (iz - iz_bne)
- fse = (ix - ix_bnw) * (iy - iy_bnw) * (iz - iz_bnw)
-
- with torch.no_grad():
- # back view
- torch.clamp(ix_bnw, 0, IW - 1, out=ix_bnw)
- torch.clamp(iy_bnw, 0, IH - 1, out=iy_bnw)
- torch.clamp(iz_bnw, 0, ID - 1, out=iz_bnw)
-
- torch.clamp(ix_bne, 0, IW - 1, out=ix_bne)
- torch.clamp(iy_bne, 0, IH - 1, out=iy_bne)
- torch.clamp(iz_bne, 0, ID - 1, out=iz_bne)
-
- torch.clamp(ix_bsw, 0, IW - 1, out=ix_bsw)
- torch.clamp(iy_bsw, 0, IH - 1, out=iy_bsw)
- torch.clamp(iz_bsw, 0, ID - 1, out=iz_bsw)
-
- torch.clamp(ix_bse, 0, IW - 1, out=ix_bse)
- torch.clamp(iy_bse, 0, IH - 1, out=iy_bse)
- torch.clamp(iz_bse, 0, ID - 1, out=iz_bse)
-
- # front view
- torch.clamp(ix_fnw, 0, IW - 1, out=ix_fnw)
- torch.clamp(iy_fnw, 0, IH - 1, out=iy_fnw)
- torch.clamp(iz_fnw, 0, ID - 1, out=iz_fnw)
-
- torch.clamp(ix_fne, 0, IW - 1, out=ix_fne)
- torch.clamp(iy_fne, 0, IH - 1, out=iy_fne)
- torch.clamp(iz_fne, 0, ID - 1, out=iz_fne)
-
- torch.clamp(ix_fsw, 0, IW - 1, out=ix_fsw)
- torch.clamp(iy_fsw, 0, IH - 1, out=iy_fsw)
- torch.clamp(iz_fsw, 0, ID - 1, out=iz_fsw)
-
- torch.clamp(ix_fse, 0, IW - 1, out=ix_fse)
- torch.clamp(iy_fse, 0, IH - 1, out=iy_fse)
- torch.clamp(iz_fse, 0, ID - 1, out=iz_fse)
-
- # xxx = volume[:, :, iz_bnw.long(), iy_bnw.long(), ix_bnw.long()]
- volume = volume.view(N, C, ID * IH * IW)
- # yyy = volume[:, :, (iz_bnw * ID + iy_bnw * IW + ix_bnw).long()]
-
- # back view
- bnw_val = torch.gather(volume, 2,
- (iz_bnw * ID ** 2 + iy_bnw * IW + ix_bnw).long().view(N, 1, D * H * W).repeat(1, C, 1))
- bne_val = torch.gather(volume, 2,
- (iz_bne * ID ** 2 + iy_bne * IW + ix_bne).long().view(N, 1, D * H * W).repeat(1, C, 1))
- bsw_val = torch.gather(volume, 2,
- (iz_bsw * ID ** 2 + iy_bsw * IW + ix_bsw).long().view(N, 1, D * H * W).repeat(1, C, 1))
- bse_val = torch.gather(volume, 2,
- (iz_bse * ID ** 2 + iy_bse * IW + ix_bse).long().view(N, 1, D * H * W).repeat(1, C, 1))
-
- # front view
- fnw_val = torch.gather(volume, 2,
- (iz_fnw * ID ** 2 + iy_fnw * IW + ix_fnw).long().view(N, 1, D * H * W).repeat(1, C, 1))
- fne_val = torch.gather(volume, 2,
- (iz_fne * ID ** 2 + iy_fne * IW + ix_fne).long().view(N, 1, D * H * W).repeat(1, C, 1))
- fsw_val = torch.gather(volume, 2,
- (iz_fsw * ID ** 2 + iy_fsw * IW + ix_fsw).long().view(N, 1, D * H * W).repeat(1, C, 1))
- fse_val = torch.gather(volume, 2,
- (iz_fse * ID ** 2 + iy_fse * IW + ix_fse).long().view(N, 1, D * H * W).repeat(1, C, 1))
-
- out_val = (
- # back
- bnw_val.view(N, C, D, H, W) * bnw.view(N, 1, D, H, W) +
- bne_val.view(N, C, D, H, W) * bne.view(N, 1, D, H, W) +
- bsw_val.view(N, C, D, H, W) * bsw.view(N, 1, D, H, W) +
- bse_val.view(N, C, D, H, W) * bse.view(N, 1, D, H, W) +
- # front
- fnw_val.view(N, C, D, H, W) * fnw.view(N, 1, D, H, W) +
- fne_val.view(N, C, D, H, W) * fne.view(N, 1, D, H, W) +
- fsw_val.view(N, C, D, H, W) * fsw.view(N, 1, D, H, W) +
- fse_val.view(N, C, D, H, W) * fse.view(N, 1, D, H, W)
-
- )
-
- # * zero padding
- out_val = torch.where(mask, out_val, torch.zeros_like(out_val).float().to(out_val.device))
-
- return out_val
-
-
-# Interpolation kernel
-def get_weight(s, a=-0.5):
- mask_0 = (torch.abs(s) >= 0) & (torch.abs(s) <= 1)
- mask_1 = (torch.abs(s) > 1) & (torch.abs(s) <= 2)
- mask_2 = torch.abs(s) > 2
-
- weight = torch.zeros_like(s).to(s.device)
- weight = torch.where(mask_0, (a + 2) * (torch.abs(s) ** 3) - (a + 3) * (torch.abs(s) ** 2) + 1, weight)
- weight = torch.where(mask_1,
- a * (torch.abs(s) ** 3) - (5 * a) * (torch.abs(s) ** 2) + (8 * a) * torch.abs(s) - 4 * a,
- weight)
-
- # if (torch.abs(s) >= 0) & (torch.abs(s) <= 1):
- # return (a + 2) * (torch.abs(s) ** 3) - (a + 3) * (torch.abs(s) ** 2) + 1
- #
- # elif (torch.abs(s) > 1) & (torch.abs(s) <= 2):
- # return a * (torch.abs(s) ** 3) - (5 * a) * (torch.abs(s) ** 2) + (8 * a) * torch.abs(s) - 4 * a
- # return 0
-
- return weight
-
-
-def cubic_interpolate(p, x):
- """
- one dimensional cubic interpolation
- :param p: [N, 4] (4) should be in order
- :param x: [N]
- :return:
- """
- return p[:, 1] + 0.5 * x * (p[:, 2] - p[:, 0] + x * (
- 2.0 * p[:, 0] - 5.0 * p[:, 1] + 4.0 * p[:, 2] - p[:, 3] + x * (
- 3.0 * (p[:, 1] - p[:, 2]) + p[:, 3] - p[:, 0])))
-
-
-def bicubic_interpolate(p, x, y, if_batch=True):
- """
- two dimensional cubic interpolation
- :param p: [N, 4, 4]
- :param x: [N]
- :param y: [N]
- :return:
- """
- num = p.shape[0]
-
- if not if_batch:
- arr0 = cubic_interpolate(p[:, 0, :], x) # [N]
- arr1 = cubic_interpolate(p[:, 1, :], x)
- arr2 = cubic_interpolate(p[:, 2, :], x)
- arr3 = cubic_interpolate(p[:, 3, :], x)
- return cubic_interpolate(torch.stack([arr0, arr1, arr2, arr3], dim=-1), y) # [N]
- else:
- x = x[:, None].repeat(1, 4).view(-1)
- p = p.contiguous().view(num * 4, 4)
- arr = cubic_interpolate(p, x)
- arr = arr.view(num, 4)
-
- return cubic_interpolate(arr, y)
-
-
-def tricubic_interpolate(p, x, y, z):
- """
- three dimensional cubic interpolation
- :param p: [N,4,4,4]
- :param x: [N]
- :param y: [N]
- :param z: [N]
- :return:
- """
- num = p.shape[0]
-
- arr0 = bicubic_interpolate(p[:, 0, :, :], x, y) # [N]
- arr1 = bicubic_interpolate(p[:, 1, :, :], x, y)
- arr2 = bicubic_interpolate(p[:, 2, :, :], x, y)
- arr3 = bicubic_interpolate(p[:, 3, :, :], x, y)
-
- return cubic_interpolate(torch.stack([arr0, arr1, arr2, arr3], dim=-1), z) # [N]
-
-
-def cubic_interpolate_batch(p, x):
- """
- one dimensional cubic interpolation
- :param p: [B, N, 4] (4) should be in order
- :param x: [B, N]
- :return:
- """
- return p[:, :, 1] + 0.5 * x * (p[:, :, 2] - p[:, :, 0] + x * (
- 2.0 * p[:, :, 0] - 5.0 * p[:, :, 1] + 4.0 * p[:, :, 2] - p[:, :, 3] + x * (
- 3.0 * (p[:, :, 1] - p[:, :, 2]) + p[:, :, 3] - p[:, :, 0])))
-
-
-def bicubic_interpolate_batch(p, x, y):
- """
- two dimensional cubic interpolation
- :param p: [B, N, 4, 4]
- :param x: [B, N]
- :param y: [B, N]
- :return:
- """
- B, N, _, _ = p.shape
-
- x = x[:, :, None].repeat(1, 1, 4).view(B, N * 4) # [B, N*4]
- arr = cubic_interpolate_batch(p.contiguous().view(B, N * 4, 4), x)
- arr = arr.view(B, N, 4)
- return cubic_interpolate_batch(arr, y) # [B, N]
-
-
-# * batch version cannot speed up training
-def tricubic_interpolate_batch(p, x, y, z):
- """
- three dimensional cubic interpolation
- :param p: [N,4,4,4]
- :param x: [N]
- :param y: [N]
- :param z: [N]
- :return:
- """
- N = p.shape[0]
-
- x = x[None, :].repeat(4, 1)
- y = y[None, :].repeat(4, 1)
-
- p = p.permute(1, 0, 2, 3).contiguous()
-
- arr = bicubic_interpolate_batch(p[:, :, :, :], x, y) # [4, N]
-
- arr = arr.permute(1, 0).contiguous() # [N, 4]
-
- return cubic_interpolate(arr, z) # [N]
-
-
-def tricubic_sample_3d(volume, optical):
- """
- tricubic sampling; can guarantee continuous gradient (interpolation border)
- :param volume: [B, C, ID, IH, IW]
- :param optical: [B, D, H, W, 3]
- :param sample_num:
- :return:
- """
-
- @torch.no_grad()
- def get_shifts(x):
- x1 = -1 * (1 + x - torch.floor(x))
- x2 = -1 * (x - torch.floor(x))
- x3 = torch.floor(x) + 1 - x
- x4 = torch.floor(x) + 2 - x
-
- return torch.stack([x1, x2, x3, x4], dim=-1) # (B,d,h,w,4)
-
- N, C, ID, IH, IW = volume.shape
- _, D, H, W, _ = optical.shape
-
- device = volume.device
-
- ix = optical[..., 0]
- iy = optical[..., 1]
- iz = optical[..., 2]
-
- ix = ((ix + 1) / 2) * (IW - 1) # (B,d,h,w)
- iy = ((iy + 1) / 2) * (IH - 1)
- iz = ((iz + 1) / 2) * (ID - 1)
-
- ix = ix.view(-1)
- iy = iy.view(-1)
- iz = iz.view(-1)
-
- with torch.no_grad():
- shifts_x = get_shifts(ix).view(-1, 4) # (B*d*h*w,4)
- shifts_y = get_shifts(iy).view(-1, 4)
- shifts_z = get_shifts(iz).view(-1, 4)
-
- perm_weights = torch.ones([N * D * H * W, 4 * 4 * 4]).long().to(device)
- perm = torch.cumsum(perm_weights, dim=-1) - 1 # (B*d*h*w,64)
-
- perm_z = perm // 16 # [N*D*H*W, num]
- perm_y = (perm - perm_z * 16) // 4
- perm_x = (perm - perm_z * 16 - perm_y * 4)
-
- shifts_x = torch.gather(shifts_x, 1, perm_x) # [N*D*H*W, num]
- shifts_y = torch.gather(shifts_y, 1, perm_y)
- shifts_z = torch.gather(shifts_z, 1, perm_z)
-
- ix_target = (ix[:, None] + shifts_x).long() # [N*D*H*W, num]
- iy_target = (iy[:, None] + shifts_y).long()
- iz_target = (iz[:, None] + shifts_z).long()
-
- torch.clamp(ix_target, 0, IW - 1, out=ix_target)
- torch.clamp(iy_target, 0, IH - 1, out=iy_target)
- torch.clamp(iz_target, 0, ID - 1, out=iz_target)
-
- local_dist_x = ix - ix_target[:, 1] # ! attention here is [:, 1]
- local_dist_y = iy - iy_target[:, 1 + 4]
- local_dist_z = iz - iz_target[:, 1 + 16]
-
- local_dist_x = local_dist_x.view(N, 1, D * H * W).repeat(1, C, 1).view(-1)
- local_dist_y = local_dist_y.view(N, 1, D * H * W).repeat(1, C, 1).view(-1)
- local_dist_z = local_dist_z.view(N, 1, D * H * W).repeat(1, C, 1).view(-1)
-
- # ! attention: IW is correct
- idx_target = iz_target * ID ** 2 + iy_target * IW + ix_target # [N*D*H*W, num]
-
- volume = volume.view(N, C, ID * IH * IW)
-
- out = torch.gather(volume, 2,
- idx_target.view(N, 1, D * H * W * 64).repeat(1, C, 1))
- out = out.view(N * C * D * H * W, 4, 4, 4)
-
- # - tricubic_interpolate() is a bit faster than tricubic_interpolate_batch()
- final = tricubic_interpolate(out, local_dist_x, local_dist_y, local_dist_z).view(N, C, D, H, W) # [N,C,D,H,W]
-
- return final
-
-
-
-if __name__ == "__main__":
- # image = torch.Tensor([[1, 2, 3], [4, 5, 6], [7, 8, 9]]).view(1, 3, 1, 3)
- #
- # optical = torch.Tensor([0.9, 0.5, 0.6, -0.7]).view(1, 1, 2, 2)
- #
- # print(grid_sample_2d(image, optical))
- #
- # print(F.grid_sample(image, optical, padding_mode='border', align_corners=True))
-
- from ops.generate_grids import generate_grid
-
- p = torch.tensor([x for x in range(4)]).view(1, 4).float()
-
- v = cubic_interpolate(p, torch.tensor([0.5]).view(1))
- # v = bicubic_interpolate(p, torch.tensor([2/3]).view(1) , torch.tensor([2/3]).view(1))
-
- vsize = 9
- volume = generate_grid([vsize, vsize, vsize], 1) # [1,3,10,10,10]
- # volume = torch.tensor([x for x in range(1000)]).view(1, 1, 10, 10, 10).float()
- X, Y, Z = 0, 0, 6
- x = 2 * X / (vsize - 1) - 1
- y = 2 * Y / (vsize - 1) - 1
- z = 2 * Z / (vsize - 1) - 1
-
- # print(volume[:, :, Z, Y, X])
-
- # volume = volume.view(1, 3, -1)
- # xx = volume[:, :, Z * 9*9 + Y * 9 + X]
-
- optical = torch.Tensor([-0.6, -0.7, 0.5, 0.3, 0.5, 0.5]).view(1, 1, 1, 2, 3)
-
- print(F.grid_sample(volume, optical, padding_mode='border', align_corners=True))
- print(grid_sample_3d(volume, optical))
- print(tricubic_sample_3d(volume, optical))
- # target, relative_coords = implicit_sample_3d(volume, optical, 1)
- # print(target)
diff --git a/One-2-3-45-master 2/reconstruction/tsparse/__init__.py b/One-2-3-45-master 2/reconstruction/tsparse/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/One-2-3-45-master 2/reconstruction/tsparse/modules.py b/One-2-3-45-master 2/reconstruction/tsparse/modules.py
deleted file mode 100644
index 520809144718d84b77708bbc7a582a64078958b4..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/tsparse/modules.py
+++ /dev/null
@@ -1,326 +0,0 @@
-import torch
-import torch.nn as nn
-import torchsparse
-import torchsparse.nn as spnn
-from torchsparse.tensor import PointTensor
-
-from tsparse.torchsparse_utils import *
-
-
-# __all__ = ['SPVCNN', 'SConv3d', 'SparseConvGRU']
-
-
-class ConvBnReLU(nn.Module):
- def __init__(self, in_channels, out_channels,
- kernel_size=3, stride=1, pad=1):
- super(ConvBnReLU, self).__init__()
- self.conv = nn.Conv2d(in_channels, out_channels,
- kernel_size, stride=stride, padding=pad, bias=False)
- self.bn = nn.BatchNorm2d(out_channels)
- self.activation = nn.ReLU(inplace=True)
-
- def forward(self, x):
- return self.activation(self.bn(self.conv(x)))
-
-
-class ConvBnReLU3D(nn.Module):
- def __init__(self, in_channels, out_channels,
- kernel_size=3, stride=1, pad=1):
- super(ConvBnReLU3D, self).__init__()
- self.conv = nn.Conv3d(in_channels, out_channels,
- kernel_size, stride=stride, padding=pad, bias=False)
- self.bn = nn.BatchNorm3d(out_channels)
- self.activation = nn.ReLU(inplace=True)
-
- def forward(self, x):
- return self.activation(self.bn(self.conv(x)))
-
-
-################################### feature net ######################################
-class FeatureNet(nn.Module):
- """
- output 3 levels of features using a FPN structure
- """
-
- def __init__(self):
- super(FeatureNet, self).__init__()
-
- self.conv0 = nn.Sequential(
- ConvBnReLU(3, 8, 3, 1, 1),
- ConvBnReLU(8, 8, 3, 1, 1))
-
- self.conv1 = nn.Sequential(
- ConvBnReLU(8, 16, 5, 2, 2),
- ConvBnReLU(16, 16, 3, 1, 1),
- ConvBnReLU(16, 16, 3, 1, 1))
-
- self.conv2 = nn.Sequential(
- ConvBnReLU(16, 32, 5, 2, 2),
- ConvBnReLU(32, 32, 3, 1, 1),
- ConvBnReLU(32, 32, 3, 1, 1))
-
- self.toplayer = nn.Conv2d(32, 32, 1)
- self.lat1 = nn.Conv2d(16, 32, 1)
- self.lat0 = nn.Conv2d(8, 32, 1)
-
- # to reduce channel size of the outputs from FPN
- self.smooth1 = nn.Conv2d(32, 16, 3, padding=1)
- self.smooth0 = nn.Conv2d(32, 8, 3, padding=1)
-
- def _upsample_add(self, x, y):
- return torch.nn.functional.interpolate(x, scale_factor=2,
- mode="bilinear", align_corners=True) + y
-
- def forward(self, x):
- # x: (B, 3, H, W)
- conv0 = self.conv0(x) # (B, 8, H, W)
- conv1 = self.conv1(conv0) # (B, 16, H//2, W//2)
- conv2 = self.conv2(conv1) # (B, 32, H//4, W//4)
- feat2 = self.toplayer(conv2) # (B, 32, H//4, W//4)
- feat1 = self._upsample_add(feat2, self.lat1(conv1)) # (B, 32, H//2, W//2)
- feat0 = self._upsample_add(feat1, self.lat0(conv0)) # (B, 32, H, W)
-
- # reduce output channels
- feat1 = self.smooth1(feat1) # (B, 16, H//2, W//2)
- feat0 = self.smooth0(feat0) # (B, 8, H, W)
-
- # feats = {"level_0": feat0,
- # "level_1": feat1,
- # "level_2": feat2}
-
- return [feat2, feat1, feat0] # coarser to finer features
-
-
-class BasicSparseConvolutionBlock(nn.Module):
- def __init__(self, inc, outc, ks=3, stride=1, dilation=1):
- super().__init__()
- self.net = nn.Sequential(
- spnn.Conv3d(inc,
- outc,
- kernel_size=ks,
- dilation=dilation,
- stride=stride),
- spnn.BatchNorm(outc),
- spnn.ReLU(True))
-
- def forward(self, x):
- out = self.net(x)
- return out
-
-
-class BasicSparseDeconvolutionBlock(nn.Module):
- def __init__(self, inc, outc, ks=3, stride=1):
- super().__init__()
- self.net = nn.Sequential(
- spnn.Conv3d(inc,
- outc,
- kernel_size=ks,
- stride=stride,
- transposed=True),
- spnn.BatchNorm(outc),
- spnn.ReLU(True))
-
- def forward(self, x):
- return self.net(x)
-
-
-class SparseResidualBlock(nn.Module):
- def __init__(self, inc, outc, ks=3, stride=1, dilation=1):
- super().__init__()
- self.net = nn.Sequential(
- spnn.Conv3d(inc,
- outc,
- kernel_size=ks,
- dilation=dilation,
- stride=stride), spnn.BatchNorm(outc),
- spnn.ReLU(True),
- spnn.Conv3d(outc,
- outc,
- kernel_size=ks,
- dilation=dilation,
- stride=1), spnn.BatchNorm(outc))
-
- self.downsample = nn.Sequential() if (inc == outc and stride == 1) else \
- nn.Sequential(
- spnn.Conv3d(inc, outc, kernel_size=1, dilation=1, stride=stride),
- spnn.BatchNorm(outc)
- )
-
- self.relu = spnn.ReLU(True)
-
- def forward(self, x):
- out = self.relu(self.net(x) + self.downsample(x))
- return out
-
-
-class SPVCNN(nn.Module):
- def __init__(self, **kwargs):
- super().__init__()
-
- self.dropout = kwargs['dropout']
-
- cr = kwargs.get('cr', 1.0)
- cs = [32, 64, 128, 96, 96]
- cs = [int(cr * x) for x in cs]
-
- if 'pres' in kwargs and 'vres' in kwargs:
- self.pres = kwargs['pres']
- self.vres = kwargs['vres']
-
- self.stem = nn.Sequential(
- spnn.Conv3d(kwargs['in_channels'], cs[0], kernel_size=3, stride=1),
- spnn.BatchNorm(cs[0]), spnn.ReLU(True)
- )
-
- self.stage1 = nn.Sequential(
- BasicSparseConvolutionBlock(cs[0], cs[0], ks=2, stride=2, dilation=1),
- SparseResidualBlock(cs[0], cs[1], ks=3, stride=1, dilation=1),
- SparseResidualBlock(cs[1], cs[1], ks=3, stride=1, dilation=1),
- )
-
- self.stage2 = nn.Sequential(
- BasicSparseConvolutionBlock(cs[1], cs[1], ks=2, stride=2, dilation=1),
- SparseResidualBlock(cs[1], cs[2], ks=3, stride=1, dilation=1),
- SparseResidualBlock(cs[2], cs[2], ks=3, stride=1, dilation=1),
- )
-
- self.up1 = nn.ModuleList([
- BasicSparseDeconvolutionBlock(cs[2], cs[3], ks=2, stride=2),
- nn.Sequential(
- SparseResidualBlock(cs[3] + cs[1], cs[3], ks=3, stride=1,
- dilation=1),
- SparseResidualBlock(cs[3], cs[3], ks=3, stride=1, dilation=1),
- )
- ])
-
- self.up2 = nn.ModuleList([
- BasicSparseDeconvolutionBlock(cs[3], cs[4], ks=2, stride=2),
- nn.Sequential(
- SparseResidualBlock(cs[4] + cs[0], cs[4], ks=3, stride=1,
- dilation=1),
- SparseResidualBlock(cs[4], cs[4], ks=3, stride=1, dilation=1),
- )
- ])
-
- self.point_transforms = nn.ModuleList([
- nn.Sequential(
- nn.Linear(cs[0], cs[2]),
- nn.BatchNorm1d(cs[2]),
- nn.ReLU(True),
- ),
- nn.Sequential(
- nn.Linear(cs[2], cs[4]),
- nn.BatchNorm1d(cs[4]),
- nn.ReLU(True),
- )
- ])
-
- self.weight_initialization()
-
- if self.dropout:
- self.dropout = nn.Dropout(0.3, True)
-
- def weight_initialization(self):
- for m in self.modules():
- if isinstance(m, nn.BatchNorm1d):
- nn.init.constant_(m.weight, 1)
- nn.init.constant_(m.bias, 0)
-
- def forward(self, z):
- # x: SparseTensor z: PointTensor
- x0 = initial_voxelize(z, self.pres, self.vres)
-
- x0 = self.stem(x0)
- z0 = voxel_to_point(x0, z, nearest=False)
- z0.F = z0.F
-
- x1 = point_to_voxel(x0, z0)
- x1 = self.stage1(x1)
- x2 = self.stage2(x1)
- z1 = voxel_to_point(x2, z0)
- z1.F = z1.F + self.point_transforms[0](z0.F)
-
- y3 = point_to_voxel(x2, z1)
- if self.dropout:
- y3.F = self.dropout(y3.F)
- y3 = self.up1[0](y3)
- y3 = torchsparse.cat([y3, x1])
- y3 = self.up1[1](y3)
-
- y4 = self.up2[0](y3)
- y4 = torchsparse.cat([y4, x0])
- y4 = self.up2[1](y4)
- z3 = voxel_to_point(y4, z1)
- z3.F = z3.F + self.point_transforms[1](z1.F)
-
- return z3.F
-
-
-class SparseCostRegNet(nn.Module):
- """
- Sparse cost regularization network;
- require sparse tensors as input
- """
-
- def __init__(self, d_in, d_out=8):
- super(SparseCostRegNet, self).__init__()
- self.d_in = d_in
- self.d_out = d_out
-
- self.conv0 = BasicSparseConvolutionBlock(d_in, d_out)
-
- self.conv1 = BasicSparseConvolutionBlock(d_out, 16, stride=2)
- self.conv2 = BasicSparseConvolutionBlock(16, 16)
-
- self.conv3 = BasicSparseConvolutionBlock(16, 32, stride=2)
- self.conv4 = BasicSparseConvolutionBlock(32, 32)
-
- self.conv5 = BasicSparseConvolutionBlock(32, 64, stride=2)
- self.conv6 = BasicSparseConvolutionBlock(64, 64)
-
- self.conv7 = BasicSparseDeconvolutionBlock(64, 32, ks=3, stride=2)
-
- self.conv9 = BasicSparseDeconvolutionBlock(32, 16, ks=3, stride=2)
-
- self.conv11 = BasicSparseDeconvolutionBlock(16, d_out, ks=3, stride=2)
-
- def forward(self, x):
- """
-
- :param x: sparse tensor
- :return: sparse tensor
- """
- conv0 = self.conv0(x)
- conv2 = self.conv2(self.conv1(conv0))
- conv4 = self.conv4(self.conv3(conv2))
-
- x = self.conv6(self.conv5(conv4))
- x = conv4 + self.conv7(x)
- del conv4
- x = conv2 + self.conv9(x)
- del conv2
- x = conv0 + self.conv11(x)
- del conv0
- return x.F
-
-
-class SConv3d(nn.Module):
- def __init__(self, inc, outc, pres, vres, ks=3, stride=1, dilation=1):
- super().__init__()
- self.net = spnn.Conv3d(inc,
- outc,
- kernel_size=ks,
- dilation=dilation,
- stride=stride)
- self.point_transforms = nn.Sequential(
- nn.Linear(inc, outc),
- )
- self.pres = pres
- self.vres = vres
-
- def forward(self, z):
- x = initial_voxelize(z, self.pres, self.vres)
- x = self.net(x)
- out = voxel_to_point(x, z, nearest=False)
- out.F = out.F + self.point_transforms(z.F)
- return out
diff --git a/One-2-3-45-master 2/reconstruction/tsparse/torchsparse_utils.py b/One-2-3-45-master 2/reconstruction/tsparse/torchsparse_utils.py
deleted file mode 100644
index 32f5b92ae5ef4bf9836b1e4c1dc17eaf3f7c93f9..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/tsparse/torchsparse_utils.py
+++ /dev/null
@@ -1,137 +0,0 @@
-"""
-Copied from:
-https://github.com/mit-han-lab/spvnas/blob/b24f50379ed888d3a0e784508a809d4e92e820c0/core/models/utils.py
-"""
-import torch
-import torchsparse.nn.functional as F
-from torchsparse import PointTensor, SparseTensor
-from torchsparse.nn.utils import get_kernel_offsets
-
-import numpy as np
-
-# __all__ = ['initial_voxelize', 'point_to_voxel', 'voxel_to_point']
-
-
-# z: PointTensor
-# return: SparseTensor
-def initial_voxelize(z, init_res, after_res):
- new_float_coord = torch.cat(
- [(z.C[:, :3] * init_res) / after_res, z.C[:, -1].view(-1, 1)], 1)
-
- pc_hash = F.sphash(torch.floor(new_float_coord).int())
- sparse_hash = torch.unique(pc_hash)
- idx_query = F.sphashquery(pc_hash, sparse_hash)
- counts = F.spcount(idx_query.int(), len(sparse_hash))
-
- inserted_coords = F.spvoxelize(torch.floor(new_float_coord), idx_query,
- counts)
- inserted_coords = torch.round(inserted_coords).int()
- inserted_feat = F.spvoxelize(z.F, idx_query, counts)
-
- new_tensor = SparseTensor(inserted_feat, inserted_coords, 1)
- new_tensor.cmaps.setdefault(new_tensor.stride, new_tensor.coords)
- z.additional_features['idx_query'][1] = idx_query
- z.additional_features['counts'][1] = counts
- z.C = new_float_coord
-
- return new_tensor
-
-
-# x: SparseTensor, z: PointTensor
-# return: SparseTensor
-def point_to_voxel(x, z):
- if z.additional_features is None or z.additional_features.get('idx_query') is None \
- or z.additional_features['idx_query'].get(x.s) is None:
- # pc_hash = hash_gpu(torch.floor(z.C).int())
- pc_hash = F.sphash(
- torch.cat([
- torch.floor(z.C[:, :3] / x.s[0]).int() * x.s[0],
- z.C[:, -1].int().view(-1, 1)
- ], 1))
- sparse_hash = F.sphash(x.C)
- idx_query = F.sphashquery(pc_hash, sparse_hash)
- counts = F.spcount(idx_query.int(), x.C.shape[0])
- z.additional_features['idx_query'][x.s] = idx_query
- z.additional_features['counts'][x.s] = counts
- else:
- idx_query = z.additional_features['idx_query'][x.s]
- counts = z.additional_features['counts'][x.s]
-
- inserted_feat = F.spvoxelize(z.F, idx_query, counts)
- new_tensor = SparseTensor(inserted_feat, x.C, x.s)
- new_tensor.cmaps = x.cmaps
- new_tensor.kmaps = x.kmaps
-
- return new_tensor
-
-
-# x: SparseTensor, z: PointTensor
-# return: PointTensor
-def voxel_to_point(x, z, nearest=False):
- if z.idx_query is None or z.weights is None or z.idx_query.get(
- x.s) is None or z.weights.get(x.s) is None:
- off = get_kernel_offsets(2, x.s, 1, device=z.F.device)
- # old_hash = kernel_hash_gpu(torch.floor(z.C).int(), off)
- old_hash = F.sphash(
- torch.cat([
- torch.floor(z.C[:, :3] / x.s[0]).int() * x.s[0],
- z.C[:, -1].int().view(-1, 1)
- ], 1), off)
- mm = x.C.to(z.F.device)
- pc_hash = F.sphash(x.C.to(z.F.device))
- idx_query = F.sphashquery(old_hash, pc_hash)
- weights = F.calc_ti_weights(z.C, idx_query,
- scale=x.s[0]).transpose(0, 1).contiguous()
- idx_query = idx_query.transpose(0, 1).contiguous()
- if nearest:
- weights[:, 1:] = 0.
- idx_query[:, 1:] = -1
- new_feat = F.spdevoxelize(x.F, idx_query, weights)
- new_tensor = PointTensor(new_feat,
- z.C,
- idx_query=z.idx_query,
- weights=z.weights)
- new_tensor.additional_features = z.additional_features
- new_tensor.idx_query[x.s] = idx_query
- new_tensor.weights[x.s] = weights
- z.idx_query[x.s] = idx_query
- z.weights[x.s] = weights
-
- else:
- new_feat = F.spdevoxelize(x.F, z.idx_query.get(x.s),
- z.weights.get(x.s)) # - sparse trilinear interpoltation operation
- new_tensor = PointTensor(new_feat,
- z.C,
- idx_query=z.idx_query,
- weights=z.weights)
- new_tensor.additional_features = z.additional_features
-
- return new_tensor
-
-
-def sparse_to_dense_torch_batch(locs, values, dim, default_val):
- dense = torch.full([dim[0], dim[1], dim[2], dim[3]], float(default_val), device=locs.device)
- dense[locs[:, 0], locs[:, 1], locs[:, 2], locs[:, 3]] = values
- return dense
-
-
-def sparse_to_dense_torch(locs, values, dim, default_val, device):
- dense = torch.full([dim[0], dim[1], dim[2]], float(default_val), device=device)
- if locs.shape[0] > 0:
- dense[locs[:, 0], locs[:, 1], locs[:, 2]] = values
- return dense
-
-
-def sparse_to_dense_channel(locs, values, dim, c, default_val, device):
- locs = locs.to(torch.int64)
- dense = torch.full([dim[0], dim[1], dim[2], c], float(default_val), device=device)
- if locs.shape[0] > 0:
- dense[locs[:, 0], locs[:, 1], locs[:, 2]] = values
- return dense
-
-
-def sparse_to_dense_np(locs, values, dim, default_val):
- dense = np.zeros([dim[0], dim[1], dim[2]], dtype=values.dtype)
- dense.fill(default_val)
- dense[locs[:, 0], locs[:, 1], locs[:, 2]] = values
- return dense
diff --git a/One-2-3-45-master 2/reconstruction/utils/__init__.py b/One-2-3-45-master 2/reconstruction/utils/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/One-2-3-45-master 2/reconstruction/utils/misc_utils.py b/One-2-3-45-master 2/reconstruction/utils/misc_utils.py
deleted file mode 100644
index 85e80cf4e2bcf8bed0086e2b6c8a3bf3da40a056..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/utils/misc_utils.py
+++ /dev/null
@@ -1,219 +0,0 @@
-import os, torch, cv2, re
-import numpy as np
-
-from PIL import Image
-import torch.nn.functional as F
-import torchvision.transforms as T
-
-# Misc
-img2mse = lambda x, y: torch.mean((x - y) ** 2)
-mse2psnr = lambda x: -10. * torch.log(x) / torch.log(torch.Tensor([10.]))
-to8b = lambda x: (255 * np.clip(x, 0, 1)).astype(np.uint8)
-mse2psnr2 = lambda x: -10. * np.log(x) / np.log(10.)
-
-
-def get_psnr(imgs_pred, imgs_gt):
- psnrs = []
- for (img, tar) in zip(imgs_pred, imgs_gt):
- psnrs.append(mse2psnr2(np.mean((img - tar.cpu().numpy()) ** 2)))
- return np.array(psnrs)
-
-
-def init_log(log, keys):
- for key in keys:
- log[key] = torch.tensor([0.0], dtype=float)
- return log
-
-
-def visualize_depth_numpy(depth, minmax=None, cmap=cv2.COLORMAP_JET):
- """
- depth: (H, W)
- """
-
- x = np.nan_to_num(depth) # change nan to 0
- if minmax is None:
- mi = np.min(x[x > 0]) # get minimum positive depth (ignore background)
- ma = np.max(x)
- else:
- mi, ma = minmax
-
- x = (x - mi) / (ma - mi + 1e-8) # normalize to 0~1
- x = (255 * x).astype(np.uint8)
- x_ = cv2.applyColorMap(x, cmap)
- return x_, [mi, ma]
-
-
-def visualize_depth(depth, minmax=None, cmap=cv2.COLORMAP_JET):
- """
- depth: (H, W)
- """
- if type(depth) is not np.ndarray:
- depth = depth.cpu().numpy()
-
- x = np.nan_to_num(depth) # change nan to 0
- if minmax is None:
- mi = np.min(x[x > 0]) # get minimum positive depth (ignore background)
- ma = np.max(x)
- else:
- mi, ma = minmax
-
- x = (x - mi) / (ma - mi + 1e-8) # normalize to 0~1
- x = (255 * x).astype(np.uint8)
- x_ = Image.fromarray(cv2.applyColorMap(x, cmap))
- x_ = T.ToTensor()(x_) # (3, H, W)
- return x_, [mi, ma]
-
-
-def abs_error_numpy(depth_pred, depth_gt, mask):
- depth_pred, depth_gt = depth_pred[mask], depth_gt[mask]
- return np.abs(depth_pred - depth_gt)
-
-
-def abs_error(depth_pred, depth_gt, mask):
- depth_pred, depth_gt = depth_pred[mask], depth_gt[mask]
- err = depth_pred - depth_gt
- return np.abs(err) if type(depth_pred) is np.ndarray else err.abs()
-
-
-def acc_threshold(depth_pred, depth_gt, mask, threshold):
- """
- computes the percentage of pixels whose depth error is less than @threshold
- """
- errors = abs_error(depth_pred, depth_gt, mask)
- acc_mask = errors < threshold
- return acc_mask.astype('float') if type(depth_pred) is np.ndarray else acc_mask.float()
-
-
-def to_tensor_cuda(data, device, filter):
- for item in data.keys():
-
- if item in filter:
- continue
-
- if type(data[item]) is np.ndarray:
- data[item] = torch.tensor(data[item], dtype=torch.float32, device=device)
- else:
- data[item] = data[item].float().to(device)
- return data
-
-
-def to_cuda(data, device, filter):
- for item in data.keys():
- if item in filter:
- continue
-
- data[item] = data[item].float().to(device)
- return data
-
-
-def tensor_unsqueeze(data, filter):
- for item in data.keys():
- if item in filter:
- continue
-
- data[item] = data[item][None]
- return data
-
-
-def filter_keys(dict):
- dict.pop('N_samples')
- if 'ndc' in dict.keys():
- dict.pop('ndc')
- if 'lindisp' in dict.keys():
- dict.pop('lindisp')
- return dict
-
-
-def sub_selete_data(data_batch, device, idx, filtKey=[],
- filtIndex=['view_ids_all', 'c2ws_all', 'scan', 'bbox', 'w2ref', 'ref2w', 'light_id', 'ckpt',
- 'idx']):
- data_sub_selete = {}
- for item in data_batch.keys():
- data_sub_selete[item] = data_batch[item][:, idx].float() if (
- item not in filtIndex and torch.is_tensor(item) and item.dim() > 2) else data_batch[item].float()
- if not data_sub_selete[item].is_cuda:
- data_sub_selete[item] = data_sub_selete[item].to(device)
- return data_sub_selete
-
-
-def detach_data(dictionary):
- dictionary_new = {}
- for key in dictionary.keys():
- dictionary_new[key] = dictionary[key].detach().clone()
- return dictionary_new
-
-
-def read_pfm(filename):
- file = open(filename, 'rb')
- color = None
- width = None
- height = None
- scale = None
- endian = None
-
- header = file.readline().decode('utf-8').rstrip()
- if header == 'PF':
- color = True
- elif header == 'Pf':
- color = False
- else:
- raise Exception('Not a PFM file.')
-
- dim_match = re.match(r'^(\d+)\s(\d+)\s$', file.readline().decode('utf-8'))
- if dim_match:
- width, height = map(int, dim_match.groups())
- else:
- raise Exception('Malformed PFM header.')
-
- scale = float(file.readline().rstrip())
- if scale < 0: # little-endian
- endian = '<'
- scale = -scale
- else:
- endian = '>' # big-endian
-
- data = np.fromfile(file, endian + 'f')
- shape = (height, width, 3) if color else (height, width)
-
- data = np.reshape(data, shape)
- data = np.flipud(data)
- file.close()
- return data, scale
-
-
-from torch.optim.lr_scheduler import CosineAnnealingLR, MultiStepLR
-
-
-# from warmup_scheduler import GradualWarmupScheduler
-def get_scheduler(hparams, optimizer):
- eps = 1e-8
- if hparams.lr_scheduler == 'steplr':
- scheduler = MultiStepLR(optimizer, milestones=hparams.decay_step,
- gamma=hparams.decay_gamma)
- elif hparams.lr_scheduler == 'cosine':
- scheduler = CosineAnnealingLR(optimizer, T_max=hparams.num_epochs, eta_min=eps)
-
- else:
- raise ValueError('scheduler not recognized!')
-
- # if hparams.warmup_epochs > 0 and hparams.optimizer not in ['radam', 'ranger']:
- # scheduler = GradualWarmupScheduler(optimizer, multiplier=hparams.warmup_multiplier,
- # total_epoch=hparams.warmup_epochs, after_scheduler=scheduler)
- return scheduler
-
-
-#### pairing ####
-def get_nearest_pose_ids(tar_pose, ref_poses, num_select):
- '''
- Args:
- tar_pose: target pose [N, 4, 4]
- ref_poses: reference poses [M, 4, 4]
- num_select: the number of nearest views to select
- Returns: the selected indices
- '''
-
- dists = np.linalg.norm(tar_pose[:, None, :3, 3] - ref_poses[None, :, :3, 3], axis=-1)
-
- sorted_ids = np.argsort(dists, axis=-1)
- selected_ids = sorted_ids[:, :num_select]
- return selected_ids
diff --git a/One-2-3-45-master 2/reconstruction/utils/training_utils.py b/One-2-3-45-master 2/reconstruction/utils/training_utils.py
deleted file mode 100644
index 5d128ba2beda39b708850bd4c17c4603a8a17848..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/reconstruction/utils/training_utils.py
+++ /dev/null
@@ -1,129 +0,0 @@
-import numpy as np
-import torchvision.utils as vutils
-import torch, random
-import torch.nn.functional as F
-
-
-# print arguments
-def print_args(args):
- print("################################ args ################################")
- for k, v in args.__dict__.items():
- print("{0: <10}\t{1: <30}\t{2: <20}".format(k, str(v), str(type(v))))
- print("########################################################################")
-
-
-# torch.no_grad warpper for functions
-def make_nograd_func(func):
- def wrapper(*f_args, **f_kwargs):
- with torch.no_grad():
- ret = func(*f_args, **f_kwargs)
- return ret
-
- return wrapper
-
-
-# convert a function into recursive style to handle nested dict/list/tuple variables
-def make_recursive_func(func):
- def wrapper(vars, device=None):
- if isinstance(vars, list):
- return [wrapper(x, device) for x in vars]
- elif isinstance(vars, tuple):
- return tuple([wrapper(x, device) for x in vars])
- elif isinstance(vars, dict):
- return {k: wrapper(v, device) for k, v in vars.items()}
- else:
- return func(vars, device)
-
- return wrapper
-
-
-@make_recursive_func
-def tensor2float(vars):
- if isinstance(vars, float):
- return vars
- elif isinstance(vars, torch.Tensor):
- return vars.data.item()
- else:
- raise NotImplementedError("invalid input type {} for tensor2float".format(type(vars)))
-
-
-@make_recursive_func
-def tensor2numpy(vars):
- if isinstance(vars, np.ndarray):
- return vars
- elif isinstance(vars, torch.Tensor):
- return vars.detach().cpu().numpy().copy()
- else:
- raise NotImplementedError("invalid input type {} for tensor2numpy".format(type(vars)))
-
-
-@make_recursive_func
-def numpy2tensor(vars, device='cpu'):
- if not isinstance(vars, torch.Tensor) and vars is not None :
- return torch.tensor(vars, device=device)
- elif isinstance(vars, torch.Tensor):
- return vars
- elif vars is None:
- return vars
- else:
- raise NotImplementedError("invalid input type {} for float2tensor".format(type(vars)))
-
-
-@make_recursive_func
-def tocuda(vars, device='cuda'):
- if isinstance(vars, torch.Tensor):
- return vars.to(device)
- elif isinstance(vars, str):
- return vars
- else:
- raise NotImplementedError("invalid input type {} for tocuda".format(type(vars)))
-
-
-import torch.distributed as dist
-
-
-def synchronize():
- """
- Helper function to synchronize (barrier) among all processes when
- using distributed training
- """
- if not dist.is_available():
- return
- if not dist.is_initialized():
- return
- world_size = dist.get_world_size()
- if world_size == 1:
- return
- dist.barrier()
-
-
-def get_world_size():
- if not dist.is_available():
- return 1
- if not dist.is_initialized():
- return 1
- return dist.get_world_size()
-
-
-def reduce_scalar_outputs(scalar_outputs):
- world_size = get_world_size()
- if world_size < 2:
- return scalar_outputs
- with torch.no_grad():
- names = []
- scalars = []
- for k in sorted(scalar_outputs.keys()):
- names.append(k)
- if isinstance(scalar_outputs[k], torch.Tensor):
- scalars.append(scalar_outputs[k])
- else:
- scalars.append(torch.tensor(scalar_outputs[k], device='cuda'))
- scalars = torch.stack(scalars, dim=0)
- dist.reduce(scalars, dst=0)
- if dist.get_rank() == 0:
- # only main process gets accumulated, so only divide by
- # world_size in this case
- scalars /= world_size
- reduced_scalars = {k: v for k, v in zip(names, scalars)}
-
- return reduced_scalars
diff --git a/One-2-3-45-master 2/requirements.txt b/One-2-3-45-master 2/requirements.txt
deleted file mode 100644
index 90d1b4f0dd1df35205d682ce814002513ae4ca70..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/requirements.txt
+++ /dev/null
@@ -1,60 +0,0 @@
-albumentations>=1.3.1
-opencv-python>=4.8.0.76
-pudb>=2022.1.3
-imageio>=2.31.1
-imageio-ffmpeg>=0.4.8
-pytorch-lightning>=2.0.6
-omegaconf>=2.3.0
-test-tube>=0.7.5
-streamlit>=1.25.0
-einops>=0.6.1
-torch-fidelity>=0.3.0
-transformers>=4.31.0
-kornia>=0.7.0
-webdataset>=0.2.48
-torchmetrics>=1.0.3
-fire>=0.5.0
-gradio>=3.40.1
-diffusers>=0.19.3
-datasets[vision]>=2.14.4
-rich>=13.5.2
-plotly>=5.16.0
--e git+https://github.com/CompVis/taming-transformers.git#egg=taming-transformers
-# elev est
-dl_ext>=1.3.4
-loguru>=0.7.0
-matplotlib>=3.7.2
-multipledispatch>=1.0.0
-packaging>=23.1
-Pillow>=9.3.0
-PyYAML>=6.0.1
-scikit_image>=0.21.0
-scikit_learn>=1.3.0
-scipy>=1.11.1
-setuptools>=59.6.0
-tensorboardX>=2.6.2
-tqdm>=4.66.1
-transforms3d>=0.4.1
-trimesh>=3.23.1
-yacs>=0.1.8
-gdown>=4.7.1
-git+https://github.com/NVlabs/nvdiffrast.git
-git+https://github.com/openai/CLIP.git
-# segment anything
-onnxruntime>=1.15.1
-onnx>=1.14.0
-git+https://github.com/facebookresearch/segment-anything.git
-# rembg
-rembg>=2.0.50
-# reconstruction
-pyhocon>=0.3.60
-icecream>=2.1.3
-PyMCubes>=0.1.4
-ninja>=1.11.1
-# juypter
-jupyter>=1.0.0
-jupyterlab>=4.0.5
-ipywidgets>=8.1.0
-ipykernel>=6.25.1
-panel>=1.2.1
-jupyter_bokeh>=3.0.7
\ No newline at end of file
diff --git a/One-2-3-45-master 2/run.py b/One-2-3-45-master 2/run.py
deleted file mode 100644
index 70e3cd96ce9259da79658882a35bc9c32fb84647..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/run.py
+++ /dev/null
@@ -1,119 +0,0 @@
-import os
-import torch
-import argparse
-from PIL import Image
-from utils.zero123_utils import init_model, predict_stage1_gradio, zero123_infer
-from utils.sam_utils import sam_init, sam_out_nosave
-from utils.utils import pred_bbox, image_preprocess_nosave, gen_poses, convert_mesh_format
-from elevation_estimate.estimate_wild_imgs import estimate_elev
-
-
-def preprocess(predictor, raw_im, lower_contrast=False):
- raw_im.thumbnail([512, 512], Image.Resampling.LANCZOS)
- image_sam = sam_out_nosave(predictor, raw_im.convert("RGB"), pred_bbox(raw_im))
- input_256 = image_preprocess_nosave(image_sam, lower_contrast=lower_contrast, rescale=True)
- torch.cuda.empty_cache()
- return input_256
-
-def stage1_run(model, device, exp_dir,
- input_im, scale, ddim_steps):
- # folder to save the stage 1 images
- stage1_dir = os.path.join(exp_dir, "stage1_8")
- os.makedirs(stage1_dir, exist_ok=True)
-
- # stage 1: generate 4 views at the same elevation as the input
- output_ims = predict_stage1_gradio(model, input_im, save_path=stage1_dir, adjust_set=list(range(4)), device=device, ddim_steps=ddim_steps, scale=scale)
-
- # stage 2 for the first image
- # infer 4 nearby views for an image to estimate the polar angle of the input
- stage2_steps = 50 # ddim_steps
- zero123_infer(model, exp_dir, indices=[0], device=device, ddim_steps=stage2_steps, scale=scale)
- # estimate the camera pose (elevation) of the input image.
- try:
- polar_angle = estimate_elev(exp_dir)
- except:
- print("Failed to estimate polar angle")
- polar_angle = 90
- print("Estimated polar angle:", polar_angle)
- gen_poses(exp_dir, polar_angle)
-
- # stage 1: generate another 4 views at a different elevation
- if polar_angle <= 75:
- output_ims_2 = predict_stage1_gradio(model, input_im, save_path=stage1_dir, adjust_set=list(range(4,8)), device=device, ddim_steps=ddim_steps, scale=scale)
- else:
- output_ims_2 = predict_stage1_gradio(model, input_im, save_path=stage1_dir, adjust_set=list(range(8,12)), device=device, ddim_steps=ddim_steps, scale=scale)
- torch.cuda.empty_cache()
- return 90-polar_angle, output_ims+output_ims_2
-
-def stage2_run(model, device, exp_dir,
- elev, scale, stage2_steps=50):
- # stage 2 for the remaining 7 images, generate 7*4=28 views
- if 90-elev <= 75:
- zero123_infer(model, exp_dir, indices=list(range(1,8)), device=device, ddim_steps=stage2_steps, scale=scale)
- else:
- zero123_infer(model, exp_dir, indices=list(range(1,4))+list(range(8,12)), device=device, ddim_steps=stage2_steps, scale=scale)
-
-def reconstruct(exp_dir, output_format=".ply", device_idx=0, resolution=256):
- exp_dir = os.path.abspath(exp_dir)
- main_dir_path = os.path.abspath(os.path.dirname("./"))
- os.chdir('reconstruction/')
-
- bash_script = f'CUDA_VISIBLE_DEVICES={device_idx} python exp_runner_generic_blender_val.py \
- --specific_dataset_name {exp_dir} \
- --mode export_mesh \
- --conf confs/one2345_lod0_val_demo.conf \
- --resolution {resolution}'
- print(bash_script)
- os.system(bash_script)
- os.chdir(main_dir_path)
-
- ply_path = os.path.join(exp_dir, f"mesh.ply")
- if output_format == ".ply":
- return ply_path
- if output_format not in [".obj", ".glb"]:
- print("Invalid output format, must be one of .ply, .obj, .glb")
- return ply_path
- return convert_mesh_format(exp_dir, output_format=output_format)
-
-
-def predict_multiview(shape_dir, args):
- device = f"cuda:{args.gpu_idx}"
-
- # initialize the zero123 model
- models = init_model(device, 'zero123-xl.ckpt', half_precision=args.half_precision)
- model_zero123 = models["turncam"]
-
- # initialize the Segment Anything model
- predictor = sam_init(args.gpu_idx)
- input_raw = Image.open(args.img_path)
-
- # preprocess the input image
- input_256 = preprocess(predictor, input_raw)
-
- # generate multi-view images in two stages with Zero123.
- # first stage: generate N=8 views cover 360 degree of the input shape.
- elev, stage1_imgs = stage1_run(model_zero123, device, shape_dir, input_256, scale=3, ddim_steps=75)
- # second stage: 4 local views for each of the first-stage view, resulting in N*4=32 source view images.
- stage2_run(model_zero123, device, shape_dir, elev, scale=3, stage2_steps=50)
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser(description='Process some integers.')
- parser.add_argument('--img_path', type=str, default="./demo/demo_examples/01_wild_hydrant.png", help='Path to the input image')
- parser.add_argument('--gpu_idx', type=int, default=0, help='GPU index')
- parser.add_argument('--half_precision', action='store_true', help='Use half precision')
- parser.add_argument('--mesh_resolution', type=int, default=256, help='Mesh resolution')
- parser.add_argument('--output_format', type=str, default=".ply", help='Output format: .ply, .obj, .glb')
-
- args = parser.parse_args()
-
- assert(torch.cuda.is_available())
-
- shape_id = args.img_path.split('/')[-1].split('.')[0]
- shape_dir = f"./exp/{shape_id}"
- os.makedirs(shape_dir, exist_ok=True)
-
- predict_multiview(shape_dir, args)
-
- # utilize cost volume-based 3D reconstruction to generate textured 3D mesh
- mesh_path = reconstruct(shape_dir, output_format=args.output_format, device_idx=args.gpu_idx, resolution=args.mesh_resolution)
- print("Mesh saved to:", mesh_path)
diff --git a/One-2-3-45-master 2/utils/sam_utils.py b/One-2-3-45-master 2/utils/sam_utils.py
deleted file mode 100644
index 0c01bb3ca4cdc0692271f769f24f65d611a744dd..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/utils/sam_utils.py
+++ /dev/null
@@ -1,50 +0,0 @@
-import os
-import numpy as np
-import torch
-from PIL import Image
-import time
-
-from segment_anything import sam_model_registry, SamPredictor
-
-def sam_init(device_id=0):
- sam_checkpoint = os.path.join(os.path.dirname(__file__), "../sam_vit_h_4b8939.pth")
- model_type = "vit_h"
-
- device = "cuda:{}".format(device_id) if torch.cuda.is_available() else "cpu"
-
- sam = sam_model_registry[model_type](checkpoint=sam_checkpoint).to(device=device)
- predictor = SamPredictor(sam)
- return predictor
-
-def sam_out_nosave(predictor, input_image, *bbox_sliders):
- bbox = np.array(bbox_sliders)
- image = np.asarray(input_image)
-
- start_time = time.time()
- predictor.set_image(image)
-
- h, w, _ = image.shape
- input_point = np.array([[h//2, w//2]])
- input_label = np.array([1])
-
- masks, scores, logits = predictor.predict(
- point_coords=input_point,
- point_labels=input_label,
- multimask_output=True,
- )
-
- masks_bbox, scores_bbox, logits_bbox = predictor.predict(
- box=bbox,
- multimask_output=True
- )
-
- print(f"SAM Time: {time.time() - start_time:.3f}s")
- opt_idx = np.argmax(scores)
- mask = masks[opt_idx]
- out_image = np.zeros((image.shape[0], image.shape[1], 4), dtype=np.uint8)
- out_image[:, :, :3] = image
- out_image_bbox = out_image.copy()
- out_image[:, :, 3] = mask.astype(np.uint8) * 255
- out_image_bbox[:, :, 3] = masks_bbox[-1].astype(np.uint8) * 255 # np.argmax(scores_bbox)
- torch.cuda.empty_cache()
- return Image.fromarray(out_image_bbox, mode='RGBA')
\ No newline at end of file
diff --git a/One-2-3-45-master 2/utils/utils.py b/One-2-3-45-master 2/utils/utils.py
deleted file mode 100644
index 8dc244bb5725bac9280e955086ebfb5144d694c5..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/utils/utils.py
+++ /dev/null
@@ -1,145 +0,0 @@
-import os
-import json
-import numpy as np
-import cv2
-from PIL import Image
-from rembg import remove
-import trimesh
-
-# predict bbox of the foreground
-def pred_bbox(image):
- image_nobg = remove(image.convert('RGBA'), alpha_matting=True)
- alpha = np.asarray(image_nobg)[:,:,-1]
- x_nonzero = np.nonzero(alpha.sum(axis=0))
- y_nonzero = np.nonzero(alpha.sum(axis=1))
- x_min = int(x_nonzero[0].min())
- y_min = int(y_nonzero[0].min())
- x_max = int(x_nonzero[0].max())
- y_max = int(y_nonzero[0].max())
- return x_min, y_min, x_max, y_max
-
-def image_grid(imgs, rows, cols):
- assert len(imgs) == rows*cols
- w, h = imgs[0].size
- grid = Image.new('RGB', size=(cols*w, rows*h))
- grid_w, grid_h = grid.size
-
- for i, img in enumerate(imgs):
- grid.paste(img, box=(i%cols*w, i//cols*h))
- return grid
-
-def convert_mesh_format(exp_dir, output_format=".obj"):
- ply_path = os.path.join(exp_dir, "mesh.ply")
- mesh_path = os.path.join(exp_dir, f"mesh{output_format}")
- mesh = trimesh.load_mesh(ply_path)
- rotation_matrix = trimesh.transformations.rotation_matrix(np.pi/2, [1, 0, 0])
- mesh.apply_transform(rotation_matrix)
- rotation_matrix = trimesh.transformations.rotation_matrix(np.pi, [0, 0, 1])
- mesh.apply_transform(rotation_matrix)
- # flip x
- mesh.vertices[:, 0] = -mesh.vertices[:, 0]
- mesh.faces = np.fliplr(mesh.faces)
- if output_format == ".obj":
- # Export the mesh as .obj file with colors
- mesh.export(mesh_path, file_type='obj', include_color=True)
- else:
- mesh.export(mesh_path, file_type='glb')
- return mesh_path
-
-# contrast correction, rescale and recenter
-def image_preprocess_nosave(input_image, lower_contrast=True, rescale=True):
-
- image_arr = np.array(input_image)
- in_w, in_h = image_arr.shape[:2]
-
- if lower_contrast:
- alpha = 0.8 # Contrast control (1.0-3.0)
- beta = 0 # Brightness control (0-100)
- # Apply the contrast adjustment
- image_arr = cv2.convertScaleAbs(image_arr, alpha=alpha, beta=beta)
- image_arr[image_arr[...,-1]>200, -1] = 255
-
- ret, mask = cv2.threshold(np.array(input_image.split()[-1]), 0, 255, cv2.THRESH_BINARY)
- x, y, w, h = cv2.boundingRect(mask)
- max_size = max(w, h)
- ratio = 0.75
- if rescale:
- side_len = int(max_size / ratio)
- else:
- side_len = in_w
- padded_image = np.zeros((side_len, side_len, 4), dtype=np.uint8)
- center = side_len//2
- padded_image[center-h//2:center-h//2+h, center-w//2:center-w//2+w] = image_arr[y:y+h, x:x+w]
- rgba = Image.fromarray(padded_image).resize((256, 256), Image.LANCZOS)
-
- rgba_arr = np.array(rgba) / 255.0
- rgb = rgba_arr[...,:3] * rgba_arr[...,-1:] + (1 - rgba_arr[...,-1:])
- return Image.fromarray((rgb * 255).astype(np.uint8))
-
-# pose generation
-def calc_pose(phis, thetas, size, radius = 1.2, device='cuda'):
- import torch
- def normalize(vectors):
- return vectors / (torch.norm(vectors, dim=-1, keepdim=True) + 1e-10)
- thetas = torch.FloatTensor(thetas).to(device)
- phis = torch.FloatTensor(phis).to(device)
-
- centers = torch.stack([
- radius * torch.sin(thetas) * torch.sin(phis),
- -radius * torch.cos(thetas) * torch.sin(phis),
- radius * torch.cos(phis),
- ], dim=-1) # [B, 3]
-
- # lookat
- forward_vector = normalize(centers).squeeze(0)
- up_vector = torch.FloatTensor([0, 0, 1]).to(device).unsqueeze(0).repeat(size, 1)
- right_vector = normalize(torch.cross(up_vector, forward_vector, dim=-1))
- if right_vector.pow(2).sum() < 0.01:
- right_vector = torch.FloatTensor([0, 1, 0]).to(device).unsqueeze(0).repeat(size, 1)
- up_vector = normalize(torch.cross(forward_vector, right_vector, dim=-1))
-
- poses = torch.eye(4, dtype=torch.float, device=device)[:3].unsqueeze(0).repeat(size, 1, 1)
- poses[:, :3, :3] = torch.stack((right_vector, up_vector, forward_vector), dim=-1)
- poses[:, :3, 3] = centers
- return poses
-
-def get_poses(init_elev):
- mid = init_elev
- deg = 10
- if init_elev <= 75:
- low = init_elev + 30
- # e.g. 30, 60, 20, 40, 30, 30, 50, 70, 50, 50
-
- elevations = np.radians([mid]*4 + [low]*4 + [mid-deg,mid+deg,mid,mid]*4 + [low-deg,low+deg,low,low]*4)
- img_ids = [f"{num}.png" for num in range(8)] + [f"{num}_{view_num}.png" for num in range(8) for view_num in range(4)]
- else:
-
- high = init_elev - 30
- elevations = np.radians([mid]*4 + [high]*4 + [mid-deg,mid+deg,mid,mid]*4 + [high-deg,high+deg,high,high]*4)
- img_ids = [f"{num}.png" for num in list(range(4)) + list(range(8,12))] + \
- [f"{num}_{view_num}.png" for num in list(range(4)) + list(range(8,12)) for view_num in range(4)]
- overlook_theta = [30+x*90 for x in range(4)]
- eyelevel_theta = [60+x*90 for x in range(4)]
- source_theta_delta = [0, 0, -deg, deg]
- azimuths = np.radians(overlook_theta + eyelevel_theta + \
- [view_theta + source for view_theta in overlook_theta for source in source_theta_delta] + \
- [view_theta + source for view_theta in eyelevel_theta for source in source_theta_delta])
- return img_ids, calc_pose(elevations, azimuths, len(azimuths)).cpu().numpy()
-
-
-def gen_poses(shape_dir, pose_est):
- img_ids, input_poses = get_poses(pose_est)
-
- out_dict = {}
- focal = 560/2; h = w = 256
- out_dict['intrinsics'] = [[focal, 0, w / 2], [0, focal, h / 2], [0, 0, 1]]
- out_dict['near_far'] = [1.2-0.7, 1.2+0.6]
- out_dict['c2ws'] = {}
- for view_id, img_id in enumerate(img_ids):
- pose = input_poses[view_id]
- pose = pose.tolist()
- pose = [pose[0], pose[1], pose[2], [0, 0, 0, 1]]
- out_dict['c2ws'][img_id] = pose
- json_path = os.path.join(shape_dir, 'pose.json')
- with open(json_path, 'w') as f:
- json.dump(out_dict, f, indent=4)
diff --git a/One-2-3-45-master 2/utils/zero123_utils.py b/One-2-3-45-master 2/utils/zero123_utils.py
deleted file mode 100644
index 62a31a58be0b33fc71010621fb84bf8274088da8..0000000000000000000000000000000000000000
--- a/One-2-3-45-master 2/utils/zero123_utils.py
+++ /dev/null
@@ -1,178 +0,0 @@
-import os
-import numpy as np
-import torch
-from contextlib import nullcontext
-from diffusers.pipelines.stable_diffusion import StableDiffusionSafetyChecker
-from einops import rearrange
-from ldm.util import instantiate_from_config
-from ldm.models.diffusion.ddim import DDIMSampler
-from omegaconf import OmegaConf
-from PIL import Image
-from rich import print
-from transformers import CLIPImageProcessor
-from torch import autocast
-from torchvision import transforms
-
-
-def load_model_from_config(config, ckpt, device, verbose=False):
- print(f'Loading model from {ckpt}')
- pl_sd = torch.load(ckpt, map_location='cpu')
- if 'global_step' in pl_sd:
- print(f'Global Step: {pl_sd["global_step"]}')
- sd = pl_sd['state_dict']
- model = instantiate_from_config(config.model)
- m, u = model.load_state_dict(sd, strict=False)
- if len(m) > 0 and verbose:
- print('missing keys:')
- print(m)
- if len(u) > 0 and verbose:
- print('unexpected keys:')
- print(u)
-
- model.to(device)
- model.eval()
- return model
-
-
-def init_model(device, ckpt, half_precision=False):
- config = os.path.join(os.path.dirname(__file__), '../configs/sd-objaverse-finetune-c_concat-256.yaml')
- config = OmegaConf.load(config)
-
- # Instantiate all models beforehand for efficiency.
- models = dict()
- print('Instantiating LatentDiffusion...')
- if half_precision:
- models['turncam'] = torch.compile(load_model_from_config(config, ckpt, device=device)).half()
- else:
- models['turncam'] = torch.compile(load_model_from_config(config, ckpt, device=device))
- print('Instantiating StableDiffusionSafetyChecker...')
- models['nsfw'] = StableDiffusionSafetyChecker.from_pretrained(
- 'CompVis/stable-diffusion-safety-checker').to(device)
- models['clip_fe'] = CLIPImageProcessor.from_pretrained(
- "openai/clip-vit-large-patch14")
- # We multiply all by some factor > 1 to make them less likely to be triggered.
- models['nsfw'].concept_embeds_weights *= 1.2
- models['nsfw'].special_care_embeds_weights *= 1.2
-
- return models
-
-@torch.no_grad()
-def sample_model_batch(model, sampler, input_im, xs, ys, n_samples=4, precision='autocast', ddim_eta=1.0, ddim_steps=75, scale=3.0, h=256, w=256):
- precision_scope = autocast if precision == 'autocast' else nullcontext
- with precision_scope("cuda"):
- with model.ema_scope():
- c = model.get_learned_conditioning(input_im).tile(n_samples, 1, 1)
- T = []
- for x, y in zip(xs, ys):
- T.append([np.radians(x), np.sin(np.radians(y)), np.cos(np.radians(y)), 0])
- T = torch.tensor(np.array(T))[:, None, :].float().to(c.device)
- c = torch.cat([c, T], dim=-1)
- c = model.cc_projection(c)
- cond = {}
- cond['c_crossattn'] = [c]
- cond['c_concat'] = [model.encode_first_stage(input_im).mode().detach()
- .repeat(n_samples, 1, 1, 1)]
- if scale != 1.0:
- uc = {}
- uc['c_concat'] = [torch.zeros(n_samples, 4, h // 8, w // 8).to(c.device)]
- uc['c_crossattn'] = [torch.zeros_like(c).to(c.device)]
- else:
- uc = None
-
- shape = [4, h // 8, w // 8]
- samples_ddim, _ = sampler.sample(S=ddim_steps,
- conditioning=cond,
- batch_size=n_samples,
- shape=shape,
- verbose=False,
- unconditional_guidance_scale=scale,
- unconditional_conditioning=uc,
- eta=ddim_eta,
- x_T=None)
- # print(samples_ddim.shape)
- # samples_ddim = torch.nn.functional.interpolate(samples_ddim, 64, mode='nearest', antialias=False)
- x_samples_ddim = model.decode_first_stage(samples_ddim)
- ret_imgs = torch.clamp((x_samples_ddim + 1.0) / 2.0, min=0.0, max=1.0).cpu()
- del cond, c, x_samples_ddim, samples_ddim, uc, input_im
- torch.cuda.empty_cache()
- return ret_imgs
-
-@torch.no_grad()
-def predict_stage1_gradio(model, raw_im, save_path = "", adjust_set=[], device="cuda", ddim_steps=75, scale=3.0):
- # raw_im = raw_im.resize([256, 256], Image.LANCZOS)
- # input_im_init = preprocess_image(models, raw_im, preprocess=False)
- input_im_init = np.asarray(raw_im, dtype=np.float32) / 255.0
- input_im = transforms.ToTensor()(input_im_init).unsqueeze(0).to(device)
- input_im = input_im * 2 - 1
-
- # stage 1: 8
- delta_x_1_8 = [0] * 4 + [30] * 4 + [-30] * 4
- delta_y_1_8 = [0+90*(i%4) if i < 4 else 30+90*(i%4) for i in range(8)] + [30+90*(i%4) for i in range(4)]
-
- ret_imgs = []
- sampler = DDIMSampler(model)
- # sampler.to(device)
- if adjust_set != []:
- x_samples_ddims_8 = sample_model_batch(model, sampler, input_im,
- [delta_x_1_8[i] for i in adjust_set], [delta_y_1_8[i] for i in adjust_set],
- n_samples=len(adjust_set), ddim_steps=ddim_steps, scale=scale)
- else:
- x_samples_ddims_8 = sample_model_batch(model, sampler, input_im, delta_x_1_8, delta_y_1_8, n_samples=len(delta_x_1_8), ddim_steps=ddim_steps, scale=scale)
- sample_idx = 0
- for stage1_idx in range(len(delta_x_1_8)):
- if adjust_set != [] and stage1_idx not in adjust_set:
- continue
- x_sample = 255.0 * rearrange(x_samples_ddims_8[sample_idx].numpy(), 'c h w -> h w c')
- out_image = Image.fromarray(x_sample.astype(np.uint8))
- ret_imgs.append(out_image)
- if save_path:
- out_image.save(os.path.join(save_path, '%d.png'%(stage1_idx)))
- sample_idx += 1
- del x_samples_ddims_8
- del sampler
- torch.cuda.empty_cache()
- return ret_imgs
-
-def infer_stage_2(model, save_path_stage1, save_path_stage2, delta_x_2, delta_y_2, indices, device, ddim_steps=75, scale=3.0):
- for stage1_idx in indices:
- # save stage 1 image
- # x_sample = 255.0 * rearrange(x_samples_ddims[stage1_idx].cpu().numpy(), 'c h w -> h w c')
- # Image.fromarray(x_sample.astype(np.uint8)).save()
- stage1_image_path = os.path.join(save_path_stage1, '%d.png'%(stage1_idx))
-
- raw_im = Image.open(stage1_image_path)
- # input_im_init = preprocess_image(models, raw_im, preprocess=False)
- input_im_init = np.asarray(raw_im, dtype=np.float32) #/ 255.0
- input_im_init[input_im_init >= 253.0] = 255.0
- input_im_init = input_im_init / 255.0
- input_im = transforms.ToTensor()(input_im_init).unsqueeze(0).to(device)
- input_im = input_im * 2 - 1
- # infer stage 2
- sampler = DDIMSampler(model)
- # sampler.to(device)
- # stage2_in = x_samples_ddims[stage1_idx][None, ...].to(device) * 2 - 1
- x_samples_ddims_stage2 = sample_model_batch(model, sampler, input_im, delta_x_2, delta_y_2, n_samples=len(delta_x_2), ddim_steps=ddim_steps, scale=scale)
- for stage2_idx in range(len(delta_x_2)):
- x_sample_stage2 = 255.0 * rearrange(x_samples_ddims_stage2[stage2_idx].numpy(), 'c h w -> h w c')
- Image.fromarray(x_sample_stage2.astype(np.uint8)).save(os.path.join(save_path_stage2, '%d_%d.png'%(stage1_idx, stage2_idx)))
- del input_im
- del x_samples_ddims_stage2
- torch.cuda.empty_cache()
-
-def zero123_infer(model, input_dir_path, start_idx=0, end_idx=12, indices=None, device="cuda", ddim_steps=75, scale=3.0):
- # input_img_path = os.path.join(input_dir_path, "input_256.png")
- save_path_8 = os.path.join(input_dir_path, "stage1_8")
- save_path_8_2 = os.path.join(input_dir_path, "stage2_8")
- os.makedirs(save_path_8_2, exist_ok=True)
-
- # raw_im = Image.open(input_img_path)
- # # input_im_init = preprocess_image(models, raw_im, preprocess=False)
- # input_im_init = np.asarray(raw_im, dtype=np.float32) / 255.0
- # input_im = transforms.ToTensor()(input_im_init).unsqueeze(0).to(device)
- # input_im = input_im * 2 - 1
-
- # stage 2: 6*4 or 8*4
- delta_x_2 = [-10, 10, 0, 0]
- delta_y_2 = [0, 0, -10, 10]
-
- infer_stage_2(model, save_path_8, save_path_8_2, delta_x_2, delta_y_2, indices=indices if indices else list(range(start_idx,end_idx)), device=device, ddim_steps=ddim_steps, scale=scale)