diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml new file mode 100644 index 00000000..f3c69761 --- /dev/null +++ b/.github/workflows/docs.yaml @@ -0,0 +1,26 @@ +name: Docs + +on: + pull_request: + branches: [ dev, main ] + paths: + - 'luxonis_train/**' + - .github/workflows/docs.yaml + +jobs: + docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Install dependencies + run: | + pip install pydoctor + curl -L "https://raw.githubusercontent.com/luxonis/python-api-analyzer-to-json/main/gen-docs.py" -o "gen-docs.py" + + - name: Build docs + run: | + python gen-docs.py luxonis_train diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml new file mode 100644 index 00000000..ce6b816b --- /dev/null +++ b/.github/workflows/pre-commit.yaml @@ -0,0 +1,13 @@ +name: pre-commit + +on: + pull_request: + branches: [dev, main] + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 + - uses: pre-commit/action@v3.0.0 diff --git a/.github/workflows/python-publish.yaml b/.github/workflows/python-publish.yaml new file mode 100755 index 00000000..353ee26d --- /dev/null +++ b/.github/workflows/python-publish.yaml @@ -0,0 +1,34 @@ +name: Upload Python Package + +on: + workflow_dispatch: + release: + types: [published] + +permissions: + contents: read + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v3 + with: + python-version: '3.10' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build + + - name: Build package + run: python -m build + + - name: Publish package + uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml new file mode 100644 index 00000000..89a59a19 --- /dev/null +++ b/.github/workflows/tests.yaml @@ -0,0 +1,115 @@ +name: Tests + +on: + pull_request: + branches: [ dev, main ] + paths: + - 'luxonis_train/**/**.py' + - 'tests/**/**.py' + - .github/workflows/tests.yaml + +jobs: + run_tests: + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest, macOS-latest] + version: ['3.10', '3.11'] + + runs-on: ${{ matrix.os }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.version }} + cache: pip + + - name: Install dependencies [Ubuntu] + if: matrix.os == 'ubuntu-latest' + run: | + sudo apt update + sudo apt install -y pandoc + pip install -e .[dev] + + - name: Install dependencies [Windows] + if: matrix.os == 'windows-latest' + run: pip install -e .[dev] + + - name: Install dependencies [macOS] + if: matrix.os == 'macOS-latest' + run: pip install -e .[dev] + + - name: Run tests with coverage [Ubuntu] + if: matrix.os == 'ubuntu-latest' && matrix.version == '3.10' + run: pytest tests --cov=luxonis_train --cov-report xml --junit-xml pytest.xml + + - name: Run tests [Windows, macOS] + if: matrix.os != 'ubuntu-latest' || matrix.version != '3.10' + run: pytest tests --junit-xml pytest.xml + + - name: Generate coverage badge [Ubuntu] + if: matrix.os == 'ubuntu-latest' && matrix.version == '3.10' + run: coverage-badge -o media/coverage_badge.svg -f + + - name: Generate coverage report [Ubuntu] + if: matrix.os == 'ubuntu-latest' && matrix.version == '3.10' + uses: orgoro/coverage@v3.1 + with: + coverageFile: coverage.xml + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Commit coverage badge [Ubuntu] + if: matrix.os == 'ubuntu-latest' && matrix.version == '3.10' + run: | + git config --global user.name 'GitHub Actions' + git config --global user.email 'actions@github.com' + git diff --quiet media/coverage_badge.svg || { + git add media/coverage_badge.svg + git commit -m "[Automated] Updated coverage badge" + } + + - name: Push changes [Ubuntu] + if: matrix.os == 'ubuntu-latest' && matrix.version == '3.10' + uses: ad-m/github-push-action@master + with: + branch: ${{ github.head_ref }} + + - name: Upload Test Results + if: always() + uses: actions/upload-artifact@v4 + with: + name: Test Results [${{ matrix.os }}] (Python ${{ matrix.version }}) + path: pytest.xml + retention-days: 10 + if-no-files-found: error + + publish-test-results: + name: "Publish Tests Results" + needs: run_tests + runs-on: ubuntu-latest + permissions: + checks: write + pull-requests: write + if: always() + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Download Artifacts + uses: actions/download-artifact@v3 + with: + path: artifacts + + - name: Publish Test Results + uses: EnricoMi/publish-unit-test-result-action@v2 + with: + files: "artifacts/**/*.xml" diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..1204d2e2 --- /dev/null +++ b/.gitignore @@ -0,0 +1,149 @@ +data/* +!data/.gitkeep +output +output_export +apidocs +.ruff_cache + +# database +*.db + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Datasets +cifar_ldf/* +cifar_small_ldf/* + +# Venv +models_venv/* + +# vscode settings +.vscode diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..3f95fc26 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,31 @@ +repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.1.8 + hooks: + - id: ruff + args: [--fix, --exit-non-zero-on-fix] + types_or: [python, pyi, jupyter] + - id: ruff-format + args: [--line-length, '88'] + types_or: [python, pyi, jupyter] + + - repo: https://github.com/PyCQA/docformatter + rev: v1.7.5 + hooks: + - id: docformatter + additional_dependencies: [tomli] + args: [--in-place, --black, --style=epytext] + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: no-commit-to-branch + args: ['--branch', 'main', '--branch', 'dev'] + + - repo: https://github.com/executablebooks/mdformat + rev: 0.7.10 + hooks: + - id: mdformat + additional_dependencies: + - mdformat-gfm + - mdformat-toc diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..479a14d4 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,39 @@ +# Contributing to LuxonisTrain + +This guide is intended for our internal development team. +It outlines our workflow and standards for contributing to this project. + +## Table of Contents + +- [Pre-commit Hook](#pre-commit-hook) +- [GitHub Actions](#github-actions) +- [Making and Reviewing Changes](#making-and-reviewing-changes) +- [Notes](#notes) + +## Pre-commit Hook + +We use a pre-commit hook to ensure code quality and consistency: + +1. Install pre-commit (see [pre-commit.com](https://pre-commit.com/#install)). +1. Clone the repository and run `pre-commit install` in the root directory. +1. The pre-commit hook runs automatically on `git commit`. + +## GitHub Actions + +In addition to the pre-commit hook, our GitHub Actions workflow includes tests that must pass before merging: + +1. Tests are run automatically when you open a pull request. +1. Review the GitHub Actions output if your PR fails. +1. Fix any issues to ensure that both the pre-commit hooks and tests pass. + +## Making and Reviewing Changes + +1. Make changes in a new branch. +1. Test your changes locally. +1. Commit (pre-commit hook will run). +1. Push to your branch and create a pull request. Always request a review from: + - [Martin Kozlovský](https://github.com/kozlov721) + - His permission is required for merge + - [Matija Teršek](https://github.com/tersekmatija) + - [Conor Simmons](https://github.com/conorsim) +1. The team will review and merge your PR. diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md new file mode 100644 index 00000000..62a349d2 --- /dev/null +++ b/README.md @@ -0,0 +1,189 @@ +# Luxonis Training Framework + +![Ubuntu](https://img.shields.io/badge/Ubuntu-E95420?style=for-the-badge&logo=ubuntu&logoColor=white) +![Windows](https://img.shields.io/badge/Windows-0078D6?style=for-the-badge&logo=windows&logoColor=white) +![MacOS](https://img.shields.io/badge/mac%20os-000000?style=for-the-badge&logo=apple&logoColor=white) + +[![License](https://img.shields.io/badge/License-Apache_2.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) +![PyBadge](media/pybadge.svg) +[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) +![UnitTests](https://github.com/luxonis/models/actions/workflows/tests.yaml/badge.svg) +![Docs](https://github.com/luxonis/luxonis-ml/actions/workflows/docs.yaml/badge.svg) +[![Coverage](media/coverage_badge.svg)](https://github.com/luxonis/models/actions) + +Luxonis training framework (`luxonis-train`) is intended for training deep learning models that can run fast on OAK products. + +The project is in alpha state - please report any feedback. + +## Table Of Contents + +- [Installation](#installation) +- [Training](#training) +- [Customizations](#customizations) +- [Tuning](#tuning) +- [Exporting](#exporting) +- [Credentials](#credentials) +- [Contributing](#contributing) + +## Installation + +`luxonis-train` is hosted on PyPi and can be installed with `pip` as: + +```bash +pip install luxonis-train +``` + +This command will also create a `luxonis_train` executable in your `PATH`. +See `luxonis_train --help` for more information. + +## Usage + +The entire configuration is specified in a `yaml` file. This includes the model +structure, used losses, metrics, optimizers etc. For specific instructions and example +configuration files, see [Configuration](./configs/README.md). + +## Training + +Once you've created your `config.yaml` file you can train the model using this command: + +```bash +luxonis_train train --config config.yaml +``` + +If you wish to manually override some config parameters you can do this by providing the key-value pairs. Example of this is: + +```bash +luxonis_train train --config config.yaml trainer.batch_size 8 trainer.epochs 10 +``` + +where key and value are space separated and sub-keys are dot (`.`) separated. If the configuration field is a list, then key/sub-key should be a number (e.g. `trainer.preprocessing.augmentations.0.name RotateCustom`). + +## Tuning + +To improve training performance you can use `Tuner` for hyperparameter optimization. +To use tuning, you have to specify [tuner](configs/README.md#tuner) section in the config file. + +To start the tuning, run + +```bash +luxonis_train tune --config config.yaml +``` + +You can see an example tuning configuration [here](configs/example_tuning.yaml). + +## Exporting + +We support export to `ONNX`, and `DepthAI .blob format` which is used for OAK cameras. By default, we export to `ONNX` format. + +To use the exporter, you have to specify the [exporter](configs/README.md#exporter) section in the config file. + +Once you have the config file ready you can export the model using + +```bash +luxonis_train export --config config.yaml +``` + +You can see an example export configuration [here](configs/example_export.yaml). + +## Customizations + +We provide a registry interface through which you can create new [nodes](src/luxonis_train/nodes/README.md), [losses](src/luxonis_train/attached_modules/losses/README.md), [metrics](src/luxonis_train/attached_modules/metrics/README.md), [visualizers](src/luxonis_train/attached_modules/visualizers/README.md), [callbacks](src/luxonis_train/callbacks/README.md), [optimizers](configs/README.md#optimizer), and [schedulers](configs/README.md#scheduler). + +Registered components can be then referenced in the config file. Custom components need to inherit from their respective base classes: + +- Node - [BaseNode](src/luxonis_train/models/nodes/base_node.py) +- Loss - [BaseLoss](src/luxonis_train/attached_modules/losses/base_loss.py) +- Metric - [BaseMetric](src/luxonis_train/attached_modules/metrics/base_metric.py) +- Visualizer - [BaseVisualizer](src/luxonis_train/attached_modules/visualizers/base_visualizer.py) +- Callback - [Callback from lightning.pytorch.callbacks](lightning.pytorch.callbacks) +- Optimizer - [Optimizer from torch.optim](https://pytorch.org/docs/stable/optim.html#torch.optim.Optimizer) +- Scheduler - [LRScheduler from torch.optim.lr_scheduler](https://pytorch.org/docs/stable/optim.html#how-to-adjust-learning-rate) + +Here is an example of how to create a custom components: + +```python +from torch.optim import Optimizer +from luxonis_train.utils.registry import OPTIMIZERS +from luxonis_train.attached_modules.losses import BaseLoss + +@OPTIMIZERS.register_module() +class CustomOptimizer(Optimizer): + ... + +# Subclasses of BaseNode, LuxonisLoss, LuxonisMetric +# and BaseVisualizer are registered automatically. + +class CustomLoss(BaseLoss): + # This class is automatically registered under `CustomLoss` name. + def __init__(self, k_steps: int, **kwargs): + super().__init__(**kwargs) + ... +``` + +And then in the config you reference this `CustomOptimizer` and `CustomLoss` by their names. + +```yaml +losses: + - name: CustomLoss + params: # additional parameters + k_steps: 12 + +``` + +For more information on how to define custom components, consult the respective in-source documentation. + +## Credentials + +Local use is supported by default. In addition, we also integrate some cloud services which can be primarily used for logging and storing. When these are used, you need to load environment variables to set up the correct credentials. + +You have these options how to set up the environment variables: + +- Using standard environment variables +- Specifying the variables in a `.env` file. If a variable is both in the environment and present in `.env` file, the exported variable takes precedense. +- Specifying the variables in the [ENVIRON](configs/README.md#environ) section of the config file. Note that this is not a recommended way. Variables defined in config take precedense over environment and `.env` variables. + +### S3 + +If you are working with LuxonisDataset that is hosted on S3, you need to specify these env variables: + +```bash +AWS_ACCESS_KEY_ID=********** +AWS_SECRET_ACCESS_KEY=********** +AWS_S3_ENDPOINT_URL=********** +``` + +### MLFlow + +If you want to use MLFlow for logging and storing artifacts you also need to specify MLFlow-related env variables like this: + +```bash +MLFLOW_S3_BUCKET=********** +MLFLOW_S3_ENDPOINT_URL=********** +MLFLOW_TRACKING_URI=********** +``` + +### WanDB + +If you are using WanDB for logging, you have to sign in first in your environment. + +### POSTGRESS + +There is an option for remote storage for [Tuning](#tuning). We use POSTGRES and to connect to the database you need to specify the folowing env variables: + +```bash +POSTGRES_USER=********** +POSTGRES_PASSWORD=********** +POSTGRES_HOST=********** +POSTGRES_PORT=********** +POSTGRES_DB=********** +``` + +## Contributing + +If you want to contribute to the development, install the dev version of the package: + +```bash +pip install luxonis-train[dev] +``` + +Consult the [Contribution guide](CONTRIBUTING.md) before making a pull request. diff --git a/configs/README.md b/configs/README.md new file mode 100644 index 00000000..3fd82bec --- /dev/null +++ b/configs/README.md @@ -0,0 +1,294 @@ +# Configuration + +The configuration is defined in a yaml file, which you must provide. +The configuration file consists of a few major blocks that are described below. +You can create your own config or use/edit one of the examples. + +## Table Of Contents + +- [Top-level Options](#top-level-options) +- [Model](#model) + - [Nodes](#nodes) + - [Attached Modules](#attached-modules) + - [Losses](#losses) + - [Metrics](#metrics) + - [Visualizers](#visualizers) +- [Tracker](#tracker) +- [Dataset](#dataset) +- [Trainer](#train) + - [Preprocessing](#preprocessing) + - [Optimizer](#optimizer) + - [Scheduler](#scheduler) + - [Callbacks](#callbacks) +- [Exporter](#exporter) + - [ONNX](#onnx) + - [Blob](#blob) +- [Tuner](#tuner) + - [Storage](#storage) +- [ENVIRON](#environ) + +## Top-level Options + +| Key | Type | Default value | Description | +| ------------- | --------------------- | ------------- | --------------------------------------------- | +| use_rich_text | bool | True | whether to use rich text for console printing | +| model | [Model](#model) | | model section | +| dataset | [dataset](#dataset) | | dataset section | +| train | [train](#train) | | train section | +| tracker | [tracker](#tracker) | | tracker section | +| trainer | [trainer](#trainer) | | trainer section | +| exporter | [exporter](#exporter) | | exporter section | +| tuner | [tuner](#tuner) | | tuner section | + +## Model + +This is the most important block, that **must be always defined by the user**. There are two different ways you can create the model. + +| Key | Type | Default value | Description | +| ---------------- | ---- | ------------- | ---------------------------------------------------------- | +| name | str | | name of the model | +| weights | path | None | path to weights to load | +| predefined_model | str | None | name of a predefined model to use | +| params | dict | {} | parameters for the predefined model | +| nodes | list | \[\] | list of nodes (see [nodes](#nodes) | +| losses | list | \[\] | list of losses (see [losses](#losses) | +| metrics | list | \[\] | list of metrics (see [metrics](#metrics) | +| visualziers | list | \[\] | list of visualizers (see [visualizers](#visualizers) | +| outputs | list | \[\] | list of outputs nodes, inferred from nodes if not provided | + +### Nodes + +For list of all nodes, see [nodes](src/luxonis_train/nodes/README.md). + +| Key | Type | Default value | Description | +| ------------- | ---- | ------------- | ---------------------------------------------------------------------------------------------------- | +| name | str | | name of the node | +| override_name | str | None | custom name for the node | +| params | dict | {} | parameters for the node | +| inputs | list | \[\] | list of input nodes for this node, if empty, the node is understood to be an input node of the model | +| frozen | bool | False | whether should the node be trained | + +### Attached Modules + +Modules that are attached to a node. This include losses, metrics and visualziers. + +| Key | Type | Default value | Description | +| ------------- | ---- | ------------- | ------------------------------------------- | +| name | str | | name of the module | +| attached_to | str | | Name of the node the module is attached to. | +| override_name | str | None | custom name for the module | +| params | dict | {} | parameters of the module | + +#### Losses + +At least one node must have a loss attached to it. +You can see the list of all currently supported loss functions and their parameters [here](./src/luxonis_train/attached_modules/losses/README.md). + +| Key | Type | Default value | Description | +| ------ | ----- | ------------- | ---------------------------------------- | +| weight | float | 1.0 | weight of the loss used in the final sum | + +#### Metrics + +In this section, you configure which metrics should be used for which node. +You can see the list of all currently supported metrics and their parameters [here](./src/luxonis_train/attached_modules/metrics/README.md). + +| Key | Type | Default value | Description | +| -------------- | ---- | ------------- | --------------------------------------------------------------------------------------- | +| is_main_metric | bool | False | Marks this specific metric as the main one. Main metric is used for saving checkpoints. | + +#### Visualizers + +In this section, you configure which visualizers should be used for which node. Visualizers are responsible for creating images during training. +You can see the list of all currently supported visualizers and their parameters [here](./src/luxonis_train/attached_modules/visualizers/README.md). + +Visualizers have no specific configuration. + +## Tracker + +This library uses [LuxonisTrackerPL](https://github.com/luxonis/luxonis-ml/blob/b2399335efa914ef142b1b1a5db52ad90985c539/src/luxonis_ml/ops/tracker.py#L152). +You can configure it like this: + +| Key | Type | Default value | Description | +| -------------- | ----------- | ------------- | ---------------------------------------------------------- | +| project_name | str \| None | None | Name of the project used for logging. | +| project_id | str \| None | None | Id of the project used for logging (relevant for MLFlow). | +| run_name | str \| None | None | Name of the run. If empty, then it will be auto-generated. | +| run_id | str \| None | None | Id of an already created run (relevant for MLFLow.) | +| save_directory | str | "output" | Path to the save directory. | +| is_tensorboard | bool | True | Whether to use tensorboard. | +| is_wandb | bool | False | Whether to use WandB. | +| wandb_entity | str \| None | None | Name of WandB entity. | +| is_mlflow | bool | False | Whether to use MLFlow. | + +## Dataset + +To store and load the data we use LuxonisDataset and LuxonisLoader. For specific config parameters refer to [LuxonisML](https://github.com/luxonis/luxonis-ml). + +| Key | Type | Default value | Description | +| -------------- | ---------------------------------------- | ------------------- | ---------------------------------------------- | +| dataset_name | str \| None | None | name of the dataset | +| team_id | str \| None | None | team under which you can find all datasets | +| dataset_id | str \| None | None | id of the dataset | +| bucket_type | Literal\["intenal", "external"\] | internal | type of underlying storage | +| bucket_storage | Literal\["local", "s3", "gcc", "azure"\] | BucketStorage.LOCAL | underlying object storage for a bucket | +| train_view | str | train | view to use for training | +| val_view | str | val | view to use for validation | +| test_view | str | test | view to use for testing | +| json_mode | bool | False | load using JSON annotations instead of MongoDB | + +## Trainer + +Here you can change everything related to actual training of the model. + +| Key | Type | Default value | Description | +| ----------------------- | --------------------------------------- | ------------- | ------------------------------------------------------------------------------------------------------------------------------------------------ | +| batch_size | int | 32 | batch size used for training | +| accumulate_grad_batches | int | 1 | number of batches for gradient accumulation | +| use_weighted_sampler | bool | False | bool if use WeightedRandomSampler for training, only works with classification tasks | +| epochs | int | 100 | number of training epochs | +| num_workers | int | 2 | number of workers for data loading | +| train_metrics_interval | int | -1 | frequency of computing metrics on train data, -1 if don't perform | +| validation_interval | int | 1 | frequency of computing metrics on validation data | +| num_log_images | int | 4 | maximum number of images to visualize and log | +| skip_last_batch | bool | True | whether to skip last batch while training | +| accelerator | Literal\["auto", "cpu", "gpu"\] | "auto" | What accelerator to use for training. | +| devices | int \| list\[int\] \| str | "auto" | Either specify how many devices to use (int), list specific devices, or use "auto" for automatic configuration based on the selected accelerator | +| strategy | Literal\["auto", "ddp"\] | "auto" | What strategy to use for training. | +| num_sanity_val_steps | int | 2 | Number of sanity validation steps performed before training. | +| profiler | Literal\["simple", "advanced"\] \| None | None | PL profiler for GPU/CPU/RAM utilization analysis | +| verbose | bool | True | Print all intermediate results to console. | + +### Preprocessing + +We use [Albumentations](https://albumentations.ai/docs/) library for `augmentations`. [Here](https://albumentations.ai/docs/api_reference/full_reference/#pixel-level-transforms) you can see a list of all pixel level augmentations supported, and [here](https://albumentations.ai/docs/api_reference/full_reference/#spatial-level-transforms) you see all spatial level transformations. In config you can specify any augmentation from this lists and their params. Additionaly we support `Mosaic4` batch augmentation and letterbox resizing if `keep_aspect_ratio: True`. + +| Key | Type | Default value | Description | +| ----------------- | ------------------------------------------------------------------------------------ | ------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| train_image_size | list\[int\] | \[256, 256\] | image size used for training \[height, width\] | +| keep_aspect_ratio | bool | True | bool if keep aspect ration while resizing | +| train_rgb | bool | True | bool if train on rgb or bgr | +| normalize.active | bool | True | bool if use normalization | +| normalize.params | dict | {} | params for normalization, see [documentation](https://albumentations.ai/docs/api_reference/augmentations/transforms/#albumentations.augmentations.transforms.Normalize) | +| augmentations | list\[{"name": Name of the augmentation, "params": Parameters of the augmentation}\] | \[\] | list of Albumentations augmentations | + +### Optimizer + +What optimizer to use for training. +List of all optimizers can be found [here](https://pytorch.org/docs/stable/optim.html). + +| Key | Type | Default value | Description | +| ------ | ---- | ------------- | ---------------------------- | +| name | str | | Name of the optimizer. | +| params | dict | {} | Parameters of the optimizer. | + +### Scheduler + +What scheduler to use for training. +List of all optimizers can be found [here](https://pytorch.org/docs/stable/optim.html#how-to-adjust-learning-rate). + +| Key | Type | Default value | Description | +| ------ | ---- | ------------- | ---------------------------- | +| name | str | | Name of the scheduler. | +| params | dict | {} | Parameters of the scheduler. | + +### Callbacks + +Callbacks sections contains a list of callbacks. +More information on callbacks and a list of available ones can be found [here](src/luxonis_train/callbacks/README.md) +Each callback is a dictionary with the following fields: + +| Key | Type | Default value | Description | +| ------ | ---- | ------------- | --------------------------- | +| name | str | | Name of the callback. | +| params | dict | {} | Parameters of the callback. | + +## Exporter + +Here you can define configuration for exporting. + +| Key | Type | Default value | Description | +| ---------------------- | --------------------------------- | --------------- | ----------------------------------------------------------------------------------------------- | +| export_save_directory | str | "output_export" | Where to save the exported files. | +| input_shape | list\[int\] \| None | None | Input shape of the model. If not provided, inferred from the dataset. | +| export_model_name | str | "model" | Name of the exported model. | +| data_type | Literal\["INT8", "FP16", "FP32"\] | "FP16" | Data type of the exported model. | +| reverse_input_channels | bool | True | Whether to reverse the image channels in the exported model. Relevant for `.blob` export | +| scale_values | list\[float\] \| None | None | What scale values to use for input normalization. If not provided, inferred from augmentations. | +| mean_values | list\[float\] \| None | None | What mean values to use for input normalizations. If not provided, inferred from augmentations. | +| upload_directory | str \| None | None | Where to upload the exported models. | + +### ONNX + +Option specific for ONNX export. + +| Key | Type | Default value | Description | +| ------------- | ------------------------ | ------------- | -------------------------------- | +| opset_version | int | 12 | Which opset version to use. | +| dynamic_axes | dict\[str, Any\] \| None | None | Whether to specify dinamic axes. | + +### Blob + +| Key | Type | Default value | Description | +| ------ | ---- | ------------- | ------------------------------------ | +| active | bool | False | Whether to export to `.blob` format. | +| shaves | int | 6 | How many shaves. | + +## Tuner + +Here you can specify options for tuning. + +| Key | Type | Default value | Description | +| ---------- | ----------------- | ------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| study_name | str | "test-study" | Name of the study. | +| use_pruner | bool | True | Whether to use the MedianPruner. | +| n_trials | int \| None | 15 | Number of trials for each process. `None` represents no limit in terms of numbner of trials. | +| timeout | int \| None | None | Stop study after the given number of seconds. | +| params | dict\[str, list\] | {} | Which parameters to tune. The keys should be in the format `key1.key2.key3_`. Type can be one of `[categorical, float, int, longuniform, uniform]`. For more information about the types, visit [Optuna documentation](https://optuna.readthedocs.io/en/stable/reference/generated/optuna.trial.Trial.html). | + +Example of params for tuner block: + +```yaml +tuner: + params: + trainer.optimizer.name_categorical: ["Adam", "SGD"] + trainer.optimizer.params.lr_float: [0.0001, 0.001] + trainer.batch_size_int: [4, 16, 4] +``` + +### Storage + +| Key | Type | Default value | Description | +| ------------ | ---------------------------- | ------------- | ---------------------------------------------------- | +| active | bool | True | Whether to use storage to make the study persistent. | +| storage_type | Literal\["local", "remote"\] | "local" | Type of the storage. | + +## ENVIRON + +A special section of the config file where you can specify environment variables. +For more info on the variables, see [Credentials](../README.md#credentials). + +**NOTE** + +This is not a recommended way due to possible leakage of secrets. This section is intended for testing purposes only. + +| Key | Type | Default value | Description | +| ------------------------ | ---------------------------------------------------------- | -------------- | ----------- | +| AWS_ACCESS_KEY_ID | str \| None | None | | +| AWS_SECRET_ACCESS_KEY | str \| None | None | | +| AWS_S3_ENDPOINT_URL | str \| None | None | | +| MLFLOW_CLOUDFLARE_ID | str \| None | None | | +| MLFLOW_CLOUDFLARE_SECRET | str \| None | None | | +| MLFLOW_S3_BUCKET | str \| None | None | | +| MLFLOW_S3_ENDPOINT_URL | str \| None | None | | +| MLFLOW_TRACKING_URI | str \| None | None | | +| POSTGRES_USER | str \| None | None | | +| POSTGRES_PASSWORD | str \| None | None | | +| POSTGRES_HOST | str \| None | None | | +| POSTGRES_PORT | str \| None | None | | +| POSTGRES_DB | str \| None | None | | +| LUXONISML_BUCKET | str \| None | None | | +| LUXONISML_BASE_PATH | str | "~/luxonis_ml" | | +| LUXONISML_TEAM_ID | str | "offline" | | +| LUXONISML_TEAM_NAME | str | "offline" | | +| LOG_LEVEL | Literal\["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"\] | "INFO" | | diff --git a/configs/classification_model.yaml b/configs/classification_model.yaml new file mode 100755 index 00000000..205ee53d --- /dev/null +++ b/configs/classification_model.yaml @@ -0,0 +1,44 @@ +# Example configuration for training a predefined segmentation model + + +use_rich_text: True + +model: + name: cifar10_classification + predefined_model: + name: ClassificationModel + params: + backbone: MicroNet + visualizer_params: + font_scale: 0.5 + color: [255, 0, 0] + thickness: 2 + include_plot: True + +dataset: + dataset_name: cifar10_test + +trainer: + preprocessing: + train_image_size: [&height 128, &width 128] + keep_aspect_ratio: False + normalize: + active: True + + batch_size: 4 + epochs: &epochs 200 + num_workers: 4 + validation_interval: 10 + num_log_images: 8 + + callbacks: + - name: ExportOnTrainEnd + - name: TestOnTrainEnd + + optimizer: + name: SGD + params: + lr: 0.02 + + scheduler: + name: ConstantLR diff --git a/configs/coco_model.yaml b/configs/coco_model.yaml new file mode 100755 index 00000000..86e65611 --- /dev/null +++ b/configs/coco_model.yaml @@ -0,0 +1,183 @@ +# An example configuration for a more complex network. + + +model: + name: coco_test + nodes: + - name: EfficientRep + params: + channels_list: [64, 128, 256, 512, 1024] + num_repeats: [1, 6, 12, 18, 6] + depth_mul: 0.33 + width_mul: 0.33 + + - name: RepPANNeck + inputs: + - EfficientRep + params: + channels_list: [256, 128, 128, 256, 256, 512] + num_repeats: [12, 12, 12, 12] + depth_mul: 0.33 + width_mul: 0.33 + + - name: ImplicitKeypointBBoxHead + inputs: + - RepPANNeck + params: + conf_thres: 0.25 + iou_thres: 0.45 + + - name: SegmentationHead + inputs: + - RepPANNeck + + - name: EfficientBBoxHead + inputs: + - RepPANNeck + params: + conf_thres: 0.75 + iou_thres: 0.45 + + losses: + - name: AdaptiveDetectionLoss + attached_to: EfficientBBoxHead + - name: BCEWithLogitsLoss + attached_to: SegmentationHead + - name: ImplicitKeypointBBoxLoss + attached_to: ImplicitKeypointBBoxHead + params: + keypoint_distance_loss_weight: 0.5 + keypoint_visibility_loss_weight: 0.7 + bbox_loss_weight: 0.05 + objectness_loss_weight: 0.2 + + metrics: + - name: ObjectKeypointSimilarity + is_main_metric: true + attached_to: ImplicitKeypointBBoxHead + - name: MeanAveragePrecisionKeypoints + attached_to: ImplicitKeypointBBoxHead + - name: MeanAveragePrecision + attached_to: EfficientBBoxHead + - name: F1Score + attached_to: SegmentationHead + params: + task: binary + - name: JaccardIndex + attached_to: SegmentationHead + params: + task: binary + + visualizers: + - name: MultiVisualizer + attached_to: ImplicitKeypointBBoxHead + params: + visualizers: + - name: KeypointVisualizer + params: + nonvisible_color: blue + - name: BBoxVisualizer + params: + colors: + person: "#FF5055" + - name: SegmentationVisualizer + attached_to: SegmentationHead + params: + colors: "#FF5055" + - name: BBoxVisualizer + attached_to: EfficientBBoxHead + +tracker: + project_name: coco_test + save_directory: output + is_tensorboard: True + is_wandb: False + wandb_entity: luxonis + is_mlflow: False + +dataset: + dataset_name: coco_test + train_view: train + val_view: val + test_view: test + +trainer: + accelerator: auto + devices: auto + strategy: auto + + num_sanity_val_steps: 1 + profiler: null + verbose: True + batch_size: 4 + accumulate_grad_batches: 1 + epochs: &epochs 200 + num_workers: 8 + train_metrics_interval: -1 + validation_interval: 10 + num_log_images: 8 + skip_last_batch: True + main_head_index: 0 + log_sub_losses: True + save_top_k: 3 + + preprocessing: + train_image_size: [&height 256, &width 320] + keep_aspect_ratio: False + train_rgb: True + normalize: + active: True + augmentations: + - name: Defocus + params: + p: 0.1 + - name: Sharpen + params: + p: 0.1 + - name: Flip + - name: RandomRotate90 + - name: Mosaic4 + params: + out_width: *width + out_height: *height + + callbacks: + - name: LearningRateMonitor + params: + logging_interval: step + - name: MetadataLogger + params: + hyperparams: ["trainer.epochs", trainer.batch_size] + - name: EarlyStopping + params: + patience: 3 + monitor: val/loss + mode: min + verbose: true + - name: DeviceStatsMonitor + - name: ExportOnTrainEnd + - name: TestOnTrainEnd + + optimizer: + name: SGD + params: + lr: 0.02 + momentum: 0.937 + nesterov: True + weight_decay: 0.0005 + + scheduler: + name: CosineAnnealingLR + params: + T_max: *epochs + eta_min: 0 + +exporter: + onnx: + opset_version: 11 + +tuner: + params: + trainer.optimizer.name_categorical: ["Adam", "SGD"] + trainer.optimizer.params.lr_float: [0.0001, 0.001] + trainer.batch_size_int: [4, 16, 4] diff --git a/configs/detection_model.yaml b/configs/detection_model.yaml new file mode 100755 index 00000000..f17567c6 --- /dev/null +++ b/configs/detection_model.yaml @@ -0,0 +1,39 @@ +# Example configuration for training a predefined detection model + + +use_rich_text: True + +model: + name: coco_detection + predefined_model: + name: DetectionModel + params: + use_neck: True + +dataset: + dataset_name: coco_test + +trainer: + preprocessing: + train_image_size: [&height 256, &width 320] + keep_aspect_ratio: False + normalize: + active: True + + batch_size: 4 + epochs: &epochs 200 + num_workers: 4 + validation_interval: 10 + num_log_images: 8 + + callbacks: + - name: ExportOnTrainEnd + - name: TestOnTrainEnd + + optimizer: + name: SGD + params: + lr: 0.02 + + scheduler: + name: ConstantLR diff --git a/configs/example_export.yaml b/configs/example_export.yaml new file mode 100755 index 00000000..a35ca148 --- /dev/null +++ b/configs/example_export.yaml @@ -0,0 +1,42 @@ +# Example configuration for exporting a predefined segmentation model + + +use_rich_text: True + +model: + name: coco_segmentation + weights: null # specify a path to the weights here + predefined_model: + name: SegmentationModel + params: + backbone: MicroNet + task: binary + +dataset: + dataset_name: coco_test + +trainer: + preprocessing: + train_image_size: [&height 256, &width 320] + keep_aspect_ratio: False + normalize: + active: True + + batch_size: 4 + epochs: &epochs 200 + num_workers: 4 + validation_interval: 10 + num_log_images: 8 + + optimizer: + name: SGD + + scheduler: + name: ConstantLR + +exporter: + onnx: + opset_version: 11 + blobconverter: + active: True + shaves: 8 diff --git a/configs/example_tuning.yaml b/configs/example_tuning.yaml new file mode 100755 index 00000000..3ef75221 --- /dev/null +++ b/configs/example_tuning.yaml @@ -0,0 +1,39 @@ +# Example configuration for tuning a predefined segmentation model + + +use_rich_text: True + +model: + name: coco_segmentation + predefined_model: + name: SegmentationModel + params: + backbone: MicroNet + task: binary + +dataset: + dataset_name: coco_test + +trainer: + preprocessing: + train_image_size: [&height 256, &width 320] + keep_aspect_ratio: False + normalize: + active: True + + batch_size: 4 + epochs: &epochs 1 + validation_interval: 1 + num_log_images: 8 + + scheduler: + name: CosineAnnealingLR + params: + T_max: *epochs + eta_min: 0 + +tuner: + params: + trainer.optimizer.name_categorical: ["Adam", "SGD"] + trainer.optimizer.params.lr_float: [0.0001, 0.001] + trainer.batch_size_int: [4, 16, 4] diff --git a/configs/keypoint_bbox_model.yaml b/configs/keypoint_bbox_model.yaml new file mode 100755 index 00000000..acf28f07 --- /dev/null +++ b/configs/keypoint_bbox_model.yaml @@ -0,0 +1,37 @@ +# Example configuration for training a predefined keypoint-detection model + + +use_rich_text: True + +model: + name: coco_keypoints + predefined_model: + name: KeypointDetectionModel + +dataset: + dataset_name: coco_test + +trainer: + preprocessing: + train_image_size: [&height 256, &width 320] + keep_aspect_ratio: False + normalize: + active: True + + batch_size: 4 + epochs: &epochs 200 + num_workers: 4 + validation_interval: 10 + num_log_images: 8 + + callbacks: + - name: ExportOnTrainEnd + - name: TestOnTrainEnd + + optimizer: + name: SGD + params: + lr: 0.02 + + scheduler: + name: ConstantLR diff --git a/configs/segmentation_model.yaml b/configs/segmentation_model.yaml new file mode 100755 index 00000000..d9d0f50b --- /dev/null +++ b/configs/segmentation_model.yaml @@ -0,0 +1,40 @@ +# Example configuration for training a predefined segmentation model + + +use_rich_text: True + +model: + name: coco_segmentation + predefined_model: + name: SegmentationModel + params: + backbone: MicroNet + task: binary + +dataset: + dataset_name: coco_test + +trainer: + preprocessing: + train_image_size: [&height 256, &width 320] + keep_aspect_ratio: False + normalize: + active: True + + batch_size: 4 + epochs: &epochs 200 + num_workers: 4 + validation_interval: 10 + num_log_images: 8 + + callbacks: + - name: ExportOnTrainEnd + - name: TestOnTrainEnd + + optimizer: + name: SGD + params: + lr: 0.02 + + scheduler: + name: ConstantLR diff --git a/data/.gitkeep b/data/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/examples/CIFAR_10_dataset.ipynb b/examples/CIFAR_10_dataset.ipynb new file mode 100644 index 00000000..f5936e70 --- /dev/null +++ b/examples/CIFAR_10_dataset.ipynb @@ -0,0 +1,267 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "f9de6101", + "metadata": {}, + "source": [ + "## Example CIFAR10 classification dataset" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "4c06d8fc", + "metadata": {}, + "outputs": [], + "source": [ + "import matplotlib.pyplot as plt\n", + "import torchvision\n", + "from luxonis_ml.data import LuxonisDataset, LuxonisLoader\n", + "from luxonis_ml.enums import LabelType" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "e5a3a45c-7152-41a8-9ebf-db54cb84edcc", + "metadata": {}, + "outputs": [], + "source": [ + "# Delete dataset if exists\n", + "\n", + "dataset_name = \"cifar10_test\"\n", + "if LuxonisDataset.exists(dataset_name):\n", + " dataset = LuxonisDataset(dataset_name)\n", + " dataset.delete_dataset()" + ] + }, + { + "cell_type": "markdown", + "id": "718c2791", + "metadata": {}, + "source": [ + "### Get the data" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "5cc9ddf2", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Files already downloaded and verified\n" + ] + } + ], + "source": [ + "cifar10_torch = torchvision.datasets.CIFAR10(root=\"../data\", train=False, download=True)" + ] + }, + { + "cell_type": "markdown", + "id": "2befa6b3", + "metadata": {}, + "source": [ + "### Convert to LuxonisDataset format\n", + "\n", + "`LuxonisDataset` will expect a generator that yields data in the following format:\n", + "```\n", + "- file [str] : path to file on local disk or object storage\n", + "- class [str]: string specifying the class name or label name\n", + "- type [str] : the type of label or annotation\n", + "- value [Union[str, list, int, float, bool]]: the actual annotation value\n", + " For here are the expected structures for `value`.\n", + " The function will check to ensure `value` matches this for each annotation type\n", + "\n", + " value (classification) [bool] : Marks whether the class is present or not\n", + " (e.g. True/False)\n", + " value (box) [List[float]] : the normalized (0-1) x, y, w, and h of a bounding box\n", + " (e.g. [0.5, 0.4, 0.1, 0.2])\n", + " value (polyline) [List[List[float]]] : an ordered list of [x, y] polyline points\n", + " (e.g. [[0.2, 0.3], [0.4, 0.5], ...])\n", + " value (keypoints) [List[List[float]]] : an ordered list of [x, y, visibility] keypoints for a keypoint skeleton instance\n", + " (e.g. [[0.2, 0.3, 2], [0.4, 0.5, 2], ...])\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "4404049f", + "metadata": {}, + "outputs": [], + "source": [ + "classes = [\n", + " \"airplane\",\n", + " \"automobile\",\n", + " \"bird\",\n", + " \"cat\",\n", + " \"deer\",\n", + " \"dog\",\n", + " \"frog\",\n", + " \"horse\",\n", + " \"ship\",\n", + " \"truck\",\n", + "]\n", + "\n", + "\n", + "def CIFAR10_subset_generator():\n", + " for i, (image, label) in enumerate(cifar10_torch):\n", + " if i == 1000:\n", + " break\n", + " path = f\"../data/cifar_{i}.png\"\n", + " image.save(path)\n", + " yield {\n", + " \"file\": path,\n", + " \"class\": classes[label],\n", + " \"type\": \"classification\",\n", + " \"value\": True,\n", + " }" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "8171a7f9", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Generating UUIDs...\n", + "Took 0.07454681396484375 seconds\n", + "Saving annotations...\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1000/1000 [00:00<00:00, 76055.41it/s]" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Took 0.015446662902832031 seconds\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "dataset = LuxonisDataset(dataset_name)\n", + "dataset.set_classes(classes)\n", + "\n", + "dataset.add(CIFAR10_subset_generator)" + ] + }, + { + "cell_type": "markdown", + "id": "d9454797-d804-45f1-92dc-393f76be2219", + "metadata": {}, + "source": [ + "### Define Splits" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "e2094a5d-0371-48da-91f1-b9590686339d", + "metadata": {}, + "outputs": [], + "source": [ + "# without providing manual splits, this will randomly split the data\n", + "dataset.make_splits()" + ] + }, + { + "cell_type": "markdown", + "id": "828f6d36-d5f1-4c68-9f70-80d26d45690e", + "metadata": {}, + "source": [ + "### Test Loader" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "fda91cd6-9fe5-43ee-ab88-3dfc57ff89ef", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Sample classification tensor\n", + "[0. 0. 1. 0. 0. 0. 0. 0. 0. 0.]\n", + "\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAGFCAYAAAASI+9IAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAAASCklEQVR4nO3c348d9XkG8O+cs+v1L2xjMIRASAQEColKUJI2QS2t0ihRlLRKrtpKlfrH9baq2qpt1EhtSFTSRiUJUAio4bcxdmxs8GJs7+45M71o9apSL/J9UgYW+Hyu3301Z87MeXYu5hmmaZoaALTWFu/3AQCwfwgFAIpQAKAIBQCKUACgCAUAilAAoAgFAMpG7+Cn/uTvs81DkjfLaPW4WPcPL7J384ZhiObD5fPM8i7JrsPWgu9oWmWrpzGbn0n6busYzA/ha7OLcb73bMOfiUh6DqP5MfgtbK298pff+ZUznhQAKEIBgCIUAChCAYAiFAAoQgGAIhQAKEIBgCIUAChCAYAiFAAo3d1HwyLrhZmivMmyaUiKSsLYG6LOplBUfTRv99GMVS8fYHN2H6VnfL7rMLmyxjHrYFpEvT3hOQnLkua9g/q3L8Ljjs75DB/SkwIARSgAUIQCAEUoAFCEAgBFKABQhAIARSgAUIQCAEUoAFC6ay6y2orWxiF5VTutAAheMZ8x9/Ligv2TwfNWAHxQzXhWhrRCY38UkQyL7JxM47p/ONydfj9TXC3SLzmSaQrOSWhScwHAnIQCAEUoAFCEAgBFKABQhAIARSgAUIQCAEUoAFCEAgBFKABQuruP0vyYhv6+jyGOpuQPwuNO6lLC3pGkP2rubqJh0H703vponO8h6TxLz8ki/KEYg5s57Emagm6q9JvPjuTdv648KQBQhAIARSgAUIQCAEUoAFCEAgBFKABQhAIARSgAUIQCAKW75iKtRUjm48qF7D3wSHQo8TnJjmXO3Wou6DPffZ9XNGTz0zDjD0UgvdcWwfw0w33sSQGAIhQAKEIBgCIUAChCAYAiFAAoQgGAIhQAKEIBgCIUAChCAYDS3X2UxseczTpzNprM2dmUzE9T+in3UZfRPumc2U/S73MRfJ9TeHOugjtouVhGu5fB5xzTOzm934L1UxuzYwlM6a25CH6Dxnf/XvOkAEARCgAUoQBAEQoAFKEAQBEKABShAEARCgAUoQBAEQoAlO6ai2HG18BjyWvj+6j9IZFWaKQVANl8WkfwUam5CD5n+PWMQQPN2NbR7ttv6j+Yjbdfj3b/8o2d7tlri5PR7sWBQ9H8uN7tnl0O2f/HyRUe3w3JvTnD75snBQCKUACgCAUAilAAoAgFAIpQAKAIBQCKUACgCAUAilAAoAgFAEp/wUoo6u4Je3uGJMvSTqAPqintJ5qxQGpaZvMfBWEf1G7w/Xz2jqwT6MHl892z3/vhd6PdF8+/0T27efLuaPfy+J3R/PrATcHuj0e7P8w8KQBQhAIARSgAUIQCAEUoAFCEAgBFKABQhAIARSgAUIQCAKW75mJK2yKilotw+ZwVDftGek5m3r9vdvcLiz9iQxu7Z8chq/44cXDdPfup1YvR7tdOP9E9e/99e9Huhx/sr4t44Rfb0e4nzvfXc7TW2uLO/pqLKa7DCa6u+Mcz8e5f5Z4UAChCAYAiFAAoQgGAIhQAKEIBgCIUAChCAYAiFAAoQgGAIhQAKN3dR20Z9ncEfR9pe8din3Qfxa0jQ38Gx31Q4eecFvujnyg1TfM1Gg1Tf5dRa62tg3N4/HB2vj+281L37PULT0S7//zPbu6evefWh6Pd26/3/6Q8+oOss+ns41kP07nNY/3D8e9b8P/0Ot3df40Puo8AmJNQAKAIBQCKUACgCAUAilAAoAgFAIpQAKAIBQCKUACgCAUASndRSd7zE4zO3POTre7fnR52Nj7r8g+s5FpJe2GG8H+kxcaB7tlPHz4b7T62PtM9+0ff/Gq0+yu/fXv37Hq8I9q9fe6n3bPn14ej3atTt0XzR9rRYHod7R7H5Nqar/toDp4UAChCAYAiFAAoQgGAIhQAKEIBgCIUAChCAYAiFAAoQgGA0l1zkRqCV7vzmov5JEcyDFmmvs9vr384zHitXA/nP7lxqXv25u0no92fvutj3bNfe+Qr0e6Xzl7pnr3lpuyi/eX1/nviH148GO3e3sxqMTbbXvfsNOv/x+GNn1ziM/ymeFIAoAgFAIpQAKAIBQCKUACgCAUAilAAoAgFAIpQAKAIBQCKUACgdHcfzdlPNMWlQP3z8XEH89M03zkZ08OOClP2z/c5xeUtY//oYhltfuD4TjT/1VPnu2dv2MmOZefK692zF17IepWu7/Z3Dp25HK1uZy7316mtDp+Kdm+O2f+w6zH4nQh/g6JrfJHu7r/G0+Pu4UkBgCIUAChCAYAiFAAoQgGAIhQAKEIBgCIUAChCAYAiFAAo/e+kf2TMWP+QrJ6xhmJuwxDUC8S7+//iMze+Fe3+wzvfjOZvO3C1e/by5UPR7qMnjnfPXjn7SrR764Zbumdff2s32v3P//pq9+zu+hPR7im4rlrL/uONyyKCCze5H1prLWi5yI+7gycFAIpQAKAIBQCKUACgCAUAilAAoAgFAIpQAKAIBQCKUACgCAUAiu6j99AwY69S0gmUyxpWhsW6f3NUCNXavUf7+4m+cdu5aPedB/uPu7XWFmP//C0nj0W7777/k92zF15+Mdp95pVL3bPPvnY92v0fL/VfK3snwp+fZTY+Ry/Qr2MI//cehv7yo2mGD+lJAYAiFAAoQgGAIhQAKEIBgCIUAChCAYAiFAAoQgGAIhQAKP3vmac1CkF9QVrRMA39WTaFu6NjmbVaIpO/7d7/F8OQbV8FdR53b56Jdn9p89nu2fF8f51Da62dvZh9n+PWjd2zDzz4hWj3waAq5IWz29HuHz35cvfsz19+J9q9fcOXu2eHA1vR7mm9iuaTe3lqWcVJYpqji+J/zPFfvScFAIpQAKAIBQCKUACgCAUAilAAoAgFAIpQAKAIBQCKUACgCAUASnf30RD02eSW2XhyKGk/0SJanu1OhHUp0xB2tyw3u0cX4160+hPr/n6iRzb/Ltp9297l7tmzFw9Eu39xMevW2TpxX/fspx6MVrefPnu6e/bRx1+Mdv/s5y90z567eizaPdzc3we1DDrMWmttGLLfiamN/buD2VTafDQEXUlz/C57UgCgCAUAilAAoAgFAIpQAKAIBQCKUACgCAUAilAAoAgFAIpQAKB0dx/NK2sHGZIsS7uP9olF1MHUWpuyfJ9af1fSpzefiHZ/efkX3bM3j/0dP621dnH7aPfsznBvtPvk/Q9F83fd+7nu2WM33hzt/t4/Pd49+8xzr0S73zj/Vvfs6qa7ot2Hjp4MpsNWoPhe3ic9Zqnoc6bNSr+aJwUAilAAoAgFAIpQAKAIBQCKUACgCAUAilAAoAgFAIpQAKAENRfZ69RT8Kr2Inx9Pdk9zFhzEb9gnhzLEG5fZo0lD97wZPfsI3v9tRWttXb1wsvdsy9Ot0a777j/D7pnP//Q70e777nvs9H81e2r3bOPPfqjaPfTTz3bPfv2pYvR7s1h2T073XpPtLsdONI9Ooz9VSutzVHo8N5If4Gi3zc1FwDMSSgAUIQCAEUoAFCEAgBFKABQhAIARSgAUIQCAEUoAFCEAgCluzBn1t6RsJ9oxjqjNgW7x7DVZGMYu2f3Wn8/TWut3bZ1KZo/9ebfds8++sy5aPfq4Ge6Z7/zp38c7f76177RPbu5kZ3Dp376VDT//e9+v3/3k89Eu8/+sv/7PLDKOoT2Tj7QPbu4/aFo9zD0/585tLT7KOxfm4L5ZPZDzpMCAEUoAFCEAgBFKABQhAIARSgAUIQCAEUoAFCEAgBFKABQumsuPjr6qysWYaSuFlvds4cW70S7D517LJp//tLB7tnt4fPR7ke+2D//2d/4zWj36eef75597LGfRLv//cfZ/Ksvnu6eXV/fi3ZvBK0L4+JAtHv3ri/17z5yMtq93LseTM/YV8OvzZMCAEUoAFCEAgBFKABQhAIARSgAUIQCAEUoAFCEAgBFKABQhAIApbv7aAh7SqZZe02C3elhDEHpzLSMVh8Ydrpnbx+fjXafOpJ0zrR2/70Pd8+ur16Mdr+1fbl79q/++rvR7tdePtM9e/p0fzdRa63tXN+O5qd1f5/Rcgrvn6m/z2jnji9Hu8ePPdA9u1zvRrtb679/pmCW944nBQCKUACgCAUAilAAoAgFAIpQAKAIBQCKUACgCAUAilAAoPTXXMR1Ef1/MC2y5RvD2D27Hro/4n8LDmWjXY1W33L9ue7ZG64+He0+evBwNP/c4090z7751pvR7rd2g+9nYyvafe3t/iqKabwW7d7ayGpLonti80i0e+e2L3TP7t7z9Wj3sHGof3jVX+XRWmvT1P/dJ5UYH2RzfsqpJee7jycFAIpQAKAIBQCKUACgCAUAilAAoAgFAIpQAKAIBQCKUACgCAUASn8xUFh+NAUlQmkyrZKCovC4N3ff6J69c+9H0e6N137WPXvm4irafXrMOoQub1/pnj14/FS0+8jJW7tnr2+fjXYfDnYfOpRdWVuHj0Xzxw4c6J5dHL4x2v3qiYe7Z69Px6Pd0946muejxZMCAEUoAFCEAgBFKABQhAIARSgAUIQCAEUoAFCEAgBFKABQ+msu2hQtXrSxe3bdltHu5dA/f+LaE9HuW978x+7Z1YWsouHp8/11BKubfivaffhkVkWxdUP//Hj05mj39qL/Wlm9/G/R7r23X+2evXTpzWj38c99KZo/cfyd7tnVxdej3RuLM92zy+NZPcdeUhMzZfc9/3/JGc9KfPp4UgCgCAUAilAAoAgFAIpQAKAIBQCKUACgCAUAilAAoAgFAIpQAKD0dx+FJRtjUOBxcNyOdp+6/LPu2c2LP4x2X3izv8/m9Ooz0e6Nz/1e9+yRW+6LdrfFZjS+k3yhi6ybahr3umeHB74V7d7dPt89e/UnfxPtPrFzOZp/+qlnu2fP/efL0e6jv/PF7tmNE4ej3W3qv8bDyrM2JV1Jaa9S3MM0Y2/TnJ1Q73PflCcFAIpQAKAIBQCKUACgCAUAilAAoAgFAIpQAKAIBQCKUACgCAUASnf30Rjmx9bqUvfskXP/Eu2+ePqJ7tkrO9lx73z8a92zh+76fLR7eeym7tlxTMumVtl80H00TOto82LRv3u5sRXtXi8O9A9Pu9Hua2f6O7Vaa23nyrXu2fUYrW57uzvds8EZaa21Fh7KfGauMkruoCksd5uSg5m94+nd5UkBgCIUAChCAYAiFAAoQgGAIhQAKEIBgCIUAChCAYAiFAAo3TUXh8d3osXLl37QPfvKcz+Odu8dOtk9e+S+b0a7j37ywe7ZcWsz2r0Oug6G+FX37DX9YQhrNAJJBUD6MRcbh/p3r/ei3ZcvXY7mV6v+Go317tVo9/TOdvfsfN/kfjNn/cP7Wy2xn3hSAKAIBQCKUACgCAUAilAAoAgFAIpQAKAIBQCKUACgCAUAilAAoHR3H633rkeLL5x5vXv26tYd0e6bv/Dt7tmtW++Odq/G/g6Ucb2Odg9JS01aChQX4AT9ROHm6HOGHUyL9U737PrqhWj37uKtaH6109+ttNrr70lqrbVx3X/WF4tltHudnPMh+/aT6UV6ZaXHEveH0ZonBQD+F6EAQBEKABShAEARCgAUoQBAEQoAFKEAQBEKABShAEDprrm4tnEiWnzwoW93zx7dOhTtbkc/3j06rvqrCFprbbEIcjJ+i34fvXYftUuExz0lNQrZ6vW17WC4vxKjtdYWG1ldxLh+p3t2GlbR7gPL/nM+XL0c7R6m/mt8zis2bmZRc/Ge8KQAQBEKABShAEARCgAUoQBAEQoAFKEAQBEKABShAEARCgAUoQBA6e4+aousF2bjxtv7Vw9ZC8o4Jp022XFHdSlxeQv/x5D9X7Le6//uF2PWezWudqP5RRv7h6es+2j3/NPds2+H1+Hhe3+3e3Yd/98YnBP2JU8KABShAEARCgAUoQBAEQoAFKEAQBEKABShAEARCgAUoQBA6a65WIzZa/pJXcR6kWZTf3XFFL52P0z9xzKEPRdJg0ZaoTFF/RyttbF/fghrSNoQ7A4/6PrKxe7ZxWId7Z7Ca/z61cvdsxuLrWj3tTPPdM+OGzdGu48sg/ttlV1XM94RbQrnP6iS+y2+Nzt4UgCgCAUAilAAoAgFAIpQAKAIBQCKUACgCAUAilAAoAgFAIpQAKAMU1yaA8CHlScFAIpQAKAIBQCKUACgCAUAilAAoAgFAIpQAKAIBQDKfwGp23Kqpui8bAAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "loader = LuxonisLoader(dataset, view=\"train\")\n", + "for image, ann in loader:\n", + " cls = ann[LabelType.CLASSIFICATION]\n", + "\n", + " print(\"Sample classification tensor\")\n", + " print(cls)\n", + " print()\n", + "\n", + " h, w, _ = image.shape\n", + "\n", + " plt.imshow(image)\n", + " plt.axis(\"off\") # Optional: Hide axis\n", + " plt.show()\n", + " break" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.13" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/COCO_people_dataset.ipynb b/examples/COCO_people_dataset.ipynb new file mode 100644 index 00000000..2d354363 --- /dev/null +++ b/examples/COCO_people_dataset.ipynb @@ -0,0 +1,591 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "f9de6101", + "metadata": {}, + "source": [ + "## Adding a subset of COCO people data" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "4c06d8fc", + "metadata": {}, + "outputs": [], + "source": [ + "import glob\n", + "import json\n", + "import os\n", + "import zipfile\n", + "\n", + "import cv2\n", + "import gdown\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "from luxonis_ml.data import LuxonisDataset, LuxonisLoader\n", + "from luxonis_ml.enums import LabelType\n", + "from tqdm import tqdm" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "e5a3a45c-7152-41a8-9ebf-db54cb84edcc", + "metadata": {}, + "outputs": [], + "source": [ + "# Delete dataset if exists\n", + "\n", + "dataset_name = \"coco_test\"\n", + "if LuxonisDataset.exists(dataset_name):\n", + " dataset = LuxonisDataset(dataset_name)\n", + " dataset.delete_dataset()" + ] + }, + { + "cell_type": "markdown", + "id": "718c2791", + "metadata": {}, + "source": [ + "### Download and extract data" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "5cc9ddf2", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: gdown in /home/martin/miniconda3/lib/python3.11/site-packages (4.7.1)\n", + "Requirement already satisfied: filelock in /home/martin/miniconda3/lib/python3.11/site-packages (from gdown) (3.13.1)\n", + "Requirement already satisfied: requests[socks] in /home/martin/miniconda3/lib/python3.11/site-packages (from gdown) (2.31.0)\n", + "Requirement already satisfied: six in /home/martin/miniconda3/lib/python3.11/site-packages (from gdown) (1.16.0)\n", + "Requirement already satisfied: tqdm in /home/martin/miniconda3/lib/python3.11/site-packages (from gdown) (4.65.0)\n", + "Requirement already satisfied: beautifulsoup4 in /home/martin/miniconda3/lib/python3.11/site-packages (from gdown) (4.12.2)\n", + "Requirement already satisfied: soupsieve>1.2 in /home/martin/miniconda3/lib/python3.11/site-packages (from beautifulsoup4->gdown) (2.5)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /home/martin/miniconda3/lib/python3.11/site-packages (from requests[socks]->gdown) (2.0.4)\n", + "Requirement already satisfied: idna<4,>=2.5 in /home/martin/miniconda3/lib/python3.11/site-packages (from requests[socks]->gdown) (3.4)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /home/martin/miniconda3/lib/python3.11/site-packages (from requests[socks]->gdown) (1.26.18)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /home/martin/miniconda3/lib/python3.11/site-packages (from requests[socks]->gdown) (2023.7.22)\n", + "Requirement already satisfied: PySocks!=1.5.7,>=1.5.6 in /home/martin/miniconda3/lib/python3.11/site-packages (from requests[socks]->gdown) (1.7.1)\n", + "Downloading...\n", + "From: https://drive.google.com/uc?id=1XlvFK7aRmt8op6-hHkWVKIJQeDtOwoRT\n", + "To: /home/martin/Work/luxonis-ml/data/COCO_people_subset.zip\n", + "100%|██████████████████████████████████████| 7.78M/7.78M [00:03<00:00, 2.45MB/s]\n", + "Archive: ../data/COCO_people_subset.zip\n", + " inflating: ../data/person_keypoints_val2017.json \n", + " creating: ../data/person_val2017_subset/\n", + " inflating: ../data/person_val2017_subset/000000001490.jpg \n", + " inflating: ../data/person_val2017_subset/000000003934.jpg \n", + " inflating: ../data/person_val2017_subset/000000005060.jpg \n", + " inflating: ../data/person_val2017_subset/000000003255.jpg \n", + " inflating: ../data/person_val2017_subset/000000001761.jpg \n", + " inflating: ../data/person_val2017_subset/000000001000.jpg \n", + " inflating: ../data/person_val2017_subset/000000002431.jpg \n", + " inflating: ../data/person_val2017_subset/000000002006.jpg \n", + " inflating: ../data/person_val2017_subset/000000002261.jpg \n", + " inflating: ../data/person_val2017_subset/000000004395.jpg \n", + " inflating: ../data/person_val2017_subset/000000005001.jpg \n", + " inflating: ../data/person_val2017_subset/000000000872.jpg \n", + " inflating: ../data/person_val2017_subset/000000002685.jpg \n", + " inflating: ../data/person_val2017_subset/000000001268.jpg \n", + " inflating: ../data/person_val2017_subset/000000005037.jpg \n", + " inflating: ../data/person_val2017_subset/000000002473.jpg \n", + " inflating: ../data/person_val2017_subset/000000001296.jpg \n", + " inflating: ../data/person_val2017_subset/000000002299.jpg \n", + " inflating: ../data/person_val2017_subset/000000005193.jpg \n", + " inflating: ../data/person_val2017_subset/000000003553.jpg \n", + " inflating: ../data/person_val2017_subset/000000001584.jpg \n", + " inflating: ../data/person_val2017_subset/000000002153.jpg \n", + " inflating: ../data/person_val2017_subset/000000001353.jpg \n", + " inflating: ../data/person_val2017_subset/000000004765.jpg \n", + " inflating: ../data/person_val2017_subset/000000002532.jpg \n", + " inflating: ../data/person_val2017_subset/000000000139.jpg \n", + " inflating: ../data/person_val2017_subset/000000000785.jpg \n", + " inflating: ../data/person_val2017_subset/000000000885.jpg \n", + " inflating: ../data/person_val2017_subset/000000004134.jpg \n", + " inflating: ../data/person_val2017_subset/000000003156.jpg \n" + ] + } + ], + "source": [ + "url = \"https://drive.google.com/uc?id=1XlvFK7aRmt8op6-hHkWVKIJQeDtOwoRT\"\n", + "output_zip = \"../data/COCO_people_subset.zip\"\n", + "output_folder = \"../data/\"\n", + "\n", + "# Check if the data already exists\n", + "if not os.path.exists(output_zip) and not os.path.exists(\n", + " os.path.join(output_folder, \"COCO_people_subset\")\n", + "):\n", + " # Download the file\n", + " gdown.download(url, output_zip, quiet=False)\n", + "\n", + " # Unzip the file\n", + " with zipfile.ZipFile(output_zip, \"r\") as zip_ref:\n", + " zip_ref.extractall(output_folder)\n", + "else:\n", + " print(\"Data already exists. Exiting.\")" + ] + }, + { + "cell_type": "markdown", + "id": "2befa6b3", + "metadata": {}, + "source": [ + "### Convert from COCO people subset example\n", + "\n", + "`LuxonisDataset` will expect a generator that yields data in the following format:\n", + "```\n", + "- file [str] : path to file on local disk or object storage\n", + "- class [str]: string specifying the class name or label name\n", + "- type [str] : the type of label or annotation\n", + "- value [Union[str, list, int, float, bool]]: the actual annotation value\n", + " For here are the expected structures for `value`.\n", + " The function will check to ensure `value` matches this for each annotation type\n", + "\n", + " value (classification) [bool] : Marks whether the class is present or not\n", + " (e.g. True/False)\n", + " value (box) [List[float]] : the normalized (0-1) x, y, w, and h of a bounding box\n", + " (e.g. [0.5, 0.4, 0.1, 0.2])\n", + " value (polyline) [List[List[float]]] : an ordered list of [x, y] polyline points\n", + " (e.g. [[0.2, 0.3], [0.4, 0.5], ...])\n", + " value (keypoints) [List[List[float]]] : an ordered list of [x, y, visibility] keypoints for a keypoint skeleton instance\n", + " (e.g. [[0.2, 0.3, 2], [0.4, 0.5, 2], ...])\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "4404049f", + "metadata": {}, + "outputs": [], + "source": [ + "# # create some artificial splits\n", + "# splits = ['train' for _ in range(20)] + ['val' for _ in range(10)]\n", + "\n", + "\n", + "def COCO_people_subset_generator():\n", + " # find image paths and load COCO annotations\n", + " img_dir = \"../data/person_val2017_subset\"\n", + " annot_file = \"../data/person_keypoints_val2017.json\"\n", + " # get paths to images sorted by number\n", + " im_paths = glob.glob(img_dir + \"/*.jpg\")\n", + " nums = np.array([int(path.split(\"/\")[-1].split(\".\")[0]) for path in im_paths])\n", + " idxs = np.argsort(nums)\n", + " im_paths = list(np.array(im_paths)[idxs])\n", + " # load\n", + " with open(annot_file) as file:\n", + " data = json.load(file)\n", + " imgs = data[\"images\"]\n", + " anns = data[\"annotations\"]\n", + "\n", + " for i, path in tqdm(enumerate(im_paths)):\n", + " # find annotations matching the COCO image\n", + " gran = path.split(\"/\")[-1]\n", + " img = [img for img in imgs if img[\"file_name\"] == gran][0]\n", + " img_id = img[\"id\"]\n", + " img_anns = [ann for ann in anns if ann[\"image_id\"] == img_id]\n", + "\n", + " # load the image\n", + " im = cv2.imread(path)\n", + " height, width, _ = im.shape\n", + "\n", + " if len(img_anns):\n", + " yield {\n", + " \"file\": path,\n", + " \"class\": \"person\",\n", + " \"type\": \"classification\",\n", + " \"value\": True,\n", + " }\n", + "\n", + " for ann in img_anns:\n", + " # COCO-specific conversion for segmentation\n", + " seg = ann[\"segmentation\"]\n", + " if isinstance(seg, list):\n", + " poly = []\n", + " for s in seg:\n", + " poly_arr = np.array(s).reshape(-1, 2)\n", + " poly += [\n", + " (poly_arr[i, 0] / width, poly_arr[i, 1] / height)\n", + " for i in range(len(poly_arr))\n", + " ]\n", + " yield {\n", + " \"file\": path,\n", + " \"class\": \"person\",\n", + " \"type\": \"polyline\",\n", + " \"value\": poly,\n", + " }\n", + "\n", + " # COCO-specific conversion for bounding boxes\n", + " x, y, w, h = ann[\"bbox\"]\n", + " yield {\n", + " \"file\": path,\n", + " \"class\": \"person\",\n", + " \"type\": \"box\",\n", + " \"value\": (x / width, y / height, w / width, h / height),\n", + " }\n", + "\n", + " # COCO-specific conversion for keypoints\n", + " kps = np.array(ann[\"keypoints\"]).reshape(-1, 3)\n", + " keypoint = []\n", + " for kp in kps:\n", + " keypoint.append(\n", + " (float(kp[0] / width), float(kp[1] / height), int(kp[2]))\n", + " )\n", + " yield {\n", + " \"file\": path,\n", + " \"class\": \"person\",\n", + " \"type\": \"keypoints\",\n", + " \"value\": keypoint,\n", + " }" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "8171a7f9", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "30it [00:00, 205.90it/s]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Generating UUIDs...\n", + "Took 0.01261138916015625 seconds\n", + "Saving annotations...\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 469/469 [00:00<00:00, 38298.55it/s]" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Took 0.014262199401855469 seconds\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "dataset = LuxonisDataset(dataset_name)\n", + "dataset.set_classes([\"person\"])\n", + "\n", + "annot_file = \"../data/person_keypoints_val2017.json\"\n", + "with open(annot_file) as file:\n", + " data = json.load(file)\n", + "dataset.set_skeletons(\n", + " {\n", + " \"person\": {\n", + " \"labels\": data[\"categories\"][0][\"keypoints\"],\n", + " \"edges\": (np.array(data[\"categories\"][0][\"skeleton\"]) - 1).tolist(),\n", + " }\n", + " }\n", + ")\n", + "dataset.add(COCO_people_subset_generator)" + ] + }, + { + "cell_type": "markdown", + "id": "d9454797-d804-45f1-92dc-393f76be2219", + "metadata": {}, + "source": [ + "### Define Splits" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "e2094a5d-0371-48da-91f1-b9590686339d", + "metadata": {}, + "outputs": [], + "source": [ + "# without providing manual splits, this will randomly split the data\n", + "dataset.make_splits()" + ] + }, + { + "cell_type": "markdown", + "id": "828f6d36-d5f1-4c68-9f70-80d26d45690e", + "metadata": {}, + "source": [ + "### Test Loader" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "fda91cd6-9fe5-43ee-ab88-3dfc57ff89ef", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Sample classification tensor\n", + "[1.]\n", + "\n", + "Sample boxes tensor\n", + "[[0. 0.01685937 0.06797917 0.091 0.3528125 ]\n", + " [0. 0.35225 0.53258333 0.198875 0.46741667]\n", + " [0. 0.12070312 0.5095 0.17703125 0.490125 ]\n", + " [0. 0.0641875 0.19933333 0.16723437 0.78827083]\n", + " [0. 0.18629688 0.16966667 0.10821875 0.39202083]\n", + " [0. 0.07939063 0.12197917 0.1323125 0.1749375 ]\n", + " [0. 0.26748437 0.06470833 0.12559375 0.32183333]\n", + " [0. 0.46409375 0.1044375 0.09125 0.18172917]\n", + " [0. 0.33039062 0.1841875 0.19323438 0.40210417]\n", + " [0. 0.5545 0.19102083 0.25448437 0.78875 ]\n", + " [0. 0.67929688 0.08527083 0.11071875 0.24933333]\n", + " [0. 0.66404688 0.0471875 0.06909375 0.1483125 ]\n", + " [0. 0.3251875 0.12770833 0.13967188 0.30979167]\n", + " [0. 0.8171875 0.05416667 0.18125 0.6 ]]\n", + "\n", + "Sample segmentation tensor\n", + "[[[0. 0. 0. ... 0. 0. 0.]\n", + " [0. 0. 0. ... 0. 0. 0.]\n", + " [0. 0. 0. ... 0. 0. 0.]\n", + " ...\n", + " [0. 0. 0. ... 0. 0. 0.]\n", + " [0. 0. 0. ... 0. 0. 0.]\n", + " [0. 0. 0. ... 0. 0. 0.]]]\n", + "\n", + "Sample keypoints tensor\n", + "[[0. 0.090625 0.11666667 2. 0.0953125 0.10625\n", + " 2. 0.0765625 0.10833333 2. 0. 0.\n", + " 0. 0.0609375 0.12083333 2. 0. 0.\n", + " 0. 0.040625 0.175 2. 0. 0.\n", + " 0. 0.0265625 0.26666668 2. 0. 0.\n", + " 0. 0.040625 0.33750001 2. 0. 0.\n", + " 0. 0.0640625 0.36250001 2. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. ]\n", + " [0. 0.43437499 0.62291664 2. 0.44218749 0.61041665\n", + " 2. 0. 0. 0. 0.47499999 0.61874998\n", + " 2. 0. 0. 0. 0.50937498 0.72291666\n", + " 2. 0.43593749 0.66250002 2. 0.51406252 0.80208331\n", + " 1. 0.40468749 0.69375002 2. 0.46875 0.83749998\n", + " 2. 0.37031251 0.61874998 2. 0.48750001 0.86458331\n", + " 2. 0.44374999 0.85000002 2. 0.49687499 0.95833331\n", + " 2. 0.47499999 0.94999999 2. 0. 0.\n", + " 0. 0. 0. 0. ]\n", + " [0. 0.22499999 0.58958334 2. 0.22812501 0.57916665\n", + " 2. 0.2109375 0.58749998 2. 0. 0.\n", + " 0. 0.17343751 0.58333331 2. 0.2375 0.63125002\n", + " 2. 0.18125001 0.66874999 2. 0.25468749 0.68541664\n", + " 2. 0.1671875 0.77291667 1. 0.2734375 0.72708333\n", + " 2. 0.1328125 0.71666664 2. 0.26249999 0.85000002\n", + " 2. 0.22031251 0.87916666 2. 0. 0.\n", + " 0. 0.23281249 0.98124999 2. 0. 0.\n", + " 0. 0. 0. 0. ]\n", + " [0. 0.1765625 0.3125 2. 0.1765625 0.28333333\n", + " 2. 0.15625 0.29791668 2. 0. 0.\n", + " 0. 0.109375 0.29374999 2. 0.15000001 0.33958334\n", + " 2. 0.1 0.39166668 2. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.17031249 0.59791666\n", + " 1. 0.1296875 0.62083334 2. 0. 0.\n", + " 0. 0.15625 0.85624999 2. 0. 0.\n", + " 0. 0. 0. 0. ]\n", + " [0. 0.25937501 0.25 2. 0.265625 0.23125\n", + " 2. 0.24375001 0.2375 2. 0. 0.\n", + " 0. 0.23125 0.25 2. 0.28437501 0.28958333\n", + " 1. 0.20468751 0.30208334 2. 0. 0.\n", + " 0. 0.22812501 0.40208334 2. 0. 0.\n", + " 0. 0.27500001 0.40625 2. 0.28125 0.47916666\n", + " 2. 0.25156251 0.48958334 2. 0.27656251 0.625\n", + " 1. 0.2578125 0.61874998 1. 0. 0.\n", + " 0. 0. 0. 0. ]\n", + " [0. 0.109375 0.17708333 2. 0.1109375 0.16041666\n", + " 2. 0.0984375 0.17291667 2. 0.13437501 0.14166667\n", + " 2. 0. 0. 0. 0.20625 0.2\n", + " 2. 0.109375 0.24166666 1. 0.2421875 0.28541666\n", + " 1. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.21250001 0.34375\n", + " 1. 0.1640625 0.36250001 1. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. ]\n", + " [0. 0.32343751 0.16875 2. 0.328125 0.14791666\n", + " 2. 0.30625001 0.16249999 2. 0.34531251 0.12291667\n", + " 2. 0. 0. 0. 0.37187499 0.15208334\n", + " 2. 0.29843751 0.20625 2. 0. 0.\n", + " 0. 0.27500001 0.32708332 1. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. ]\n", + " [0. 0.48593751 0.15625 2. 0.49531251 0.14583333\n", + " 2. 0.47812501 0.14375 2. 0.51249999 0.16458334\n", + " 2. 0. 0. 0. 0.53281248 0.22916667\n", + " 2. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. ]\n", + " [0. 0.44374999 0.29583332 2. 0.45468751 0.27916667\n", + " 2. 0.42812499 0.27916667 2. 0.48593751 0.26041666\n", + " 2. 0. 0. 0. 0.53281248 0.33958334\n", + " 1. 0.421875 0.34791666 2. 0.546875 0.50625002\n", + " 1. 0.3828125 0.42291668 2. 0.515625 0.61666667\n", + " 1. 0.31406251 0.49583334 1. 0.54843748 0.56041664\n", + " 1. 0.46562499 0.57708335 1. 0.52968752 0.73958331\n", + " 1. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. ]\n", + " [0. 0.66093749 0.32083333 2. 0.67812502 0.30833334\n", + " 2. 0.65156251 0.30416667 2. 0.71562499 0.28749999\n", + " 2. 0. 0. 0. 0.77187502 0.37708333\n", + " 2. 0.60781252 0.375 2. 0.796875 0.51041669\n", + " 2. 0.61093748 0.52708334 2. 0.765625 0.64375001\n", + " 2. 0.67500001 0.64999998 2. 0.7265625 0.63958335\n", + " 2. 0.6171875 0.63333333 2. 0.6796875 0.81458336\n", + " 2. 0.63437498 0.80624998 1. 0. 0.\n", + " 0. 0. 0. 0. ]\n", + " [0. 0.73750001 0.15208334 2. 0.75156248 0.14375\n", + " 2. 0.734375 0.13333334 2. 0.76249999 0.15208334\n", + " 1. 0.72187501 0.12708333 1. 0.7734375 0.20833333\n", + " 2. 0.69375002 0.18958333 2. 0.76406252 0.30416667\n", + " 2. 0. 0. 0. 0.74062502 0.37708333\n", + " 1. 0. 0. 0. 0.72343749 0.35833332\n", + " 1. 0.67500001 0.34583333 1. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. ]\n", + " [0. 0.69999999 0.11875 2. 0.7109375 0.10416666\n", + " 2. 0.69375002 0.11041667 2. 0.72812498 0.1125\n", + " 2. 0. 0. 0. 0. 0.\n", + " 0. 0.67656249 0.18333334 2. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. ]\n", + " [0. 0.43906251 0.29791668 1. 0.453125 0.27708334\n", + " 1. 0.42500001 0.27708334 1. 0.48750001 0.25\n", + " 1. 0. 0. 0. 0.53281248 0.30833334\n", + " 1. 0.4140625 0.30625001 1. 0. 0.\n", + " 0. 0.3671875 0.44583333 1. 0. 0.\n", + " 0. 0. 0. 0. 0.51875001 0.56041664\n", + " 1. 0.44062501 0.56666666 1. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. ]\n", + " [0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. ]]\n", + "\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgAAAAGFCAYAAACL7UsMAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9ebRnWZbfhX32Ge69v+FNMUdG5FBVWdU1drfUre5Wq9WDUAuJXiAEli0DZrQWLNvQgBYsGy8jsQyWwRgbjK0FWF4YWRYCAQJJLMk9qCcNqNXqQVVdmTXlGBnzG3/TvfcM/mOfe38vIjMrs7qypMyqOFWRL+K93/v97j3n3LP3/u7v/m7JOWeejCfjyXgynown48n4lhrm7/YFPBlPxpPxZDwZT8aT8Xd+PHEAnown48l4Mp6MJ+NbcDxxAJ6MJ+PJeDKejCfjW3A8cQCejCfjyXgynown41twPHEAnown48l4Mp6MJ+NbcDxxAJ6MJ+PJeDKejCfjW3A8cQCejCfjyXgynown41twPHEAnown48l4Mp6MJ+NbcLh3+0JB9C8ZuAgcPfpT/cEwDJDO/Wz7Drn8RxCy0ZeKGLJAU1eEtiV0fXkbYTbdp+/XhG6DILj9i0QRshgMhmwyGZBksBaEBGLICIggSTAOWC1YnxxDBj+b4C5dgmDBCTZDloSJiZgFbAWSkBAw1uGdI4ZAFBCx1NYQY491jpyFlMA5S8gBaw1VtvQ5k60gKZLaDtPUGBEsniwRKw70KsEKJidShiwGSRkjiS4ExArHr71K00yYPXWD2EZEBDFgyWQsIWfIkZwzIgaLIUtGyDr9xujPEDJJV0IEySAipJzIgDVGvwdkEZLo9RkM5IwYgZTJYoipR2zN7a/8Ou3p4tG9IsInP/kpfst3fy+r1QJrLSKi7wGI0d0gCKlcT0avT4yQMxgj4zUD5Jx1i513WYuGlRij15zzuN90LoQYe5arBauzE8J6RY6JkDKTZoJ1Qtu1dH2PQbDWYMp7xZQIXY9zFkMmpIi1DmvAWY8xkBKkco19iPjK4Zwnp0hKEXLGiCWlhHGCdY6uDRh0vyzalpgSNmXa0NGFSNXs8rdf+BInZ2dlVnTejTH8zr/nd7G3t8evffbX+N7f8l38xE/8JLdv3+FbWcvr4qXL/MAP/BDr9QpnDT/5Uz9B17b6jIjh45/6NAcXL/HCiy9wcv8+MerZ4itPVXumTU1VOaZNzcHePiknNu2mrLvupRAzXRf46PMfJcWA5EROZe+JwVcVIhBzBjH89b/xN7h4sMdu48k5YYxgyn63zhKBFAIpC13fgQhG9CkQMSCJELOuqwjWGJyfMNm7DK7i53/mLxNC94FZ9+H5ftP3yxpZXyOTKb3oOWOtYT6dMt+Z6CMuCW9rDvZ2mPiG127fpo094eyMgwuXMZVHJLPetJw8PGSz2ZBTIsVEszMDa7lycIkUIqfLUyQlECHFpOedYby+GDMpRYwM54+eJTmn8QxLKZNJ5ISeaSKEGB+9w5wgZVJOWGOJKSICOaXxdeP65Tx+FmQeWdacgXPnYDk1y+6Dt5jX7a++8/541w7AOP4MsHnTR73Fv+Xcv0X/6GwiVtQTMEA56J2xhLYjdmE0UG46JTQVYXNGymC9wzhHl4aFERwWkYTYTBALySBWL0GyLqy3lk27ISNYI9Q7l0hMMK7DZEgYQBDnSV1HI5kYAt4IbY5E4wGoyMTYgWtADF1KzJwn50hKPRIjmRq8YLpWN773ED2SwRowodXNZhI4ixGri5oN1hly6HWenMFlR+xarK3Yu3qVto9YZ7ECKfRlZjMmJ4zzhBj1e0Y3hiBghJSSGhExxPHgAquWGMnqMIlOuzoH6CVaY8qKlk1qdC3FGDIJa98aREopbQ2q6DvIcMid2xtGTLkP0QesXFtKSR0P2T6cw8+H6zFiisOwPYzHHWekbEsh9pEHh8fMZlPq2uPIGGPYbDb0IWC8p/KenDKh70kpY0XwzYTKV3Rhw8RPda5F9JAQ1MFDnRXvHH0I9F1HChHnnM53FpyvyTmSEzjnSDmz6QMJAeMQqwek9zV1MyHlAJLI2Q4zj4xnhc6fEUHOP2LnH7UPhl34uocAZpgEEYy1PD4l3lekmNg/2KdbnmBEnxPnLFVVkch0faLrVyyWLcZaVus1Maqj5n2F9Z4YE+0LX8BZNcjDFKeggY4xhhAjIoazxYo+RNqdue5/o/u56zq6rhv3hjEGpLh5KanTnTKZSNd1pJhwziNimc0DH7p8gz5GtsHV+3NYa/WcyXqdb2X8oexlkzVo6zaIGHWMAD9tQCykhEXn6ujhIccx4+qGPmcSliyGvuvw3gFCtTPH78whQ4gBI0LoehKZ2c4O0925eu7COI0iQi5BUM56XSlnDVYZgg91AHR99IUpJ7UzYggxk3Kk+GzqgKRIOcCI5QxWB0B/N8ao71eclZQipERKgZQSOSdIkRyjOpxiMM6T2jWkyHvxoL97ByCjxv9/CazfzYu3kf/4vSxlwwtYxgVw3pO6nhQDkLHGYiYTxFbkLpBCKK+rwViIgSSZpoJ4dkLftUjOZOeo5rvEbHElwsYINiX6dgNkmvkebtoQu5bu7AiJQnNwAayj73sqC+HsmG61JDiP391DcmB9cghdy3zvgNxMSVmjQ2/h7muvQc646ZQL12/Srs44vXsP5z0Xn36Gzhr62CJiObp7i74LOO+5dONpsjPkBLbytGcnPLx7h8s3b2LFkY2a8b0rV+gQPBZjhMXD+5wePhw3oHWOpz/+SfrVqhyImZgSlXOjr6gBdCqRhq6JKchAPrdMRgajCyKZ4vSCUe8c0ViGLIoa2DdvoZwzd+/eY7VakUkgDnJxCopD8YihEopRM9u9I9vIQcq/jRhS0k0zOAP6cOpTN14rIFlIKSIIMWdcVfHarTcIfaSPSR+6wasvlxNSJEWN2HJMSHGYqsoRYyz3nDFGjT7IOH8C5aHNZMl468gxkVLCeYexhpwSYhwR8AY9AGLCCISUiDFz5dJVrKsQVmR8mY9ep2s4ULae2reKrX/LoftWQHI5nNOj0ZOezhhnqbxnNp3iK8vxyTFdF9isezKZGBJNU/PszRvs7+3SFkM97IW266ibhrZrSSkRY6KuK317C8ZunRBy5uZT19TRSxo5GmuRlPDisNUEay1WDCFE1m3LarPBeX2OxKixm+/ONQipKnISlusNxtXEflVww/frygt1M6XrOkLf8U7XqU53R+o6RARnDTFEQrumMfvEGHElgMwhcnpywsVnP4RLGddM8JMp/XpZzg+rCFyGlMGL4ICcekUhs5R51nUZAwcAzGj8cwkeRRThG84YMYaUYnlNBux4e5WFkNTA6xsacrak8ZzScyulLaoQUyKmREpZ9y6Zvo+KNpazLoZAjgGTs56dzqmd7OJ7slrv3gG4hBr+dzT+8ObTXR+M8aiUcsAbcNaq11eMPwK2rojGUFkLy834Pr6Z0KWEkKmN0J+cEjcLUobJ7h6187SbNdX+LjEkDaS9pT07JSeNXncvX2azXrC8e4+cEq6u8M7Q54z3Hps6ThdnhK6lvjChriyHr71G7luSCDN7SVGLZKhMxfLshM1mhbWWa1ee4/TebU6OD0l9YLZ/AM4iCSrjadcrVssFKWZcXUHl6EPSFEO34sGtV4kpMZnMaIvTg3cEMi4bshVy7Dh5+JDQt6Sy03PO9EE9TEEwRqOUpFg6nDP8Ysx2bYzAsF9l+3VcOmMIZd0sFhl+lrNCmlLSA48NYwyz2ZSqrui6wWBT0AC9vlQelC20pV6xZEBycWQG46xpCt0652K8nMs+eiQvsP2vGDCZ9WaNd44f/P7fCn3CeEV01EMvzlGKrLu1ph+s3lXOcPfBQ77y+ms8e+M6O3VFjglT5tY7DwjOOaqqAjJ91wEGX3mcs3RdDxmcM+q1G8uvff4FPvltH8WyvcdN6Oj7SMRw5/7dcgceiAh9uc48oikpRowpsyeCr2pC33H5yhXu379Piu/NAfF+H9bYLfJzLgVEgdOlRKNiDCnD2emS9aojZ/3dpq6Z7jdcPtjj8oU9vLHMK0dKEwCcNYQUiSlgZI6IwViLdRaDIYSgq2iHAzvqc1jSherCZ10rU/Z6yggG6z0JeO2NN2hDz3w6p+u7MYL0dqYRLJnJxJNTh8kZxd7evy7Aer3GOlci3ndwAAZjqrBfwbsyYo06dkaDjaqqySmxPF2U5xUSlpAi3td4b+njGhsyMSY2oadbnjGZz8nWsAk6d5J0TRRBK4FMTqScSVHhdkSIxUEYri0P0H8JGoafaUQvJeBIxcbpWZbKftRjU1OY6nDqs5nyEPkH9VhSIuY4OrKSsu6VknJgOiMbve73au3fvQNw+E4veBzyH0IxhSvTedc8Zyh5c1Im9b3GMhlcUyOVJ3YJ01g2cYMA1nuyd4Q2UFUeWS+J6yUYi5WMrSswHtZLnFhCToiFyhiOV0s9Tl1FWCxZHp9gq4a4XiHG0JXcv3VC6oPCbKLw3Or4hP29Cxw+vIv3NdOdXZZ9h6893gmHDx9iMtRVzfLoCENm/2CfoweHuLohpoRLCVs3rE+O1ICIYWfvQPNfBlxO3HnlVdr1mt1Ll4koBJVKdGmx+nuV5ejV23SbDVdvPsXZ0SHr5WprFJK6rsPv6Y7LGFOg0cE1LpY+xHgOpxkiWkruM4+5eNAHWX0HIaNeMAmsefMWqusa792YYzMGUkzjXhjy81vIX8btMkT8OW/z3/pMljzigAyci/of2YWyNQDGGEKXqOua2PeslyumTU1oF/QxkWJkuVjSth1IZjaZYKwjCyQB6ypC29Ou1jTOM59MIGeNSpwlp6RwMZZ1v6GuKprKE/qezWJN222YTKY0dY3RhCGrTUdoN0hO6vzGhDUlN2wMbTKYPDg0gkJlOoyxZT23jpEaGseFixdZLhY8/exz3L//4B2e1W+eIdYUxxce3Qy6R6yxeniHyGq1IvYdOzs7uNpSO48BKmdxVmg3K3qrKZq6abBGiCRs5fFSKypU3tsg5BQRMjFFrNF1st6SC7ybUyCjiEFOia7tcNaRRPdmSiDWslyt6WJEbWViPt3FeH1mDk+OcZXDuVrTDVlIWd7HKEAmp0hOhef1rn5HinGEWFBNV9dYAcTgfIVznmg6+r4nJeVLibE6HwLZWpyvNDAyYAX6s1OapiZng/eeo3v3CH2PGCHFqKm3rA7Z9kL1XBmQxiHoUPOVdc3KeTUgAxoTFZSgbMHB3j3i4JSAJach51/4V0PgM+b79TIU4R3zFOTYIPnR8/bt+BXvdnztHIC3HI/D/ef+njO55NgHg0SM2BKFxb4fJ9HVNck5+rbDuIawackplAinIaKQsxVo1yuSQDOb6kSuW5LTCCslIaHQXOjaAi/pZ52eHjE9uEDoelabFWIsbd/T95G6EvLZGTlGJZUsFkz2L2GaGen+HYy3bDYd69hT+0y7WNN3GxBhs9ng65q9p25wcvcOOUFTN3Rdj4REZYX12Qm5GC3bTFhvWma15/6rr7BcnCLGsHvhAuvVGhCSUcfJ5ISva7rTU86Oj9i9cJHda9c4OTpSc2ld2V+6wQYHYCT7lXB2zLfLkMPMYw41w5hX1t81WBgJLpprj5o2iPr9BOPBd36EEHjw8KHC9mbIk2+5BENu//xWGbxqkHJ9ijIM9zSkAc47C4xXXt7mkRyAfp4RQ1M13Dm7TQgd169dxVrLsmtZnC1wxuKnU8X8jNDnSI76UPbrDfePD5nN53Rdy3LRF0KWJ8VM13aKtMREVVXEGIkxUtU188qza3bIORNCYN1u8NYymdTs7M45OzsFMrPZnNViibUKQeKnmhnLw3+KMyRGjUrW6H97r7pmk+mUEALT2UwdwfetgXjvhjyGIG3nbEBLFE53Rjg6OmRvZ45zwrptS3SZSwRmOFlsOD5Zau56dLAy1jqcdWqcYsQ7M+7VGMNwpuu/MzivgHVVVSPK5awbnd64WZNyIsTEZrMmpsxytQJjcAtL17XkdI8QerzztF2HtZbLV65ww3uWqzPGNNn7dn0zsXCUtuOrXe+5SLm8xCJIUp6S916fMyPkGOhDKGhksSdGnSKDZeBUkBMmZZwYQoaq8qwXZ6yXZ3DuXBmCiyE6P3+p2zPpLcZb3c6Aog57Mg98p3Oo5FeZgTf/fYsQgikcKoF3QFW+lvEeOQDnN+SQD7PFe8nKfotZ8/RoztpYQ7/ZoMxXzc3oAmcwDmMzadOqx2WEXDtCr1FTbtfKxhWDbWaYDMuTB7CE5sI+2Qs7VORuw+nRw3IuCNVsxuzSRdZtILZrckEO+pTJIlTWcrZegclY6zm4dJ3OWVbLYwBsPWEVOj1gXWZ5cjrmcPcOLuF291kulqyXa8QKqW5oNx3ZgInQrtaQM5PpDslYLJGj27c4OzlBCoxrXcO673Al2rOAOIeRxN3XXqWezJhfucKqS+oBk2kmHsRgxCKUvH5OpIG1nzPWOIU9i8FIMlAfjeYph0oBGSo4pIAFGWOLJ58tuSTHLIBknKvetBv6vicvVxohiaIHmYy16izklMdI3Yjm25ESVUGpYCg/z1sHRTd/2V7jg741/Ll478Njk3LCWSF0HQd7B9y9c4dXX3sDrCXHiLWGuq7H65aUx0MgJkWBRAxXL13l/sMjXl2vlMwjQt/3Y1qj7TqssaSo7H8xemgZc47TgGglSYxgDK+88YC2a5VMmRPeV3jrmO/ujU5iHpKRWeckp1SeHaeO27mooqpqnPNMmslgkYa74v1rKL7+MRgCgZLuOudZGrBe0ModS1NDFli3LTnAst2wXKwYgr35bEYzqaldzWQ6haT5+5QzXa955JSVrBVjImFK9JihIA39JhJjQNadprSG9AOMBisPaF3OiIW6qWnbjq7t0QBYn8M+RGLK5BzYtC3TyYyjw5MxV/3BWtt3us78iOET77DWksVqnZAIdV1r2iREnMlEUbTAABTI3rla0yOyGc8qrEFiJoT+UY7IgDo+buTf7vvvdDv58W8PCMgWlXw08nnnORneB8nKIxLIkh57hTz62jd9ztuPbxwCcO4A0r2q+RZEYbJuo6S8DBjnsFVNCFrCV5Be5QVIxjpPNpYYMt5D7DpAHQmMZR0zuxcvs3zwkLg4Q9qWs5BwXshJEQY/aZjtX2SxjFS1MraNeFw1o08wnThyu9AcHnBw5RK9cSCwWZ7hnaGqGpJ4Gp9wqWe1WmCMUE9mzA4uc9S2TE0mbFZM5jsggjOGnabh+N4bDIFZvTvDOUN3fMri+IShBGR3b5+QE85YjQhTwhhH5QxHt17HO8f+UzfpsmHHuZFl6qpGnSGrOUs9IGQk6KUCRQ4EvlzK0shgpOQTsxr9Yfvo66WUbWYEq5BoDmrYUmHAlry+LrmuubUOa50S6OxQqpnGaElZwnrtMacx36/ljVvDrnbzMSBxwMeGv8qj8O/5R0oQsEIfOm5cu8KzT10jlSjae4c1RhEMUWchpqg/R9MmOWnUlyVh5CaQWaxWI9wXAzjnCTEoeYstERDSQHUBhKqqyFkJidZZ2rYtzF4h5UjXbuj6yGS2w4MHDx85qIYIZZhrUyDJXO5XREqUpKWMfLWD65thnAMTRQaUy4xEq+3LBCvqLO3t77M8fcjDw0OWqw0Jg3eOZjpjNmnYmU65euUi3ltWyxXNdIpJELKWfz48PmI+m1JXGo32Ieg+DlEdTe9LCa1eXB96qqqhkA3IWdG20Hb6u073n4jydTZtRx8Dfejpuk4JYjHRhx5jHCKOGDpy1AqR0UH8JhwiZiyx1jSphRTZ39/nE5/8JLceHhV0xtBMaiRnYlZSr7UOCT2D0z0EDkak5Pj/Tox3G+u/+/cTsYV7loeYoJx9b973wGO8qLcf75EDAG/v2YxPKqBGu+/68TXGWnzd0AWNuPJwoOc8krSMdWQEYyxGMiEGUlaCH8bgRCC22KpiurNPGzu88XiBdq0kQl9N2aSMcwaTOmLfa/RaV1QhUMWe06NjdUiMxTQT2j7QCOSu1/ILb7FOqFLk6O49+q5HRMtSNiEya2o2R3fJGerZDlmESWVZPLjL8vRYDalYLhxcZrNZszw74uDCBe7fu4u1lno+J2TwQzRuHU6EfrHg9PSQ+e4+jRhMtyHEltB3ZMmkdsPqwR2dl0JUySlTV5UaMuOo5jOyha5ExVsvVx+QXKKRLCXqzloGM/xcRDAYkijhJY+5Al2XAXUAKVGqJcak6A8gueRqS/phICQKGt2aAvuT0yMGHXL5LP3Z+P1hT4lsfYVxv5V7EsGIRvNtu6GezdVBoqQ7GAy2km+UgV3yfzkoAzclNm1HCD1VXbMzmYxohfKWMmLqc9e/RaIH0t6QAgkpkhHIiYmvFRlJev9h4mk3PcuuL/fPOQs/PGFaKTFoN5znUNR1rY6A8+dylo8/i98k4xHnSB2eLTP7XBwpUlIr6vy+8uobOO9oJg2u0dSQBWpnaGpDiC2Cp648se8xxrAzm3Nydsa9ew9YzqaEvsNZg3MeI0LlPeJMqRhQImBISuTq+l5TQk2NALVxHMzniIG+C2xiKIRAdYpT0tx1ZS3ZGIIErDj6kOn6Vvd/TsX5+yZc12EIGO9ADNYIjkztnWqsVDXTyqueg3e4kq5JUcYzyBhD5RymPM+aJTjvGH8A585YjHVIVsI6MJ7RbzX+DjsAj8ZdOtK572TE+MIIVZJIQWvxVUXX9VouIYNTIOTYF2MgGF8REEQG5qVG6dZ5Ys5MrLA4Oqba2WMtnuCFia0Ip8eklFXMp2nYSKC2DXmt8PR8fx/nPJvVKScnJ+S+B8nU0xktgphM7FogUzUT9uczFqcnrBZLJpMpm02HGMtkNicCjTOcLJYYZ9m9eIEQIqd3XufgyhWWixNC17O7t8+0rnj95S9z/eYN1mdLhXCbCa5u6CJYKyQsXY5MvOXevdvknFkcHbM8OdHSvoTmgo1QV1Nq31A1IMZinSeJstkTmjLKCWw2o+BIKgSpQSvBiJCTgBGF1EqqSUQfnpxUtMd6RWLEKKyWjBBTOLcFshJ1YiKTdCOOpTGUPJmQS/STSwrorWr9z++m0dCfh77Lv8dIaMiRncPiQgyEmPnVz/46aRBlMRZbDoeu79lsNqU+X/eYMYbJpMFaS+UrvNNyn5hOaNtOdRVEaKpaEa1y2MTQ473HFQREHYyolQFG10MQHjx8gPeeC/uFCOod682a2Gfq2U6JZh9/vuQRT98Ux2sQVHLjgaeJlA/gEfcbGlJ4MkMZIGz3hAwk0WIYrly5wnK1wPmMtwYpTvZi1bJcdcTbDwfuL1mgdp6YMpu2Z7la0nWJtt0Qk3JhJOdSp68oE1mDB7EG64qGQ8p09w4RY5hOp5wsVlRVg3cWMZYQey0PLVs2pljWPo26As56xDgEq4TVb9LIfxgZ8E5Lh60RvDN4K0hKRYgpl/y+OkvkROgDMaVC31AdDw0i80jYjINOCh8cF0CvVRCnDtGAMr319Z+7M/cNdQDeKb9Qcp/Dw1fg0EIHL5GwlAeglH8Zq7AzYIwjtmpUxFlMVet9aXKHPEyAGGpnaY+PwNeYakJKPS5nXO5Zd2vICeMbpKqxIWIks1qt9DJjz/r+bVzl2T844PDOHYyBajKh7zONr+gXSy3v6XtOb99m3a65duMZ7t16DSHhqgZbTwhRH9gQOsjCw9dfp+82XLz+NOBo254swmJxwguf/VUuXL2Jn8y5/drr5AzVdEpIucDIyrifVp6jO29gnOP5z/wm7t+6xdnhAwiJWGr0p5MZs6tPsemj5rwySMwqFhRVrSp7jXqtgKXUshYBHY3oSyRV1qtnWB81pxHNOw0kFCmoTs7qGIk8DkMVo37ueyOMDWPKowD/DExWTRVt+c3bUsC8Nf4Me+pc1D84ASNhsPygGIQYAlevXePo+FhP96RO5qZtMa4imUifDbVXQxpD4vDwWJ0ma0dBEUSw1tL3AWMtTV2p1kHWFIrqI+RCmExjBFrVNTFGDJoCmM7mrLqOO/dfIRYuQUiRGALXbtzAer/1ccab2Ua0A/xrhnSJ6HWqUuE2JfOtMKQgOdZY5Z7kR7KPqOoo9N2GCxd3OTp+SIyGdhNp2zMkFxLlZIr3nhQSvvJgha5LRQeiZn9/j77raJqqoFtFebMY7jo3SpjtAqeLBTFHJvUEX1VMpl6fo5A4PV0hslHjRBr5XE1VQUr0fdCjLmt9+ODEXH/qBnXT0PVKPPzAWLDfwBhVE43grcXkhJSbTjmRQiECDj9DheSMsaTUY4wl9AHE4Oua1OfiWA3CRO/v8WYHRR0AMYYU4qOpn7fxZsS+mZz9VuPrQADe7pAxo6eiR7ka8sFLH0cWjFWhGrFWczQMoi9CLt6acZ5YDIye6RnrK2Lfk/uOcHZG33VML1wmZoVakyhK0HUbcoZmMgOxSE6UtD4icHR8xHzvgPnuAYd33yABzleYplEJYYqKHplus8E0DXtP3WS1XrNer8k500znZGPIuRh/1MD0oefiUzeQyQ7Lo/tIVmGZqqqZ7x4w2dvn7OyUvu9w1jLb2UcwGKNkuT5G6gTLxYKrN26waiMXrt1gZ2+P06Nj1qtTYheY7x+w6nsVvBiZz1YNaZbCws9j1JwkIdZghrkuBiSh4hNkLX2CQb4UfU3JP+l8CJINpKAowuMeadbXrVcbdmdzQkxAomALJcd+Lg1R/jcc5gqxl5+VaFvLAbcn+wD8n0PIh48eX6HKh5GqclTWcu3CczR1U6Da4jA5x+df+AKTyYwbVy7Qdx1GDHXl8U7zxwYhJEVbYiEsUtIX4/NXkvJGTMnRp9FIG6uizTGoXOgb9x9grOOp69dVthbIGELoWYXAnbv3hiTfo9FeHhCQIXVx7mk0pigvyrn0yTf3EKEc/AXeTeeq40WjcecrYhcA4d69+7Rtr4Ix3tI0E3bmM4TI888+w7SqiL0al2QEK1ZJn2QtxTNmrLCQ8pkiglghhEBG98fDoxP6FNjf2aWuKqwRzVMPToPVygCxjtWm4+jomOtXrjCpK/oQ6PrCLyhKgL7yWqoc+sKuf7+bsK9vVN5TWYdNgh/OBF1QYgla+hDxxpAKmZcS+QuQyKWSS39PjCrxfWDnrSgAFjiLARHUmOqR00+/D+/6DPgNOgCPH7vn8F3kXB40s60qT+XgYjzMrZcCm6kEpORM9h6JeVRUquuavniExhhCTphmSmONSjzmnsneAckYLQXJBueEvlvhJzMqa6nmM/qsuvp9SNjpjGndYJtG6/O7QD3dw/gJxnuMaO495ozf22GnrrDOQ12xDj0e4cK1p/Th95623SDOkcRx6cazilJYQ2ccptvgmymXrj+FsZ6qmdBloU/qVKQQqSczmumMTd8XPYgMBroQuf7McwoxZyHGiFQ1e1evspsva1RprZrrLEVlb6gNP6+iJwXyLwI3SUugGLMu50ImAZP0dxN5Wx1AHjX8h+XXDfi2Qp+6hmSGMGmQ7R309s9voQEJkCFFkPXzh48zGSh5vPFAKHvMGEsq6NE2BVBQCgRjHAbN9drc020i1jtSSlR1RYqR2WzK8uyMdtNSNxXdptwzUPkaW/uCUCVS2M6rRYgp4etK4eigs2GtpmG6PmKi0Lctm/WKXNCxttuUnhAJYsaYiG8suaXkrId53lYSjAIkJTI05w+EgWEp7w76+2YZtqRntpgQY+QPZX+KVvWsV2su7O9TeUddV3jvmEwalotTlmenXP/Qh7QXSQj0SUmhRlQmuqoqdWxzcdfKZzjvCqE1lzRR5mB3V7UiREY0T7UIVDSo7VtC17Hp1xwvFtR1Q+UqYuw1xRAjKVqapiETWW86rf3PPSLpLefhm2YIVE3NrG4IfSxznkeRs5QyYUgvFqOfSt8TZyBkwWRRGXNrMVm7n7jRIL7/EwCPX50UzokVISIqUkImG6dnwlayYAilyjnzzuPrQADedJlb8pFsXzNMt4jR3H/KOKvSqv26BSANiTevNbdptVaNfjFQNO51/SJWlPiH8zQ7FSEGWoM+5MZoZCqCm8ypJzMSEIyQoyBEEMHPd7BiWMdAGxMeA/WEbmgAktWRCVm5C0y8CjtEofaNMuFdTbaGPiWstbhiDHJlyWgpXIWWkQWT8bNdwNCjmtGN9RyfnSJkJju7Y+mXJIW+nfXFwCt875wtBD9t0tP1ipxkQfNdSaHfGLUhUYrFIJaE/ljvP3jKIqWmf2DzbxmzYkoUC2U7wVB2pBGm7kEx+qAZEc7rzm21+bfIwFASZ8xAYDu3QYvRR0raovATzLlceAbVxCaX/PfQQ+DxjZ7L/xU1sFGlpNc5cufl+6zXK6xYdnd3S1mN4e6Dh2A9TWXxzrNcbJTvUPgLJrdYA6nraeqKyWym9ysK3YaYefjgiJgi3nrqpiLHJaHIyYoR6tmETRc4PV0QMJwuF7Shxxkt3dw/2MNVFXdvP8B4N873UDkzekwFXRki/aHo0VpT9OV1bj7A8c67HyV1tW109dgPB6TJGo5PDrl46QDJWXPK3qlanYBYzxdefYOzdaRutKpjvV4rubKqqFyFs56u78bUlHcaoYeYKJL+YxmrSk97urbDO4ctXBMpyKaxQg6JPkbuPHzIdDojJti0G+qmoqoqRDJHi0MtFRXBVxMuXL5RGnd9E69splTUDATOQZ9kKI1NOG/pOxQpTiV1mHNp0hSLQmpCxJYeLJYchhPq/e8AnB9qO5UI75wl+nMO4BZ+fPR7SCkRf+fxHjoAbHO+w1WfY/+TNNIBS8xg62as/8YI2ZSueiKEqDK4VTMlilFW+rgJFOSxZMgdNidsEshBa3OL5G/MmZBBXCkrjOogJHTDJARXFNiMMrTwvhCpsnpdQ6Y6i0LRA8zoi8635ExlDApoRLIx2ALdGu+JxUutjGCs14PKaIQduw2r5QIQdg8O6EKPlcKgF23EI1KELXIuUJcrOcNM5QwieZQizSXiNkWGFCOjEd/mpSlpFrYlS0Pd/zky3ZjTliL1K4NO/7notMDQRlQYJ5Y1A8ZIdb3ZFGOknz0gFKaQPYdofTjE85jzlGLrtrl/QffJkLPVaynXy1ZZi4JKIFpiWPuKqvJsuoRzFU2jWuOLxQbrVLXtmZtP0/eB09MlBj2sh32SRTA503btmNevfDXOjzMWZwx9VA1vI4Jznr7vS2mSJioq61WYKmgDkOl0yv27h0XVLPPq7Tdo1x3Xr9+k8nV5ju0W4suKoqXSOGQsv9wCIQzd1UYE7ltgDA11OJctEQy5uKS2IHlN0xC6NWeLpQqJddpwJ5ZGQRcuXKILERMcbRswpibEwHITebA6KqkkP2TDxkBnqOrIGUzZM5FMHwPe1zgRBF1jUyBsRZUcdVVx/dpTxBh5eHhECAm7XKuWhBTNDPRMu3J1hlhHF765EQARtKTVavplaLw0VrekPJ4VZtjvlFRh2fjawyMVlC/TNJ6+78onfPCeDGedloYPlQzngb/02H4Y0dlvuAPw+Kc+lhYYIrcSTedY6sytBWeIVlvxUrwzSQlrBdOnkveF6c6UTUrYnEltT993WCmayyIkK9rtyzgoOdCY9eExiMqEDnKPpS3OCBUX+DT12n0whJVGmHkgJeq1DaUkrqqQoleAEcSZUYBCoT8IUTvgCcr4tmynxZD0CrJhUk84uX+XlDKTyYRmOmXZdSUaHKJwncuEkv0sloH8pgYZzUeX1EjOeimmnE6CEom889sOVqNlUHh5bExRbmNACQa4bSD6DTXMIgN0v43inZWx3e/jdckpBlarlRpJa9mWxZ3fwZTrHZAGnbuBYDUg4XnUCzhn8M/lubYkw22CIGWISej7iHeOT37kI+O8hBBGwZ4+9ITSe0G5A9qcZ9O1iu5Yq+SbIukqzqgmd/k8Z7WqIIz5SBl6BaPiJBYxbuzKOOQmYYuykCNt2+HqOXfu3h2vH2x5aXGWzmmBD/MwsMIGf+BbiQRoRPPrIqaUPg07aVhLU/af4+6DI9q2o/IVs52Gvb0dZtMpuztzvDVcvniRxnkV5el7MAbjHa+/cYdJ5dnf3R3ffegtkCmoVVkwZx1tjHz55Ze4cvkqT129TO56Nt2GEKP27MiJ6XRCSnEUbtr0Pd26LdUoRRjLGGIMpS14Azki5bz5Zh0ZSkVNaZRlNcAaNDsETQeewytJWRvrjHEnqgswlMJZMSox/AEdYqUYdaAEHtvAf7sZxtNctAHWuxnvsRDQ4982nC9UlAI5C5BDRipPtuVgiyrsEDbr8e0Wx0cQIZpMPZvTNDuEkMnOkrPWeqaUiEYPvZRBXAWlLldPScFXmiqIKY3iOCKi/keBV+us0YK2uc3YBKHr6MKazXLF+uQMSHrwG8H7Bu8rmvkO1WRC09RQSEJ96BSOyuCMFGdAIGuKwgKLo4dAZmf/AjGpPG5B50fkN6ak11QMYSqGI5X2kCqQMdx7HKNnNTRmNKijUShGa9wyRaVMjC2a1IwiM4PRGVdPzhsuU0iNUjifj67/4FydnJ7w4Q9/mNSrPsGgnz8UHCCD6M+5TSyFA1HSFMMm37Z9LUZ+RDDYOpuydQoGNbUsQt1MWC6OaUPL/t6eKiCOJTWwWZd2O4VU1HctFtip68K6FyjRRO2rcwdMMTDWKkEyxtLgyulcy7m8fdmPY9MhDN66QkoDMRlnIaZeD7NxrssjmrVETLRHysilGB4WM6TRihdwblq+qYcYJR2bwWml9AKnEDIRYh8gZ+Y7U65fv8psNsWSqZzHOYf3numkQTCcLhYYa5jMJsSUWK5X5BSYNHOmTVWaRGX6qCS0gzv3uPTil9mevbqnP7Vp8Xfu4V/wJW3zmE5BeU7t6LQ8Ruc6t3jDXjbW8sx6w+Miu9vXwX8KrN7LCf47PXIRCiuk2qG6xohgMqV3g2NTnjtN46UiBzwgiOoQOKfNgjQIfCsH4IPxlAzCahq75XP38rjd3e6tb7AOwPmJM+e+pvGiRgqaWHJW+dM8kr/K1z7gnBoRJBK7npyCkji8w02nxDbgqobkGloM2UZijuQciEmjVCsWSXkkxA0F7Fo2Zcd6WutK7W+mdJ2DPDB7RYhZI9uhxtpOpjR2yu7FK6QQtU9z7Ngsl7TLNeuuZXlyREgqUTyZzaimU+qmwdWNOjhikKREMXLGOIX/16sl1jl2Dg7IIVMZCwPBp0yvN6XdpOQx4rRlgyczNPrV6H90EmIc4Xw9jFTDX0rfviFnnnIuObYts98Uj2Fo3iQUBnse6mcVVpfSXTDmSIjo2p434sODWKLchJKmBkb/lli4PTAfeQ5lEO4YouMtFDBWC5xzOnTN9PWJYmjLNcSsmusX9i/wy3/7c6xaVYZ0TiN7GDonbrkHoAIb1mgtv5Hh52mEGMtk6f2qlCIpRV0fq3wNZXJrLwtbUlHOClVdk2Medc6N0/TX6dkZpm6gtDRVB3rLcB6cIlMQl2GGM4zaBGoIz/Xe+IAccr/RYY0KTOWQR3XJIegQI6XUOLBeL3nq+lV11EIgGIPNGWctq7bl9du3OVssaNsexFBVfkxx7e/vcefBMfceHhfVOZUGnncdn3nt1luGPxOAGGHTfg1344AD4P7bvqL5Kr+dgX8C+GNfwye+H4ct4mEqV67yt0Nglgu/IqHVTMO5JzC2PtcgJlIVnQ7nHJvHoXLgg/NclNQwsaSih1T721y/2c7hO42vwQGQc3+GDx4OGPPo3w0MqnAYo/i4MYh14ITsDCZFCD3SdfR9q5rZsjUWbjoliaOPAfoOomoFDPCnGIPkXhmhqXTiKgp4zrsx8h2iIbLm8o3o9YkMOdRC3iuby4uSqRL6oKeQiX1SbX+x2GrOtJozv6AiL6HviF1Lu1yxWa9Znpzo5zhP3TTU0ylNM6GezLDe0zQVt1+9RQYm0xnWVcSQMFjyuVr8YXoFrcNXDoBeKZmRC4AMnfb09zTq3WrpD3LBCCX/WErTRhhfxo5puswyxPgMaYDB+KbijQ/RipZupke8zSH6Hz5TSWklGtYF2KYeRsPG6MxuuQa5CHdIqRqQ8TXbHbm9hy0xagjF1DhsNhuWmzXXr1ziuZvPkMWQY2TTtohREZ84QPdk1us1MYRSuijba0AQ43DO8frd21wsIj5lynDOMWnmStw0QgwqFmPthBAjSHG0xHBycoq3HkEIaUE2WY12ggv701GlMI+4X7kzY8vWUCfWFZhPYFxTrZY5Px8flEPuax9j+qTs6ZwCgkUkjZyIbHLp32DYrHtWqzXtpqUtkrt6Xlgq7zk4uMAFaxXNKzwVZz2TpmG1XnO2WBR0zYKxSiIs1/L60zdIVUUq6pFjSmwQZxJF7kJImKLEOaQbySD2OyA/jaRnEP+i3k/8WTLQ9R3NdEpdTXn9tVdYLM7eNBfPAR/ibbHYD9TQFs8DqsV4U5mhE6dDRBG0EYEco3/lYOSiupmTphFC/3a4yft7KPfJlnPVFAdgsG1mDM4e/SWLee91AAZDP2yxVP7kc38vB5bqr24hMQHykCeLeGNJfU/qNvRZjXkzm5FipF2vwDqoakKfMFE0CPJ2ZDYqAVq1nXOMY/SWjCEnCH2nKIMU9vuAJMvWmAyytNqzOhVlOEtAiW9SosKhttqgMG4fYtEl0OYM4mq8r6mmO8xzhK5js1qxOjtjdXbG8vRU88iFP+CtZbk4xYhhsjMnoDB8zAlSifItW4WzErsXF4UQs/aNJmFLXplyLaVH5lhCOUav56Ch0bfIypId0gXDz4elNkYKI18eIU8K5YErfIOcZVRDy+VQG95qEFzKlDI+CnFwqKE+3xWw+Awp5+0mFz0IrLXbXMZj0OjIBRj9CCn3qdrgg3FYrRZcOdjHOhXZMdaSYqDtOi27ygmRDPs7GKDvevoCHc/mc1btmhgSk8mcPrRcv3qdvflsTEM5a1W/YSALGhlvi6yIgmSYTqa8+MUv0NQNly9exHlX0ASLt557R8ea5x2ft0fTaKbsWWvMOBki2pf+5OSE9XpNVVes1x9oIPhdDxHdizLEgBkGVNIYTb1NJ1PW6w13795DRKgqR91UzKb7GLHs78/ZmTbMmokiBFmNhnNuRHd2Dva4sr+r3QLL51ZnC3jpFQDu3HyKWHoFeOsIURsGdV2rPJys5M+UEn3s8cbqcx8iiU8R8++mqS7jspD734qJkZR/G9lbuvwX8bMVk9kev3j4kHuLs+KUbx8GQR2A9/uYA/+jd3hNc+cO1eHhOSdWtQEAPtz1IKL6KXfvFGdXz5VUSBlZMv1mg1ltIEN19JCubUvq5IPmEGfM4hTXaiO5EMJ4nkvu38S7AqDvsQ/uvat3/xocgMHQK2y6hf6HvJYoZDnkJQcIN5ScPwNckwmrJfTaCa2aTum7RDKO2HUYK9reNnvERKItbWxLFGlKDp485KoHo3UO9s7DcVAof8MBKkZla4uRGEraJKP1lWivaG8dsVcijvTqVc4nE5x1hFLWZYumvTLrKfX7gqkbdua77F6+Qrdes1qcslmuiKGlPduMEbL3FZPpXCsfbCmRJI+NdhKZyEAALFMuQpQ01tF7a/HO0fddiTw1d2ZKvb0zVmV8cxwjoiFKVK+owEkF1paymQayYkKlOMeeAIVwhYj2IKBEYI+QzspqG6v910MsyYfiGw7IBMWRK1toYPaGlEpEa5QzMDgEMhRZPbolhVLlUa5R1ySPDsm0nnD96lX61RK/V43OSCqiPNY6Vqs1MUYWyyXz2QyyRmmxwPCrTUtIGedUe/zS7i5hdUa9MyP1Hc4I/aYlF+Eg67zuh5zxBV0Z1ARTSBjn6GJP223oOqhrjxPL7TdeZ+fCJWaTmu1N+YI05NKmWHOAiaG0UnPJY6eznEaE8FtheOfLnAgxJMCRCWVjKVGv6ztmswk7O1OOjg5JKdL2C003iiX0FU29gxGtL7fWYL1KO7vzGhMCxuSiMgdVtUW+Jt5y0rUcnZ6oc2vtyB/JsSPnWBxSMHhSCGAuYd0/x2oTSKvA5uEX6TcBrGPHNZysA22/5ODq70Wem4L8LCldwsi9wvl5exT4/Tr+58D+O71os9Y/7zje4TVFTI7ua0nDvA9H6PXP4+PtFj8naN/dPX+NDgAoIF16E4s2h9nW3wwOAGA0sskFNRDnVGQnJmLbgQh+PidmCxKQlEkxkGNGnNbXD7XmAxFEhnzfkKe3RoUGHyORwePksgwMte15W/OeGUlxpdIQEdk2bWEQ/CiEPO+1rKcgCRplD3WoyjYdo1gxuJ05lw/2IUXCas3h4UPa5ZI+Bqr5Lr0Y+vVadclFCskPjBRWs+IOoxqdsQNhSEff99jSGla19cHkAtALIylOCj9D+8oP0ORYQV7uXz2ZIc2Q8rlcXNoKAhlRtvuQHrAF4twOfdcbN27SNE2B5hgh/0dIifncvBfHoHJOSYlFMhrO5/vlUV9jeJ/h/ct6DV0XlRORcaZilZYcLs7Ymc9wqFdlxND3kfneLl3XYWun0XqZh6ZpiCmxXq20bbARkjMcb1Y0zrMJnXZ4C2CcZV1axs7qGgv4oZoBgxOtxjDOI96zXi4ISZGYB/cfkjL6nlFlmhnSL2iaxcDYy4BsRsRh6NpY1zWXLl5kd3fvbQhP35xj3HuipXcJo03CcIVRD4v1kp29Gevlks16Q11VbNaRaFv6PrLetBydnDKbTPFWS8g2XYsRTQ2kFOlTGks8cyHMHsTEp8p1vPjSq6xrbfDUdp0KFBWkkJS0J0WpNMl5F2M+jpHvJ+fXEAyXqhlXd6/hn9/jxnPPUM3mdGdLbr/2Ki996Uv0bwiz5p8mp28D+dNI3pDzK8C7i/TeL8OXry/x1kwHEeHgwgWcq8bo3ojQNDUpZRbLJWIs6/Wa2Wxa+mbksbx2UBM9OzliOpvRdh07OzucHqqmwgdtiAj1bK6quTkRNhtC15agS3grb/8ZhGvvEun4GkmAefyT81Bjfu5UzwlyHHDicrJnBl05ciTFnkzG+RpxNbEL+HlD2qxIMWFtRb27wzoFbSlb2h0OhnpwPGQwHqOxZ0vpH7+UyKlY6JzSyKkacsojz71E5jkVuLbUzcecNNrPlAe4uAbJYkXIKFyYciaQcaLxrjGWECLd4KlPpzTGMN3d14PcV/QxqWxvAozC/XptZoSvc87EQmZ0I5FlW9aSY1DoGpX+HZCQwRFJUtIBQw6+zNswgYPef3lbjf5LOeN2Wh8t8Ttfcjd0bHy8P/l8PqcPYczZZZI6GEb7q+sbbed/2M+M/9Z7GRjvW+N/Lg8gWxQql+05XmWBZ0TAe8vJ2Rl/85f/FnVVqSix0X2lrVk1H19VKsAzNPIZmMgpRUIIWO/IMdO2LdYYvvLKrbIfTOkbr3l/M6QkhpSTyIgAOGtYbTY0vuL+gxOyGHoSIWRi23Lj5tPqGBT43xiFoQcHZ5i7wQndlmuW9R5SIt8iw4yOLAUCcjifsD6BZFaLM47uP2SxPNTcf6upHWcbrPPs7FXM51rFU1cNBmHTteSsyJyIoZPiGKdM7TwZJXhW56Ks2WSKrevxWclkKq9CQwPapJ0wBeEjWPdPqDMeMrWrefpDH2J3fw8x0JPo4gYzq3jukx/nqeee4+WXXkIQbt68Se3+Ue4+uE+MryHyp8n55O/S7P/Gx2eBX3qL71tr+cTNp5nMdrFW19Y5x4X9ffoQeO3WLbxvuH3/Hk/duDFqK3SbjhCUAB1j4NZmw7XrNzg+OubDH/4QL65+jbOu5ZHz4wMwjLXsX7iIryaE2LM+OmLVt4hxZCzEzaO/IIbfhXDtrbgBbzG+Ding7WnrgOZcsP3oFJc+6RjEdsgAR4cec7bAA5Wp6DdraqCeNLTrNTs5g3UMUrMSw9jMRkTlHiOZPpdaddi2ryOPyni5XJQUY00GGfN4hmWpT7d5QA1L9Fzy76r5DX1Ufe5BonbstBeTQspoFJ2LAa1E1EGICmur4JAlSiYahXNtUWwTk7e9ELLG+HHgGFB4AefgbUNiaNmXSzlDLqUvOQ8lUfpeKhoDo7ZA1tcOztP5tRrK1qSU14QQHjEmRjQ9ceP4kO965aXB7JJiLCDpdtX9i58n50z16succ9O25KjB9L+lrfLlvcJb/fCdx+CLlL9nMjdC4LeGsN0rwz2f/3txOrbvM+yebVrh1e//rbTzOc47rLN0m40aFK8CTirIFMfmNJPJFOsMfa+6A6TEbDrTqCVF+hjpOq05T30gi+X+4QOEpKkbHAPJdmi6pLX/ghH7yB0M6ZKUvjUQAK2IcFsnNQngSanFG8umbXnuuWd4eP8+9++9zuVLl7iwt0ddeeraU3uHd5baeyaTmklV412FcSqx2rcdMSUenC64fecNbly9zLSqVP43Z5rVGr6k13Lj0j7ryuv8U5rWREUNrGm0VFQGMa4ZkmqyMUzchMlkh9n+LiEHbBJMyuTacfLl26wenHLpo8/x/Cc+QW4DN29eB2955Suv8lf/+l/l4eEMkT8OeQN0X2W23v9jcGptaXyj3zxPDFZ3e9ATKb20IatsM+XZDlHF0cR7cpHsDH3/qF36gPgBMghdofcfw3AmytucnTCQtt/N+Lp1AAzwg8Bvf9tXpO3X805JSpBKDqc/l8tZnL7123wD0JtOhP/v3gVe93VJuW6V8zIFXswKgceUaPsOayzWSOmoZ8BazdUnzbnnrLKNgW2kNpD8FAYv5Yki2mHPFKJeUjRgYLUbGNngRsN58iA5KkYT2ZnRhBsxZDtEQzLejzWOnMMYIQ1961PK2weLbZSv6QQhZBVbGjry2fJzGwO/7Usvvu3eG8ewUdfvJpcHcAm4Uv7+IXSzfAX4wrv8/a8+3tVGz+fTRm/984/9zV/ihR/9Efp+w8nxAiOKBIWk6JC1mjtuo6IBh4uz8XmUgg6tT0/porYhrppa0ypdR07QzHfo2xW6gJmcnaaCchzlnUH3phlEh0YejB4Yj6ZkvolHSQOOpaFiAIfzDc61mAx/61d/mQeH93j66WvYlJjWM32mTEIkIRGSCO0qk/qeGCLrtqXvevZ399j0PQ8XS23fvLePLehaFsF3Wwe18RWmmSifqJBAt+mu8p/iwCFTLDtaZRQNZtKACA7lG2QRfLTETY+dOtrVmte/8CX2bl7l/ue/wuWPPsusmfPRj36Mk186IYZ/GY2p/3s+4CoApXLGaUntUOUyNLgqZ1+MQdsu56KzEYujnhIJ0a6sMeHrRsmDOZ9zivMjX97vw9iiTouexUNLY+2s+xY3MVS3vcvxdTsAFWr831+gYwV8GvhbX/VVdc78ttWC/2K/ZujBvo2UB5qjjHnzPgSiqKbAUImA0Y5zzlhiiZgDxfiiG3obR2oOX0l9eUQjckEA8tBnvPyeFH5FJpNGBTlIJfofnINc0h1S3neQh0xJsLYo/InWmmPA5ELINFIYpUNEPjgamYF7IKV8Mg3/PmdcvnDlGmd1w3p5xtnhwzdF1vP5nE9+4pOamRlcE7N1NNruIzj3fOlsd52q+ght21JVqsY2mTja7q8hvIj3r2u5nhlVhEr+Sx6Zp8eHiBBD4P7Dh7x265b+Oyqk77zDFoEVyOP6pcyoKa5VIJanY+KjvSoGfvbzX6APHc2kpvIVTVVT1zUZlQxOKbFaLclFNrquVN+9D12JYtYIKlC12ARC6EcH8OThIc7XDCRaTXOUCEByqQIoxv8c6jYKJaVhPb81xrYEVOhjAqkwPhLTilkz47nnPgwxkvulKspZV8rwMkasNmzKibbrMTFydnpG22kkfevuA+pmQnIWsvDFl2+RQo+r1DDN+56Plut4+e5dll6dtcprL/sYtyWqGUaWekg1hlfpu56PfezjHFw8IBjUoBkLScjecvH5m4oKTmqmuxMtn376GU7u3GPWHHB653Crhc+n0fPlv+LRSOuDMxTdLCjAmGZkDEJSHOaxEKZlm/7ZorWlBLD0TMiUVsAf0GfCOBW6EiPjskpGg8/Hg2ooCLZ511vgPVIC1PFfMVBSHnMHxAIGsWib35xBbKnzPO+dmS16UZS9sgBWD2ljDPQ9YXMOzhGAHwPztFYPiEV8RWKXbPaAX+ERinjZNL9jveRjfcsgpzK02LAiRYdfP0AMJR+v2vsjV0DUWAoZixp53ZYaXduSmxpY+TpKwyJ0Ew9qfWrPzuUygZwy2rGgzEMuTPgMOWolQCpQmPbBVq4CaLMgGGrDSwdGSi49M85pHEoAi/EcHjpzbp5GdOEtEKc3Llzi7nzOyQPL7aOjkWuhtyNcmkx46ts+iTN6IBr7aeD72Ww27Mz3WCxhvnOVxWKBd47OOk7Pzjg42OXw8JhLly5ycvoRrN2Q0wkh/HF251P61GGNFMJjHi9sVAEsRjCj5MVu3bK6ekp/+TJt2yEIfTHmy8VC8+oCQiImUbInUmyvoiGzxQIePCDlzNm6xXtH38NitSCls3E/tu2GzbrFOdX+1zXQngMxKRPce08o8sTeOXUsUyTHyIc+/BEm87Sl1cjW0AvFCcwZZ1xxLrcHm1CipXfZCeyDPnTvm3FfpqRlo82kpl0V5C4EnPPcP1xp99AHp4TU03Var29Qsaj9/T2uXLpCsjXRaCOaZrfCGct0OqWqK1abNa6u1MCkpAp0ZYQstCETYk/etPR9GAWkBrGilLKmBVxPzh37Owc8+5GPcnbvIbc+90XMOiCNJ6HCL77xTPd2cPu7THcm1BPP5FPPcNBdZvWg5XJ7wpfvvDLC3/BxtGbog+kAAIjV8kgrZhQqMyXgArRqIyeM9Uoyz4NTUEpAs6Y+NX1amo/F8CaO0gdlGFFVW+MpNkwVX7MUu/D46wcE8F3KHrynDsAhcBcYI0oZbL0F8QgJ4x0xDAB5SZ7LNueThzK9wRlI15BeDa96us8g8sOM8phAzuodSirv0RvMxAO/F+x9srtf2LhanpaBdTvUqG8hoaGunGLwclbjbgfBiYFgVfobIIzKYEoE09p9g3ppA2wzmMU8NodRODhlVYNL6LVps4s0HuxD5tzIVgebco3bRjil/n5IEcAoV2oLdIQxo3Sw+i2lEkJABvh0WMSCbIgMDoNefSoP0PkNo/nuXKajXG0+/7Ma4t+Hdd8DQN8Hqroh5wXGXiCEh6WZkRD6oCVdach1ZWLSz3V2nximOPe/pU+e09P/hosXzyC+hoyiR1v3xBRRn6F6oUsBa+HZp66ojrjRGu8QQintDAVSt9x5eMxqvea561dVFjgl2r7lwpdfggcPqLzjt3zHJ4kxEXutSR7g+IE4aQauSE50XUfoe5xzpeYb7j+4z6c/8QnqwmfR9AFMqoZNH/mVL35B7ytnwDGQHVMpkQQQsQqD6k/KV00JiHywDrl3M96q1wSw7VGRI7E8/0aU++KnHmcsoQ8cPjjCe4tzFXu7eyCZSgwHu3t0sePpZ25CjKxWG5x1YyntINqTUubC/gzB4MVCTtjTrSDP01cv005UPjilRNu12tk0pSJEBKAlxs4+i+XDXLt2gy/+1F/j9KXXqQoyGHIkokFRyIk+R4LVZmLVpX32r1zh4OZldi9e5ru+77uxuw1/5a/8AtK1HzTb9uYhKsw1oJ+DOPJQ/RRzqWISrYAZ0ETVK0GDyaxlsiL6vFhny/cexyff75NVUrvDfDAgHYPFErbp9e0wxp4LON95vKcOwKPj3AObM9ZC6CNpkJU1hnyu1WkedU1kC5ulBPF/ClINFVFYa9id77A736FvN1Re2wWvNmsl54TEumtZr5dqqO3zSPM6eK+bRz+CrcEoUW9BHgbYaOgqmAaiydAET0r/+fMkPluiaK2QPAdb5dJ9T2vsR5iWrRHGbOWH1VuS7fyU1w5VDoMPu+2KWMoUi/IYDAdluZ9ihEZyW/k9VQFUotoghLS9ptJDYMgvDddRPM7zKQBTcpre+dLA41yOWjw5/yg5/2b6EKnrhs1iTTOZ0oeokRKq7GWdpes7bLVthOIqr4d4VZFToqlr2tBT+Qbrfg85eWL8ebz/MuTbY+XDOVemRAMZZw1922GbijaouE+/XBL6XmH6qgKB5WbNvftH+MoT00U27YqqqqmMlvQNw5c19rKNwlNKWIS6rkoZo85FbS00EwSwztPlzBe//CUq53AxE0IHkrBGWJ4dk4wrTaYoSM4Q4RakLOvfY+k8+MgTJ4yG5ptpvF1OU5CCqCnjf/jqTFGqdMrib3zF0zeuc3Cwh8FQVWrAa+fx3vPll1/i8y+8wI3r1+m6bnSiK69qnllUFEvE0MeOTejJOVFtluO1rDcLepNGombtB2dQU07DeRFTJIcNknte/tm/yvrePYw3xAx9jrQmY1MmmkQoBF+bhZRalndOOHnjZV7+rMfs7DK/eJGnv+15PvOpT2F+5Zff/zbtq41ik81Y6s2YihVbAjL07/2mHaVdBk6T8mX0ezlGXOXVGSvG/wP7RGSVpc9ZG5FRGuGJNRDP35VO4FAq/27HN8wBGOwZCDkGwtBOl61Ii2DU8CuIw9YbACRB+h0IftzXdVXxkQ8/x9UrV3DWsV4uaJcrJGaaSQNOo/s+RV5+9XXu3rtPDt8BnSDyc9pEKKXzF8dg8otiEMYP6nh2+7KUSlc4hfXVTm9hfYphFBR2t6IZ+WyUkGLIeOOIKYx16mo4EinFMZIzRf0PUeb/UNYoMDoHUlCFISc/3IEM74ecMwwFJUgDAx+MKQQbMSWw3ILIqRBtctq2DybB0IGOlLaOAWW5SuS8RQ30B8b8A3z8E/8ok+mE5XpJTdEDKHoF+quqKeDcUH6nfRu6rmO9XlHVWg0QQqCZVLSbM3bncyQLxsxZtN+Psd+OM3+SGI8KKY4xLWBc0U8Qg/cVv/Jrn+N0uWBQzms3WkJjnV7/at1ijGW+u8OD+4dkSdTek7PwsZMTbqAdH198+dYYjaujuD2IhkZGoXQYdM6Pao1iDJu242yx5mf/+i8yaxp1UCqvnR1ToplMWXddseQqQ3zOLyxGvpyW43OukX9Gm0p9LRHAB3mMzZZS1nLJnJBkaVdr+i5QVw3WGJq6prl0EQx4V42IDLZi2QUenJyxWK7YtImu7+iCphiddVS+GuOsGFR5LZXOcztd4HvLz1748mssvNO2tWjaUNtbwyDe5IxFkufy5B9gcvZFzOkZ2SUlsWVF6iRFUkHpQPTcRKuADBHnMiEn0skh944PeeW1l7l87RrXmwms3iwR/IEZQ5rLGJXrzqW5G4OOyVDmCikpf2eICgcoPOdzImjOs9lsoKRPH+UAfHA8pYHUq2j40ChNG7jlt3jMjbWYrwEB/IY5ANucC2j4qtC+yrsWuVqrizuQ73I51wbNAPK1Ej/DzmTCxz/6UXYvXYDKk51lWjXUs5YUA2JK1JgS9abn4899iP3JlJffeINN/E2QIoa/TkGKHgmShjyTlu0ljLNbQ1dyUbbAUtaYsZWsMXofw+bDCLZ0vQNGKd9RCc4WR2FAE8797zz7PA0ea0a77o35XspmLryFqMbHFu33LJxzrgCktOEFRIgpEkuJjCltkjVS1QNUUirMWh5Zu5zA2KEcZxvlZ9iiJIOBMg1N/Xv5wd/+L3BwcJFUyJPGOQYRpqEdr3IX1EPftB2r5YrVZs0eO+zs7KgaoDXKfDVDOqR4ugh9H1ivd5lNL2HdKaaUO+ZzeghJUFU25/i25z9KyOomhUH+F2G1XrFerRBgOpkwnc9p27akRjQVccEaODrCiDCfTyDlsi8cMfZ0vf7RSg3LpGkK4qKKhl3fEUPkwv4Bs488z/HZKX0IVN6TY8J74ejkhItPXWD54KFGQ1rwef6p0v8nFQRSpTn9fiockUfRrW/+MTRFskYgB1JKHD58QE4bhjbZXd+zOD2hCx1dH4kpEUNks1ljjOH6tat8Ym8PZy2L0zP2Di7QxZ4UtETzrFSy7M/nOudOy3MnyzX8rV8G4MPPPkuYzxExxBA0JRQTH/3s59g7OV/Z1GAkQr6jMVv36Gq9+6NbtQVy3yFf+YpWJ6C1NP/rr2tG3/24D/wp3ru6A0Fw3ml0y1CFlbfbOg8NxEpjt8JZSsXwx1jO4qL/v16tqLMtPR8G2Xr9pA+CE2DKuQVsIwC26PCjaQ2NEqy1RUr83Y1vkAMwZCze/L1BGZAhBQBoXlv0ADMaCRG3CzZpGj703E2me3Oy1w57NkbEebybFFik3FDOpABpveDKwS4AL91+g667AG5Gdiv9/DR8Mgx80eEqc4wqQhQDrkgPqycuY4mVFSWhhbxtvKMM+i3UnvtQiHmQTbHKZdFC1jSByar8l4bINW9L9YbIPsm2eU4eZmUk3BlSSaWoHpB6N0lkrFQwotF9TqpWqBLIxXs+t0xDZD48UBSSYMyJpFAN1eNwbKbUTRuM1Fy+9o/z8Y/9Izz/4Y9y7959XfWsXIGYUS11MQXeFrquYzKt1DG0sLs3V6fPCt1mTbO7p6kAb7Fi6PseX1lVW7NaPnl29nvY3/+PyKMIld6IwvJWnaUcqSaeqVhWywXeCmI1zeCaipmzpEJIrVEy13K9Yn9nh7r2+AFyNMKNi/s467UiYehDUTZPihHvq4KYDOWjGjk6EWLKrDYbruzOqLxjMml0vyOcHezSp44cW90v5dAaOA4j36KQO7f0twGV2lYMfCsMMVvprAzEDBntGyElfRJLRH3v8OFY978zn1A7w3RyBe89OUV2pp5J3XD9wi5nizOkb7HeEesJtrLsNBMuX9jHF0c25US12Jq+Z65dpW1qQghkgRB6LnzxK1x5xPiX8Uhw9B6Mc4bA8NU7Br7zEOB54Ivv+Mqngd8G/MTX9XkjeE1mIEkLpuifi5ReIOgqK0qYmBiHZDNOpWQlCKacFakRQ+5aZDonledvO0vvf+M/XGNKGVue9TzSzB8l+apibKmI8jXSb978dm8zvkEOwGPR/zgKxDz4B4NaYNYoTQ2boJq4nwAuY4zh2tWrXDi4RDaOECImRzWGLm+jZ1ECRAw9kYgTwya0TJzh2qUDXr/3EVL3K+CW+jsyXpFGxAM/oBy4qXRzYyDxjZrBGev9WJpijEaozliMBV+keUHIRSHwfOtboQj/sRXhiSkXwQe2hj6X15SUQyypEyWdyZivH0iH+rAUD2F8Z00VVHbbsKbYj60jIcMqDQpyg1RyuT8RkpgxBfHIamatGPDWYa3l4v4Nfui3/yFyIb+N7XDZoiw5JS3D6yPOe42ac9KuiiHhxBD7nkkzoW+7Uhdsy/0Z1gXWW6/XTKdTFoslJ6crJtPvoqp+sVynrpUt158zhJT50he/xLWrVxUNMob1mXb+I6t4SBcD1hjWqw0JhRGPz5b0fUddGhvFlLh9/xBrLDElvHVUldd9XeDhgc9RV/XoWiobXOcjA6TIpg0cnZxqq2hRJzj0PaRUov8BWdFFGtudlh4AMihfsn3NNyMHAB6PdoYxcGGG1xQECCBrt0RFEzuuXd7HSi79JaCyhr5vWa2XhJDYdIH5LNFtNizXS5qmJqaW7Dx9VD5SiJGcAr6qiDFRr1bcKJ/92t27rKualCObTUvXtTxz/x5PAWsR/tqs4cLODv40QeuJojJXke1XyGXrKuqUs5CN3sNQOkuKOFO6aySgyH3fTJlnMpwCP8m7MXH/AG99/Btgl3dyAH4EuPCOn/HuxnDuIiDGjs9PRjk8UtY5D2deCLgBFmdIcWrVjiuRoBVDH/qisfIBrIooz7GxRnliKZFiLEgz43zA+SSuOksmvvsqoG8gCRAe3YZDdJLHiEesflUFs9K2llTKBa9CnuOd48LBnh6Omw4jHVmEzhpMt/UOMYZEX9j3mWS1E1jqWhyR/emEw8Xvppr8GaJZnoNUVE1s+69SX39OkUpEqIzQ9T0MplG0dnyQAYaMd5WyTkNgrCig5KVKDj1SWiplA4XcMhh+Nf5SGu6Ugz4W1T/RaoQco8L2I4qlD0fKmaEPg5Y2bVMCYgSH5tRIGWdVorTtS3lMeU0qOUtdE5UDzqkY1OJ8pHMH8ZiDJWPMlB/8wf94ZNb3QRumGGtwviLEyHq9ZjKpmU4mxJjJObJcLNnbmyikbRQ6j4Ug2XYdfYycnJ1hrdVcvTE45+m6DccnJ8QM0/keDw+/kxvXIedfHPu4D+Q8Yyx913Pl8mVu3Xqdtu0wxlLVGrGt1mstpbJC6rSpzmQ2pdtsdH1T4sJiAWht9xdffYOcQwnQU3HSMlVdjVUNXdczKdoAQ/pGuQ5wtlpR156J1ZKvkCJCJiW4efNpmum07ImhtGnrsI6lnLpzz5n6LQnqbThzH+jxVlUAGkPIkIvSeRngz+GZyoI1HnDcv/+QTdex6fvC9Dfa96M4bXVd09QVvvLsDA3F2oBxnjsPDjlbLHRPF43/3dDz3eVafv3FL3FWuCwC1HXNM1mP1w7hV2zNThe5kYTGec6ILFMiJogY1gKbFEmowmnMGaxgcywEYl1r5ywmZyyC95ZaoMqJvdLUawP8Gg74PcBHHp2voRwxJsi7bJ3Gx+f2tXdcj+/lvXMAytUV9HNb958LAjBs5xgTQ5lQLpA3OSPWYIEU0mgcva/o+57Ke+KmexsH8v06hrkQzekjYDIxhhL42bJmQ3t2/R1B+Uyme/cHwDcwBZDf4lsqhVq2HXnoY3+u/l/QhhshlkYQVrA503drclRWva88sVcjEZ3Wgw79j5UsEehDX4xaxAN70wmLzQ65+wOY2X+6bRCI2reUC+xfGPWS0+iyxBAxzuBLPt0Zo7WlKWFK9De0Fg6l5lQFa7bkO0p0gmjETClxG7ASKV7eGO0WB+K8hsAQXarWgymQ//aPwkUFDxsi0lw85NE6lAijMPYHPQFK5DGkM0zZhI+UBBb0gfGK9Ppj3OX69X+E3Z2PcXR8TFPXCAbvPCEllssVzlr293ZV/0AMi8WCyxcvMptNMGKY1A0pqjb/6ekZu3t7HJ+esndwoMhFQR/Wmw3z2ZTFcsGFC3vcPzzi4f0HXLt8CWOeJabPqm4AkLPWE8dNx8HOnGeuXeFD16+OWv0Z7U0QYiCmQDai7VgpVSBZqxT62LP30ivwuRew1vCp5z/MtYv7Jfp32sgqZ+qqYpCQ7kLAl1bBOsdJa9Krml/9wot86LkPcWk6Y71ekK2hKoz0Vdvzt7/wJTU+UvZz4SIoAVQRp0Ed7TF87YOBbr5XQwpDWleMEOJoGHIWrLM4Y1is1rz82hsYa9jb2eNCVVHXFZOm0uclRW4+dV2rYnImpUDXB7yvijK4gZvPELJqdxgpRN+TU/iZnwPgd/7276dtFHyPMdBtSuno8TFN7fkdv+U7aGzNF/+HX+OkjTyMmUXOdEA2iVk1YbepcM7R9T2nyyUYy2K9LrnvUtPel9SogI2JiQhzZ86VfVfADwO/GUWJTGkaRXGKhJyDnr1lvJ+Mo3FOr9EoL8kb5V2pLdCqjmxMabaUKSLzgCBOz28ojlLJn/f9b1BS/O/2KOe8kcL3GpBkVwiQb+IAmJHQ/G7HNxgBeHwUyEKK4lUhzgHkQj7LCDldIOcP6/dTZrE4Y+IrrLFgDamNOLEEoyptzlcKhZcDUYzDVTWmqpn1Hcv1grjumdYViz6Q4seBv6KXpOG8MolFmfs5b2sSjAjYYhQLaz2P31dIV1nmrhhSNZypQHgMhrxoCwBFmWdbvT7K8BoZ0xlbWKyo9mmMOApAMDD7i1ORsxp/YTDWjKGgIgG26BgouQyTEbH0MahXLdt4ckyHZEpnw1KeM+YNyhDBmD2M+Xv58Ie+n8OHD5nNZ1ruFIbyvp66auhL1JVipJnM2PiN5vCjerEhRq37L+kQX1VMm4bYK1t/YSyrzRprDHVdcXR4xMWLB/R9h/eOSxcvkdNNMn8N0q0xAswoYS6nxOLkBGeg6zr6EMEY+r4jhL7Iixq8UWRH51UXIsVA6rtxP8ZuQ2WE1LesN6vRG48bZaIP7av7TR7XI6RADJHZdAZdx2a9JE0anCgC0G0ioWuVz1EQjJRVHXLYNmMnxvKcjM95zjCUkso3HwLwdhoAMqBk6LYcKjC0tLWk70Sjyo9/9MPkEJjUatTrSY13Dmcsy7MzvEBTV8TYY6iYNrXql+Qi5oW+Zx8D3qn8symKgACEHknKN3HWMJnPmE8mgF7jbNpg+swyRE5D4DCo03jjwiVuXL3CUwcXmO/OsbUn5sSdO3c5uvE6/8Nv/29JIeP+9d9F3AzpSK0S6sn0ZFYxsRwPlz0YcAlR1KmqKiUlOkfoe9qNpjHbtuM8cvR3e4gxVHU9BlNjeliUIB5LGjMWp5qSJh2I2KkEbyllqrrBtRsN4EYhoA/KKGGhKNvfGkWVY3FwZWiCZij9LwoGYIcGYe9+fAMcgPMfnx/73hjvvuVr8vidHeAmoAze+4eHXN7ZZT6b4cQqBOJsaa3qEGdxxuJElL1tDNV8RpKOmZ9RbRxndxecdh0SJjT+9yH5PvCr5KzQyhDxmgLBqXqXVgBI1mierBB4SFGV4gCTZfRMU4ETyXnbcW7ICSOlnA5lK9kS8Q+OkGrz6sOYimNUIHdvrBp8tfulCyF66FtDShnv3NaZYIDRtuVEIyqRGbUBYtK2vqNDUTxMJI+1pIOMsDFCDmkkNerLHIZ/hNpfYjqZsjOfQ1FOzGT1WssKxxiZTmYsV62q41lDnwLrTUsfU2mMo566KboK3jgswqRp2NtVQmeI2vFrb28PX8R8bEEX2s0KMT8G/HGEoexU95ZxnsbVOCMwmbBpWxDDJE/p1iti1+G8V0gfjUSyEd1XzjM/PV9iZZhM58SgKSHvvQqYiCEHJSKGoCmFbtMRQ8/udI+YE7Wv2d+/wCu3boMYvBiV+0RL2LoeTD3R8kqJxKHtJUogjFElio21Y8pncNYU/vzmIwK+3eE9lMQK6sCnopMuziJB58EYy6Sp2ZlWkCLTeoK3liBCCIGHx8ccHh3x4HTJ3r4STnNU8Z6YI1L2soiSz7KAlGdptulGDsAXXnmds6IcmVOmqms+dnTEAdB2Pb/42RfwXeThZs1RVKzt+auX+d7PfJpqNtVSYOvI3lE5y7XLkV/4I/8L0s1XIUPziY4P/as/zr0HxxyfLWg32hxFshDjwGkW4PdhjGU2a2gmDTGmsWxRu1pG/d6IABTEcJziAPx37/USvsPYnjXOq6YIpYJDz0Yzwt0qrBTG0umhWCumWOKYQS8/k2OkrirOPlDGfzucczjvig2BFLcKuFu07xzabgzG2Tfpg3zVz3ivL3o7xsss/xy8GvVs1H077wyUTWjMIz0OUoKjsyW1NVSVH8lydVXT/ubXMAb8yzdJZxP6rifFHuMc7WzBiz/+/+DOD/0ssy89zcf+V/8y+1/e4Wj1AGt2sOYZtIFGiSKMMufHqNwI3mkJVyoQbxfCGNHl4mmmnDAhcy4ldY6MN6hY6fvFFJXYI4yOwVCznpNyF5RzoCSfIbJMpcpBjDlHMlNo0iLad758xlAcoJDxVkRm+Hyylpf0MRCSGtPzEL9ii8O2yqO0vER9QDszEGp2If9ThHyFKwe7TJqKelZzcnKCd54UM3XTcHq2Zjadsl4tNR3jKlJKbDYbDvb3cd6UhkWZrmvZ3ZnRthu890TRlI8VSwyJ6aQmhMj+dAaX4cUvfpF79+5x/doVpFRS2PyXEFPwk6ykqsp5SJlf+fXPcvfBPUxmLEMkK1kQsipWi9BMJqQQ6buOpqpIOfGR4xO+G03VnKwW/IWf/mmcc/jKaQOYpPvblv3ddSp0ZK2DEr0IWlK5d7DPbDrl8y98SfsPGKENPTkL890ddvYOMKISxMgw3xmIRVpYoGvZPzvjJmByZm9xhl+vcffvcDUGJud+653GuSPkXY+3+p3zRaIDl2ZQ1BzGPd61Sun2sBsIYI99pnGu9OlIQEHgSGNE6JxFshJAX3njDjEEUoisNxtWmw3OeqrKc+3KFc7Ozlh1ioYNKUGsQcQTYsAVgq81DjHCarOBdtt9Lxghl94SOWeOV8uxfDCjrcJrV7FEtUQuTBue+pHM0UdfY/7gAjuHT+s8lXTUf/Nv/WOc3nh1nIizH/ubtOEv8O3/p3+SF7/yFUQst+7fUySLgUQowCUyifV6Q992eO8RI1jn6bue0LfaNOdtjWJG5PAbjgo8bqL06DF6vd6RQx4DETEgqZQhZ620saUOPqZUuJADcqsOXMiZPkbquuKDmRcbDApEk4m9HsQ5Z4yrkNCreqwWIgHgjFfH6O+uA/B41P/okGJs1Xkp3vv2SdefxQH+0fdKKWrL2dAjdY11lru/7y/x+X/pPwCTefrP/30c/O/+APk44r1n56rnhX/2j3H3d/4MAItPvMTn/43/gOv/2j/NzmnDug+I+RHg5xFalEJS8uGD6I9RvkKMUXOuYvDO0ke9R91sQ4tWCKO4TtHNN8Ns5PE+rXVQ8sspKyHQlNSFBtoyRuAZhoZ/5AE5KGpzg3FW5EFVoRTmLkgCg7+lbzA4JVYsYocDU7sXpkGbeMijSWlWRCltM0oeVPWBwVgCfBc5X2N3OmV/Zw/nHG3b453HOc9ys2A+nzNp6rFMzlpL225wRTCl63q6tuN0sdDmQJsNG+84Pj5hMpmyODujKjmtlAJ+UrM5XbDabEp+Vnj48CE5Br7r05+mQojmk6T0KkkUzRFrWC7OWJwtuHn9Gs8/+4zCaTnTFjidpFUlKSTazWasauhjUPEY67n48qtweIw1lo986MM8e+NpvHPEQkp6cHTEdDqhLj3grXWFuKVQdIpJUzhkrNNUyFNXL2vkmoqCXExEkzk8Wbz1I1RIQaSM/7mf5zO/+qt8ZvjZL/+yfn3hBf7HX/X5vIQ+9ne+6qu+/pEf+TKMvwb8/9787a/2Dm/R3rg46lIgz1yIxCVdNbghA78lxki7XuNEaOqaSxcOGGrJDeCM8My156ic1bLA4gfnBCk7XnvjFjeuX2XqPYPwkDUGv1jCy68A8MlnnybOZiVlkFhuWiaffQEOj6m95zs/9hGkF944XLA+g/5HXuYX/t3/gv7GQ66/8F380H/4h9m9dZPFyTGvvPYqq+7s0fUXOF4ccfnkjO/5+Kcw1vMzl/40d5/7dcxr+7j//DMjv8qIakQYNGq04ui7tsyJJUp69Jh+ZDHeOt3yXo9HMokDp8EIta8UhZXS+6Sci0jp1yKiJGZ9ss6lSsvXpFybrt3Qb3pigtB9AKsAQB1Qo2FcEC3jpqi3GiNaOn5urbIzhXv2fkQAxqUatOeHfNaQ45FR23lbhlbCz2yoqobJdIZzljv/0F/ky//cf6YWEnjtx/57bj+4h/mDvxcrwjM7E+7+2M88cjXrH/gidy98kf36N3M69iL43SD/bRFv2ba+VadECDlj+iJg5IrATon+YzGi2qJRiSjG27E96RiKl8jFOlPqt2Uk5bUpaKVCUpnQYX6MMapZj0EkFfjeoioKMpIHNd5JOg1ZiWZpaPADY914iJF6KJsp7qKzrpCmYERmspafDQ9dKlyFFOOYD9VfuIZ2H8s8feUaQ0/BylccLxZkXxyPUrYSU0/fd2SBs8UCV1ecnp4xmUxoJhO8c+zMZpycnLC3t0ff9TRVzdUrVzDW0DQNsevJUffO0ekpRgy//Ku/TNv3HOztcHJ4SLtYcuHmdxPkJ/BEbWkMOOe18Y711NYQjbDYrMltSxcDIjD1HuMsk+mEHBPOO1w90QgzJaZNpTMlMHVCFNU+aGNkNpny4ME9aplQl/RK364IaEpqtV5T1TU7O3Nq55EciKkndJGQEuIsfYzaQMZZUgrbMtFzoh45gS8EIP+rv3ruqXq34x8CLqOx+gnaueOnvqZ3+HqGAL8FLVP7Wo7kLVH10bsdNC6ksMK1V3pC7PY5FtFn7/rlS2zOTpjMKiqXmU1qcoY+BNp2w8RfIIVeHTdf4USFxaL3mNuZ3LUYX3pqiCAkjDl3F6HX8uSsRsjp4gF6SPerU5ypuXxhj4cffYPTf/+/hBtHANz++C/xkz/+r/LdP/5vcfeVI+7df0j8F34U/tz/h8HKTW5f4zt+9h/jmeefp0uBe5/8ZU7+xT8F1+6RD6ckyeQ/+Sk9N3PWXhUlUDGlCVEu1/KIgS+2s6prUkrvKWHu7bgbb/FC5vsX6NZrRFR5U1usJ2TohjfYjxyoJ42qmVo7poCGtGvft3TthvXZGf1mw9nZgq7v3uEC3n9DkEIA1H4uibw92wViUhxNTCHTIxiv59z7xAE4P845A4MU79AOV0zBDpMSGkrkrZr7phgh/SNGCBmOnv4KcXauz7xA+L6Xkajet6nrN12BOZmQloaJc7gOUlwwHH7OWNV6z5lYYBeLGj5SxnqnnmVOWDdUClAczoQrRt8YbeyTzx1WA5dAexvoPHjZ8gv6PighEgoR0BTRB2U22/IzM+TqBI3ks5bRUcQvyFGlM81ATNLXpgLzp2zH5kAZve6BNDM4Wvr30jMY3YQ5l/K1okNvcgamwEV11lKiajybtsUaJf3t7O6y2WywzrI53rB3sMe9u/cw8pBN27Jer5hMJqVcx/Pqq69y9do17t9/wO7uDCjyv82Es8UZvqp55Y032N/do/IVL7/yKnfu3FYeSIqcrlfcevlV7HqDF2H/6X+Kvv+TiFmNecOq8qoBT8Ymw8QqTGMLaW+nniBVEUnKCjf2XYcIuLqiKgxvQZhM51ivksqu7ambhlXbcXWqEWAfAskqwXATE9PdXe4/fMCqbdmZ77B3cICxDu8TlVPN8rxaYazj+GxBEltEn9TJHAiyQ/R5XunzvwZe9Z7v/p7vYbVa8+3f/h38hT//53j48EF5hcOYPXL+UeCT55C1a+T8PPBd5XuHwJ8pv3PMbxQ21QhUVTVTzsUIZZ4H/uHf0DuWUt/CfYDt+SZSVDMTI6dG9/+2ZlwQ1psNl+YzJEWmu/PSorlUqqSE7wPZ1/QRbr1xG+s9lbFMpxOWoefo7IyLe3tsNoG2bUmosme9WnN9mLH1il6gaRqquqbrehzbHhuLdU/TeJ6+fp0v7K9IHzl65B4fPP8Cv7j5JfY3z/GRZz7MtRevc+8f2+fhv/tnMNnyvf/sv8szzacwLtPtHfKTf/jHCZOSYriw4uf+4z/Ls6/sMPmFZxFjmE8m7M/3qJzjdHHG2XpFG/qREzQYZ91neUQ+Q5D3DP5/N8b/4OCA5z/6cfxsTtismO/u0HiPz1oFMARMALNJg3PK6fBWFWW7PpCzkogX7Ybj+3fpuw2nD1QJcr1Z0ncfQAdAKARAg8mJsdx4EJaLaVTTHX/HObxxf7dTAOfHo0DP8K1Ba55cDrlUctrlcLJiuHjpCiff9zdpDw7hC5fIX7lI3/dgEvUvP4f9+6fEvdX4npOf+gxSeeZNRfw9Lzx6k0f7XPvX/xnsC9+O39vl0szT3P7PYXMfY+Z4V2ON0PUbIuCtLUQShelzDArjlyYvKlAxAJH6+aZ4a9aq+tiQPDeidf1DBJNFCOX+5Rw5MMVU8jd6YFhTgP6Sx06iJZNDswyTgaQHXCy55SFiHIQzBj4DaWhRrBerjPtUWKTpnNb0UNin5Kk01NwWIymSC7p6BtxhNv1u1ps1Z6sz6rrGGMNkMtEcnXecnJ0RU6ZrW1JM7O3sUPmKvf09zk4Xo5jPZDLhYH9ff8+6YvgX7OzusNxsuH33Hjs7c7oQ+NXPf46u61itFswnDcddS9tueOPBXb77U5+iO13RL29g5r+f3P9ZRM6ovGfd93zpy1/h4eFDXbIEbVQS4qbdEgCHBU2ll/sg9POJxYLvQUmpP/83/iaCloPGlOm6nmpa87df+AKpC+RCCtWHNtE0NdPZlJOzBfcPF4RXb+u8xsim7dSIlcqRG889w3Q+G6WPx1wgmvssBaLj3u5QKdbeOXrvaY1ljYrPiDhy/m1I/h2MrZqHr4A6etOyf2+S8o+Xj/v3yPktFOzeYQxsbZNhYgy1d6QQaPuer/kIHs6w4oxFyRoEwFAoMx50iSKSUvRFhn4aMSi60603vHD7DZy1dK/fpo/FmS1EVyGzv7tDXVWk0NPFOBLQprMd6ukOn3vpZSRlwhAh58xeCHxHucyXX7vDWYFnq6rCWccnSzItpczRqicsDqn9DjP3FOvPP0X4xBuP3O/yv/tjfPqP/FHSTxiefuopnrn9+7D/s3+YSTOhmc/0SvtAH7rR+A8jTAPJJeqq4ns+/Wmef+ZZ5ru7ilCGwOHhEa/dep2j01NOVysOT46VCJs1uAhBmfKa0vwo8OLXumJf8xARrl27xnd+57ez7npMTvRl7o00RZkzaldPMcznDTsyQZI6Kau2Y7np6GLPYr3kwd07eKMN41KIpBhol2djA7oPyhiKfrek7SEkQzVrBtTrEUMvYxfLr6UVyHvsALwZ8n/0+8UkCQpVFdYsDHyHDGJw1lH/gZfIf+TPkg8eEl+4TPvjO4QvfYrZzg7P/uzvYe//coXP/ev/BzCZZ/6zf5D8R3+Yy89e5uh/8tN88Q/9iUcuxb58jeov/z3ki44NYN3rGHsL0Am1kxndeo33EyT25BipnCWiRL+QEs7pYgw5GVB1sKHET8/W4ZBlm1sb5FtLtJ1BD7OyqRHt7AZSWvnohec0gCHF8KOlP5Ly6PnlQiI0Y6ssRVQGR4ohf0Yualh2hPwTbCm0+dwfUUEea5R4d742Y7ui94EXSfH3cO3KFe7eu6NKaUV7YblesVyt8ba0wI2Zq1eu4r3n6OgYV1m8s3hrmdQ1ValgsNay2fQsVivaruXeS/d5+dVX2dnZ5dYbr9HUExaLM67s7nNjPmfVbTg+O0Zy5vjsjAenJ1ypZnQPT5jWHybLdXI+U3g4RT75bZ+k71rVcSjCGkQ4Xiy4e3zEwXSiUVFM9F2Hr1Tu1zrH1dffgMNjnHN8+8e/TVn+JW0lKeNqS9+2WBSy1JSIo64qjWAlY8SSjeVLr73CfDZnZzZF0Pc35SC2tefVN+6WtTn/3ABZyZxv5eAPa649DkDk08CnQD7zmA76o1GZNYaqqmimExaLBX3Xk/MPAn/+zR8yXs3WCTHCyEuZWMH5irbtVBGvU4dq6it8COccrOGe3iZCLFUiIKSQ6PoeMYa6mZBT5tlnnuPunTuaejMWySroM5TeGmPJKarjkFQ45eDCRWLfs+drxFiq2uKd0wAqZS4c7DGbKio1cIJi6tURJuOfvamGsuhYCODOFnD3ZwD4zZ/6BO2kKek3NVD7X/gS3LqF956Pf/gj5Jxwfkb16vdy+If3ePXf+w9Z37w93vZmfsQv/aH/Pd/Pv8b0b11gOp1ibUOuLCJWo74scL/iuf/m7+fl3/fnxt/9tj/7bVx64RL1pOHTn/4UrmlIVuhzwibPhcqze7BHSpHYBx7ce8i9B/c5Pjvj6OyENme6rqNtI/ADfKMdgGELW+fYdC2hD/R9r2qc1iExqxNcqgBi7BWZNYL3KqiVCn9ms1yxOD4iLRdscixorR5oxw8ePJJG+yCModRvqOnPoqWMKUXEVRjQChXOZ8VEG1GNqex3N95jB+CtHuhzEMXQujEl8KrqNhiqDEoIwNH+lpd5/d/4v5P2td1m/vh97v6J/4T57/rXqOOUZjrnqZ/7EXZ+/3OkdoN5ucHuz9mZzrnz275Inj4ab8S+5+T2HeYX9qjrU8T+KUzWSNCLsGMNayPYdcfOzpxEUO2AnFSVuDD9M4JJouVnUZm0iSLFGzNVNFSmoi/lSLnkg1VON2sXu0JoUUS/sPvT4OlqVGmGKomsin7WGGxpKSxp0OlHNQui/jsYQzBmFIsYmhNlSheyEVkwSE5YjOrNl6BQiYtScs+DsqBA1AjLiikowLCef53MLV56+SLXnrrGZr3W5irOcuvOHZ66+hQP797n0pXLzKYzFqenrJdr9nb3qOtaWfZB4XNXVaw2G27deoNlv2K1WPHw3n260DPZnXN6esLRwwdc2N9n3zouNxN2JhMOFzC3FW0bsBPHF/7A/4uf/rGf4tKtj/Jj/9H/DVP9GCm9RJI1IUSF05yl6zZqOPpAjJF55VlXFZUzzJrJtg9FyS8676m8PipGhJ1pg2TlUQz6Da4qzk4hrMUYSTDKL4tRzQOPobGO2tVcmO3Qp8jy5BTxjkkzZd122gxFyrOT8vigD53h3ur57ttAFzKv3z1hsbwO/F5ymgJbHQEpJEIrMJvUuJjY25szne1wZxUIhXwofIIsfwny2/D1hVIGqymrGcrUtsDMGYJr2PSRFAKblNmkHl+c5OFE2Jr/oVpgSJk5fN3oOREDwZSgwDumdc10PmfZtUwPDjg7O+Fv/OL/wHd/53eVJlOCMQHx2szLOVucpszVq5c4fnCfpvGkFJnUjtlkgi2lntNmWvgEQszattyVCgPpI0gkZA3FfF2rFO05GQApLZ1Tito/hDy2cjIC06kFY0kJPvLsTcyXhCv/wh/lr/znf5BUbed5cf0Wq297wO6rV7j92utcvHSBOtekXhGOtt0QbM/u//mHqTav0/3uz8MvPcVT/8zvY+ewIRwYbF0paTfqmuQYCDliqwpywvqK609VXL92ha7veHh0xFdee42X37j1SOzyDR2i0t7T2Y4K/VgQHG5QTXVKestJzyhjPTAEiSUYspb1esHq+AFn9+9DDgUZMlihkI+/+p2MQNPXfNNf/yy9ubpl61qLRkQIgww8upedKS1zhGQ8IlG1QoxoIPH+IQEOYztJuUAxIgIhqgGy5YJTQlLSlr51S95/tKFBvHLGqlsybVdUzYSYIs29PT52e8n1w1vj68JPV9z7QUeYKFQnUfjuv3KZv/fwVTi8Bfy/4RwgeePsiH/q139x+0F33/sZ+DsxTuqGn/nw8yxKDfmWY7EN7kutDQOBoWQD9TVFCtiU3O2g/msH3flc3nJczjV990UuXf4dOOuYTmZ87tc/R9M0WGtJMXDhwj6zaaOHaeOo6wntutU0ihMeHh5z++EDPvvZX9MWwKsV7XpFBrxX0l23WlEby7WdPS40My5fuMB80uCsJcSOp69c5d7ZQ17+Z/5r8h/8yyCweOoN/iL/Cj/8p/8d6uVzeD5H6AK37t/jwb07TCc1u7MdXOUx3hJj5v7hIU9duaz59uIwDVLHKZ+TnAac9cQUaIPuI4djtViBaPdFZ1VC2pc+EV3qabuOnBWe7DM8ODri7OQYMZlZPSG0LQ+OjvF1Q0wRazS5g7HkVJgbxUl70zCGo9MTlqsdXvz8J+jb74RCHRKTmdeey/sHTCdTYkrMJxXf/vxHcGT63PPCl1/i8P7tc4s7h/wPAv/lmz7KiTKNTYw0vsLkxBRBvNZqb/oNYmt1GJ1jmkH67tGyM8O5ttLqWJOF2azB1xVdFxER5vN9ptMply5e5MaVqxydHfPSyy9zdHTEpg9MJhX3793jZ3/u5/ie7/0+rZDIWuYmhaQZc6DvIy9/5dVSXuqYNDVn0lJVK3bmM5x13HlwwqWLFzEocuMrT5+S9gRxjh7BOC3hazvtFFi3W8O92gQ6ulJ9o+im6XouoKVqdx6cELpA6B12skDEMb13gasvfobbn/lb4/tMHl5iduci63ZNv97w8he+yO7+HvP5jJTh6Nu+xE/92/8S3c4Z9f/zt2M//Ie4Uj3FDW+AuyXFqi2Gs5YxFJRQSxtzERtT1VI1HNPphEF5lJzJNMBF4OFXP3S+7qHpEu3amDFDHwAREgJW0bgsgsmpdO/UiqQ+RF75yku8+Llfow8BazPzyYybly5xYX+XLrTcunOPu4enmvI5Z4cGw1s3jZbgblqcNdv0zrsa596Pr90VeETLfzikpfAvytnjvFf3OGugl7LqUSCqfikGhuzGwCl7H1UBPD4yyv0tENlAQMuQg5IcMgIhwEsOftrA79hCN+5PfYbVHVi6BU3dkCrLpbbjqcPjRz7lB/6dH4AMP/lv/yQIfN+//3386L/yowgvAf8tPJKN/B6Ev/ENv/O/E2O/3fD8w4f87ZvPjmT97T7QnFIuegJGjEaTpRxTRsdAj+OQkyoMZHUQtMVkhvSoATLuZ7j/4PfibE2IEesc+/v77M7m7Mzn9H2vZ2GROV6vV6zbji9+9tfYbDacnJywXC5Yr1cjyZCSvYhdx8Q4LuzscjCbs7+7q3yBusI5S+U9uzs7TGYzLjVTXvs3/vIjiPmXv+cv8Zlf/se5+Sv/sApk2J+hjT2bVv8cHS1GLYQQExcPLvDGvQe8tHldeyc4S1M3YwrgqcMj9tCKis996UtkY8hEcihQc0y4SnsBdG1LZihZslryWIygJLDeMZtOOes6Ysqcnq5JRuc2L5Y001lBy86pMgJDRcmjZXHCzt4eZ8sd7t/5rfSbBiQjOZJFuL6/y3d9/GPsTXfoUubWndvs7++xd+miOuSh59lrKz778uuc9Oc1B66iYlyvP7LPGqvVKDjliKScaAVqFE1LYlUvQ5z2dMgKy3fnGhubNx2Z2kehqmsMcDCfqJhNs8PewT5PXb/Kpd09jBXu1Le53x2SYmK97JnNJyxOT/m1z/5t2o0aSZNzcbacQvgxsVqsWLYt2RjcyUp7Ngjs7e0BmaZpWHSdCs30gZyUzBlDwJb24CJaaTOMna7jE+Xvn/3ClzjzVeGOqCP9yeMTbqK8kc998ctYa+lawTWv4J3nGX+BH/o//pv8/D//b3Lr+/4qctxw9V/8/az+yi5fqV/kqSvXmcwa7t+/z+LkhPYH3uBX/jf/V7pdFaVq/+DP43PPj/7Uf8LVF78Ed+/qrKZMjir3m0NPRsXD0rnnfxDLSSnSdy1d22laC9BKkd+E1mt8A4dkndvifOScCmqqios5leCkELChCJMZaNuOk4cPiLHHW+Hm1ct89OlnmExqTOnpMRGLyfDG4QmqfF4MbjG8s/kc6z0hHDKbTTk5OuI3Ys6HM9Q67SC7dW4fu90h4heDsaqWaoxBSstzI6ZUlSmSQc5kW8iapaJq20K99AEo92Kspm21NPZ96QAMo3iaxqrHUyRZxRTFIwy8apj98z9M9ydfovv0q1R/7lM0f+jHcJsJZ3ZJfJg5mM8hbZXQfmJ3j8V0H6mnhP/6o8y+9PewaTd85Rf3+RP7O+T8ZdJpB2mK9Z/i+7vfxEe5xKt8Jz8hDQZD9LZoi4Ota7J8BVP9dRqbyakfg2fNCijjWBd0YNUmnHXn73SEdAap4oGFrU16zEhmGhdSBllg3QwGKQRAgyvNbZMUeD8pUeC3vvwVdttNeYjQfLMZiICDIIGWBnnviIPbWDaqCv6IQpdZN9NQLqTdtNJY//9I9zUyy82SSwc71CJcvnyZ48Mj6qrGO89itcRmz6rdcLI45d7de9y9c4c7tzXaHDolgs6pKXM78xVT69mbz9mZz5jXEz5x9w2mfT/e14hUiLA5WPMTwZD89nCWKDS/8FfZ/4lTcv4C8/4XEAMfL9UPOW0V0kCo7t4nJu0nPwSrY4GEQFWYxD4EvuvXXyyOikCOowNlrCGEUIyA/qo1Wluu+77A3+f0zctybSF/NH3w/GqlUXN8EXILZGZfeJH5rVv80NnpSAoSsdjqn+Tu7UxoZ2jLUJ3Ip3Ym/Oj3fhdXLl9js+k4Wy2ZTael1K35/1P353GWJmd9J/qNiHc7+8k9a+9ae+9Wq9USkhBIIAFCmEWAFzCYzTbcGXuM8cIdfMEwc2c8eBlfG+MFg8dcDBhhzGaQAQNCQhJC6n2vrr0yK/c827u/EXH/iPeczOpuSS2BdMfx6erKPJV5zvvGG8sTz/NbnJKhhNXFJU4tLfHk9fXZfHJ6AUc4CAAEnqw3V2EIhCTVThvfM1CWFjyJVB6myKjxS2Rak6nb3L3rfhCzYK/VjGg2Q8q8JGq3saZCeSHduR6rS8usLC4yGY/Z2d1lfzikrEq0dhQxo0t8X7K5cQvBScBpXVBJhHRiWIvz85w8usyt7S2CMCKQnkPcWIuuNM1mg16n656X56RmMZZA+ZRlgRTQabdqRo3Lihmj8eMENrcAeODcWfIwctkj30kizb90BYZDQt/nkXsu4CmFkg1SfZbNvTG9bg9zM+f83/sOFvtfQiecY+s3JVWv4vSxk/hRE2EFaVmSjIbsnH+e8cnbAzL+0lMsPjmHedFilMH4Gm1LbKVRmno8O1tqrGMiGVmbyxqN1Zp0HDOJJzUW6XD73BYDBJIwDA9OK8IJm7n4UlHVmCip3DpmjAZt8T2fnc1NNjfXsQKOLvc5f/y4w3DULp1lZfGk4MTSHGmRsztKatGog/vJ0gyy3IluVdPT/8uT8Z+sL165yXpKuWDlZSRXrzaJcowq5T7LpWRr/xZ3YjN1SdjtFziQpqpdSKfmavW6P8ObcbBYeVONgFe5tk/WPg8BgDj0xz10C46+oOqVwVPujwWs4zxOLje485v+PmfPnmB/b5/nBhexwsmrWixZUTA4lIK7rBVVex4rJWkcYz96Ch/DQGn2zROUye9hvRMI7y/jySaZHUOZkvtn2WguossKgcXoytXmY8A7guTtRMEv4vvbWLlf0zCc4tvMjneK6Lc1IEq5CK8oCzfpp6wC64aUmtryTt37hKSq01uitvg0WAJPOdxUDYjyTC1NLO0MAIiFqn6fA0of9fu6FVaK6Wbj1AyVkm6BrqWNHWjaMQmkkA5tXm9qpq57uqyCvQ1hqjyJFT/DlWvfiCcED9x/P/25PoPJiNxq4jxn7aWXyMucW+vrjIbDGsnuovB6DiAs+Agavk8nCOmEEVEQEjUbREHI+Z1tlvb3PumwDgeWb/nyb+EXf/YXiVdj/Njn7X//7Vz42QWk2QTa+CwAL37Wo3jWp9YyF8c4y9Q/jwPLWVy69E+PbtSZfmHjgxfjBOKEhUM/Z+172dvuoadzQTha62q3ybve9EaWjhzFSIWnLcpCv9NmbzikyFIC3/lp+EHEhVOneHF9k6w6nAYV9X8uNamEpBUGjLMMISEQgkQbpFBUxhIXJYHvUuhWGzJtqYTEav2yfaSWdrPQ77bod9vs7w2xEm5tbuIHAfNLIf2lOZYW5imTnJcuXeLm1iaD0bg+vTuWReh7+EKALcnyDOGFWCERqjbPqeVhG6FHpCz9dogyFuX5jpFTanRVEXk4lcAkQxvt5q3bk5AImrUtK9QZGOXVvhGuNZseQcvDGGpGj8CvB6wQgm6r4XAcVUHb+09cHb2J1pEV4mxMtRbSvXk3y0uLnHyo4ea4NqT7E0ZVTpqmBFFIc7KIP25Tdiazzw0vraJzzbi/wy984N+x/vAtzj/5Id7yb/8eYjvA6hJniVwHmljKvMAYi7ZO5CxJYiyu1HWwgrSBECHy10Tn+6yaAD8InA8LU/+VGtgsBKGvagCfy4JVlTtAZXnB888/S1kWdFstzh45QtSIwAsw2omsKaUIwggDHFtZIU5ukFTV7AAGkMQx4A4RRZHPbn6WET24TOquO/R6Xbuv58dUfKkobutEgEPBBXWWgzoD49o0G+P+vf51bUh2dpnYHXcorAxWGIp4jKf8+rwwvRcBUs0YYp/B/v+5DgDqGTQNAOzhdCbMUMFFCZWT3rR1/dVKj0vXdzGl+z43BosmDBRFXkBlyA4HWr05xnnqKHNao4sSqUDbR7H5b6GaXwTBG9FZF1Fr1INDlQZ+hSlTJ5oTTEEZAptntIzlC1/3I+zcepys+k9sppdIhOOmurSN2/i1sQhVUzfq2526cBljkMoZOqhaUEfUJj/1GuuUB6fHQeFAVdrl69yJxa3AlEbXp3EnCqHkATBP1GlkByG0swBxyvc1CKypsNJz6eX6w/X0FF5fw+xEWteijDUOTCOmAq/1IzQGKycY/yZZfpTd/YFLkUrBxrWrpGnCpYsXKfKMqqxmE+bweiKso4x1fJ9OEBE1G7SjBr4fEIYhjSDErzMlOghJjhyp5UHr8hEuY7F8/Tz3f3+LJx95knuT87zuV99Ffm+IoMDYX6csBNjzDIdD55+gDWWdHgVqkORBenDq4leWJdpolo1lNcspheBi8wTafjWlPoLn/Q2sAKUeJc8uY6pH8YPA1Tbrk0We584l0himEpFKOmVJUf9dlRW+74FwqPzBaERVaKxYBjsEUpaWFjmyusL6+i3O7O7WWQCJyQ80wqWQHFte5G0P3s/CkVVKz6+FYDSNZoPSalpFwdq1G5w9f8aJEklYXpxnqdflxu7eofXrDuBprE0RQnL8+HF6UrF39TINz8M3kooSgaUpJKGSTIoCqxzAK8fJsWJvX5OsdQJUy3Nd7rnrTp58+jmSvKCsmQ9lUTHZH3HjmRdpnwPhKTbWN9ge7Doq8LRMpC2+ClAKIk+xY8aUmDozWDoeuYQ4TdFVQVYKtvcnSBxMJvAjjDEkSUycl+R5PnPxVJ50G0ip8XyP3BZYXcsE15OtkeacqO9pc29IMo7BihqIBSaOmceVjV66uc5M0c50GaY5L167DllGww8wumJtZ49JklPEMT6CvTRhfmGOCyeOY62m+XtfDO2Kx/+nf46OclaffCPht34jz939KC/+8//Ajfucje/zb/zPVInm/h/9f9C1c7U5ktuo9DTzYbRjBBhNEEa1Sunhfet19bN/ic9Jq8tZvufVtifC+YzUlGMzlc6mxuHgxlAYhly+folkMsYiOLo0T+iHFEWBL5XDnlhLoTVZWVEZSSMMaDcCknHF7Wi/20sCB5fmDjqBp4iiBkI4F9KydHa8qmYv1QXU+jDj5tgrVStfftufPpia5mWp3KF0RvezFpullOQILMLzZrcjPTVLIn4mAdvnpwQwS6VOHf+oxXEECMeVpjKgBVZoROCDEGhteWntFtgKW1YYDM2mc0iL0wLjRbOPSCsosgyh64dsLJV4FDyFiL4JY07glZZes0Vnrkt3S8N2ii4yZAaR71FWhjxOXEBlBY2wwfk7jtGNFMO8ze7XdBm/axN93af1v60gjUUFfo1UdeqCpTVI7RD/zt9HAm5AG2trIR3qSelO3RrtThq1BwA4ogRy6gNeKwAKVz/2pGSK7p/WfcBlFJx3QW3yU0eyU6MwKeWBkRC1vK+UKFtrn9t6wsGsTEFNixNTENyh+lZVaapqQDO6TinP8tGPfrjWxw/Y3d1xgVhV1mFynX47NDalcKfIjlT0/YhW1CRoNmiEIWEQopCEnodXf3YZRexduIsgcsAh55kuiZRzUJt/YZ7lXznNwqlzJG98ANluosSYUv8+e3v34yvF1atXKY3BGjdZq6py4lG4zJOwUJSa0hQ16FHgS0E1HLoAQH4dn1h+A4ZVfOmRa0NWVEjv3chewdnTlv7cPLn8DbQZUVYlRVawubXBrbWbDPb23OS1lkBJLpw5zUKv45AxSlFqi1U+jz7+OHGZY+QdYK4gyHjw6DG6b36EZz/8EU7v3g7QssIFdEdWVvjiL3orC50OqtXGr0qyyQSBJQqcLLMSgsFwTJHm+I2AMjdIY1jo9bi1P5hZqsI9CPsBIAFrOHnqJHNInr16xTmT1ch/ay2ZqZBiKkKiKA2U+tUx2FYKGr7H6++5wM7+iOFkUgtwuTEbeoqHzpzhvvvvI+y0UVpQjmK2nhxgq9JhUdwgpdKGfreNqSRFlbKbjBEqcvNPuCBVFyVpVdQSsoogCEjzBOUJdAVShVjh0Z/rYI2lEQVUVencGJUlSRJuJfsIa/E8D6Fc3TqMk9k97QxGxJ7CausMbZRkrpiygSAvK4QVtWsoHFlZJU8Ter0+0mhMnlOmFWma4VnBSq/P+WMn6cz3iVoBo8mILC84/Ztfgb81RzQXMHftbtYp8O+PuXrfB27r45fe/qv0/ud3cod/L57vYbST+JaBwpPewXokJUEUclAMm07RZ4FbfK6aO0yLujzmPtkdXMQMgDzLaApbs27c+pakeV1ek7SiiLwoyYsSOxox15/HaMPO3h5FWTlFTyWIoggxTg7G4/Q0Mvv2IOHfDn2W5nrMzc3R7/cBQZymjEZjxnFMp92h02m58kmSEGcpaZoxjOPbevCzbi5FOquMzEzappGvqD1ldDX7KCEVylO1pt5rpz1+7oWAamQpdZ18Zv87/REhsdIw5aKJqQsUU21vgRA+VlnKqmA4zogCp9Vva5qSBao0cbQRT4GtQBfI8F6EDRBpxvxcxPzqKqX0UdWEInOp1X6nxevP3MFwOGZzOESbgEI7elheZtzYWKNhC9Kve4wrP/irmGYBlTtht/6Po5SjMV7gwE/S8wmkBE9RIpyJiLUYnLSrqlXKhHAIXAlIUWMhrHHReX0qd5l3Bw4pEagaACJMTT2sSyFFOVX3cqlHlx49tInXKoTKHfURfl3Tr90LTeXST9P3m6a6JJKprbDDBLgJZ8VB2mV+bg7Zn0eIa5TVGi8+PaYqqxqoYg7S/PVRfTbdpERZSyQkbd+n12jQjCKCsEEjjAgCH09IoiDEj4KZtLEQIHyXDcjKHC0EtgIVWjzls9yb54bfIOh2aMz1yYoBhn+PpxSNRoNu20nxylq9cRKPXKDh+wRBiBEVlCFpMcFQoKzEU4rcVPjPXIbtL0aI19Hq3EOv08aPIkZJQhg6MZ07Tp3CWst4MqEsTuKwGB+l2XmO441V+v0Vnn78I0zGY4y1pGnG7u4+5+64A1PktKII5Slu7Q8xuqrnhGZKv3CKgBJPhfXK8DCIc7OaugWiszu871+/HYRl8fv+Eu8afhv9MAJhsD5E+OhFSXQCrg4usxAfp9NsIkKPVuTjSzmjG7pOn01foijk7PETPDIc8OSzz1FoTUt4GCy5NSTGkGlDXmNTnInmlMt9kPrxhODskVVQgmdfuuIyXfVICZTiC+65l0de/zCm4cy/tLScPXuaQhd89NmnGKbpbLuKxxNOrSyQZCXNRsioSOtslcIIl/Gam+sSeoIsS2m0mrRb7ZlGRVnkrK4s4ylFEsc1e0AgFZhaoEx57kQvRa2RIZzjnNnbh5cuA/C6u+7EdNp1ZklT6IoFY+DWLXzP454zp0E4jAjiCLDsvB90RZZlDJKEE8tLnFpapSV9kvGIQTzCTixatDFS4Ic+BsPpZ95O5EfIIKB5Mkd7a/hJm7J5UBqQey3mekuYoWE42mevd50nf/HvY5Xl3n/81zn78S8n9BtUZXGb2p77fwWsA4fKT5+DJpXE93yoAxHqgw7iYP0R01LnoQWk124irGFprk+r3aTRbBJYS5Ek+EFIkSREvsfc0iKeEIxGQzrtNo3hiLQGuk73nzOnTrC3P2AwGoMQdJsBF06dpN2M8H0fL/AQQtGIIhbm+pQ16BdhMVVFuxERTSZMPEVe5GS5PjjsfKatLieEgc9Cu0u70UBj2R0OGU0ms0ztbJE2hxdVh0Fy7ravXfjo85ABqBeTOg5gKvdbe9C7HDm4p14Xm00Fdg3LEKn6jgblRWhdkVaOm9+Q5nAAR5U7gRdrjKvjC4uqDIGn6R09SdRtOYOX0S7XrrzAvZmL3j1P0W636XS7LB9ZQRjLjfV11rZ3qbAMBnsUd/g8+r//o4PO9qD4zgG9j/4l5K89x3j7g2RZCcptlqEX4EUNmstLlKXLBjh9GFn7AJgD3ACH1sZpCcAe/JvLEDgqTCBqoIm1tcDQVO709jY7aUuHQrcSpBuX7vRbD/9mo0EUuTToOJ5Q1c5aU/SbFLVKYb0hHESiro3HIwa1XHNZ/TsspzDmLgea6nbJ0hRpnEpbZQ1O20PgWUtP+XSCBoHv0W41aQQBfq3H7wlZG/J4M3ELIw3PvvcxBneN6H7iPsR6F7TjXvvzcyihaDWb4Ploz0cqDymeQIrjaN3AmufI8pysKmlEIV6zwVx3CeXdiZCurLR94xpCP0joX0GLW6T6CmkeY2ixWH45EOH7Ia9/+E3EWcnWYJut9Q0mw3UevO8BtncHJFlGYSqkhKooybJ7SLIztJstOtElhJxg+ShTSuzaxib/9fc+wMrSEkudNmHglPysEM4EapYTcrLOMxCmBfBBePXaKDBvvMGlX/g56LmN4Ma/+qc8+f0N3nTpvfhBQJXn7Leu8jvf/SOsP/jHhC8e48Ef/AHmnj1D6EkCb1pSkrPy3LTE6FLjEqMEj7zhIZKy5PHnnqOqszih75FWzlSpstT6BS6jpQ+drgDm2k0unDvB81dvkFelYzzUKn/nT57i/vvuR/t1OQqJMiU6Utx913mk1fzRiy+wN0kQQFoUaCEQyunz91pNYuPS7i51LJlMUrx2g1ajRZ6kjIuSZtSm0YgotGZzfZ3lpUWKNHMlNukhPDf2Iz9ASg+Ly7pVlcYIJzYUBv7snrSuKPNsBuLy5O11XmsMRhikJ6kqW0uiS0TgEXpNxDBgkI4xWcGjN28xmiTM9zucbUco5QzGiryiykvavQXac/NURUE2GtH5xFne9aN/hw98z/9BvBLTvnmcu//+9zFZjzh6bpXtxn/jiX/wv5Av7gHw6I/8MNf/6jrnPvpOus3WQUp7mj4Wfwj2Q3yu29RN0xh3khVCztbYmSDaNGMqRI1hcOVRpGJpaYm1rW32dgb4QcDd5+/A9zxMGEFZ8omnniEvKpqh4sTKMkeWl7iytjFLYkopyZOYNE2IQh9jLEcXe/TaTVe+FQ6YJ6k3fK2dW+PBQ3Wicb5PqA5s0j5rzISFKPA5trRMp+GuoShKvJ4kUB7bw8ErXRzrNIHy6q3cUHsDvLb2eWYBOJ15UVM+pmnlg5KAcXQbA8insepNGDuHow8qpPKxNictNZWwjA4h0oWv0Hla15UitFB05xeIWh3wQ7CGvVvX2d24TlUUs4flUuUKqRQN38PzPM41zyKCgGtr63TDiLnVpVe9m3vuvYcj8XcyGH2Ine3fZ+PWh9ndGVBVBdkgxvclrbklYq1n94yYIlrNDCnNFIhX6/9M0/Ruw61P99bRi6bvMWMUHGri0OsHOt/TIOJAlthVBpyhUZwkNW1ZOLGhmqM9cyisN30BSGsOuQE6TEFlnQkHUiLEbyKIqfQbKMuS+YUF4sGAXrfLZDSmNBoPQSQlrSCk3Wq5On/oOwGk0K9rgS4AqXRFaJxE7wd/4IP8/g/9PlZZ5j/0eu783r+JP5YszS84JUHP9en9d92JjkKnPRB8EQKB8ndZbF7HIuj069OmBWwTy10AhJHk+AVBniRsrq+xduNZSrOBletU5ksY32hxkgnaGC6u3WD9xhr7u5vE4wnWGB5/9I+IooiyrCikA4jF+9tUZY62hmazha8U49GXI+QqVr+E4AmX6fEDMm1Z3x3S7zbpLSzgK5/UVvWg8ABHD7LWOIGbg+xo/aWF7/wEHDk4BaIMl//qz/PQ33gPjXabsdjgt7/7B7n14GMA5BfWuPGjP8GFH//H3Pq9FCpLMwyZVIcEgOr3N9qwsb5OEMcszPc5tjjPtUbEIM8oLETCQ9vq4KA2rYsyHabuKgVw7OgKhS3Y3N5F18kBX0pW+vO86cEHaHaamBrAZ4oSW1ZYrZECLtxxina7wQcfe5Jb8RhhBJNJSuQLlABJReQHVNZz2BGp2B+N2RvuobXG9xRhEJAka/hBgOf5VGXBapI7alZZYjUUVeHS/F5YBz/GyUKHgRN3koowSZmuDJdurDOq56k2BrTh3O4ei0BZljz9wkWEEC7ThI8xN7EIh2kAjh0/yXhnG2VLlhYXWFk9wly/S7fdcDSxoiDOMqyGJEvpIfCspLOyxODWFsf/zSpf/YmvZnROEy1+PWr3Ah8bPcb2Y59g+4d+jfz4oZKRhPTvvZ/ove/hxo0bHFk9Sq/XQ2xuYKkI/I9irEdVHoDTPhdNKoXneZiyNmebTc2a8+5ASDPwIvW42t7YoBEEnFhZRsllspUMP/TwVYBAEHg+8802b7zrbuKiIGo3aQU+8tYGNza2KaZZTyFIsxwhJXeeOUmZF8x1Gm4zNVCVBSbL8TzfXZM5KOk4rRRDmRcU2lCVBRhdB7yfZX8AjcAZUYGlMpoiz9CFdv4xYmoQduiZTAOmGp8lgEq/dj2Dz2MAMN1GpjdgsVX9eo1cFLVohcClpBqd36GszlDlbiGU0snzIqC0gvxQRwitwRT4QUjlCaK5RXTYJMPSEwX7164yHOxgrCH0FJ61joEgBNJ3wi34kkqANIo7T96Bp91EbqTzvOVf/TAf/cv/K8YrwUhO//aX0/vYHFbv0e++meWFN3P/vQVZ/C+pTMyly1e5fu0q8SRBhA08oW9zc6IeRNP0l5C1FHAtfympnQPrxIhzh6p5pNb9rPOzP9iQp9Q9d/Kp9f+pJYiFO1VSuc/WGGxR1t0v8ZXCmLKundZ60tY6WdbpB9SeBYc/TwonACOVQilNwW8hUKTJ6yjLEmMN870eq4sL5HEKWJQSBF5AFAWEQUDgeY4PLCUS52ImrEFiyKqCD/zdD/Lkd34Qq9zz3nvro7zwY/+Qd//Aj9HttJ2FqzWYqmSh3yOjRp57tRGTWcSyxJTuSM3TNkJTBROstQRlG2klURhy8sJ5lo8fY+PGceJ8zPqGJs2eBaAscj7+wT+gLDOnWVEP4cFuclC/lKoOZmuDBiFIRkP3g0IADwLnQHwh2AxrfpVmo0lepFgvcJ4TNRfY1BkhrAOaYuXtNJ86q4IAT0fkRjB1ycSCTEO8MMDvtsEabj382G2z8tbJx7iZPc7RuUcY6oJ+p8n2ZHIIqPkNCPvjWODFl17iri/+Yq5cvsrqQp93vuH1fOgTj7ORJBRlwaGCz6xfDtbuqYqh4NTqMuvr6+SF6z8lBceWVvjCh17P4tISpa9QlQVdISuDrbQTQhEgIp8jS4u87e67+eili9zc22c0imnON2mEEUaUjurmhi5SFyz22xw7tsr29haRHxBIn8oUlNqgrfN7kBIaraZ7PMbNL4OlLAWR79OK/BngdgpADdVBdqbb63Lu9Gm0Lt1z0pbOc8/B1jZBGPD6hx6o7bwleOcozTkEHr7ysCi8qEG6MMfTj36cY0dX8IRCG8swiamKkjhLnVCX8ul5Plo7nQejLYv9BRrdHkd//U6SZsijD8G4fJp3vuPt3Lyxzs2hrjOvB+Mi37esbWzSXohorHpMro6xtkLwi1j7p+cI+KmanJaFEbO1a5oFPVwGFvVaZLQGH4oyp9lqEAY+YRASeRFpGmPyglQ7q+8iSYgCn5UjS1TGQlUSeNNnqJ2WjLUIpeh02pw6ukI8jhE1RdaWBdZAmeWk1WQWaWs9LZNaqsqNNQ21eVlIXExdKV17LY6I09mspHTXqBTaOt8JXU9/bexBHx1udQCNEDMgoPm/Twbg5clp97DrBPNscZTKAdcwplYGdBPLDxKySYaQjkcdhBF+FDAaDbAve2+TOR36UlvCxT5R5DTmI89y4/nHKYscaQWhFCy2GjQLDWnqeLueBxinvTxN9xQFp0+dwGpQwuf47341b1Cal17/K/Q3z/G6f/nXMOoGubgJYo0ijRCyTRB+LWH0G7z+oT6ve+gBnnrhIltbu3h+QFGV6HpjFlLWroJTLGm939Y8TlVHfEZolIDA8zEG7FQ7QDqKoLUHQKupqMS0JixrdT+lRH36n8YLoj7WHQQG2jiterBOwEU6Co41LkCx1Kkl/fIBWBsfqakGdY7lV8BCWbwO5XvERcbKyhHyIERXFVJYIqkIlAP4eb6P77ksTFnkVNalS4tcU+UJW72dmbLjdBgVJ3acBHJVOdU86SyUtbD4UmHyEuk3av2FOpMhHK1xKkTz9Jv+A7/1jd+LAL7uJ3+WM89/GV59n6WWXN3MuHz9Fvu7Yx45FLwWWVKfbad94P7yw8CBzsrq4EWXyqnRunUdzAJEWOtArJPx/8iN8inmFz/BOE7wotAptuGkPa3xoS4Z+Z6PsRUQAPOze1qY6/POX/sHfOT8j3DtS5wIVu+pc3zx3/iHdO88gpYSL1Ms3DjH7okDZLe6tsj+lQxvfZ1ep81yv8uVzR3nUGgt0JtRTg2CpeNHCT2P+fl5PGO4a3ef/KWLjLSpc/+Hx8bBlwe6BeALy2gUo40z3rrrzDlU4LMVjwkGLcJmg0gohDCQ5UyxLEr56MqghM/y0iJvafj87mNPMMkTKhshpSVUgp3BCItfj2Hh0OGmwqfC5CWq2aIZelghXYpVCpaXlyjL0rEWhJPL1sC+LgkVzLdCh6+p70EKgXfIiU2XBcoayizDYGvFjuniZCmT1LFCrEWIZ7HyD5D2LaTG4DXbKBvhBSEnL9zFi08/RT6Z1Ej9El95eI0GQdSkNzdHVhYIXaHz0okcZTmqKGhiefEb/4jf+an/GYDe/+df0HjpHu7813+FyZ0dLn/Zr7lyzuNH8N/5nWwu3eTSP/1ZHnvHZfjICfjmNvLai5TlQbD22WazX0uTNXWtsk6fwDEk9IwefbhwJKVE1geATrePzTPSNMGUmmQ8YjQYsL4/JMlzpJC0goDlXou+NWhjqXTJ3v5OTUm1tNstRuMJldacWF1CSIVE0Gi3kX5AVXg1u0eSJolT/ixztwZrQ5ZlFEVW13sUQRgSRSFenJPXS5XvebTabQaDwafsh+ldCimJi4xiXxNKD2kgLjLKWrRt5vFyuNUD0vM8599itPM4eY3t86QDUP89Nc0BpwPg+67+pyuEqZUArdvUUJI0HoP+GIK30plr02012b615lJ71mUMDu5EgvDxu3MYL8II8Msxe7duURW52++AfiNkea6PvzeEFKzR6DxDKqcB7vkhQnkIz3PpQGmxukSnklO//G4Wf+YLaTbbWFvWmtUxgtBpmcuCvFCo6mGUeRLppwS+j+cLMBol1az2a+sUlxBToR6DxOkIKCln6S+FqsWF6nTY4Z6tPQFmtSfq0m39wtTYBumSQ84+2AUFaGb1fV3XH9S0X4V7XdafqetNU0wdAWefV4ef8tD1QF22+A0sDyAs9Pt9Kino9PqUcQx1Ktuq2ujCaIR2blbK90njmDTPqaqKndGAxf+4zOBL20yOTmY3euznvpQiLWhHTcDdk67NQcIAyiyj2W6icWUPd5pglrF48s0/zW9//d8C6QKoX/+Wv8y7f+HHOP3xr+LGrXVevLbOxecvko/Hbtya27m8r9am9sHThatO9NS/dNt3t80PawVZej8bawVRNGRr51GyNMNa5zmAVc4q21qQ08T6IvBIzUOWLC/Mc+L4SVo/+b/z1NY5rBCc/+2vpPfwEn/4Zf8UU1a88X3fzFf9+D/hv/7VH2Djjqfgcp/oe76B+Mk+p892aDca5HmKLyWlNrePNeDOc+doNdr4SwLlK/RoRBBIlnsd8t0B+aH7O0j4c+hE525hd2eXUZpTltqxaAKfy9euQlUx3N1nnKa0wgbz3TYr7Q69ZhPPkxgyhFcHx76iGfp0Ao/J2Fm/Yio8JWhEAUnmTmyFFexOEpIioRUFRL7vsokqQHqSsMbA5GXBuAZnep4HVYnyAyQuAK6qAim9WbZJa0N5aKEdDAc0BwMEpvb9OBCPsliyMne9IkEYibZP44vzaHp0wgDl++xs3KLZbHPmrnu4+MLzbN5aB2sJfcN8q4PXbNFstpnvz6HLijLPScZjxoMRZjjk6rc/zq/9q1+bnfR/77u/j7s3/xrDnzzBhR/6Dppry3TnuvDT95DfBdd+9N9i3nrJ3cBbr8O/gcb3NPB2A+LxqwkD/ek2b+Zd7w6B0xyxqEF2U/wJOIVAJZ3w04ljJ7lZZGxvbtOMGugsod9uoYKIySRG+B6dMETokv29Pco6RZ9mGd12E+n7jGOX+fN9xVy3hdVO4nmytY2VivnFBRqdFvnYlWTzsnBBuZB4QhL4AUUeUlYlKOnGX1Xh1an4g3kPyvdBm5k1+zSDQH17tk75l7qiSCogRQlXysir8iAb8mqdWP+bqD0/tK5mZaXX9Aw+04f22tvhbQmcmszU6d7UKX/cgy5Lap1ZpruJsBK0YeXITXq9szSaTQb7O0RRiPU8KDLC0h5ERMKimh1ks4vF4uucjRtXKEpnmyktdHzFQrtNZQVxmfG+n38fg+MZ/Pw7aP7S61hZWWF5ZRXh+eB7eDrEVC6NLfIciyD0AqS2XLuxxvzJCkGIL0qEZ7EmQsoCa5YxvBfBz5FnOcbW+IaaumdsTZEStQ2wcGlQTymwxp3i60jJk85rzRhX8BVCzdKqU8ezgx6fbvS2PhFP+6femOuUIdbetjnZaeG23vxn+gzClSmsqB+L5TZzjalpkDtle0w1H9xQrYD/iq7eTX9+3lHOohBlDFmeYsrSDW7pMg5ogSkUKElVFOxsbzM/P0e/2+Ou37ubh9/75/i/fv//wviWM//wz3Pvf/kL9DodPN/HConE4glNYUCXFSZNyJMGQcsFa1OhpjrHyNOP/CxWHSzgRTTmhXt/Df9X3sDvffAPSMuKfDICq2cT8DWNeGNv+35ajvnkRynLND1u7EOkaYUlBftxnBNTCpSz65ZCoVRw2zt4SnF0dRUvarIUneIdH/pBEJIqivm57/6zrJ/9BFjYWn2Cr/1n/5yv+6l/RtLdYnC1gMkSSw/3aTYDqiJjMKqfnzhghAgkUehx1513obXbIE1RUBQlnV6f5TxDW0GxOyCrnS1nxYBZDddJvVhrWbu1ySBJnfiVNly/cZ0sSdjZ3mKQJGR5jlQ+zUbEyfkl3vL6hyjGYzZGQ3JrUEKwMNfHF6q2sjVkeUkr8rC6pBmFlLpW7rSGIi+IJxlD39HfPOXhewqMs/wuK0NRlPQ6LTrNBoHvEwUBsoJSKLZu3ELhsmJSCCcU5PmEScLx+hm0evNcvbnp1Nykox6eGQxZBsqq4sXrayjhlEZ9T2DFJoF4EM8/z8Kxk/hSEk8SCguFLllYWabRaVHmJZ6vmO/1ac/PoSvNeDImaLQxVUUax9zcWCeMxzzxLU9ggoPgtGyk3HrnB1n+me9k88Yu/M9voGi3WVhewDu1xs5b/+j2ofhlcO7NZ5i7vkCapLx46RLD0WAGAn4t6ezPpAWBk36e6aEIW+PA63zT4QOvlPXhRtHvddlvtTDjnDibMBoOuTGacHz1BHecPcbm/i5r21sUacJit40XeFjrUVlLu9shCAK29/YJgpClublZZi7JUvxmiyIrefSxJ1mYn2e+18VaQ1lVFKXGc37DhI0mjVYbKxyV0RjNZBIT5wWVhCLXlFXFcDSk1euRxSkoRaAUVVWilKpVQ/WhmzzQH7RCIH0fqqreL1/l9F//DhYkxh0wdfl/Fxrgyy+2RhVPN5hqRjrjoJDhfk6II8DrWVn9MzQbLXZ3d6nW15hMRiwtLpHEYzws7UAxzbcEUYuy1aMyho7SDG/ecHVaCyBQwtJrBAghuV7e4Nl//wvor3/WrUuPXMXb/HpaT76HZYTjNwsP42uE1UgEWruarhSwu72FlJIsidi8mXL6dIPIF06l0LhNUIgjCL6DPP97CCvR1tT1LHdEV2pKi5Tu1rVGGFOfwh0SwgUFTmde1CkgpzE/rfvWrIK652rFf4SorW6nDlo1nY96M5qibU198pfuh7D1mc35hdSATVkLGt9GEzrURO2YxoGyoYtqLcJ+DJDE6Xnuv/f1vPjkU/SbTbAFoa+YdDZ5+if/EfnpLeZ/4S10fvDdjPcEjSBgtT/H2dNnkELQL59n8Y+O89dO/QjX772fjurRmOvgex5iaphR+4gr5WxTq7xg7ep1Ttx5fuaNNB1iQkB/7w7W7/jjevUBUXl4Gx2ilo8C0sG+G4ufZDTP7vPQ37PFsX7hQMDj0A+9ajsoF1g84Mtwm/6zWLOPEBnTyg1CUlYHJzMHclQcXV52gVgQYAMfWxa877v/EutnPjG7ppce+j1+8zt+gK/8mf+T5s45jiIQ9znXvqrIqdKSJHEqccwyJwHYP0+n81u0m03S4YRka5N2KwJP0evNzzJWG5MJWVrTTex008CNo1nqD3aTlFy7DFNVlozHFaHv0cpy2mHItSwl1yWjUcGzcYzXb9E38OTVqwTtOZqNiHJjgwDDbpZRac1gPCEK+0gJzdAjyR011FQZp08cpddpOmOmsnTZvsqS1UpwpTFEjRZHl5dcqUjOpLQQUrHU6aBNRVFLuEZRhBDC6QBcvAI4alp3uY1SB3N9wVjY2ER5PufPnqcV+FSVQUmD8kOK/EOU5gK+HyBrxPco2+Xc+XOEYUA4CVDSIwxCijxjd3sbKRXdZtMpl0qB3wxRjQCbKPpX+ogvEjOsDKVErs1hjGGlv0A7jNgfDti4toZRA8KtefLlvdlYCnZ8gtInEJLe8jLHjh/nt/7bbxPH8avXn/+ELQyDWQbUjRDHUBJyutdNJ5L7o4QLIn0PFleW2YpHtANFXhREUZejx4+RFinLC/OURcG18ZBup+UEv6SPTjyurm/UVGqXcjdlSbvVwvd9mt0ejz/9HBZJXhVs7u4RBQHNRkBLSbqtJslkgvR8UB5RFFGUJckkxlhLnBVondHvddjdnzgTIiGpSoP0QmTgUcUJUirHuDp0WAiDkCgMSNOUUmsaQYDAgHRmY0662bySBTANsnF7QloUtykPfrr2eTYDMkxFYZz88VS33CKYR3AegU+z+V4683MoL2AyTOi2O9hGSFlmxHFCWWqiKCQSzAIA7UVY5dEwGcO1G9gyr9OjbmN02XLBMJlQfNMT6G989uDSAo3+mV9i+Su+FazBZgXW87Da1uPP/b4VUOY5/V6HnugQ52Mmw208bwVBjtFuA5YyAOthzSpZ+hUI+RtIs1uf3t3C4gm3XZvaIll6Xj3q5czP2Z38D9EdD+1GU5DTjPMPbrWdneQdw0LWBibCHlgCi9qudHrwd6n7aY271jqfbpYIpJ2KAr3yibqBp5C1Zess+WVxxCnxEYb7R1ld/XI2bs4TKIFQluHKNS79v/416b1OvWzn236PnXEMf/vLUalipduj3+s5RcBGA4GgWbRYiI64z9QGIzUyzzHaUJQFWZqSW00rbDmqFoaXXrzIvQ/eT5VnjpNeB0Bf+XP/ErA8+/D7wMI9v/NN3PETp/nEtV8gThxv/tCe9UlHNIcC82mNblolOTxZxad5r9ubAv4M0AL2we4AEqMr5FRtcnoNxp2UQlH3ic6RaUk2GFAyPhgz9UUk5ZBsb0hrZXUG5zC6gFJTFgWTJEF/62PQzLEfOAnPrCDwGQ4GfPgPPohMS1738AOUVU4/6OB7ilajCdJjnOXTi2Ka7pbCmQctznVgd4gFRmmFFg7uGfge87052qHPuX4fP9f4Rcm4KBhLsM0mw/0RT9y4jrYGMR7VpTQ7S1ELBHFSkBUlvZYPFbRbDadUqRyQ0vM8PAlFPEEpReRLOp0GSnlE7RaNZtOVIU3N1BFyNj9ohyDcpi9qlTprwTtUAugFHjpwwbyu3du8uu+lAF9aKDOCGs9U5Tl5VtHq/yaIBx3YNvApBwkbGxu0oojQ8xmOx2xtb6G1RhkotebKYJ+3PfwIoa9QUnB0dYWTQcgD3/U1WGl54tueAOD4L3wF0d96N2m2z1CCv7BE1IjwfI9qs83d3/s3eP5//XGy0xs01iPu/gfnWdpeRAaKJE7wq4o7L9zJY48/9qe++UPt+ClwrrBi6j3iOk3WxxlqNhJ26obnfrfZbDPJNb1WwNLiMtvrW9y6fBnlS/LRmGwcc3b1GMZYJllGVpZkeY4xUyS94cTxE3RDp9NvrKHh+7SDkPXBsH7wljTPyfMc2g2kL1nfHlDpw+l4ge8rGo2AJCuY7zbxVEBRTHFAlqpInURx7oTgTH2gOnwmqKqS5vw8nuczSSacOXKcvKq4srFGN2qiJaRpRl7kr+hHay1FmjnBp8rZRr/W9nkOAMBNV2abs8Ug7FfRaJ2m0biTxaVldFmRJCl5Gbt6H4I0L5BSsryyQpbn7G+vI5sHSoCeHyFNST7cxJQ5NZ+tFhKyBFJSpDnNVoR3CL17uBVF7lL50mJL7dLxQmAq7Ta2GmlZVRUWS6QanFxeIvAqrNIE0iBVhBCtmePeeDxHq/c1IH4OyJxXAM4i1OGMncKfQ767NJio/RCcHLAbRKJ255si7qeuUtro2eR0J/p6Cza2ZglYZ7Usnboh1iKMqV21DnaHw4xCt0lSMw7qZ1aXEg7vJ9RXJjBgIAiaWMAPG/hRm3S0g8USj38baX+bpZV72b61hjKWjcUnGD/y7O1v9j98DO8Hv5Sw8mu7aKcWOZUplUIShQ20LqGsyCYpEktpNFlRgBRkVYkuxkjl0VtZ5Nrly5w+d44o8LB1DQ4s0ije+Uv/kHPPfiVYwcpH99kPNtHVXZTlrde0Ybs14kBD7BX0HPuKLz+DJoAvBXIQvwxcJMsLykrTatWBAU+AeJ0zcLIWmySI1FDFGQLD2//r3+Y/nvt2x1wBgmGHe3/5G6DSmPEYEXoIqcBAmiVMkhFb3/Xb6L/xa9Ao4dEj8HV/Hq5DXpY8e+MKDT9kYXubRqmJToeEOFOlorKHqx8OL4KhqQSnT53ggcVF+Ehtu20PaIzdRoMve/sX8eKLF9nOKy6cOcO9c3Po3SGP7tzi3MMPcfLIMf7DL667eiuCylRMzYSoS155pcnyin7k4ddzv9IahUeeV1y+cZNG6IMxBEoRSZ921CSOY4Zxwl133enmprRUpbP1taZGMiiXqVO1jsc0sFPFgfeDqIGyxliqqsLzfDf/6nFRFAXKU06fQElKI9BSETUKLE9geQNRo0GWZcSTmOdffIGO9Bnu7ZJbjcHiGWh5PmeWlomUqiVVBCurq0S5Y/C88+98LfbiV7KzvU30/vtd2UL5xFlOvraOL1yGImg06Dx6gXPf/wbUXS8R7ITMPzNPhUbhgtk0y5ifm2dpYYnN7a3PahR/qqaUAxpLa9zGPBWJq/EykjrrUOOQps9aCEFRWbYmYyLfsjQ3R7/bZjQegikJPMXcfIcg9BmmMcNJwmA8IS0qlPKRQlCWObc2biGW5lmY7zjsh5TMNxtsjga1LLhzuOx3WiwvzLM3HFDog33MjXN3Ms/SglYzpNNpMBw5xoYQgvbcPNlwTNjwKWqrc+lHUFWYspxl9rQ1bG07YylPKXYH+1hrCT2PQTxGSIGuXrmxT4GayWTigqTPgAEA/39yA4QWgiU878+xvLxEt3sMTwXsD4csLS/x/NPPoI2mKEqqStNqd5hMhuiqIJ6MGAxHmMqQ1gYoAggCKLMx8WRS461MHelZggb4yxmdVovOuI38jTdjf+oq8bc+ivUsatLg3r/9d5lrLFAKHOdSKmxloTIIYw5qorVtrdMkl3SjAJspJrpNnIypyoxGO+TYqqUsC8aDIWHjGJX4FgLvp8FmjudfZyQEFqUkGDcAfaUwwjiEv5kJ8073rQPLyLpMcHjPkULM3AQNLljwlKI0VV0qsof8ouv0dh1hz6yZa5T69HtZSxZri3PlMgchwIyQJlzNTvluwQvDiLDRJh3uI4Sufdo/xsJCwc3rc/QbTZaiJTbSEN04iGjVoMXxZo9+OEfTDxjt77Px7o/wsX/xo3iJ5Vvf/bfYWF/n8uXLM2tj7QkqbQh9n5WVFboLc2SjBKs17bJiZX6Rm9dvcP7CWXcylbKu0wsa6Rx3P/5VSPl+NibPkExOsXFjhC6L17bU1f0deJ5zUhSCstJoY6heJoP7mQcB0yzQEthvwvITZAns7Q7r8kwKbNSVHUcjrSYjF3ANR3SOHOX05XfxjT/+C/zqX/wuAL71n72PuckdFGFMMR7iVyEaCYWrJV/5yt/hyvf+J2xUg4hefws+8FNw9utcn2kobcH2/j7vevgNVFVBnGakRU6RxI4DLZwWvo9ldb7Pw6+7n9PHjhJcX5vdWSNSlIUbk2HU5EhvjvkHH+DS5Rt89MknyGwF2nDPvfdx7733YDf3mA98AqXwKk2OJNUV+1ZQWmcvpo1hMJyw0ovqQ4Njz+jaOGh3d7/2gKhX9jorsLS0xP7uHldvfcjZTIee02AXCqk8wlBRlSVSOVVIqTwXCFhLI815S31Pz1+6Qqw8lOcCTWMNd2wO6dGmqgKu3BgirGVxYYl+p+M2OWvwwibIAZW1zM3PMRoOOX32DDuDAZ6F0ydWQacE8gNE8mvotXzKNKvLgB7d3jxeI3LKkYAcNzj1395Ga3+Pp7Yug0zpN5vM9/qkuwMmRUwQBhjZwPrPcXw9wds/hS4LqIODNEkQnufKEnnOsaNH2dvfqwOwP73m+07sS6gQ3wuo6vr11FlVCoHRlWMGiBpoV2PFBuMhWZZz8dqIRqPJ0vIyy3U5tcgLsjRmOB5za2+X3VHsgMxSYn2PdrdDPBgyGo3pNkNMqTFSIzzJqTOn8NpNxuMR1kK33abbbROFIVJK9kcJRaVngmnToK/Xb3J0eRmsZj3ZB9wwi+MJYRhQGTeGENKZqGl3+HU/SM0K0wgrKLXhVrFb077rA5l+uTth/av1BqDLirwoPuNn9HkMACRwN9y3hb3rfprFec5cfgMrK6s899zzLo1bZNjSLaD9+Xk63TbJOCaIWlTlHFubm/hByPLKMlu3blGUB7VQo2MmwzpKte7Uby0IZbDf+1H2/pffYg940w/9IEd/+2He9Te/iccyyeB4SesDX83xp78Qo7QDOBnnT2B1hdalCwSsQSiXcrVVRVbkGOvMidJRgyeef4mN3f26knWZdus5FhbmSZOULIlpdI9SlN+IlL8CauhsVOsChQCEclaYuqpo2YqFOEYKb0bzk57jBStZW+EaVzYwxhBoA5yinY45truDwOlBi9qUxU6T9eKAbysEqDoNMz3bT32kjTU1qrrWZKCWZEagDiGDl0YDwtrnXkpFN0voA4z2saMBR3Dytc08x3/pBRqtkHvv/Qs899Q1Tj/+NvjRMU//zZ+k6iS0Lh3lrr/+1+h17iAvS7SQvPim3+Djf+dHZ2nsn/7dn+A9//A+TuiTpEnGbjxiksYIBA3f1c/6dr4uQ0jiOObI8jLXNrbQNX5jerK3uOjdiscYjP4Q5c2B/CrK6r/x6bZqAYTSmYwcXVzi5PIROmEDazTjOGZ/OGR9b5vdNCUx1Z+ASmWxTAAF4juxlBT5iNFgfAg2YImiAN9TxPt7ZFmGVRBmOXvPX6X14nH+wvvfjzElJgio5ipEKBjuD4iy0HnDVxWi0hRhgoletoAsJVjRAJaBrfr0a5GewvcCBvGI8WhMksV4QmGwNH2PB8+f4sH77qbT7TohH2FmfbfUiygHGXFmCMMIoRSTwQ6LLY+7Vpa5tLbO0qmT3P/6BwniAi0USytH0HnG8XYPXwjWtreoxkP2y9Jx3I0lywsmSUa70axR1Iq8qpjrNDn3pjeQpTFSSJQSVFqQZBmlrpjvncToiqpygl2B5yH9kP3RhLTIOH7kJEHgI4Q7YamachWlGTzzAgAnT9xB1Ww5ooYSQIMFzsEm+H6bu8+/x2EOrSuFCiuQHvjeESqtQWqazSYC8JXH3RfO8aEP/ztOP+TR4AVC8QMIXaD3N9i5tU3/wj2UhcZvhM5JtU7h+cpzMrbM0wub4IEMBKYq2S9L8tKp1lWyQHs3kb7DYQiclGyWZuRZhvQ9pPKptKuRLy0usr7xJ/MGeHn2cH3tJh/7448TttsMB0POnT+LraY+JgZPBc6x0gh29wcMxhMaUYTVFc898TiBVAyLgqcuXeHBex7gjtOnqIxG5AXJ/j7DYUqiFZVVLiiTAi/wmeQ5zWaTIs8dGK8siELH/kL5nDp2HG0tWpe16JOHNZZOlHBqaZFJnhIXJWVVIYWg0YhYWujhS8lgGKONcwbUWmOKHNNwoAZdaaytXrVGP0vbC5cJUfXz1MbUOK9PvohMswDj0Yii+MwcST9/ZkAihAtvh5/6z/DI75Lv/zE7fy+i/eh7SGod/6oq3GlRCDY2brF2s6Lb6RFEBWWeEkURC0vLnDt/jg//4YdI19ypwgLZzja2qqbJ2Bn43f7dPyT/4d+djb5H/84/5kH/r3H20jGaW01K39KsWoRKgXJpVFtzgI12bmbGiFr206KrkqKsqCpDWRR4NqTXCXj4/nv446efY3t3F2GFcxiLXVpmuLdH2Gwj5R1Y+9UU5Ta+76Ht1E2wrgdJiW8Nb7x5nWPj0WfQweeAFY7v/xTH9/c+7U+/ttYFjuFMQV69Xbh2+VP8vj34O0ngN38Df+4jiHc+xH0PfgGDGzc4+ivvRGYB1dyY/rPnWdy4lyLMUdoyKkue/Os/eduqMTh6jRe++L9w16N/CaVC2v05BJbHLj5P4PsEgY8pnH+7MQ79XZY5oedRFAVh4N9mZzxF906SDCGabO3tkeflp9ywBY5NcnZ5meNHj9Dvz9Nqth3WoqyYm+uxsNAjijzM+jpFnFDxmaXlbu8/M/t6POkwGD+E4Zn6365g7Tbz/VMEvkcuoJKCrY1t/viZK1RC0Om2aTSbSCUI84rwkmHp2ApoQ5wNkZ7CGImSgpWnHqD/4hkGFw4913/yZjDLwL0gtrBAu9/Db7XYW19jc/MWyWjCbpZhrKHfirj/wjne8OA9CKDKcsIwICsLoroD+50Oe0nFJMtRymLylGySs3H1CmvjEfefu5uVs3ew+9IVJoM9qiKnKQSP3dpgt5OifFcSzKxFHOKllNoyTguafkCgAozRWCSVsYziGE9KirIiQCGFohmGaC0Jg4CFhQVM5fwXrHCOosIYRtrn+MqSMwjSFRhmgbIcHdgBz7WbVC2HE9DGIOQxlHwQeNJhVqylqgzaVPjBzM8T51ng0tyB3yDyPaQpObq8yFKzIt3pMH/kXZg0Qe/vMrl5k1K2CVptsnQPjXVMoRpMp40hjp1lstcMmCb8xuMYicDzFGVZkGRjRDuhLEqU7yGlR1bkbs4Ygy0LZL0Ze55leWmJrZ1tJ0D0WUa0L/+tne1tJnHCwpGjpPGE02dPz7ItLvNvEMb5n4wnE4bDIWmScOPSC0jp0ep2GU3GLCwtc/LsGafgZyEIG7R6HY6ePMF4Mub5557lpZcug+cjcP4DRZYAFm0NxlRUVeEOZNZQZgVxnIBQ9FeWAUtlSsKoQa/Xoy96qMCjrCqMdqytsnI0w73JmOXVFfZ290jimKjVQpfa4aSiBmWez6TwX60XXXZP0Wg2a6fK5FV+6uW/4za7eLBXu8i99vZ5ygAIROursL/+s3DWpUf03JjNH/m3HPkHp1EX/Vnqoqzy2s3O1dv29/fww4giS9zEE4KzZ88yvzDH+MaN2SeYMsXUaO9psxb4hmdAHbxYtmO23/Q4v3/iIs9+0wcxgSHceZojf/GvIh47irL1iRxDM2rQbnbwlecGQOA7Bzzr3OSKPEOXOa1mg0anzVve8Do+8onH2drZuw0RnmUpt65dYW5hkYWVe1yd0BqksEjrBH+mVrFC5xwdt1wfCXGAJIfDMIrbmjIGwQ2MEBjx8jj7Zc0ewdqvOfQet9f2p/RDRABEQI4QVw89SZDGlQ9e7frMq3DkBW6g+fv7iL3/iD//eo7eeYEbz73A8d/5UnzlIl7rO8/6NEvYGexDqg6QhgBGkIxKRmlG0w9odJpsT0ZObGg4IhKSIspdXVtBlmbO3107Va0wdEHctJyC9Ckzje97jCbvYXtz0+mSv8r1T5sUgjMLc5w9eoT5lWUHUgx8lwr0a3ErZVmenyOtNMO1NeLiM/NUP/x504AW69TAbtzykeIvgvgw2C1gj93BgKQsaLabeFGLG2ubSOUjjGW+O49Csru7w6WdTeKiQFy/whvvvpNjnTZ5mTqRFS9g7soZ7v8f/yf+6N/9MMXcGPFP3or9376QGQrV7VNcvHgJHefsb2/ihT6ddpsSRavR4L47jnHmxBE8JUnznCgMyNOEJI7p4kba4lyP3XHMYJKQZhlFUdBsNVk9dZIrzzzLtZ1tpBGUScJgNCBsNzgy12d/u0V/rkd3aYHB7i57oyFX9vaoDgUBWVZStjVeQ2GMpTSWK9dvzMTDjDa02k3m+j08IWk3G8x3O6RZhtYlQrk5PoljxkVO1GyyF4+ZUx13GvQkReXU5myeuYwXsBuPGecJZVUSeCFl/l7OmgNsTl7WBklWgFFgNVEUOEMw4+SOVVaw0vUw8TZZDF94z1fj5SX5xGJHm+iNXUbDguxEG+H7WAllXuBLnzrBglKSfn/OKRLiSpXxaMRkNHJgNjRBpWlm0DXvBvkRhLqKtZosTpjEKcJzrqFGuqVTVxW9Xp9G1GA8Gb/mcfxaWp4mbF6/SrfXc5TAeoxYXAmg0hqLKytJW3H5+RfI0pTl5SNYJFJ6rCyvEvg+Fg1W1uVRJwzX63e4/4EHuHFzjUp6jkYLJEmKA4+mTtQnz1BCUmnDYBIjkFy48y5Kz2HwpTE0W02G8YjBcMiR1VU6jdBho4xlnBo2B0MmcQJ+gywr8Hyfdr/P/s6uWzOdXztBs0mZJvAqpj2y1mBJkgnG1pnc29aCT9Ysezs7iE+3/r+sfV4CACEWWVy5h+3Tv3jb63YhJg32mZ+/m83NDaIo4sUXXyDwfc6eOQ8YktTpr5d5xXg8oCoLBoN9Ou0eLx+Kogbt3NZRT6zCgxsHUmSpx7VTfwzv3JsJZuSLe1z7z/+I8/f9GPt7KZU2TJIYY/aYazQ5e/wY0jqAmfIUykKpSzJboawkr0pCbZBpxjve+Ai7wxE31tdZ39piFMdgLVVRsLWxzt7uFlIoomYTIRSBH+AHPtZaVBgcyMsC779wnv1GCyn9g0yBcGYYRhtsDUD7iuefYT6Fi0eO8dTxkw4DUKNMPT/A6JJStzBVm3j41cSTHCsh8B31KAwi2q0WuiyZTMYkacr80hJBFJFXu7RbT9BqNZCArzVf8Ud/CMAf3fsAu/2+2/gNpJMRF599Cmuq20Zr4Pn8zaokAKQMCcIAIX1Wz5/hxjPPY8rKlX+wFJljenjS44v+yj/jYz/1/YzuuAoGHvjP7+Tef/9d7CdD2v2QpEi4dOMaX3j369jaWEfjQH6yVlnUVUVVVkwmk7q/bB2wSKpSM5ik7O5vk6ab7I9zbty4VlMwb6/ZHx5PnlKcOXachfkFlPLwhDrwSxAOIOYpSSsKWeh1CDYVk+IzOzG98qfd1VRVya2NW1grsfZvA88gVYc0LcnihHarRZ6NeeD++/BshOx2iXptqLEZ8foWlx5/mlE6oo2i3e1STYw7+RioygqzvcTKI3+fYZ4zHsQ1ezcDhu7arGV/OODR4QBw6cowDKiKkpWFPp4fMkkzFmrL5aos2NnehPjAWa7daNBrNgnkiNHYnex6rSZVmfGFD76B3cmIpTvPcvOFK0zSMePxCFnllE2frY01qnRMIwqJQh9w4lS2Lv0ZrVG+otVyznzKGk4cO8rCffewvrbmcC2ApmI4njAcDbm1uYUQtk4Ha6q8oNluc+TEcXwjWVvb5LlnXyTLC3fCt85oqFuZmQ7AY088w0g6MSvJce66cASpDrJxFousCppUBGikLJE2J9lKqHQKZUGpKxqiwmYVpsrJq4zSeJAbqp19BnHJRRT3nD+P1I4yXCQpaWlnokPAzCJ8PBwyGA4Ybu9gqgqrBMtL85Rrt+hJMKMhsvfllP5vUeVPUdjcBYSVoNFq41uB8NQM/3DHHad56uknP6Ox/OmatU461/c8Bye2OLVR4QR4PKXY3Nri8sUX2N3awlfw+nvvptFsIYSkGyr6nRZSTn/N4as8Kyi0Ic1ysNBqd9gbj8Fo4rioT+CWSZwwSXKSyZhxkpNpx/A4ffwEo0lCqw3SCIQVFEaztLzK9a1d4ms3WOm1nL2zsQzHEwajCWVl2Nvdc6p8WpNOYqzWztjM9yiKjKoobuPqT/UVpBS0Gg2s1g7wJyzWl5RV9QoB1ldr5WeY/ofPUwCwtPS1LHXvZvxLj5B9w8cO/uFjR9n+oOXc/DzWGoIgIIwCsjRja3uzdmkK8IMIISTLy6tIKUmT3NVrpBPzEEBncYXQVdEpKu1IAGVB9t1fi9ECvvMxt7L+yNvxdlpUP/Ert12jlRZfCd54113sxxMev3iRJDfspgnBzjanVlYRdTQ8rYvnZUGn0TwQ17GWajJiud9mdeFeNrZW+ONnnmEwnMxO1rosqWxJkWez9znc2oe+3rx6nW3PwwtCgkZE1Gg6+mOziQp8Ku2cDw9vVEIpRwXBTSxdlAyHkjz/AtJ0EZ1v10pm1QxLkAchWRyjdUVRFGhdsbu1xdETJ+n3v540bzBOPupqq4cuVyh5YF+MRSh/yuRFCIEfeHQ6HcbDaTnjCFL9OYxsI6yh2WrRW1lm59oNB4gsNWlaoitLFDUorsIbvu8HGL3tl2kPYr7gx76Sx47tMh6MaLQChvGE0/NLeAK6S/PoskR5HqXVyMI9k6rSpEU+o6YL6UCDkzhhY/MqludR3oMYI10J6ZOIbdT4ZIQQNJotvCDCUzVGQ9ce3HX0bitNEPh02k0i30PV4KrX2sSh/7uerVkZRlPmOd3+NQSPARZjYkp9nI2dfXqBcs5unTZlVjC8dpl9DSoMZ/LHp+44it84RZbFFGVWl0QEZZZT6YpBXrCxNaQy2o0rAYJNrH10Nsam0GWHtbVkWU4UhWwPRiz0eiyuLFCWuTNdkdKBrabUVgHNKMIPwxn3fX1nh07zOO1Oh7DZRT81oRplLBxbobnY5dbaOk8++wwr8wusnjhNlo3QwrIZJ5TGzLJ+EvA9QRgFBH6IElDqgshXNAPFfKuBJ10aPAxD8qUK4XmEYYNG1HAiQb5CCVWPbYtnBOKU2+armmJVVSVVWRJMYqjLkG986D5Mq4OQHlp8Fy9+w39hd+MGD//TebprTXwsnVAhqgTpFbgju8JUEmk1zkImd4BdrTFlRSDbmGRCuTsi2U94oTQEZ85x6sQdlJPJjDK3sbtNNRjQAXSl2draZDwZMUommLJCWYMfBnSX5vHyDKk1oa8QZYre3cIsvRstJdL+IXiSsNFEeYFTAzUG6XvkecHy0hKNRoM0S/9UCQEO4+QhhEQKSegHFFXOzbU1bly7wq31NcrSHYwW5hZ54L7zTEYjpBAcXWqhNQy3b9VsIYERBgrwewsEkWI82Ge+32d/uI/WxgEe61ZWhuF4TFmWDNMCDZw7soIvLY89+QTNZou5RovKWiZZSmk0w8kEU5aMR0N8z5VfiqKiMhbHXQBdZ7TT4RhtNBkFeeZuVmfpbd03XXNsHcB0arvqoihI84yq+tyZMn3OAwDfD1hdWSUdW/zv+2rAkr3343B5Dr71vYyvNtg/tcvuzi7GGjzPYxJP8JRPnmdMPA9T4wOUdAtcp7PN6x5+COUpKFwapTu3SKE8hNHkxm1wgRB4YpWbf+c9mP94H0IImh86g1qeEP/OWfSXXpod9c78o29DVh6PPfcsmTFu0wBAsDkYYgycPXYUdOV076XnlL0QB/QL5Tb0LE3xvYpeq1kPvPFMyOnw/vKpgB0AeZ45KfR4AgO3qToLzZBGu03UaTM3tzDjhTsv6AolYTIcsLOxgS7eQ6UXEbQwelJrATi+s64vKs8S8jytNf/da1Wec+PyJXrz8xw5/i7ySpHEf0jUahy+Aaaq3RXW0RONrVPFknarxek7TvHM089C2QLeixArBwA2Y1g+coTttVsUSYG0roatAp+0qiiKkvGHBPe+/x2cSyaMooLh9i7NhT4TaYjjlDtOHkErS8uLMIGPtbWLl3XAJm20k2atT7lYSOIEECgxoj+/g+d/C888dQOB4TD/b6pbKbGziSIELlVYFvi+whhT67tT08gkwvfJTUmgJHNhxEjGxOaVrIDpV1N3jJlfwLRzBS7FKAAKlHgf+dgj0UMsDpAlwlW0fTu3toecXp5HCZcWDULJ3Pw8+3t7TAY7KCGIem2U10ALhzeJhwnalCC8WkPBMhiOb6MSeUrSarZJEuUASfU4m8KSXGC+yH0P3MdH/uDDtJsh+WRE2W2xMxiyvLrEcLDPctCcvafyFMqr+84Ybm5tc+7YUaJWC+Ur5uZ6pNc38LsdyjTGrm/zwMoRVk8eYzDaJzeCQVKwP45rDEBdkxeWyPdoN9tI5blsgOexsbPJ/ngIZcnK/AKeChBYAk8yiSf0Gk1kWVCVJWnmENXWGqJGAy0lpa6QOOnaoizxfQ9PWrxDbOJIKbSnyKsH+eh3/DgvfNmvgrRcfvMqf+E938P2cI+9vR3OnV2itGCVRAmwusJWJVh3chV5TpWVBO0eVVai45hynLBZaNYbHd5y1z1IrZ0mgnB6HdL3HcIcyIucixcvEvqKShowljBq0Znr05nrsvPCJj3hTrSyAm8So9Q+uv9OkmIbaR7HU76bi9bgecFMGMoay9HVI1y68qmwP59Fs057JAp8JqMhOxsbXLt2hb3dXcpZ+cyxlVrNBmWRUZU5ClEfZixSusB/mr7LRjlznT5Ii/QErVaTKs9fkSIXQJqX6EoTKY+kKhnkMWEjpNeI2BwO2drZd+ZQNT5kOk2TCqgDE1EbrUg/wAuDWS1eG2eV7fAl2pW6tXWnkZet/9ZaJklCVuQo6QTgtNY1BfJz0z7nZkCNxiMM9k+xtNTh5s0Q8+1/huM/9O2EXsiVF9cImj7Xr12l1eoyHo1pNBoo4TTzo0bEubPnQDqksjtEaYoyJwwCpyVdpz2c4I1T/4qUplSSQPkUVcm5ow9z48M9QiHoh4JqP+LLvv6b+fgv/zS7R8d4//ZL2fwXd4C+RaYrlFQEXkilNUZrNILt0ZAg9Di2MI/GUpSls2f1FbmpyHKnUx4FTbdxVA5V7HkKmNL17Mt655UZgFer4LgB536uLAuqqiBNYtpJwv76LfLUAUWchr9mb2uXzZsxgi/D2HNOVlkUrswwLYDXiHhZq54ZY7FGH+w/yiGKR3s7jEdDTt6xhBcE6MMqdNbW0sYaKR3FUHkKXZaz9/T9Hu9650/jvf9pqORsuxM4ZllZlhw9dZLLTz1LJgzNXhttDYP9AWkkabX7eLlLH5uq5NRwl/ObNx2jwVjUyxcjexBYTTnD91uLeubJ2eQP65tctutI8TzwIb65Mq9y+j/4fjpRVFFyxx9+mKn61ieBZWCxLFs4Vat3ffIpbA/exYJTABwCXbDrwM/Vr1ugcB4OYycVBPD12SaafdRLBeqK03U/vMhFh6POmcCUe7dFO/1sF/RZ4EFrboMsCp3D5CfAuozAAPgZIYg5ANrOz88RBQFR6BM2fEZxTLi9S1WWZIMRnvSIJ+4ZWus0BeYX5vDDNfIs48bODrvJmOOdHlop/H6bm1cvE7/0HKW1qG6TpVNHicuUTOfsZCUXN3ddsMWU5SLxlKEVRXTaXbKipvwJQZGXKNVge3fIS1du0mw0aHWaeFLQbjVZWFphb7BHkqe0Wh1ajSbWGpIioapKAj/AaMN4N3aCPMqj2WzQOWR+lJYV41HG09/yPl5811Oz8uLGQxv85Ad+kjd88+uZF5KtTzzL4kPnmfRG/Obf+iF2T17m3B98EW/56e8gGAbosiTqdjAV2L099DhllJS8aATlykmCZtuBG62jGhqcqNjsEACkpmI0SWkGAatLS3T7fbqdLoOtLew4puX7WK2xEoyo8IsdukmD54fvwNhtosYuxlg833cCX0pRaUOe5Swvr3D95g34DPTmP10TWAb7e/zWb/46g70BxliM1RzEw/WKYQ3dThtROXXWqT0GwmCsk2p3JAtLYC3D3R02drfZ3tslTVM3xuv3m+qceJ5kaWWZFy5dwwP6zZAqN1y+cQvf92kEAdYzJIWmqGV7p1LZM/C2OLQOKIEfOlviqjSUWUqoAvLcgUobfkCqPzmwz4LLnNnKzdfp1H2VveJPo/0JA4DDS+Arm5QRYXQPu/tD5heW8QOffJyxfWnEgw8+yGZrH7C0Oh26nS7tThtPeTRbTWe6gSWOE0bjMWVZIayh0+lw5OgR/MAniJoOYQ40orBGgQpH9TbWRYZ+gPQsZ86f58all6i0JvA8wrHPt33Jt/Ei8PNCYGyFEOB7PnfffReXL19msdlF5zmDyYTCwI2tXZK8YHV+DlFphPDASowV7vpygy99VOghPYXne4ShP6Mkvry9Kq/zU3x/OFWEdBvi+bvO0XjxEkwccnm8s8f22jGs+ZI6XStm5QchJUHkxEaEE8PEDwKiKCTLcvI8m2U9jDFU2lHYyizj2iWPuYWHaKw8dvv1W3e61trREpV0ExNhiaJF7rnz+1novwkhLuI2N2b3IKWgKEpkI+Do2dNcuXgRqUIazSb+/pAXr16i32hxqsZF9KuK/sspNK8ijvHypuA2dOx0e5QEwFFAE37adzn4XXEIvPPJIDcHn/Fa2gC4BNwFFMCHgTcBzwH3vuxn14DN2Xc+Z/BxqOJX64tPd32fvlmwB1oNq8A7rOXXa+tmgLm5Li9dvMjK0gplWlLlJbt7u/Tn58iLgqOnTpA8+fzsPW5sbIPw6baaTOKccRLz1IuXWZ5bpBGGBAs97vqiL2B/Z5vxcABSkmQp4yzn5jDh8sYOzWaLs2fPcvHiReLE4WwiX9HtNJEqRKiS0BfkpeXMyeOUWUIvOoZBUOYlcTwmznLGccoHP/Zxep02rTBisL+BlXWpqKqw1AJA1hD4gUPcK8MgSWnnJafre7qydovEv4exPoNVT9zW0aZbcuHoIsHONqPdkq3sCT7yPT/PrbueBuDZr3g/shI88uN/gUarhzZQ7e9TbYyZxBmXs5Kt3inUwgpVlSO8YGZ8hTYcsp7BU4qlxUWkELTbHbqtJmEYYrOMvWuXWfU9pKmopMKTAk3Adia5Nd5idzik0f06Cv0BAnWxtklWtTaIE+oNA5+FuTnE1tZrHkGvYYSRZTntVpu5uT57e3tIaofY2Uh1uh/NZgNtSnfiNs4ieLYOCeHMljToypBmMZ4f0AgjxiOHGJtu/NNAIGpENBsBvufkodutBp6wbOduzGtjqUqNsRpq7IeoS6fTaz/ofkuVZeTaZQTLOpOUlNnsXj9VNn8WuE83i0Obxudi84c/cQBQHwE+ycV5XpfFxXdx+dIVJpMJK8srTMZjiqIgjmMuXLhAkqS0Wm1GoxF5nqFNxdbWJlob4nhC1GgglcdoOERXFZPxiEarwdziAkF0YIoileeiwpr7HvqKsqqgLGiEIY0wQl44x9rFF6nicrYfTFOargmCRsje3j6BVJw7eYJsb4fldour+wNGac7OYERVadqN0FkMZz5RGBJEIflkgi1LlOyiPZ/nXrjI9fVbfwIe+KdqprYXOkglxZMjbI/uwZgzBz9Wf7iUCiEUunJYi6rM0RYq40Qtev0eSewziWOnlW0tZakRwslXOhDjMqJ6CPjD+gqmUfiBVLAQDuncbHR4+xf9n8zNvYmqvH3TttaCdOm7Xq/LzVu36C0sctxY1q6+RFsFzHW6GGvpN9u0kglk6cHvA0+2+1jpKFy9fo8oigh8DxEE3Li1TpVXBGHIxnjAufMX6Pf7pEWBBLK8IElS0qJglCRUWjPc3abTapGliaNvHmpCwnkhuF9btFLsnj6FCgKUUkzNnK2xjh8ehkjP2fhqa8jjCcPRgPF4zGCcspPlaMBXkjtPbHDu3FdjA4Exl1HiAlZG6Oq9Dmth/ixoi7UGU5YUWYY261jeR//xDyCs5Znuu1mXx4mH25w5foz5VuQEc5RPEIRoralqBT0v8EmSmOFo6CyipYfWGt/38YKAF66vMUwOfAfm5+Z46L67iMJPUFYF4WOP4Q8GRFHE8cVFjh45glLuube6C3TDkOsvPcfq8hxFpSnKCiU8siRlrteBG1sIa7n3xgagOKcrBrg0tX/9JmFZ4fkBnlIYa1i2lnldkeU5ZVGwWJYsxin3GUOjLFjc2ebusqCox3hUQefWPuHgWYy1KOWhtbPD1rpE18A5ZQXCc7XaKk5J84wod37xU4nhqU23Ns5kRUkHLlVK1VLAFnUoEHxkP8Woe9j51zG/8qY59mu2Exbe9XcLep94HOKYhq14KXyWaw9//LYx9vR7fpMv+f630h720JXGpBk2KekYiIzlfDZEvfQ0y9clbd9HV6Ur42mnmxKO3QbX0Jo3DYfO/ns8cc6pUpLGE+azgkalncaJkHilIksKvKKiZzR3AuFwQqN4CE9sIsQzrqzHwQovheBkHDNXf38B6HyKVcoCvwJ8ujD9xKk7eOMjj/DoJz7hRJukrAOAg8VTCEHku9S4K7tWeEIRRBFSeoAkzVLWdrfZ3RwwrjRxmpOkMVVVzsB2h68u8DwCaem0IwbDjCLPqYQkLSukhbBWKzQ108RzaN9PeT9lOQ2Y69Lea1z/Pxdyy5+ufY5LABFlUdDv91hbu8Fdd99TIyEL1tbWyPPcAW88nzAISdKERqOB7wdYC1pb0iSjP9fn5PET7O7skGYJVVmhtSZJD1IpRZFj/RDfkwS+R1m5k2noC9pNj0kc0+52OHHhTp57/EnyQ5N3FsZYSxzHpEnKYqfPjetrNANFM2rSiFJGaYZEMJzETJLUIaLjjHFesRen9DttwjBkezThiRdfZGdv7zZa4p9mE9Yl0+M8dUIiSPKkT8G5V8RjDjvgu6BHWIrCncQ96QKoOElI45jVxSV8JRmOx/WMraPcOhMlhGW408PRAzPcAcTOsglOHVEQRh2+/Mv/PceOvc3xqQ+R7y2u7DCNwo0Fqy07e3vMryyjPMHm1euE7RaLSuArhShemTJ7CUHUaLG0sECwtIBoRFjPR0vB5jgmDSo2xgO6J09w+t57mQQB4zQj9AKG4zFZmrM/GnJze5u0KNj3A/T+vkutzjrOXXAvbLKChTRFC8GNVsuBU30PsHhRi7KyTNKYoiiwk4ROu03QjKDdIQl98nabtJ2wvv2vKfUV3vDA6zl2z1+liu7FhA1k8GZXm9fa2SUDtqqg5o5Lo/Hygio7TVm9CE/8Adj7mLTP8GwcUzQijp69g70yc5r3eUWapk7drhEgpGISx2RVTrjQRSmPNMkIghbNZoMkr3gKV3wQQqCU4o3nzmPuugsb3U0aj/Gffx4GA86dOcPJd70T3/ewVnPpyjWee+km0UKfcV5yMgpoByGjeJ/FhVNcfW6Ne6+uz7p0ef+Av3Ni+oWu4ObN2evTEkdQ/5m2Y9Mvsgxu3pxtRACUGoax+/OZttHk0/8MQG3afTibBdC8OQQE7Wsdvu0df45/+5F/gw4MX/G9cN/PBUhzkLVpjcFLPKrmQWAcjkKijRHB7itpqPMA411eQXt6leYZw8qr+M93p1/MgnEDZUUbZyw9a0XuklC8FdgCruFYIK/eluo/n6xZoAH87Ke57m6vz/raLa5dvQYwC1qn5UosBEo5bIOunCCV9MmNYG+QcmN9g3GcMhmNyKoCXdXGOYdwNa+2wRpj8IVirhGSJiX745ReFNKLfEZ5ybisXGmBGuODM4Y7aJ8qC/7539A/0/bZBwCz+7a3vXTwjSQIv5ObN29y/PgJd+qfjFlcXGRvf4+iyGoqhIushaReUA2eJ+l02iwtLtZp9Ig0Tbjj9GmUkijfI80y5CFkdehLysDDr9XxnHWzodVuUJQF2hj29/eIvJC55SXsrY3Z2JJipg6LtMKlAuMxW2VBoDzajSaF0bP0EcJtfHXpFOOF3Nob8uLVG3TaDarKkOTZ7P0/daHks2v2XZcoepZ432IuGUBj7ds4wKsfSh9Z63wOhEPAY51JC55jNWAqjIC1jQ2iqMFKq0GpDeO8IK95uK5+ZhHCB74NeB9OWliAEXUgF9FuPcyDD3w7p05+EVbXjInDqaypC5Z1IDu0od/rc2tjnTRJCNodFu+4g/3NTWTo42uNGg5fcf9Hl1bwm006rQaehTzJyL2KSZWztrdDYi3LR49y9txZx3TICiSSJM2oiophEjPJUkAwmUwOsm6HSwXuMonCgDk/gNQFWxdvrnFicYFOq8HecMS1wYidOKHdaHBqaYnl/hzC85hMEhQuMxBFbbQZcKqsOHnybs6f/RaE9xBG+YioAdYiLfXp05lGGeUhhZMHtVriCY9WEFKk3wX8F+Ap5tr/kvuWl9kbfxW9RgthNJN4gJUeUnnEScx2krgSjZI0GiG6sqRJglIK3/OoipKisgilZmPcaM1oPEEEAUZIdFli64XP9z2MsNiixNiKZtQkLwr8KEL4PpW2FHmFsYo0y904qTceC0zme1TW1GNK4PlOhGY8Tm9Le05LcgpJGIW0owZBu+XKWpVmksRs7u9itMFXiih0znm6qtBGozwfpXw8T7kTszUoz8f3nG5DqTVZXpBlKUpJJ6lcA20xxmXN6vS/rdcqId6KC0OexDOG+fF0V/4zQAzcpHvjt/ibxw+XnL7ltrF7+ndP81Xf/VW8/5++n2w+Y+7SHF//zV9PY7fBn7QZzyNZmMdWmqzUDLOYvjE0cADbYWVIze0bmPM1kcw3IjzhwIVK9dDyfyAt9yj1z6LlJnKKNZCC/miIX1XsAtuf5FqWgAVuZza9WhNCUOqKZ597jqIoXHr/cLPON8DzfXzfBxTjJOGl6ze5trYxW08OCuZuvZJT4PUn/WDAaIrK0Agj5po5W8OKuKrotxtYKZlkbt2U1glOFtry6hI+/322zzIAcB7IQkoXidft9m55iLIQFEXO7u4OCwsLbG5usrp6hMFggLWGxcVFPM/H9wM67TZaV/WEc85HWZ6Tj8b0hvvckbpJGoYBYdQgiiKWBwdqeecvX0RFLTzPuTtZ4wRepIQkSVCeIvB94tGE1fFodpJYAt5VDxJh3XQgTQ6c3KoKalW+22Oe2ijHaKL9PawxFFjMJHnFhv+ZBACHTztvwS0pL29PfdOT3PqXv07VLdj/2J1Uf/bdcO2/cZpv58t4JY/dXYObEPsW/qgGDBlTYmsapQVCT7LYbnL66Aqd0GOwv8/N7X3WBxPnSD/DpKwCX4MSL4E1GOMmqCm/mAtn38g9dz9cv6OdgeWmTUrJ1DXPvQDNRsRcr88kjmk1Wm48zC8wHirK8YRXymVAr9XChgHaaNI8pyw1aVmwrXOaqyucWT1Cu9GkEzbxPI+9vQFS+WRFziRNGI1HTmSkxjB4foD2lDMaOtQ8JRmlCWujMffgzJmuDYaEjZC5fgeBZTweApIj/Tnmuz28RgNjLePRiHwyRpuK/uISMrzIyeMep+/4Lgjfgm6EyCBCGZfmtxKsrBkLtuYHCOmMfoRyrmmAF0bAO4GX6PZ7LHUigugP0PoOWs2TpEVCkmQgfMJGAz8MMVNEcU31FAh8T80Q/Y12h7yoT4dCYJC8cOkyYavBO972Rqrq12aBnPMw1zXswFnsCqPds8hyssoyGg+IopDRMCaeHCrfCMHa/RcYZjFVVZFVgr3hhLJQrMUFkSeJPA+LJRQSgyTThrkg5K6lI5y88zytuT5GGza2N/i1D/weaZ6z2m7SafisHjlKVeUM4wlW+vTqZ1JlKZWtiJptGkETK12deDAYkqYJ8/PzlNqhyKWSRIGH7/kYJNaUYA3avhUpvsw9AynxraX9wksEV67Vd7cP7CF4w8tG64dum40CeN3/9zyqfA/jo2OOfuIox//oOH/SZoHBffextbJEvD/i4qRga2uDN5uYOQGXK8uzk4RhzQJwC5igpTweWe5i+gG6yhB+g8BvYqKQSpxiWJxkN/l5ctYJGm2U7/PGZx7Hn4x5HvjtT3I9Xwq87TVctxCCnb0dhqPhDFx3cEdAjfb3gwA/aPD0sxe5sX6LLK+tqwUHOAgxdZqVh9w4P8nqa51NfFmVyCCkFXg0fMkwr4j3JnhCoLFI6ayry8q8Sur/v+9g4DUHANM1W9RCKotLyyAE440NqDmPq8eOM04SRvsDovB1HD12miuXLzEcDsjzbMblPHb8BK1WE3f6itFVxdr6GvEkrk8pTaeWpzXNNOPd2YQeOARFVd4mKgJuQt0zGsHo08nnWtwkfR/wDuACc8Cbb/uRz/CBWm6rT/9ptgdf5bXnvvY5Bv/sN6Hr2A/FG1/gP//ygL/yyNdzvIJPuYwY67J7WD6mwFaAcHSk1bked6wss7q0SNRq0PB8VubmOLa4z9OXrvDSzojcwtSpC06yfv1RGr0uQnkovpx8cg+vf/NDBHVqXEwr5HWXGmX4wFf/MJffs87xl97K23/thxHasSS6vR661GRFiu8HBGFEqzfHGEF1WL+3blpXYCSDeMxolKCVx/LJ45xevoNCa5SSLM0vUBnN1es3CMIIr64dCuMUB5Msd/rZSUaVxlj9SqtN3/NY6c9Tbm4AdUZHOkTy6uoxfBWytjtkYzLh+bWbbCQJc/15tna3qbKE450eF+68QGVuoXTEHcf/3/jt8xA2CPyAQgoUZvaeStauhXWznnIUcVvVHWkdjYgl4AvwlaXbbBBEUFa/iva/nXa3j7BDkqSgNKVjeFhBUeaOGiclyhMoz1lFLywucWVzj7wsXbYHp8BYlYYnnngWac/xunvfgvR+rx5GppbFVRSVc2L0lAStyfOMqzdvstTvUsYTQpGTF7fPj1LAJMnYHicsLcyzsT8kEBqpHM1qvtuiFTgLaL/R4sSp03RWFwl93wEeswJd5az2exyfW+DirTWsJzHSMpiMWOh36Ui4tbmHJxXbewOEUIRKccRv0uuEoCRFltMMPBY7C7RaDbDOSAYhaTYjR5k1IFWARaLNmxGyXQfNzv3w+oXTjPtd9iZjBqNViuJO8iyl1W5zZrjPw0XKhE3eH/p0G01agUd/YQFPvEj7wy+wmk5Q4SaD113CCMg2u4j8bRRFwcfnzjM8eZyWJ/CkjzaGpqn40tU5cmvwKouuckAitEEbzSgIyPb22JEhN4TihNYsNCQvJBVPjBNyY7HC9bO0kl4o+aKjfbqyRNsMlKUyGYYKnZd4zR7zUY+F8JtZj3+SvSpx2ZI/xY3P1sGyMW7e2kq/Iq9ssTSaDa5eX+Olq9eppnOkXo5uN9d19/ipLnEKuFPKw1Q5KA8vDGmGBWlpKI2lsFPHVVO7ofLKk9Xhy/zvMBZ47RkAK5C+QnkBXtiiDJsEYYMTvQV45nEAcunhtzvI8b0U5Sms0SwuLrC3t+9ockg2NzdotVpcuezoW0IIwjByCFvPZRWMNjUnWhMq3OYPbHs+0vdRylUIy2TCknEn3mG7RVFVWGNpt1qMx+OZ6YJQ0gGdvAFC/BxS5DSSkqBynmq7h25T1GkuZ3BTO1DVnLUwDMmLgsBz3ebcl15Zs/uTNAkcwX3kJi+vNMLaUkK6cPuCunvnLogFRsCIWk7yVQKZZVyGoWMtRjve9GKzyd2nT3H6xDFC5eMFzlzEYpG+T9CIuN/3sM9f5MXtyQz5DTAZfRW7l3+LI6ffRjq5m/vvPEe73aQqSneare9DKknZKPmD7/9dPvqX/xCrLJvHnkQYyVve//0EZYvA91lcWmBjc5OyLJFK4Qce3V6PqNF8xb0M0phsMia1htbKEv3lJcJWE+kpVhecSt/G9hbbOzt0u10CpUAKTGXQxuAFIWWaklY5ZZVidT4LVKbUHqyg7SvOL3XoVAnsjvCV4uETx1jsdWl15yh1xd2nT3IiL1gfDNgdDNgY7tNq+Nx79wXOnjxLJi2jPcvRI/cRdjoQhsjQ4Vz8Q4ZMU7KPW5wEQgms1piqQlk7c4h0a6KPS0Xv4ochnlDEeUJR/QQLCz9C4CsacUIcFyRpgsbQwqdSjoPfDENHc2qGyCDguWvXnFASYGvdBFun6B996hL74+N8VX4/IbfqEoGT7S7LgizNMVISZxlSCpIkJ/ZTFJKBNFQv40JYBIEfsLu/hRWKMAqIBwMaUrLQ69IOAoSG0w+9jiN3nEIPRwQqwngCZS3aWELfp6hy3v2Wt8LH/5jBYIdWq002GTNE0+t36LbapHlBq9OlqCyjNCG5eYtr6xv0um06rRZ7O9ssLc7T7ncRwtKMAucmV4NklVDkE7DiK1GijadiCgxxmjOc5OyNE/bjjJ394Wz8lEVJUGQsKRfclqzyrPbxxhLV8OnqJqH/EHPiIbqRoNdv0raCBU+B/3FE8VPcaM9x8fSbafWeJA8CKv3FCHUKVcB+v0fb1xTU48EZqSDSklaeEy+vcGtfU+zdYnmuzZXxPo+NEspDS4IvYLUZ8gUnFmmZIQUahEJ4Atuo+MRf/yUufuMHaW7N8yXf87doXl9lLvgrjNJ/R2VjptoLfxpNCEFgJZVwgMMDYPPBrmqNk/a+eOV6jXl6eZvW8OzLK9Ov2qZ26mEYIPwIayqUH9BuNUjLknGm0XZK33ZU0ynQ2TVzcH3/nW7+8BkEAPMrx1CtBigPJZ0RQhyPydODzWiwdpNda8Aeww89rl+/TqfTptfvMdgfAJY0TRkOR+6h+y6qFYAf+PT7fZRSBGFAELpaXieN4cUXAfgvvT5Z4LO0vEy71Wb7qU/wF8fu8z90710MsxQfSbfd4eknnyJNE2QY0F6YY2EuptN6iiD8NnSquf95n1MDuI6jAU4zHK4k4VFVmigI6C/Mk6Qxx+ZXkEHAKBmy1OlSlRWXb9xgb1TXp18t7/5ZtDbwffXXv8RhwlfdnliBZ5fgnoPK2/3/4X6EFjwB/G4N+CtrLv7h9l24DIFj6VmWex3uOnqcMydOEnSaOGaNqLWsLFb6CCHo9Rc5eyJmfXSJIj9I0Vnhsb39JaDWmOtvcHT1Lc5cRNYWLcKh5K2x7J/Z50P/zw8dXIy0fPxLf4x7Hv2zLK/dDxKU77O4sMTm1iYCi698VKjwPP+VHdXv0Gq2WV2cJwhDJApVlxe2d3eJkwRjzGxMCeWAh5XWeELiex6hUvjGkOUpsvZcn7X6y3bYYC5ssjQ3D7sjpBQsL/RYnltE+OCHHqdOHKU0MD8asbO1Q5mW9HpNzpw4QaPbppiMWV5axG+2sGHo6urTAiUHQ2fWxCEchzaIOnOBdSl0IaYQufvxvA5h8ASWygnVWA8bXKbTP00gh3gqpRkF5EWBNQbheQShTxi5AEAIyWMvvcRokiDwauynYcqt15VB+gGXr9xiv3orffYoqyGlAWs0WZ4xmiRo4TOOR/Q6PdI4prSapCgYD3PCl80JT/l4QYMsL/CUR5blaCSZgbDVZm6hTxQ22N9c58aLL7LQ73H2nvuQ+A40GjojLVNYvCjkK976Vj722CcwaBImFEVJHmcszHXYGsRUacbi/BxLvbbTqag0Qvls7+yhjeDyjVusrW85RoNU7uBgBRaBkiGh/zUYlsCsYbFYz2N3f8g4rSiEMy/TZUWzESGUJKwDZ51sHAwnA5X9/7H33/G6pmldJ/q9wxPeuN6V9ko7Ve3KVd0VOtE0ockoIAiKggqKcThHz3g8DgeVQcWZYXQGBwXGURQUIyMqIkloaFJD51BdXXHnvddeea03PukO88f9vO9aO1RTDd2ttFyfT9Xea+03POF+7iv9rt/PY6xjjGNUQnpqlWFZcvOopGksrcTTc48y17iPi3PnkfOvQ4o87LfVD6DFn2EiPC/2j3jz2gLKOGzkA6e8VEgRMdBNPnxwxA1TErW67Pf7XO2PqbybrR/lBQ/Nd3ndUoz0RxQalGwgLJRlxbPf/pO8+IffCcBoY5df/Z+/j7f9j3+C+UsPkrivZVT9LM6HEcZPRhgQnu8TALt74MsABoPBbRS6v10L1LvNWmHQUlHgk4S5ThtETl5UMxIeT8CKBTkaQWE4TrJ+hzp/+AQCANXpkFcl1WSIzXJcFYRwGifR9M6AWASeCSxVzjAYDGm1W0gVysJT4hkpJaura3Tn5mg0Avjl8PCQ8WjEaBzQmUVR0CtvL8seHh4SxzHtTptWswl1AJDnGb1Ol9HhEc9+6MNUOJL5Dt1ej167SyNpEumv4HBvkf1buzxcZkBAl2utEVJSlRVCUKOngx52nhfgBRbY2d4mbaVs7u6RFQVE4X3e1oINH7/q9Mmx95yGP/K18LM/AssT+Edv4E1/+UuQ7pgN8F7O/05rJREX1lZpd4JSHJWBKEIiENbilagR8RahJAvtOda7Ta7sDY6xNh6ca7C/PeSpxxRxvbnOsgNPcGbTXeJOT1f/uwhLIrCvJQmLCwscHh3VkwvTNXO7rWycYWQdZV7hqjDWhKDW3Ba0W22EDPMJztYUrtaFES4hQpbha8T9HeNGTA9TeLrtNlJGnEQyJFFKozePFYpm3MQrh/Se+xeXue/8/ZTWE8cSLTWFimg1usQKRBIhozgcqxdIIaf5TV1inIIt3ezvM1CnC2IyykvciUMV4mm8fx6pXJhRR6Kj55A8SrvjabQaWGOpyhJrXZDxjXSQv7WWfl6x1R/XG9wx9sCfuMnOlIgoqvuAX8zVG7/B+3/+l1lbXaI91yGrDKlWpDje8sxTfOhDH0ZEnjQRlN6SDYoTxyuYX1jgpZu3MEJQWMe4KEPOZyw3trcoizGrq2shsPDwyNmz5OOcVjtMCHE4CGyWcRDTiYXnmaefRLSavO833sXh7iZCd9ExLM51Ate/7LEw30Pig/67c3RXT1EZw9z8fGBqE9Rslg6hI25ubXPuzKNo+UfraRbFi5ev8PKVqxweDhFRCnEQ4PFIrFAgJbGCsshns+JCSJpzPaqyxNsSWxSUeU4+1wMdRvF2hqNAIuOglcaM5ltIO0G1UpyxRPpzEOKDNNWbeb4sifsZ55otEu9QIuao8GwNSz42HrBXGrxKMMObvHLjCnaG/NdI73h0ucOb13o4M0SqBqYSTPYmCGOpkpLnv+4Xb3sWDp64zv5Dl+i+uE6iTjFwvw/vPwSM7ijV/9Z3P+dDZS70+09uGMd2O/r+t29SSVqtNt1WJ+z7kwlRpFFa0U4SqnqEdrYm6ufVeLi5P7ijVfE7015zALB34xquLBA1eneKbr8zBBQs4VkiiRParRZHh0cUeUkcR+Bhbq5LkiQsLCxgKsutW6GkaKswpylV0Bn33hNHEepEib2qKrRUSKkYTTISfUzfUpUF+d4BDz3wECpKuLF9i7iZ0m13aSQJ6CZltcCVi89S5QWTEyXy6fiaIGxQzWaDVqtFlhdUVUUaJ1zbvElZFIg+GG9Jk5Rms3lbFnfbdZj6wE/FCvnQGjzy5xFKoPIEMTq+CVMU98l55Tst0YoHz2ywdmqZ0XhEURVESiIKD0oGJi4nkCIoFiIlaaPLA2vLHAzGkId7IutnvtV4G8tLT2GNQSpZKwKK8N76mJZeWOIr/+xX8rPf87NU7QpdNHn7T/xNlm89GrAl1Hz3eJrtFqWpKIqKONKzls9J8wgWFhZQSlEWZT3F4WdTBtOHNdISKxXWWXAOrVWoAmhNUQb56aTRoCgmM9wix38gdRQ2pxPrMNYxUZIilaKqH4R8POFgb4eV5RV0rBHG46RERQode6TyCB1Q5cffIOrrEy6kdzYAUT0EcKULxCZSzYIYiQwTHCfut5DfQpL8G7zIEd4h9HWceS+RehqhPSqKSJIEWxmEFFS2whnLzt4hL23tcjQI1MjWG0L9R8AsGAjOrX7sgYSs+AKu3Iy4uvWviNNbJLHl1MI8vtVmY2OVF154AR1rlrsNrhUZmSrBTM/ac2tnl1t7exSl4dLVm7RaLZSS6AaoqqCVpsTGUY0ybg2HrCWvUBlL/1cPSVpNVhcWWFxaZGIMa0tLeGsZHu4xf/4sTzz+ep77YEFZVDTSmLlmSj43x+VrN7mxdcDptVWWFxZJVGCTq4qSVEmipMk4m9CMIqRUlN6jZRvtv4lECEQScWN7j+deeoXcC1wU0+l2iGLNaDhCxjHGCyIEo0EfU8vNTm+S8WCRCKfIxiNknLJzcEiaJCzM95iMw2hqc+kUe5MJ1c1XcDbHr67T6DaI9BrWfwBTjEkbLT46HPP+nSO8cXgnyCYFpAlKh8qF2N3GXHkeTAkybPRCeJ5YnOPpM3OUrkLHMfu7GYd7GctzDbRQVBU0trtMTh/N1pgeJchRjK0KUpnjXA/nvwX4uwjuntB5rTYl1UkbzeD8p63LT1dJ3YOSgmajCVIQ6ZiyTEgbDbzzWOvw3mLdlIU2JEWDrEDsn5zJ/J3bA3jNAYAvSmRNiAAn4BYnzlvKGCH+MMJLrDVsrK9TFgVZlrF8apn5Xo+iKDg42Gd//yA4Xh/GipqNBtZ5pAwZipACpTULjQYMwyK77777mCiF8Y6j0ZAVdSIzFIJJlrHXP6TZ6XCKAIpJkwgnLhKLR7h48SKmKO867ukvpJKkaYq1ln6/z1TUZndvgDHmeAwQwHnyyST04O6wYybWT93CEEfNEDiJ279jBq6XoX918jynh7W8MM+o1SBNYiZjqPIMoyRKR+BkHeDpQCYio0D120hYWV3lwt4AbgYWMOtDv3g0Ltjc+mHWVr8da+PgOKZpfW3SSt74j96IObfO9S8dsnr9KZ551586zjSnxx/eTLfbDYRMSYRSd1cAWs0mWVXhhaDRbtUBjw165daHP/E46gkFq4iimKIK3OJ2YkmjJDhTH1DR/o5gTnjB/mjIRqvNXK05IYUkbbfAgcgroijGxZJunHDt6mVGR0dsrK+hdITsLpBojfCBdnW6wYnavwa62DrDr8cz8SGImQbYgd1MglQIV3thdRzwaa2xapXKfBFC/QSVtyhXYHyFsI5YBm57UX+pd46yLBlOJlzb3aeQkizLOZ68n36zAiFn9+O2heQkziYI/gTZ5BZV+RHy4hLjsmBjdYm1jTM8+7HnKCcN4maPZdmEre3pDeagPyAvq9nsrS0r0iQixdOMY6pJzr4TbB4dMSgLxGKHp598iirP2Lm1S763z9b1m8TNhLyRBF0BJYMUdqx55PVP8/yzH6DMKhJt6bRbqLTF0XDElRs3uHzzOmkjoZk2aMQJpxtNZF5RWSjzklajiVARrfYf4HAY0+tWZJOKV67eYG7+FKn1ZEVer3EZMBjOMRn0MUMbArm6/UXNMKoFATiJRegItMRLGI7HGGfQjSYViv3DINYlyPAvZ0yuXKU4s86p1z8JZh4ZXaGqHg7TITqhxOG9IZ1rYApNURQ0RMbk+nMIl4c15yRSOZ5eaPK6jQbGF9gjye5eHy0ly82IxIdqmpKat3/tn+PXf/BHOHz9TfQo4Q3f9TWc+YnHca4gEhVp3ELQYjoODJv8Vva5aaKS50GyW0hZr7MTkfgddjeZz2/PtNYIJUFKdCKQShBFCc6YEADgqFzNy2E9zlmyQVbvL7Mz+aQdz6fbXnsAwEmwm5ihoVXgpA2v8bXkqndEOuLSpYsEsQaLtY79/X22tm6F9wsZ5m192My73Tla7U4o204ZrozBntCf3rp1i63JmPnlZcZVwfIJaTqpIroLTXYP9mi2mrSaKUVR4PlFmvHH2Lp+i8FBkAidVS/uYZPJuKYADQIVo2EV1uQddL6VqV6V5Of4dZ+6heH9x4fhGGNvf1BO7OHtZkqkBEoKolihlQy65FqG8jT1a6XGCRAoiCH2bc6dXpsFAFOzznD5yiWeetIy6I9JGyllMSGJE6Ik4uR1eOpdf5LX7T8EiFpmPrQIwvSPZ5oAKKVoNNKZyt6d1mw00DoiyzPKPKfRbJLU7Hz4UC60zlJVAQXvdGiNOKkoEURa00gbtc/13AkansZV2weHbDWas1aU8579gwNMXqF0zNDkoCPK0YRer8fR/h5XL11CNVrc/6bTKKkCtkCK46DIOwKXs0dQoyWdv2tjm44AWgJrH97W7GjHppRGSUVWriDsGl5eB+WxHrKyIk3rcY963M9VBlsUGGORSYODre0aVJXU92TafmDGYyyEqGWUT9wHGzJPxArW/F5s9SJbZpefe9d7WV2cY+30GU4vzrGxsY4bT+DHf3L21iRpsjy/SFFsUxqH845ICnrNJk0dMZ5kbI0PGBQlSik+/P4PsHNrl/PrG6ycWia5/z6kMxSTMWkU05+MSXtdnDMUeUGz1aR0ktHoiChNEFFKO2nQSCOyrEEcxzTTJv3RiP3DAzZ3dmgkDeI4opGkREKj9YPoZJWyytk8GDHOSxbXztDMSw4Oj4iTmNF4TJkX5OMRwllcntUgSonUGlfvi846suEwkDwJhyBBGkiaEUkUMxkOcWWJVDpUYYwhDBVPEJXGXsw5FIrGhXWi9Dm0eACnUpw3JEIhlaIoKwpb0O2mZB/4MG50NKvgxMBblxZ5cCWiKi37uwUX+2NW44TljsT4Cl8pnIwAS+ulBT7rz38j+09dI86anP7Z1+EriXcWVeUkDVE/kvMInkDIW9wLCzdFzk8D2ZPre7qSGo1mmFDxoQqrI4U1dz8LU/uksuXV3CZeABK8F0gVgZBBAtn6IFBHFPyRsOSloD/KmQoU/U52/vAJBABJq00xHhGcf+Ae995j/YkNyU9zLsn8/DxZNsEYS7PZ5GB/lyhK0DrCWoPWitXVVdbW1pFS0j/qc3PzZk1daoijQNaxYG7HwSdxwvb2LWQjwrVax//gPMYEIR88GGuxvqQRXWY83OHG9fdg7eeHw3yVcwyBx3Gf6eP2nD4d/f7fxDwwQ73e+W93Pignf7SGONI4G6RZtdZIHeOVQqgE5YKQD9KDCtm7I/Tqm4sLs4+ZKiEiBTdubHJ4uEOvex9KKrSMKKuCiDuyx5PHM62oTH2LFEiOX54mDbIinwmdnDSHI0ljdKTIJhl5lmONJqnR7XEc4X1NDGUdpQlaEsZJkiRhPMkoqgKpNEJH+DIDf/xAh2PwFKbi1uEhF6bAPO84Othnd2uT0aRkbzSi3WhyenGZCw/eR6RjXn7xOXxW8nC7jc0zsBYpYywe5QXSgROhpwxudr419h8hawS+cXVvXuJwAQvhXB1AHL8nimMqdwrrvx6t/ykwQKn3klfz0LqAijUSifcG6zyNVotcKqJhVrNpimkvgenoZljgFqFUHQ2dDMIChkHU4s8BrHg/Nj/HsHyMfPxzRI09hM7pLVe09fE2I4Tg8cceZfnUMllRsLN3yMb6Ot1GTEsJfFaSVSMOxwUuScAbDvOCg1de4eKNqzz16KMsNdssznVp1uyP6fw8ZUMxGYzQWuGtZ35xmWc3N4nSPnMLKfu7u7TbTeJYk8SaVqtJpATtWDM316UZxyBFaAt5yThfATeP1Y6y8AwnBTt7F5FShsCzCDrzUkhsWeGqMlRYqMtuXiCFJszxhKRIRBoqhxe1+p+r96myBFcGZcb6tdOl6KmQdkTx8gvgH0I8PkSIAdJHNNKAmi+yiklW0um2yF94lmLzEsoLjBc0pOeNK/NcWGrS395nZ1DhvOKxtImyFdVEQhqB9QhnkTXl8dyzK3SeW0FGYESFcArvJdIblK2OH1IhagKl6XqsV7EgVKxqPn+ldFhZs8ksFWiKhUB4iRA+MPy54yTgU72/emq8lLUBo6YEXoXxSG80XjqU90gbWmNKaA4GhwyHQz4TnD98AgHA/OIiW+NhvXHXD72Hk3norHQqBJPJhPn5eY6OjjCmIk1TjHE1BiCl1+threNjH/sYRVHUo4C6FvTxsypAeUK7OYxtpAglmJgScxIRKgWjbIyUCoQjSVPwv4A3mzz/0Y/izFtvZ6S7xzl+Svr1/xWacxaFx7sKqTReyLA5ORBUGKHC5l9ZROWoBAjjqJy97X6HwFkQRTHra8+ws/N3WTn1DynzHB1LVJTinL0NxHfcHfGzSp8Qx5K402DAO08UR3XZ9O4bo5XG1+NgqqPIJjnOWYqyCKqQOrSRwnx9cHA6DsHNwe4Wg2Gf0ji8kuhIUdYO/y7zkHuLTMMoopSShcVTjLMRSk5YW1pm/fQGjbk5pNJErRYb2XkOy6AEaYoSL1yo4ntCOwUPUuKtvd3hGhvK7tPNdEZl5pFSgLEhKT9xmF4KpFY0mk0mucL7Mwiew1RDJnmGVQlR0gRX4r1GKot0lioTvHx1M/SOZRTaDNOxp+k1q6sz/k5Jo7ot4V3IaMGDC20U55sU5e+nMDnPj/4DxeQGX/bWR297u7WWxaUlHn34IYajD7F7sM+Os7z+8Ue5unWZ/dEAJzSmKNAKrJKUpqIsHX1Tct+pJV58/nlWTp1iaWONSgmywQjhoN1usbO/x/qZ07x8+RLeK1rdOW7uHXH1hZc4f+YMi72U0lZhasQZIuXpD/Yoypw0CvwkSQQ2iuhv7vDy1ZsUZcFDDz3CYDjicP8AGccIKZkM+uArZL0nhuunkGkaaHVnl8whiXEKcAYRa8b5JIwaxxpf1tWdu3uTAVzth5idbfyFr0I0fwjEf4cBbGkpJi4wPF55heyVjyAAKzwNKXjz6WUeXm1hJgWi9JzSIQAidhgv8LlBlgLrREC4e2pmvUAvbotQjUMKLCGIPqluubq2zlvOvTVUt6QKAWkUkzZSzr74PFx8iU63yxe97fOQShL/55+GsuTRx17H+oMP89xHn+X61asIKZHeYU0dBN39JH7SzdWJzqzyqKabj0dq8E7WnA+BMMtKwa29/UAY9RlirzkA6A9C9h9K4Scz47vLOkopiqJgbq7L0tISu7s7DAZDFhcXieOEyWTM4eEhRVEgpEIKiRRgjKHVbofybKNJHMe0xiN4KSiJLS4ukkpJXmQUO9u17GxtztNOGjhjcZXFyE2UvM6Nq9cxZTXbZn/XQklSyiAO41F4EZSrokhT5hOccXhhA8rVBUdUOUtWOLY2b83IhjweJ0TNt/C53Ljxb7jv/LhmmHOzBXFb4OWPWylSitur+yKsL1nnllL6mvrz7haAUgorAno4UhoajZroJgSO3oeeolYhyxiORjQaTbZ2drDOs7qywZX3vgevZKB8VQpv783pkJclVRomVYyxXLqxiY40Da3Iy4oXLl5ExjGxDJMje8MhyxunoaoQrkJEIkjXitDectYifCCSwddTEBB64sLd1i6YlU7ruWWh7iil1m8OtLmasvpSrP8IznuM/DBq4YsRUmIHE5Q1uI4GKRnsXCFKExAaIWO8VHWwUbcnQt22DkaOsQrhywgBw19+J/Qm8NOPwy+fP1HJEHjTwvENvPzyR1hudvmSE9dTas2kLGg3Uh647xzb+4fs7u3z7vd9COdD2VVgiHXE2bOnORr22dsJlL/D0ZjcGjLv0PMdBrYknxRoKWk2mmGSaDgiURGPPfgQe7tbNNKERy/cR1ML0jgin4zZP9hFC0GW5eyNMpQUpFGEMRZBg2JyBh9PuHTjJuP+kGanxSQvgr4Hnk6ryc7WFkoqFlY2yCdjxv0jJKGl4Z25DbviHaElIEGoCOsgaTbIx0OUB6kTMAXOTgG28hiTIkLlqOrvMLp0BfX4k0j/QUTxJHlWIaTC728xfunDs9J0L4p4w9llLqy0EXlOdjhCOxWEp2KHiiS6VBjhcLMqXV3lcq6uugXa3xDoeYT0KDwxlik3V5o2WFw8hXMepQNdr9YaHUWk9XSXlJJmp1M/7+GNUawRChqtBl54Ih0HoO2sF/dpqgHU3zXj3pDymGtDOKiFqaTSHI3H7B4e3ePQfudWA147D8DSIptHB7Of7y1lG25us9UiTVNu3ryJUmGuVkrJ0dHhbDRmuqlEWrEwP8/a2jrNZpOqLOn3+4zGIyY7EzonWPY2b91kUI+55UWOlccEMc4YxkVOu9UiiRMGw4+ixCsMDo9OZFm/awCVMSilSFttrFTkoyFllZEVIUMVuSE3GVJCJJNQ9h6PeGlzi5vf+4NcPX3IM//4GdwPPR2Sv1qRTKuY4XBEGkd0um28r/nU73H1TzJLHmPMjmvhQoR+u9KhH3enKSVB60BLCzXts8CYKshwWod3HuNCINBstRiOJwwGA5Ik4aB/hPWG5YVV9rcr0kabyejonsdZGcewprx23lNKOP/ww4xGQ7LRhOWVZZI0YXzU54VLl7m+eZOvfuopyHI8DoXGCYn0AmHDOToXjk9KhavlQ6eFdn/imnjnES48W4GcStVttmCBrpRQvfEeJRtY+2V495M4/2w9vKzR3U5QypSayf4R1sF4UoKsKYZ9qD4ck53I2WjljIFwus9JD9/5U/CXfh4SC1//YfiKPwkvLAMBhY61509WdgABAABJREFU4CyCB3n5ZXMcAIjg3HZ2djg4PGBnd5ej4WiGExI1LkQIOH1mnVarxZWrV5liEReXloi6bc498iCjPMeMJ3QabdI0DtLhgwFRHFOaiuVTp4JipfV0Ox2WF+YDdgFBuzmPE4JmZRiNxzTSFlIofKTR8Sot8Vlc3LzJYDginevSaLXZOTjE1pnqaDRG6pjuXJdOux2kzosslPNtqNS4aQtRCISKcaak0exgbKCRcWWJMIGeGaFCzzwsAJxzTKmgmXJUuJLylecQy2/Fr/0GpXkdoJF2RP+5D+GKCVIoOlHE2x+9j9MLMpAybY7IhgVdEdX8HA7rJdbX46f2RIDnQ8DvYRZ4BmxQ3YpyAl0ViBqLEuuINI7xuGn4gDcG690smLHWMuwPQtWuDmqqomLYH9NIGnjvgwqfMffEEnyqzHtmgXlQPRQgQgXZy1CVdN4G1VEdc/PSZcrqXknC71zv8poDgPF4POs5T0uDd563EN8EPpTw19fXsNayv7+H955er0uz2eTWra2a/S9hbq4XhIKqir29PQb9PggR5pFhlsFNzdkg4FEUBucE+eQEx7gKG+TR4RFRHNFpv56j/eco8lfqYGVEULVKf6vX6nes3QXiDvym3NjcJCsMt+Ln8b0RxYFCXj5FK1a0mzGNWNLqzINSbPlb3Pz7/4T8a5/lhoTNN23inUD+yFN4KxgMBrSbfziA3hyMxxnNRopU4ngjJDh8KadZbFhLU+d/W545bUvDbD3ccVYhS/HhfKbCNoggZeysp6hKtra22dg4zc7uLvv7+5xeXyMrKy5fv865M2fZPzjC2C1kshSWyB0L2/uQ9Q9GQS1OCoEbTnj+2Y9y5uz9dFpzYAXjwzH9rX0WneTBR1/HYiPFlSFokK4mRqoT/Cn/voQwIWBt2IikDH1IY0MQgDgGB4ZoKYw5nhhJ9NTOBI8ToHRMLD8b6NNovoMsu0m3ewEvLcJ5TGEY9Ic00phRPgGVztD4QoRRxEA7rMN/4Qzq/+r78M2/Ad/2n2EKwj1/CL/2fbD8HQgbBYiANXgR+BXG4xPMlT60oM5snOHc2bOsrq7zgQ9/lGs3b87WAB56C3Ocve8M733v+4NmgxJY57mxeROpBdloQLPZYr63AFpyOBhw1D+i2+kGcLEQWGtZX1tjMhkTJRpZk/RIGahzS2+Z67Z57Nw5SlsFls8kRsoHuHRjwtWr14jiBlVZMFKCOEops4oSh1OapNUmjhMqUzLJM9AxiIDTqIyfrXshJGmrSTEsSJMW48mQOFLYbBJ4KJRAOhsqBNNF7wPhk9IKGTUpsmHgo6tGDEY5HfsolO8jkm+i/8F3IQZ9hBe0I8nbHznN6Y02k6MDhpd2OBgbVlSEwaNmxZ2wzoUI1T0hjnlaAlVweD61EjWfhsdbcMqHqhYeKFF6hI4kEskUKC3qytGU5SKKNKvLC0DgvgBotRqsry0zaqd89COQZ0U9/VDHmDKEw59M4p/brZ6SolYnlSLQcIsQMAeMhkO5MHQzKUpubG7P9qfb697/DQQAOorCxu3E8aTGneZDNGWM4cUXX2R9fZ2qKplMJgwGQ6RULC8vEUURadpACMHLL79MluXEcYySEi1lQOFLAfZ2BHjaSFhIuiHHkJDu7kIR2gCVcySdQP4SK4lykqODoxPl0g8CjwMP/JYv1u9UO4mzgwDA2T/q0+/3KR/Y4er/9s8YP3UJnl+i8S1fh3jPOs1Icd+pHguLa7Tm2rz48PPkf+Ajs890kYN/9B/xP/oETCL6R0c1HkCj66pPnhfESfQqDjxktzVms44ppznE1MRtf9z2XsLSUFoel/xrGucoihiVEyprGec5Wzu7SCk4e3oDpRW7e4ekIqKVxrROn+bF4Q9wMPwiZrX4O/qwDo+ZJnNSsjQ3jx3lVNdvIXVMHgUVvUUZ0VpbRzbjsEE6H7I5LXDGhkG7aTm/Pi9rQ6uFmjYW6UMLIFyg2evDezzeOuSJgMo7G94CYB1O2NC+FWdJ4h62/DG8+DawispYrt+4iZSKrb09bGWRUYLD1a18H5y/M+CnZFLhWoRMtP5e5Y6d/9RiU98nj/clEASDAmeBBh4GXgxvV3pWGZzvdnnmdU9QFCWbuzsIBEkc84annmIyGbIw16MqDESaIs+5fv0mwnvW1k8RJQFQvL23TTbJaLSaobJiA+Ohd6F/njYbCKlJGh2effZZ5hfmWWw36XQ74OBgf5/5pXnwCuEEVfV72dy5TOkczaRFNthFKI2Ia+9ZlpRVhVUaX2Z4GWhqfU20JAQB8T+djnKOyWiIsI5h/wBrPWUexjS9ipFRjCuKE8+nBBeSpMrYwFwY7j5eCMxgn9ycQdkbjJ79Bez+XggyEHzeA2c5v7FM3j9g//Ie437BQhzVYFGFVyEAVVrhKo/xFuVkSKAg8EgIULVQlHEeYWyoYgjwxobJLyGAXcrylxmOHkEAxvnj5E0IurXsuDWWvf0DwHPOOxQwGo3Y3dllOBhiTB0QKw3I4AeioED5qQkAZrU2yqII7SznZ6O6sibqmrIAaqV5afMqZVWdxAnX9jvX+cMnEAC0Wl2OohhXZPe4CMcWepZQliX7+wekaUqr1WJ/f5/xeEyv12U4HB3zANSpXlkWxHEgCGq1WiEgUJJ0OISjQwAWlxboO0+R56FcdGIjdNYiVSBMkbrB0cEBVfkE8CxweI84zfMT/+A/sb0yhh9+Cn78Ye7paT5D7LaYrSY9kqsZr/zvP0j+ZK3D/uge5of+PfIPfCOTF+ZxKHrzPdoLPVqt9l2t4GMLWYvFoVRA4idJyEuNtcwahtNjqbOEQLBjUTrM4s/W1awaOWXCu/tLBTAVJgooeeoZ+zDWeLB/xOGgz9HBAavLy6RpipBQVIYkjjGNJkLCfv8QLyS++imE+to6A3avOm8shaDdaTEqC4b9fUZeBJXKZopoNUisJEGja4epfEBXU81c6fEIp3cIS0BLC4mXDm88TKtejpqBrCZ0ch7hQn95as4YnClDDx/AWaTQJOpxlGxQjH+WkR4CirIyNFRCa6HNO37pJl6mtcytn/0Jug6EXCj9O0OYVDgBfPrZB+CXz8HnXj1OgL79y8HKWoQlfJagHh/0MfAM0wBgakopkiSm1+ty/txZdvb3cc6ysbFGr9tBmorXPfIwUghubu+A9cRxhNaSJI442NtnNJrQm+vRSGNwQaRIKoVyLkh2G4iihKowNJot5hYXQQp2D4/YPjqimTbottvke/sU2QStGhRuQH+QIVQc+AqkwpY540l9X+pRYluWjLNhWP5ehjI+AlxR70014l0ItIqwpsRWRXidVKHFIjUiAAOgloGeckIIJUlqwqqpuI30HrezQz5+HHn1fRS3GsAyAnjy3BnuO7eIPTxk8/IWg6OStTgKWX8scSpgf4QObQV8aDNYH+i6LR6FxFuPcVUIRLUK5F4uBAaasAanI9Cj8YSdgz5eeLTSKBHklLVUM+ftqWNhd4z/8V5QWU9RGTyOtNnE17Li3rvAXGmOr98ndfyvNiGg8iFomc6zSClrnJJHKYmxYZx2c2vnuBL3GWSvOQCI4ggdx5R3KN/dJu0qBHEcc+HC/dza2mI0GnF0VKKUqNGWjp2dvdsPQGvm5+fZ2DhNmiaMRmP29/fJ85zJZMyCMXxR/dqXXnyJAaE044Fzd44gOkdoW4o6uDhFmG++w/m3Sn767/0U5Td/GK88fNEl+ANfD//5Ap/JQcBJU0rRWW8dO//aqkd26JwqeMI8QCcOZCMkTV733O+j/29f5uLv/0m8tiT9hC/5u/8rBw+t8b7nXqay/w5T/T7KIieJ47r1HEqY/kQUPx1xE4jwb9bPsnc4roDeHvff46GrXy+FxPoASnJ1f1dIweraKvOLCzx44T6c81TOUhnL8y+8RK/XwwvL7v4B24dH5HmBjK7jZYKoytADvGPUTtbrXArB0kKPVhJTFnnNByFASjrNFs1Wk7TVCOfsXZ2FipDVeUB4lAzl6eDQfeADchasYCpi7pwLRExQj/75mfDL9FpBCAC8dwgfVB2zokBKQ6KbCHkfw0KxkDq0btJ0CXK5x+bWFqPBuC5XV+FPWZMhEdDfs9KMDG0VvA+ODQdX5uGr/gj8/A/D6hi+53PhBz6rvk12drvC6d6jXChmxX68AB1rzp05w4efew7nPBfuPw/O0ul2UErxWW98hosXrzKajDh3LqhZOOeZ6Jw4rrDWYJxGmTCeppSidMHZFfX2FClFM9GcPjVPEkUYJ5jkOePRCOHhYP8QEDSa82QIBnmFsQ5RlUE9MtIIrZFCU5QWbwxCQhLFIcjRknIywVZ5HbTd7ix0HCFIqYoMvKeRtvBOUGmJ9qDiiDLLQ9BUr71sMg7BrRcoobHOhCw9z1HXLlFcP4+wP4yS38y5XpdnHj5NebTL5sVNjgYVq1FMFDtkLJCxrUGn4ZrburfvtEUjMV6icOAcXgsiHZ1Q1AtqeAHX6xD2mDrbe48xlspZpKjq6yFJ4gRZj39KIWg10tue8yhSpEmEbQcVxjzLkVISRzFCKBrNFv2jfqgGf9Kd7nGW4X0YewxiPzC9SL6u4AkhuXZzk8PDo09JEPJf2l67HDCSOE4oZj9zAhNQW70Ydnd3WZif59TyMpcuXaKqgm6zUhIhQsk2SVI6nTZLS8s457hx4waHhwezsYwp6547udk5dww1OYEmh1ABiBph47XeEidxzZl+ATgW5QDgD32U4ls+dPxzp4T/+K+g++1QvnaBxN+ppoVEScXhjufUi69j5+FnZ//Wfv4cj6UPsjTfZnmuh5QRlfHMNU7xRf/sr9KJu1TjD/P4//0YG4dPYB8+hTPw/hfWWF3ZwFqDdVVA1U/z95MtAF+j/6lR7qoGPE3BZyLkjnIaCbyKTTMQ72utgOnmJOqMRjiyfIKtLEeDQ9rdHlu3duh152g1GszNzXFwuA+mwlWrxOkmzm9RmcUakEVNikNwyPViE1Iy1+7QaKQ00wZK68D6JxWuqihMhTElZVGSRioA4RzBSYjguAQgZj0FatS/wE2pgFUYwwwbrpiV3qWv+RnuaJIIL7DO4kyFcBIZSbySVOMKLb4SnVwG8XqkdThXcePGDUrjQtAkw3UXs8irzv6nDdJAWchd5Z9BCm/+7wgMglPGoClf8PQ97u73Ma3cBFpV68I41ly3TbfTwSNY6i1gTYmIdVDmrCwP3n8OnURk2SQwFypNrzfPJM/IigInQpFcyzCXLyJNYSy6plTOS8+pU6vs9yc4a5hrNzBVxXKvy6nFpTCmKSUi+rO8+zkRlEWNQZgSj6WqSvCOtNkKdLveMdfukjYaVM4ynAxDBu8tCE2AZtbVFe9wVTW7Eh5PVWQIFSF9hDMWnYS20RSN7uuJjCk/vp1dRYGvMsz1y6gcvFgn1oLHzp9C5AfsX9rEDgwbUUJDV9g0KPyhwjpBeLyZVs1smFDxHsogcay1wMkaJ1S3C2QNBp2uEVHzRQA00wZrS4sc9PuBQ0ApkjgiifSMsFKIEKiLE1M/SioUkjROkVKiRHgGyqoi0pJRDQz91Fm4rmVZhj1ntt6pW2+hYlEZy9UbN7En8AmfSfaavZ3zYbZ+WD/Yr3YhlFIMh0P29vZYW1tjeXmJ0WhElmVY52i12jMWN+csW1u36PcHAMeOP/wQCF2khBqBvbq2xnwUBY0ApVgrC6hlhZ0HU5mwAdyGHXg78OvcmVP+12JaBtS0/ZSBXaZ2vHwFnnYz4VT7UR74/r/BL/y572TnsWfpXDrDZ/2vf5k19xTtM83grAVIY5GNFDWa8LYf/gss/oefQgDDL5JUZsJDq8tcuv7FnFl/gOXl5VCGlcfO/07+heOAIKRnU97+OwOFmf+5RyQgpcDdRrzCTAZZacnOzjbNVof9vQNUpBkOhxR5zpn7zyPwbO3ssXJqhSuXLmKrp1leu8rR4AMovpI8m2bZFd4L/NSR+bAxHhU5qRRUxmKMI4o9SipMGXjXB/0BrtGk123hTIW0AosNzH5C1NmUnc3ci3oUMJAgebw1M/4ED/h6SoB6I3UnqhNmCgqs172zFmvgox99jnx/wONPPIrINgL5TKTJi5LrNzYx00r9jOBoWsUjpIizlkB43oUPLZ1727F88Ow9tyFPb79/AbPhgoPw4CpL0ohYWTmF95AkMbkLpF5CgBMOi6eYZFSVCaNa0oOy6DgiH1Zoa7HGUQmDEhEeiwW8r1BJ0E/I8oylpUVuXr/B9m6BsRUHh0dkRU671URJiXF7TCZtMAUxDh1FTJwAC9iSykOcprSacyzNz2McDLIRXipEnCKNJYkSVBIRjwdQjAEo8wk1FBQpwTgTqj612IybVAQBtenaO05wptJMx8+DQ7oKqwTOfz7nz1znzMKTHD7/EuWgYj7SKGWhodGpDAJI1tcCX4QWhnPIOtg0uJmjn4JVvXc4IUJgW4NEwxRKIHub3lkpPCuLPZbmO+RFUf9OorUiUtMRVUEch97+dCVESpCmkqhWX3WloSortJJUlQmTEZ9i895T1FwN02qVd64GPDqSKOXi5WscHB7BZ6T7/wQCgCwviOIkZHZ1D+gexT3iOJ6BsjY3N4kiTaPRoNvtBs5nUzEYlJRlNYvwwsIOi2Furktvfp5GmqCUDkyAz4YMNY5iSiHIi5LxeER0giXQ4ZE+KL1JApAjOB8N/H7gx4CfAL4Vfuwx9JdexPzB58L+liv4xq+D6m7RmU+mSSFOXLNAP9vSit7iEtdubX1ql1di+Lf//MfQj+2w/DOfwxP/6ZuIpKdz8yy/5+98N+V8hRzGLORnEMsNvCuQzlLqnH/7V76JsjPk6R/9Jh7+mS+YfaR3HiliWtpw3+kzLM0vMhpPSBtx7S/vPqPbHXootYlat0DUvTg5e+/x6+60KWI4ZEvTl9Y1Bx8YIzdv3mRj4wyXLl+qsxZHp9PmyrWrVKZEpylps8loUJIXOe1WQjYWlKVECRkAUKgAfIo8FBVFVfHel1/iyTNn0aMxwroAqPJhsqL0npubm8yfPsMZVfP3131T6nVp6+xDeMCEf3PC14RI4Vyk93X/XdSMiwIvPFLdPtQ6FVEy1mAqgzUlN7f2yUYjHn/4AostzYde/3/yri//x7QmK7zlb/xfXLu5ixN1hUMANYKbWZWjHuULpYEwqeBObsh335vjI5oCxPwdrzs25zzGVogoJtIaVbfsmo2EXm+BLM+Io6BFMZ3JttYGQacTAYpwgrlWm2Kc4SpDpTVeSpypSAkyRl5ptIgDl0VW0Gu1ad13H5X1HI1GDIcDDvoZN7f3aaQpqF36Y493BU8//gQHB0dc2t5B6BDgWevIy5I40uTGkGUFRniEjGmkDq0iuvNzHB0dglZQnDzzkIFP3afwHo/FGwde0Oq0GQ6HaK3r0vftuithPxMI6anKMThPo9HkzU//IcwHX6E6LOlGMVqBSj00JD4Oa9PVExnCMlt7zglc5cBKrK+ICf1vGan6vntMaQJPgVR4FXAtplZTBHDWMOrvzxgEp1LS40lFpwiJm7WG4eEe1ljOuQACHI+GHOxuU5lwXFVZoiONkkGS+NNl04CSKcV2nYRKqcirihdeenmakX4Guv9PpALgHI1GE6U0zr7aDRJ0Oh0WlxbDWN9gyGQ8oijKu0bRgHrRKJqtFmura7RaLcqyZHd3l61bE4oiZ8EYPrd+/dWrlxnATDSic+IDS2MQWqO0JM9yzq5vsLu5VUd4vfq7+4CHfspXfsMf4KKR7KyM2P5Hb4B//yif6v7/VOlKeEi1otdMWWymLKwsM5pk7Pf7n5ovXprA9/0kR1/3MRCw9+h/IOq0edO//2P4MqOxP0+6C8YL6CisKVDOsNe6ys//pb/G9oMfAQHv+AvfiR+NWP6ZwL/gixyXZeTlLu2uYmlhCZMXEKmAbK7Ba+KEYJK/rYQtZtnObSbCetNSv/pjNy0n1j6mdqlM++4bq6vkRc7RsM/B4YCH77+PjccfZTSeIJTiwv0XePHyRRqNJuPhIdY4Oq0r5PmzCPUoWiqqPEcIFRjO6oqA854Xrl7jqD9go9cjtR6tZNAdEDAqCw4OjthotnhcPRCCXONwSiJc2CSFEBhnUUIhpmN+MnTfXQ3Mmp63EBIlwwSO86GEb42ZPbi+qjCTCU4qjKk4Ggx55coVnnrscRYXOnzgDf+Un/3mvwnCs+9f4to3fzXyV/8YfrMLQh1fwNrZMw0vhKsr+w5vSgKg7V734qSTFwE7gAjYAhyIKHyibwJzwBBjKqy37I1GNBoNYh3hPdx39gxp0mAyKbixtUO316WZJFTG1HPx4R5ba6msxXsxA51WVUWWF8Q+cCUoINYKhcPYEm8FnhghY4ROSFTFYq/N6uIciwsLIVxxntH4IUYfK/FLyzz58EMMj/pc275FYWw4SxskpAd5xujwiLSZklcVts5eXdricL8MeIqTQmFKhj2oZvs72UL1eOIkoSxzwIV2AyerZ3XVtX6fcB6hItDwhrd+Dq2dPoNbu7R0hJcemXhEA3zkQnUphNVTbGa4a0YEHGDl6wCB24LxKdZEKxW4K5zDeYlQEiGjGQHVZDLixvVLqBrUGMcJSokgs5zV1Y+i4Mrli0RJNKteDYZ9rpYTKmNqzESY4Kmq4niU8NPQc59kWahsTB2UCAFqojUf+dhzTPKs9l3/jVcAqqqk1UyQWkF5XEy+rR8pYG9vl6P+Eb25Hhvr6xweHTEaDsnz7LbXNhoNWu02zWYz9KMPD7l86RLVbdz//jbt8/pb6g1S4PxxViIFyEhhnCONEqwJqmdhDXWB+4DLJz5a8HV/9Ot4EfhXr/Ui/DZsli95SCPN+eVlemmEFh5hSpbmuxwMh3Wm+Em2t9yAP/TcbQfzkT/1z3nwx76cnlknbkZobxmPB8Q15uva5iaX/ujPsP34R068z/Orf/7/4HO/49vBCwajEVWi8OpFnEuJ48+hsmCNDxmzDw7MnQBrlkWFNJ7KWqqyotlsIFXdmzzRAppyA8AdrYHpoQhCab52WrK+uEKGMqozjjOn13nl8lW0gk67jTUWqSRHgyHDwZiyClgFrWPWTv0e8uodeF5Eq/vQyQKqyrGlIWo00Ce4iKz3bB4dsTccsT6/gPYeU0xwEoqyYlxWxId9yipk96UpES4gpI0XgfCvLv9b58Kmax1FWZ6gLw6BklYhkKrrANiaSXBqiiBMhaiw1rO5u0Oj2WBhrktejHnHn/ruY8CEAL7wZdzvfRb+8eeEDHR6nU++plZHEb5G/7tXK8feUaFRCoSus6kaTDgDtZ0BHgPeTVG3SoqyJMuDBsPS0gJxrNjb2+Pa9Vt4CVdubrK6fIqFThOtwnUz1mKspapsoIkWECUxxhtsaTF5RSUlKvAuI6SgLHKk0igpcEphrGF3Z4uV9XWkjsizjCjSRNEF5jqnqUYfZnV+gbk0ZeF0kzNLS1za3Qv4o9m1F1hbMRlVs9aHcQJTGaSOUCqhOkEFrITE+wBS9RwTak8Zl6uqOp4OYfoMTDe/46DQy1CVippt5ufmeHhpgfHPvZdYxljpUR2FSsPrQvk+VGNlve4grDucnw54YKmfPRWmOERNQmTr78M7VN2j8lhMTQ0cPio46kkxYZzlNBpNojiiquwJylyBE55xns3OaZxPKHSbNElruW+HMSZ0nj5Nzh+gLAusdzMNGe8sQnoO+0Ou3bjJZ4roz6vZaw4AyrIMZVQVhV8I7kjp3wAsB7nESUk2mZAkCXGS0Ol2aDabDIcDyqqk1WxxamWF0WjE4eEh2SQLD4efloLrrxA1EVA9DrK2tsZKq02r2UJKyfywD6+8AoSRHOtSyqJieXlxRkdZliUh87gjAPgE7BOKRu9YK8cPukB6T6/RoBlpTs33wJbgTQAltZo04qTGO3wCi+21rE0PtUbTsTnBpMhJzBiHRVkoxyN2Jxn7+32ORn2KvLjrfcIe/3D56nV6SURz4Unue+A048mI3vw81pq6Eyzu6u1LKcnznDhOGWVjnHG02g2kDkQ4XtQMaJ6Anp/iz+48bTE9LlGXhI9rBaJ2l9prHjh/jjzLiRJFZS15YZBSs7OzzdLSMsurpxgc7tPrfQG3dn6OovwYtvxs4nSRKE7RssLjUDK6/ZL6ANq/1e/TiiMoc5JIIxy0um3OnTvHQX9IT4CWGksgFAqE6wE4Z30Ar/p6DtoZh4zCtICeUgHXOAlnTRibtHamMgfUo1vhJh/1+9zY2eHtb3gj0hiyKrv70jnCgftQRQuYAj/L9kWtm+ARYZbdTvkApvfdz/48XnoB4IUMTi4QETggOtEOOH53UZV4IejN9VCR5uIrF5lMxpw+s87+wRHnzp4njjXjLGN7b5/+ZEQaa5TQlFVJaQxlaaiqKlDK1s+nB6yzFEURRksdOC/QyhN5iJKEwlXEcYQFfv0976XT6XBqYZ5ISYpinSjeJVKKtaX5GQnO6flFLt/aRPoAhnOz4NRxr23BWYuzGYFLIaxIGcVgK6rSnLiKx2XRUB18lQdZUPfogyP1LkyJnDt7lujSK5jxEBfHyFSipvxNQoZ76aibIQQWRBMqTSLMzSKQVM4RSRW0KmbFHBGmUwCkQqmwTrzwuHpeH2CUF7x8fYskiUnilIOtQ5qtFOc945qorbKGw6MxSh3TGw+HQ/ZyS5okdaIWpHbv9iufWnPOUxhLGkmodUeEkFy+eo3JZPKZ7PuBTyQAmGQI74kbDfLh0T1u0jxpOkev1WRvbx8hIM9z8qJgOBgGprY4CrrZWcbVK5dvc6pSa5Iood1qsbCwgFKKOE7oZBN4MWgBRFHM/njI/sE+eZazXpV8LuH+mNKSoPDe0JvrsXNr6zYRGmgzHQn8RO3Vnf/JYm34+WR5O2RWoXykEMw3Gyw0EjqNlERJCidQKsI5TyuK6bRSJkX+yV9wP/sA/NUvgu/4JWga2G0SffPXsX0xx7V20FqhkeRlxfW9Pt5Z7jt9hsN/8jksnLrEwTe9C7RDbXZ5+o9/22xjEwKORiPGRJxZbjM/P4eXEEkdRvNUUCuTJ/TroyTCJmEmemFhjn5/QJYVNFuN20YBnQ9AJSXlvS9HgBZPL3HttGAKKkR6IiKcczxw4QKj4YjtrR2ME5R5Tm+uSzYZ0h/1efihh4nUPEJ8LVL9IKXdBXuaOFZ4rcmy8V1rQCnJ8vIKo+GIQT6moSSpULQ6TZ548knaC0tcuX6N151exZsSqWQYixQKKVUoqDs/40OQUpI0EqTS4XqdqBJMy9OmCriZk/UQJTVSS4xxXL2xybm1dRaWl7DZhL3WpeNJhqn9+9ch/vlbwNWMb1N0sw+BhxfUZeYKb+uxPwSh1nA8/gUnuqJC4oVCuCmJUMj8BYG2+JheOFiRF6AUXozZvHSrDtLh6LnniZI2oyxnpbvKQrvL9Zs3kIS58soH55/V+8poNEZJxcrqCkIqrDOhNUDEZJLTSBO0Bx+FMxhPxiitsarizJl1ltodsjxnfnEhsCTaeTxt+umAlfm5gH0wjvW1U6SvxIyLUNqHWaPkjrVZR9qz3v1xsOSMpakjeq2IdpKi5TEArjIVV/f3GFflx3E402vqEVLRTNucbSaYFz9MvLKMaDeoJvuISOJshUeiEOGWCYEzvr6VAmFC6V94QkBhPXGNyJTTNpAEJcQMUF3VSquZF/g4DUhGAkPsUMKIUMK3zjMaSHSkZxWAdqvF27/grfR6bZJ/+mOQFXgnqfIMV2YInbC4tMqwfxiqZfV6+XR03Z2H/nBCZ20VV1VY5xiOJly9dn1ahHu1G/IZYa85APA2SOU2Oh1Gezt4Z2qSj+OLk+c5h2VBKPv52yJzYyrMifJ+yAwDK1i73WGu16PdapPnGYPBgMlkTFlWzFfVjAfg2rUrDJnKT0qqGikqgF53jqwyWBuiyu2d7ZoYZnqIzwAfA54jVCt++zYFYIX50TpZ9sEvaSlQQtRoeEGn2WS120VJSJuNIIksJN6HDV15x1yrRXZ49AmtNyU18p4qYifMSvjuz+UxK2mdO2LrHfdz/acvcDU5pGpnQZ7WByKQo8yy3G0SpzHn5s+w8H1/kZflCqYxpv1vn+Fs/0ngfQB44UmimNF4wt72DnNzbVSiw1iRlye6wyddVlD5mxLXdLodJpMJxlSous7uHUFZzjm00PdEZkxLc6LuXSJDxhI06sMGYr1B6Zi8PyafFDzx+KNcvb5Jlo1oddrcuHGTqjIMhyOkTDBmEWd74H4UyVOkjTaD/hDnBfYkB7iQzPcW6HV72Moyycbk1rEy1+HJJ59ibX2DG9u7zC8sIqIEX1VIIcLoUxSDlCgfkMaBNz+sExVFgS+gBlPNkPXe433Qb1Baz5wQhCChBHYP+xSV4S0PXsAUOUmS8Cvf9rdx8e14nchptNUUwgeOU18HIl7gfV2z8QZvCqYViLrzfMcdOPm7en581pIzBCLYmnAGSfBEx2vAOcvgaEKqI04tLpGkKTc2b/KhZ5+l3Zlnr99nNOpjizHNdImyKLHOUJSGSZGTFQWjLCObZOgopt1pkhVFaAMICaJCIGjGUBmLizRKGyId4SJLEmt0EtEWAmUtsdagI7yIkcLTaYb5dO8d890OC+0Ok+qwdgr+4zin29OBqcUSFrodLqytc25tmTSJSXSEdY69nW2sKbi4u889my2eGiAb7ldQpFykMxlQlCXN3gISG9TtfJg2EV5gTQDrCV+n9tNK4BSeIQWFD3wJ1Nl9eGhDk8K6moBKyCBd7RwoTWmPx2Ln2y0uzDUD4r/GdCmhcd7Q2TuEoiSONcsLvQDirC/PhfvPceVgn0RJiNosr5/n2pUX2NvZOyFA9emxd7/n/bzY7dJsNUiShMPDPpN68uwz3V47D0Ck0EqxcmqFg5vXqHKDONGzgtqJ1GIn/OVfw79hE371LHzfW9A6CsxLJlCiKaVYWlqk0WjhPYzHE3Z3d8mzgBWYjnfd9kDMosIpUctxuLyxts7Hblwj1qEkOBgMaiTtHR/Ar/FbCQAEYb8MbJGCVCq0cEityRE444iEDxrXHpJIEakQBCihaLYaxAp0HId5V1OhZmcq8M7QbiR1BP7aF7+Q4jedcJx+4mf/nbdxGngXcB3oF56IHGVDJmgRiDjGa8GkyFhYWKDX7bL4I9+KKw3eeEb5ePa5tqzodT03b/469OdZOTiit3YKLwiZu/HHtLa3HQ1IFQRGNJKWbNTgOB/ANj68zlYWmYi7fU99D/zJ+FPU+VFdIp1u1ALHcDRg6dQ8W7t7DMd9VlaX2d7dp9tbQEaS5aVFrl/bRIg1qmyR3tw+CwsfI6/ehnGWOElwJzaENEnodeeIoqjOnh2tZou3fNZnM9ftEjVShoMBD9x/HmerQKNdo/xREq8lytXn6EHLJESO9SYaKF8BgjKiqUwIIHQ0GyWcmnGOvLJc3Nri9KlTtCKJ9DI0Qe6BnViMm5zvnOJjRwcMrEUoDQTyF+FKfFUg7BQtpqa3gkAadKd7qj/fT3v+nrClOISIwntdcCDHNgU9CiKlaaYJsY7QUtJpNnjkgfvwSA6OdvDe0myllNZgp6Nh1mGNYTzJyMsSJ+DWrS3ON84TxTG2DMRArq48TETo7wpvSEUyq9kVeYHQEicsLhvj0wStHeN8TJrGKKXCOhKCTqPJI+fPszsYkFtTl8anQ32v/ryKE3+mSrLUanBmdZF2M0VJETAJwtPrtLl/ZYVbh30Gxtz1iWGN+QCwFB6EYHF+DnF4C6k81eE+KnGQhHd65wOy30xV/UAKha1KQCFd6AIBlM7SkFEIAGTo6QslZ+vP27Cve+cCZsV7cns8oZBXhrEBbcN0Q2kN3k944vELnG534EPPg/fYasCtrS16tVBSpCMef+Ixnnv2OSyesVWUNUbiOPD89AQBeVGyvbeH3/UzTBHwaW1F/Jey1xwAqCQBIWg3mqTtFlUeMoQ7C+DnHzzL+E+9g90//4vQMPBVL4KVuH/8JjQJcRTXdI+eLCs4OhpgTIWrldFCD+Z4jC+RAopQIlzbWGe11SWOYxywmk3gYsAALPUWiDZvkkYxSRLz8MMP453jpZdenAFsfjuWKkmMJ5KglSKVoe+p44SjvKDRTkmFRwuFdaGkq5SoKwGaGhuNrQKKW0mJ8SGQUSrCeUsrTWqxjHt59AnQ4E5vaK2dReOz+3DH+vV3bCnTv1kgs5621rQiQeE8/aLkYKTQDcPlD70w2yisNaRRxNqpU7PPWZxrY8SE3eHLuGTIUX9AY75H0khD5lGz/t0uHz3FmQkQ4cikkJRlRZLGYfNygBM1Fb6/d2tU1P8Td/57XRkARGA/oSpy0iRlLCcIBI20gfcwmUzI85ysZiHDGi6c+9O0mv8SU72PKEo5iM6jpCaOIihD9nX/+ftoN9tc27pFf9hnbXmFL/y8z2W+2UYphRRBAbE718UPh/iqxGDRCISzSHc89qhq5+hmwfR07M4HtLvztUKvJIoCLetta1nBwShnPM5440OPIKoKH0uEt3zt930/P/SdX81wYRs8JM+vM/nTX8BeccAXP/gQv3b9Ers1qY6vSrwpwrarajrg6W2bUjm78h43YuoEp//VQYDU4MzsTgRLAIWSAq8CGZVWtcRx/YqF+R5aa9rtBqM8wxmHMKEl5HxogxhrQy8ajzVh9HHQ79Nohedjut6Mq/CVRymLdGG0U0YChGSYlURJyniQMTzYo9XqMK5uEqcD5qIEYesRvVqa+bFz5zCjjI9cu8xhUdaEceYuNzUb0hTHrUMpJeuLPc5vrNBJNTUvD944JIEMa2VlmblrNxgMBve8wqHKYmrSKZiPJPFkiEyjwO8vNUxn+x3YyqO8Au+wxteiUwFsSqD2x/rQngzz+oF3op7cD/gPFQJ1ZwIrpZMCIyQGGSiz+deopMFRYWnHkmYzYnlxkaXFOR64cBb16wFAPJlMeOc738NgL+dBY4mAvZ0t9LziS972JNdu9tnpHzA8GM44pz5drndWpT6+aXz6vv2/vL3mAEAKwdb2NpNJNhvluusyCcHO6rNM/vK/OH7mmwa+/ydx77iP8qWl436t9wwGfaZysFJKmq0m7VaLdruNVgodaearCl54YXoUHB4dkuc548mEibN8Xv0v+SSj02yhteLo8JCPfew5BMyCjWDngLsfsNdiSkA7jogF4C1xIlFa44xjLlFE0iOsw/kqjM5IZspXgURF46yj8rbGJkiMNeg4Io4TZKRI0wY60jPQ4+32XuCzgWMwmtb6VQKbMG52zKR1R4BAvfBxTKwglp6FNKKbpvijjBzBKzc30VIRxzEb6+ucmp+nHI3RxXEpt7e4xKWjAc3GA6wtr7B29iw6ifHeY2yovkjuwDZ5X/ft6/Ejgga3kqqmJw8Po3MOqUJP8Z4BgJ+x5s7Oydc9TC8D4tlZh5KaVqOFLS2nFnsszvfY2z+k2Ug5OjriiUcfY39/n2ajybmNdayHw/6TYG8h2aU3d4aDA2Z4EiUliY44OjygKHIiJXnz00+z2O2Fak+kKYyh0WggEYFrXApK6zDWoasKpVXgWEfWTkAwpU1z1gQe9MriQnmgJojxGOvqAMDRqi9DVVmev3aV+zZOM9cMCoTWGDyWeLPFH/uuf8MvfON3I4Ti1Ld+A7+8d5GJ2GNht8Hnn7vAL119me3RMFxBperWjEB4idfuRLR4Z1A6fcCnAYCG6ay5SOr7XKPeZ/fv84FXiHSElSH4mY4CU8d9UmniKMEZMMZROoNXDl9WVKbE4zEulLZNTZjkjWc0HDM3N0chxwFJ7gKgsaoKnFNopclsFtorSuEwlMOcWGk+5/O+gOu3djm82aI/KWh2dV1ZMygfAq4olrzu4QdYXV7mgxcvcmlnm8pNiajuvCYhEJiGNUmkeeLcGXrtNpEMLI+2bu+YqsJ7S6yCvsHdT+tJC1MZUkZ0XIUts9DWl4F62DsHVuNLsGVoLQot6nsXKoViWoX3HlsnvLJu/4m6OuPtlAoXPCbwqkhFYQ2V1/SzoiYum5BEC6TCcfb0AuunF2h2WxweHTHO+qiioAEUhaMqW0QNOdN1eXgjYauzh4wjXv9Ygxubllcum5BS+k8f9u7uCvF/W/YJ8AAEPYB2bw5rCkYHB9x+wV4A/wSTLPu47cLp4ptakAWeo9Vqo+OEbDLh6OiIPJtgrGXBGL6wfu2tzRsMTx7Tia/f399jvtOlchWj0Th8h5BorWcgI/hcEEGQ5NUAt/eyaSChpSPSEuc0lXOYqiSWGlzoM3pniZREiloeUyqqyoAE5QLtqahL4hZPFCekjQatVjP0g2WosJDdq//0+XccU6iQWGfv8VqIorCZ3ItO00//76ESnr6pSMZwptnm9GKXJGkirEdZSTNuksiYtHB4p24LONJGh82bQ06t/R467RZpMw0MZ9P54Zre87YbdaJ0L+q/CEX9unBezhmctyipgljQq1RvbmOvg5lYHYKaTCi8Jk5i+oMjunPdoOLmLL1OF1OULHTnmAzHpEtNmmnKMMtoNL4G3Ii8yOm1DePBFlXd+lBKsX5qhUlZMikKzl+4wPnTZ9BKEikFWnE0GTPXncMCgyxD5JNwTYSmcgZZGVAQSx3Y+8pwjwKhUF1GFUGe1FWWoiooTYV3JgjzemjV575zNCYfZ5x/ai2MxTkfSr0+tJ7mttf52r//fSihebF7EykuYhF8eGeTRtzmocYck8mEITCVhK27JwRSHxfu33Su/8QqOsFWEP7zAiEDxsFbQwgKTi66+gcpUDrIhjtrTzDQ1UIyWqG1QmqNMmFc1FmDqyyGoC5na4W60GIIyoLSh+dKSEJ6G9XMc9ZRGYNQClVWSBcY7KSQmCTi6tYuH/zQRzgaCvJsjuaZ06hH7sdmVe2QglyziiOWFno86c+Brbi0u0Ppj+trkfA0lUZLaKYpy97BcIKSkoVeN/BaOD8bzROE6Q7rXKBknpWf71zvxxfRWcvy0mk6tgxKkLEITr7GkrjKB3IpGyYWfOURrv7MGjsyBZ2aE4RUMlJhYblAre0QNU1EiLS988SRZmQMRzafkRmtr6/xZY8/zP7uDQaTQ2wUIXXKzsFNUhOmAJqNhI21LsWgQm2H0sNkXJE2zhN11sjzPSb5Ad6D8gor7B2B1afS/ttz+ifttYMACWNjmzdvkB3uMy0sH9sNhBijXlgj+j++hPzP/3yQDDUS/sbb4erCba+egvO63Q4g2NnZCTPRQJZNArgOqG4jAhC1TnQwBbPMZDwes7GyivGGvZ1dGo0GvV6PKIq4cuUKx3Wl6UR1OKvXdu7MSDGk9CilKavQ97TehEzOQ2FMANwpAcKRjSdopTFCoiJLpGOaSRNXBwPtbotup4d0FkPB0WDIqNad/82PSWBsFdDbd5zHtOx+exXgjvf76bYtqDzs5Yb2uGBpLkVIj0bgbUUx7lOWnkNgXBTgq5mgcuXg5uE+7VPrzM13g+Od0n2KYwd8krlu+reAZg6a5/gg4GKqCiV14LX3DkkI3iJ/Z/Y5/QR/G8J8KgTlpomrDEDC+V6Pnd09Ll66ynAy5oELF7B2zNrKCtvb2/SP+ly48CDWFcQ1C5vWEaLIEc6x0J3Djvqza2ucAe9Y7Hb5rGfeSKTkTAnRKYWOBXOLPdK5Dsl4xNXtm/Rf/wLP/aXvBQdf8W3fi9xvEQGltThjccZghSBOm+ia0KWfHXD9897Jy9/yr5GjhLf+v76Lrp6nUbfj8J6r27ucuW+NThpjJnnof1sHWmMddHSEF4J8MmZ9cZ6l+WV2DveosFzavcUXLZ0h6a3xrqNNMhGcnXQ6MBP6eszNmRAIzG6jpH766jHNoGHgCCVl4S34ksAloJDS3L5EfVgjwltMKP8hvMFYS5LGoRStFUpISiGD07dBWdKIMAcfAkYLXjA3v4yOJAaLLR1JEsibApJdURYFRVkgdITUGulAKY8XCQWOD7/rVxn0Gwj/EN6XlGWJ8x7nRd3/JkgMO4FUnvlek8fOnUEh2Rn0yVxJKhRrvR7dVkojCnoRq7sHMJyEVo9WSBzW1ZMc00pz3a+zdip6c/eeJBF4IWnGEa9/6H7e+MY3oTevM0Sh8AgnMdYTWYWtQqtEIvEzmezAjRFqNbKO6wS5d2gh65acR3lwKkgGy7qCJhA1qjk872MbWnvTPXRra4vnQqeB7jxs3rzJzuEeXua0B4JTgI7hwhOrpJxFX9oEW9I89QjJ6lOMywNkO+bUmRXEu68FjgsfPt/9V0rf/plkrzkAsLZEa83S4jJ959jPRnetVQ/4iab5t74SLxzFMy+jf/Ehku/9MtBQxiVVTfM4Td52dnZn71dKoaOYJEnQWiGEpCsF1FoBZ86cRc71iCKNtYZevw9XrtTHZ5FK0222alS1oixLqrJEKjWTlrz/zGnGgy56ODpB6vGbm/NBwztqaoy3TCVTK+vQKqjeNURA3uZlCQjiJEHIgP5OohghwghUpCMWej2iOOXg3Cu45gC2O1x7D+TVb06DKeQJtLySganrjpthrSWK4o+rpuWnPS8hKYXg1nBMM9F0opJKKUbliP5gwrC0RDqi20jZmJ+DQSjjvXDlEuh/gZZ/h1a7FZz2Sccva9GQEw48bByhjC2EquebFQgXcCDCU5ZluH9VFZyB+DgUzdNN1HMcDNSTJ1Pe/EhJzmxs0Oy0uHz1Oi+9eBHpKx595GHa7S4LC8tEWpCXCrQgVoZKheqRqUL2qZUK6Ok6gBwPR7z56TeQREHTIo5jvIfBeMzlK9fwKkLevIXNc7Yffp4Pf89fxEfBEf7o9/5xXv/nvpPVzfsCoMwHtrrSWIa7+2RlxajIOfjSd3PwXT88K2f88t/7a5z/7/975CvzbLgw/z8pRzxx4T4wDukchbEYCdJ5pIDBwSE2L9HNiKTXY/X+R+nvF1Q7rzDMh4yrgod1g+jUBr+2c4vhdGbcg8chvEG4GI9kygYohEDoGBGlSNXGqxgRp8SqGdDq1uKMxLkxzldQZTC+BhwCJdobnG8QxSk2C1wFFgkWItVASI2QNvSzvafynnFR4uoM2DuHN7aWB0554vEn+chHX2T34AhvLMZVJHGK9noW6TrvKcqcpFAoBM5ovCy5dfkyo8EALzqEUWGPEn7mPH39LAUHWk8zCMXCwjzP6JQ8n5CZgiiKSOMIpSSRVljh0UdH9Rr1uDzHOBv4QGQIlJwNQXDANtjZFMyd5motCh0lnFtZwW3for+7RdpQ9fUIYL3ShaAt8B/UfBqOGVhxWnkUBJBp6aEtgmCVDNE7UoOXoX0inZxOdKKAzFniuUWW0znUOOwB29u7fGBnFykkG+srfNmXfC6/8CvvYrc/QcTBvRRlztUrz9HpLNLzDgnsj24xuBUjKsfB0R7GS5rNlMFgzG+Kav5d+6TZJ0AElGNzGCYNhI7A3wOl6T+EVGeoSkP1//18cJ+LBcaMkUre2bCdmRCCKI5ppk3mej2azSbGBPWsbjaZBQBZlnE4HDKZTCjLkrMnuLK3d7ZZf+BBtI34wAffT5EX9WiZDpkwwU8kgGw0aZQlTG6XNv545hFYIaisQyiJqSqiSCKlQmhNZR2x1jVlcphpj5IkOI7pKI2SaB3EjJyHvaef5SN//XvJNrZJP3gf/Knfz9xwBbKPf1xaRVhngqb4q2QN1jriOGjVG/NqgY7Hv34bvupFfBkx/t/fyrX9IWesZ3Vpgc5Ck1PzDpSi0WiQJkmYF375CgC3Do9oLDVpt9o06jn+WpD3GKl+R/DhnaMsC4rczPjEVU2PG8URHkFRVsSxpKxKup3uXSBHoG4X1C2Genjd15segtmo3PQ4nK3otprs727Tm1vkkQcfCShs79E6DWOZTuNsWQcnISA11jDJckx1zH2A85zbOM3ywjwOH6ReRQC2eSF44aUXuXr1KmfWV3nkwQe49G3fG5w/gIB8fYf3//X/hc63fgPdV85zYWmFjaUF8izjYDjm+u4OI1NR/L1/zYyNRcDwmed59q/9He779j89u+dKWJIoxRRB3MZYCyoK6w2BTAJoTCiBqgxnWppLg4z0qc9j/P53ciPPWElizomYQXOJ9072KV2F9LIGaiq8yBFCI/xHwe8SzZ9j7oE3ErVbtNsd4kaEUYLKFQE7YB3OepyFqhQ0xsCvfBS4Cewx0R0iH4Bj3e48w+GIYlTQajdodxsY46EUIZi0Fa6qcKbCeFff61CN8BbOPXA/jbTJaEDt+H2tiTAmEm3iJEJ7hVQGk1dkWYZxnmZrjp2dLUbDfshl/VSHQdBstWo551CZkhGYMijD+Rq4qpSiMRf62k2XoOo6V1CnBFkrRFL/rsrzeqolwalQahdCBXbDWnVO1E76rtUuFHHSpJWkFEXOK5deoGsEsRC4vMLLKNADe4H36rhy44+xCNM6nCAAlYcEzJKupRuUEDgRAi4pRdDBkHVQLQitKO85zEpw4xPA3qkQlKTfH3D58ku86amHee/7MvykpgKuLC9c3Ma5XR6sLBo4OtrmysU+kwwGWcH6+goLi12Gg/EMI/a79qm31659OxpgveewzFHqVejZ+ABCfBXNVgsdRSRxQlmF+X/rampeE7IIX2etQkCaptx3/33kWV5PBhxRFBnWWhZPjB7t7e0wqpPWO9eHMYb9/X2ybEJRZLVzCRmDrkcDJYrNrTdSin3M3RzDr2phgwiEnBaBtJY0jhDCI+MIa0PPvZGkAcAoFUoq8qIgiqPQj7cQJwlKKaqiZPTgJZ79zr9HtrENQP70ZcQP/0se+wt/G9556dWPpXb63rmgImbvfR7e+1p2OcGYyb0/bG0I/+LH4IkdqCRmYczBX/lixvuGQVFxdqFHt9ckTpKQpZoSXx5/n06b9NrznNk4HXASTKl8/SzIm+EAZscviKKIKAoMYJNxxmgwotlq0mx3mIzH7OzuMTffwznY27vM+cmE+K4TJMw2n8CXTFsPAQgYwFtuGnx5j/Cez37zm9EqMNQ5axDYMA4vFN66umJiAzrbe0BSVhmrK6dg8yZ4aLVarK+voaNQKg+4BoEpDY004Y1vfAMH+3u89c2fxdkzp/lglDLwx8cJYJ+5gX3dJjff10FoxdkHznP2wfvZ/PCHGN66wVynzT7y7lzoS1/h9L9OER/MwXsePn0eipKyDBMOOIt3ErzCekccayRTkRzD6aV5Os9/jHxuDf3Im9h++aN4L5HO8UjaQinHS9kROzVtq0DTOtUj2XgFffE90L9CY+l1LD/aRQuP1oG1sLIW5ezx7RDgXQCpyvEE+CkgBHjPux4NJdiwgjURkaaCJImRSvHhj73E9Ru3SCLNqZVljAjrpdVuMxqOKUweXI5UCAiqol4Abby0GDcKgj3CIfOMpnAktZaDcI6yrBBRwng8YnBwGJx2PTc//Xuz1cFYSyRDAE0NUlXGklcF3nkUEoMPa8lN2+tB6AbnobKhFVM/i1kVSIS0rUKwaGuXrBWlMeRFTqyDAJKtN7cA1PXwPT+NffIQ8YtvRL7zfrqtBsXQsm9gydtQ3fQSj52RBsOUIyPsW8bVapT1vRkaS1MqhAitDrxH6pocqta38HGYqHC+Hv8TkokFVWS30XvrCHpzHdJGxLXNA6SwLHYXiAsPZKRxk3NrDzEaTpDiFcAiTMSw7yhMTOQiDvdy9vb64ch/1/l/2uy1BwA26EWbYoKZbWR3pfJ474jimCRtkOd53XaXKK1IpCKygcLT1CNF3gea4ZdfehlrHSqKiGfAPcdJanytIyIp6xE7zXwUweEBAlhdOcXywiKrq6fYvHGNhflF4jgJmWWkefHFF5kMB8RyndX5HnPjHF5Lub2+SJGSIbtCh76+NQF5ayBG09LpTAZT1MQZ3e4cRRkQsyqKyKoSO66ospx9e43J6a3bvsu/bouNM/Ozn5WOkT60GKx1oZwdxXU2KrDG1MCte2Nmi6IIiOx7modf/0E4F3rbRA7+0rtwuy2y7/lsLh0N2ByMaCpJqjVppANiG8mD9SesrW1glhaZn+vUfUdBmYx4x+/7Nl58/Y+zeuMpvuZH/hnpeP7kt9ZO25E0pu0ezWA4wh4dsr25xWgyIU6b3Lx5k26nHdjI7jBrXQAqyhBwSDllApy+oiY4ERKLB+trfoYoZC/1tICwEo/HmIqqqkJJFR/aOlIxnkxYnJ/j7OoabN5EKsnp0xu0O63wOVPHgWAyGpPOtTl36jRvfupNtBtthPX8vr/69/nR/+2PMFjZnB2/HjY5HZ1mML+AtxZtoRznZIMxiY64sHGax/7k3+Y9P/gdZIsHs/el/R7zsongCIAz7Q7l6ABbWYyxWFMRxTFapyAkUheImuJXKE9bSNYk3Hr+vSy98fPImj32/YTFChIEb2wt8/T8PK8cHHFZGTj7HpY2JHEyoXMjgFPnXc4D2T4TBFUtNJN6j/MyyMU6iasLz144orIgjLECKExTs+cFO5njUmlZkZqz3SbPfvB93Lp5g/NnTqO859rmDeYX5pFKBmwAnv7IkU0KDKBizZXr14iSRaRsInWJ8yMEHus8Y1vixg4Rx7PpjWnGvb+3VVeWOsA3gFhCRTG9pWWiOAl8HWVVg3YVQlmEBCUSyipQXYuyxGmJQFHVLUbnA5hXeRAzfQWBVjpI+jgLruadx1NVBWVlmGQjqqq6DVflmxV818/D//vdWO3ZfNvL3Fjs8vr/+BV8oL/NXmF5Q6PDYnGEL3xN4+zAT0Wejp83XzNOChkAlwNjmIvqJEbJWmPAY52tR7AlRjoiL7GVxyCYCEEBREWOd/+M0FAAVwZ6b6kE3g553wf2GI0LetZyPzCZ5Fy6eJ3FXmd2bou9FTY6TZI0QcYx1kn2Dz5QVyymEty/a59qe+0BwMmC/91/CT95izHPcePa8e/u7Zru/uxut0u/38eaiqTdmpXRlwF2dwA4d+48eRTNHp7WaASHYXMcDodsbW/SbrfQKuKof8R4NMGaWnZYhCkG6x3We4b5hN0/9FGudQp49xnEsysnH5nZsQugKSUtJUAqTG7RzVDO916Q6AbNJCWJQhlYSLDOUxQlk8GYOI4oyjxkJVWI1nWkiY6WER9Ywz9z6/j7fuE+BjemRDvXiZPHEYVHRZK4EZNNDgOfNqFU7azCczcr2UkSjTwvZ7+9606075jrjhzEtUSzh9w6cuvwpZltTCcz8fnOLnLlFEkjYB2KaMw7v/Kv8eyb/zkA1x74FX7q67+VL/+xv0ujfo+ss+WAPhYU1kKkiJKIw8N9PvTcs6yurTO6uUmR5XS67TsEoqYrhpmkqawLUkLI2XlPKXadqzPSGpwomPZfp1LUYZKiMlUNeoJYRVQoMjOgPzzkyz/v7cT7++H4paTZaOAqh1Qe0DghubG5SbfdwhjL0f4Bjz/5eva2bzEYjRGDJb72r/8AP/P/+6tsXXiOeNzizd/3Fzj73JcyOj8h6w9Y63aQSvGW8xfIjg5QtmR1/xG++G/+T/zy//A/09+4Tnt3hS/8/r/C/VuvA3ELvCf5wPs/IYLrBPiq6Q+/+K/vfkE4Td4EvImfhL33wgduf8nSKy+z9MrLn8C33m6xAKSFVDD0gr7R3DySTFrnufDoGovNICSjDvaxFmStohhFmmajEYSAjEEnMUVhuHLjBl4s4bwgSlKMFfiqxFlDXo8QyjTBaYUtDZPBkKoAwdPAE3g2QCjmFjcoqwJMRZxGWO+PSa1qWJr0EicEzhhUHIdFGCXENuwz3jlUPQGjdZjEEUKQpk2cNRgTsAxSKZzzge/CeryQVFWJlmCtBOURb9vE/cXfmF03Hxve/Re+n/O/9MW8tH+EkU3eheeN8Ryr/ghVSQKzgA2tGI47SKrGw2gkQ2uxAhpS4b0NVSzncC4EvUJ6hPJEBN4AaQKwtu8VWTkhau6DHAQ4BGCxjEYTJpOcNI3RSqMjhauDImstm9t73Nre48vrNfjsC5f4gBQkccz9588xHI0ZDcbAlGDpd+3TYZ9AAPCbW8iFfg14HB1ptI5I6rI3CPIipyqnfOY1oAdmpCcrKyu0Wm3SRoPRcIS1BnGiH76zs82hMZRliTGGc9M2MzCZZAFgF0fs7++jtGbQPwr/7j1PPPEEjUbKxRf+T6r8i3nPn3kvl/+Xn6JqV/CBVdQf/gbcK3P1xJpACI8mOKyGlmgRWAl1HLIIpSSRiki1RkrITIUF4ijFGENlHMZ5dg8OAcG4zChsTXJUCJL3NxF/4mvw/+rfwmO7qF94gDP//29BF9M5/2cp8mWcWyCb5MSJxxpL6Q1J2iSdm6e/P8Tau/ECnmm9+RgfMCV0hROV6O/4QviBnzx+40dW4CceOv4cEfY3cfJNJ57NXvdZGit/FI/FOygbh3zos37otmN55fGfZu9XvpXF40UCQpBPSoaDITe3dzg8OmRjfY13v+fdbO/sUjpPNhrTbbbweDbKuwlonA+qeIHpbDodEsrxov4i4SVSTBUlb78OxwGCo6pMTfUbMn8vJKUNraj1lTVWl1eoto6rNaouzQov8EpweNSnqkoa7VPs3LhJ99QiL330JXb2t7iwvkHDWRY2z/MVf/9/YvvCyyTjFmfe8zaqnieNUw7zElPm/Oof+QF25y+T/tTD6H/3BrTzbLz8DF/8PX+dyeltugcrbHzkaYR87eDV35plhJL95m/2wk/YPvb0G7GxxDuJJcgaOyuY4PDLS9wwFUoJ3P4N2p0OZTGiKhxeGryxVM5hRY1sNw6BpyzDiJ9QCi9jkkhw+ux59rY32evv41UEWRnkqa3E5Dn434PnGUAgZJPWwgIyipns38KLdUaTCuECR761nkjrmaaFxgflummCLwVaxei6/SRrAixdj+IKauCuk0wlmGs6jDDFKARJHNGJI7qR4shb0kYDm6SM73ENX7x5ndFkhEwUQ5XwLAlOtlnXGdI4XM3G6MXxTqAIVQCPY8daukoiBVQ1jiag/T1eeaTWgYvDGPAe62DiYbcoSZIjkvinEaKeiqmPScqgBWCMoyhyzB1cJsfTOuEdnbkuC2nKM69/gvtOn+Xd738/V69fr/eu3w0APl32SQ0AZh+qNefOnQchKKsS5zxlUaFd7fArX5PkCKw1tDsdhAw637e2bqGUpigKijxj+UQ/aDQakZ34eQqZEUAcxwhCv1DWEbjSugYCCubnU06tvZ+jGzfpf/VzXPvun8a36szymS3sz/8T2g//f5gUYaQvBjpaB7Y+ASKKKEsbSF4iifKCwlZMPPjKcDQck5WWKcGAn8tB29AVHMdMCS6UlMxpDdajn13BftEfJ+3AfckZ7ms8iBLHj7wU/xL8nwWO+4nWFmTjEkeXVmeeYd8hfC21LOoNx4c5clEryvkpHaufZr0AEv5RTYn8N34Rhgniy74JtlsBBS1l4AKvR8Hxxy50+rc0iWh3nkTGKQF5H6HGTWzrGHOgiwbKHS+zKbtfmjSgK3lwbo73vf+9/ORP/1SQw0Wws3OLbruLUCpord9DItm5gNSm5g8IhxUCtxAE1CAmP9U3DyRBQiqwNmR3deDpa7KoKA66A1VRkRc5RZbz5DPP4Gr2sunFc94HZyAVpTXs7u7TSOIAPhOC8WTCsy++TH/4a2i3xMOrv4fS5HReOU3v2v1YF2b9HYKsKOgst/jFb/he3vvVP4KLKvRbfoW3pStkv7SOzQuaH36Ypedeh5YSnwSBotkz8cTjDOda9SSEQMcxUiucCYwviY5RWoWJBm8pvOTn3vsetg4nzD35dqywdF54mfs3LqCf2MA0/xMNdZFe9DbmYo0WHuUdEon88R9H3LiBffwJqrd/fugPCxlm2/E4FfgymIHPwnpReUn7h/4xAjAX7kdoUFWo2FjnECqoKErtqEh4uTB041Ms9yr2djMmeUZpDJUtccZTmQJja7pkIdEIcmvxlKh6JG156RRrS4v8yq+9M2SxDmSk8eJNOPt6BN26PB7TXFims7jI4a0rmDLHonjx4hUaacrSfBstImIliZVCC3BYdKSx0mE9qCiqEfx+NgUhvKsTH0CIUBWYLiExHa/0YQxPBSEobSFWilvDEUZEND72DMN/+NXsfct/wmuLKmIe+9t/lpefG4bnzRVkZcSOHTHSEU/IiIeVR5TghUc6jZUW4QjaEyJw/+/jeIII512ojFkfiKkUyLiudFY1i6NWmLxiYD2VVHSaYxrR8JjkDOq9u56cEPYuAF94/PxtYqTNZodnnnqaVqzZ3trixvXrM3Ky3/X/nz577QHALLPithss7nphiI6vXb2CMXcvhmMHcuxIbBUQ4YcHhyCg2WjVLG8OceIb0jRFxAlKBQzAqpJwK5TQv/jceawQ9LZu8dZej2arxfVrFWNboXg3a4P/wEpjmb0yo9Nx7LTuyCpXRrypnTK0hokJ35tah5ZhtM+WhuUogHQS5xEWxpOCzExwCJr1NcI7dh/dZf///lF4bBd+8iE2/uxX0dnsAp45pTm7uIAHtvYNarhM0zTotduIco+oOD6u8/6ADoEdS5ji9us+GZIkUClITP17nYJs4l1B0ukQC8VkdICtKkh7UOyDDU5WCBmmuv7BW4j+wZsROqIKKA+8D+JCkmMxp+O7e/xTs93l2RdeYjAsiKOUqih59Nbf5qXv+FuU61uooy6v+4ffTvzcI3j//lklIdx5Q5JE/Pp738fe3j5lkR9/V2kYHh0CEt1oBDKlO8xagyAO+AAp60ELX/MQ+Fm9I8xB15SmzteAqXAO09lohCCKYoQCby2ZyciyMZ12m+X5BZy3FFVJpz5+qRRCSawX7O/tEukIKSM2r99ifnmBX37Xr1Fk+zx8+glWFlqotAnKYb3B4iit4yifcPP6TbS2FN/1K7z7a/7J7GEy7Yxf+lt/jW/62p/n+gduslsMWerN0em0aESaCOjUBDSZc0wQGO+JtSbWCp0kVMKghAziQ7FG6hiBI8vG7FeWI2LihWWKYcbSU2+h/URJpt9FLC7SS1Pm4gbKu3AlpQyAyzrwsFFE1e1wkgrIURPMiFB6PhmyueyY2yKqpyjQoFF4V+FkcDbWObQA25D0kxayuEDHNqiq5+gfDanqQCOKNJEOjlfIiCJ34AL7o/IWLxVKCIpJxltf/xSlrbi2GUN6P/2DN1NM6cSkIG50mVvsIsyEcjwImJ9Io7TkN579MKdWVji7vEqvkZAIQaIVaRIF516TeimtUAqcDU7Oe1dzfbhZ4OgFoUpRB4/Ohn1SqjDmqqQkWV6i2Wmjt3c4nJREqsP5/+tPcquZMr7/Ovd/6O2M/uXj5PnF8JllUKnMfYNKel6QCXOpYqMcUtRcCaLGIlgETlgmRoFxNBq1UmPNveKtw+SOCIWVNrQ+YkmZGyrnMVHKUidGq/+E1vEM2Dt17tP67wx0cJcXr8cN6iSkyiekyrG3dYuizMnybDYV9Omx19ac/ky31xwAzC8sYo1hOOjf9nt/198HwIt4Hg+/FIIkSWi1WkRxkGe11gV1wMpgnSUrc1ZOnaLZbLK/v4/WEfPzPbReo1cWcP06AOvr64yEoKoMxhiKeoxPAE+88Hz4vudhvT6eJwH4VXDvgA+Ho/sCFtj+6YRbX7/GrTce998/6/vezOcc5Eh3Qvfc+9t3svKkIzr59/oqeM/+g/v8ux/6D/B4zW/wlS/R+gc/we//I19HMkygKmdByxMAZQXjCezt33XNv8QWBMZ+ybG2ePi+X/or7yTv5Tz84w+z8Cvnw3XwQa0LpbF5yaQYYW0WHH5VzBwfMgEvibTDSI9MOkRRG1eMsX6ENQVKKBrNJnk2DiIs97CXLl/hI/1XiNMWOopRCFofeT3nvvt/YHT/S6S7p1Dv+Cw+6q+y4erNUQikUhgXaFx3dnd46eUXp9sHUwSDdZbReETlPVl5NzOidQ6MrUFnQfREUvc5p2REBHCgq2eiZmgJEWhobU3u5K1DxoGbICsKqqoilpoza2tEkcZbQ6ORnvj2UFU43D9iMsoxXnB4cICtCoRwJIXhcx55irwCXcbku0OsrpiMBxTOMRxP2B8NOBgM0N/7Hq58zY/dK5JmsddDnE3Ye+5ZBodHuLygaiYkJ5664WjCKIqQWiAbICoQup4Uwc+mQZSqkF4xyQqyLEfIDl5KVleHnFu7RaV/lQjPYprSTiTS2kDqIwXWCxwQ+7qUDNh69FLUJWzhj6mv8WGEb6oHIfwxj0NDRzSkYuRCRVxqBTU9rRACiwvytEKwF8N4boNlIWjGL1IWhhKDm1a1nKeoBLFuUuQWj5xpSUghgwYIlrn2k2ysfTFb+55B/2JwRMIjVUqr3aURt7h54wW8DSA8b0ounDvPztYWL16+ztbmHvMLbU7Nz7PUbDGXJLTjGKkEMtLYqgqiOaYKhGBlhnGWtChICQ6ytJZIa3Qa410dhEo7C0IlQYSn2Wqy2O1SVIdEWrDSm+PMD/8ZvKlYXD/DzxTvOw5cAWlKRJxirWOsFC+aBkmjpFdYPNWMcGgqD9z3hpbWxN6TSUGkJViHk4CX2NIhE4mIa4IgJKVzyLk2jcZ7EKJRa5icSEaEqFsaxxXZO/VH7nS2T77+CX7u6lWcC+ukKEtm7HCfAruXMNbvDht8AgFAs9XElNWsr/7qNkbKa8zPf14oDVpLFGmkUlSmosjzECFPt2Mf5kiLomR1bZ2VlVW8g6LIGY5HiPFxSfz69ev0nauJOTx7SrAuBE+fuJNSyjqSdATdu3cyK3rLgNhdem6VP/gH/xD/7Bd+mOHaiDd+/5v5/O/8ArwPTmG6Ml6tdvHx7ODcETffcvO23730lS+Rtyr0MMC1xO2p8F1VkrBdvg3Djbu/QDne+V2/wK//pV/Hxpbnvv45vvErvhH77BrvtjaQrugUU0zAT4LTlRG60UZMJuAqdKKR1uK8Iu0sU+UZdrBTM9xZ4laHKE7xlcHP9M+P6zbB3svHXvwg6f1t8vIroMYmOGPpvOcNtH79aZTSZKaoZW/DOX7oIx9jaW6BjbVTWBcoWo9pCQWiLlUKwoaq4xhp78YAWOtq4FKt9kYICpRWM47zk31HN73OghrZLYm0RtXUp8YY8jzM0SdJQk8I5ua6YX3KY2rlcC0kg+GIg8MjnFMMB322b20TW8OGF7z5zFkGgwmxknSkwEwm7A3HVMWYSVUxLiqyokA4uP6l77jL+Qur+NJ/+T+iJw1GxQHLC0sI6xnbnGo4DhKq9ZrJTEVhKmKhMFWYJTdZFmSiVYwzNvAAEMq8g9wwMYa0c4rVuT5nFn4G1BG+Eix2IxraIJwOXPVSIutrZ51HE8iHEEypF6AW5fEnNtipe3IiXHt5Yn23dcRCLMnzcoa3Ca0xgXMSKwKBkfWOpvBkieBW8zQPNhPM6CZlOcIaKMqCLM8ARZJEDIcTTGD7ABEkkkUS8/+Q9+dRlmZneSf623t/0xljjpzHyqqsSVUlqYQkJAFCYjTYwmAbMA3G2NgGe7mNlw2+brd7WG5fVtOeAYNN0309YczQWBgzCRAaS1WqUs1zVc6ZMZ84wzft6f6xv3MisjKlyipJ+K51d62szIg4cc437G/v933e532e5595kVPH30OULrO9EzxFgoupJEp7tBf6VEVOOdmd+tCwuzukFcfcefwExkuurK/xynf+Ahe++zm6L53g1P/4lziQzdFNYrJWCyUjBIaqqjDWUlmNVLDcdBl5oLKOog6ywLEKxj0IifVh/htr6aQpQik6rRariz4IVEmB15paO7bHOcPJeBYszWSKjQGlKLXjovfIpMdbVEnXVeBdQPuExnrFGMuSTHDChfsiBMXhIb/9734OgK/+W9/D3PMrISgWCu0dWkXQ/gxR9BC19phaX797TrkQ0KBCIpQjvSWoZ0kQbRwGCMH81voGu+Mxd917Lx/9wz9s2rK/fJv/DLHY12K5/+v/fx23HABcvnDhplHU/rH/p6auyIuCuq5uGmkFxrxoWLMRg8GAwWCAUtGsf72uqxlbGwjtg82CrpTERxEfVRGfUIokiYmThLvuuouHP/MQk8nv4f0fABYhBEtLy3zdBz9AnB3h0Yfu5P5XXuGvnf1rbHZaPL68zO+0K7oHO3glqMqK0TBnMMmx3qEtJLGknUXBsjXJsNripac2jsEox7jG5nQ3g8s9OLLnWhC/vMRvZT18v00kJUsL8xw6ehyVxKyvrfPypUshgxWOtrZ863CCYI6f4wdYe+3u8H2fw//tT+BVg7ccG/Kzn/g/YfHvBrdWo8E4IPSJOxJEMhcyq8Y3wLmaWMWknT6T0QauLoP2fpwhsj5CpRTjIU5XTe9wjFCE3t+ZCMOIpeU+18pzCDEizg5jBBgd9OiFVJiqIFExSuzVP52P+dinHuWt993BwYMH2dneptGTbTJFO0MDnLOU4xHG3dgFoGuLih0I3wQSkMQxwgVeiRChh1vIaXwRGM1KKZwInjDWSZTwyIZfEcURbdrQ8jhtaGcpSgm6nYXr5n5dV2xubOOFYHd3zJVLV1iUW9y7chAXf5Lf/J9+nPPv3GLlkbfwvv/t7zMaStKFFuVmxeZgk6LyaCxZltF58RjDE4PZwxMXLb7mF36Eu/6fD/HUCy9QGE07y8LiqiMGkyHDRj0PoNPOcJ0Uoy1VWTcmOBEqDWGkdZ7IC5w2OBExnEyIkwOceeeDnFr5l5ReY5znyHxCR0rwEiMIUtACtHNNdudnnRzTsX7oGjay9Ad9WsNOIwRFU8cNPRfOTa2Nm/tvNXOpwsqYkfYYCyaGqrbgAnrjALygsAIlPHXsOGfmuHNRoXcvURQ1lauJ0haxAxnXxInFigrvRJDr9o6iKJhfuJ/twX1U5ir5pDE9EuBFRNaeI01jNjavAD5oQXjPOM+JW23aaYuDSz2qv/YHbPzQb2KUp7z7VYbDMUf/1+/itvZp+jYYX1W6ZJQX5FVJ5R2xSsi71zDvvERlU558aRUpIE0iWlkHpw1G1wgpmO/1mOt0Q/9/bWm1O7S6PUxlMdZgcFgkz7/0KhvbO2EuNLA7wuN0gfQC4wM34YqJKdMubzGO1boMIkEionI11lh6iaTCEnvF4PgGv/nhnyI/tAsC/vOv/SO+5dv/e5aePYzVBl05amUY2stYX1IWFcrvF/dOgT6IdvO9BIgRxMBVYB3IQNyBEhXY5wBLEgm+5qvex+989A85euI2qrrg1RdfaLIj9yaz86b/STATAut0Wtx+8gQrS8vgg2S8MYbFpQWKsubSlStsbm+TlyVam5kNchNlXRfoBJqRmBGjg3O5/3LFLW9ovNmCxhvgALx+tDT9qXMXGY7OY+08cZKgpApZebMQOOtotVvEcUw+yVlYWCBLUy5dvtQo1wXIWQiJisTMHa/f75MlKSqKSJKYLEuhqVOXVYW1lsef+hXG+VM4fn+vXu09O8Mhm7tDjnVXWD1S4M5JVKVoiw6L86tM4hLnNFpXeDwqjRGlABsYrrIh7LTbHZRKkK1A6iu0IUsztobjoJX/yFHSH/x29M/8Ku7oLumjh1n5S9+B32qTdTpEkUTjyfOCfpLQ7c2R9Ee8eOkyynl6+5gyDnVjN6zy4c++4aLG5os6MKAIG2m4iCoojlUlU1EFbxxRK6Ic7eBqjRcRcZoRZ/NUeUFtdhCRotXrBl36pBPqfraEYgS7oVxRFAWutUW//yRJchtKKLw1eOewzmGtpqzHmGovCNzc3mA8t8BnHvkc9959O1pXqCTFO4czFiFCV4UzOlQ+lMS7GwPPWteIOiJqbGWFFGhjqLVBSkHUZPWi8Y4IksJyFlxMSWoNXRAlgsRvFIWOlZ3BgH67RRYFr/QpvO09bO0OGU1yvIi4uvYci/IJziy+xGC8wB/+3ce4+N4g7rT27sf4xI/9BIf/5n/HfH2Ewhi67T7GF3gXFv8zP/SXufxT/4a1r/s0wkre/rM/wNFf/TqevPY8W8Mxy4uLiESh4ghVCUQdY/a1RapIIVSEN66B5kOdXYpQF0cKamdxJmhRjHLLibd/BdnhV1jXAU043G7RFRLrQrBsvQ+CMi5s6EzRlD0SEK/e+SK/+ac+TN2qOPXsGd7/y99IZ9IJGaCQeyUIIRozmmYIT0dGpEnMcmSoEKwXNcYKjFcIQ4Pm2KavHGIlyK3iCj2W4j622CBTMZH3OKdw0iKTBGUFWlgSVKN/L7nj5DdTc5ZHHn8a42yzjkHU6tFd6OG9oqqmHAUBwjLOJxjr6M/1sfo5nvur/3Lv3CXU3/sI27/yVm67cDvB0a/m2sYmL29tU+g6aAEcGvP4j/8afNOLsN0i+r5d1H89SxKFFrkUQQrEEsSqod9qh+CFoONfVjXa2RA0yIQ8E7x05Vlqc33p0XuBwOJcjVCdoFQaWbaN5BmRQaJYqkvwUFpJO5J0lQurg4DP/ehvkh/eK+uads0n/8Ev8i1/8q+T11A5zzh9hbx+CmpL5UOAwiwgVniRETSDwzEFDGjPPErgEd4QxyYsU8Dl9S1e/OwjjCcTDh86xvmtNZYWF9ja3tm7F294S/MzHkLgZkTcc+dd3HX6JN65YDAlFajQLiwRnDp2hPFkzPrWDmvrm2wPBhRlQaV1IAw3YmLGahbnF5jvzzEcj2i3OyBgtDtkd7gbPCP2SBB/5GOvgPrGxq0HADOY5yYSwDeMC/T7NYcO34NSkqqq2d3dZTQaNXU5T1WGlkBjDLu7AxZOnOKO288yyUOE1mq1yLKMflnBi8HBb64/x8BaqroizydYG/qBjTGBze1fxPv/jBBTn4I9WElrzYXzFzh+4hiHD3+WpQOLcA7yqmRnN2QGxtsGvgp63vP9dojqS4NFoJ2jNppWHBNnKZGK6XjoVRVzaUJpGwe7R1ewf2UZtVKiXpyjfn4eEcckcUqaxURKUpcVRmue+6v/N+vHX8D9wkHsT7+D6vUUCn/zDPz+Sfiac3vPyN/6eoI6k2Oq1x7+KPAKV1VAxf7JWZQlzmrwoOIMH3fRtSTqdJGqHyxAhSRO21TWUtWeiJSsncDu08BTWJ2yND/H7s5DrI961PVyY0UrkCpGiLCZBmJUU1aRChelxGnKSy+doyhqhIqI0xRtJs2GPz12j6mr4Pz2muGcw1uN1h4l94IA0WziTsqgligD/CelREiJ9yEYFSK0LWljsdYyyXOcMTjvOX/+PEtLi6StDJUE4ueM7GYt62sb5NqS7/5/iIpnWciOUW28A3P/Ehe/5V9dd5xX3/Nxzt75Z7n021sMyzFnFheZd1A7RW0cdig59De+g4X7304sMxYffifPDF9mK89pz80FwanKkeucja0ddnYGtOS+x9YDXqKNI5YK44ManSPYLSsX2kedhGisUcu3kxx9lQm/R08I5uOEhVghvMXKoM0gmqDai73a/J5nOpw7+wof+dBL1K0KgFfvegn9nR/mj//8d4aM3wdCXpiFe8gTBESh9gbtQqxqrAss9UgGFUMRSHIRgbRZN/bQKhZcMYI4OUpHlNSyRiQC7yS1dqSRIjfQVmkQxUGAlwyLu3GRpyjL6xxA2/05sizDaBNq8kI0BQ6L1jW1NvSylNPHjt34DHrIkpS5bo8UqIoxdVFSVxUW8ImHf/tL4RkFWCwwP/2fUT/4bUR/cJZ+HNFJEtpxQpbGzM33EbHCN7yJShvKqg6kVBmRe8HDzz3H1u7uDYcipmRpZwPLXgZzL2s96yqmUBFH+i0ODcbsmJKlOEI6g1NRQOjd9Cbvm08ILB5d11RKUYkYpy3OW4Rz6LKaEfYEGuEGjazxtB1HhOe3EX/yGDybVLpgKvBz+epVXthc5/77HkBJwZGDh9jZ3mZzaxv4PJLIb2B4wAtHWeRoo4MGjATtLa5JEgCEEnR6HU50Ohw/dgStdRBmqjRFrXE2uIlab1lZXqKdtaiqqvH+8EwmOQ995iGuXL22V9r1/22UDARBTr2l1OvKyU/HG24DDKprcg8N+DyogJSwO9hha2szQCveB+GL5tIEbf6GPOIdl69cYmF+kU63TZ4XTMZjdneHDKaGGsDFSxcZet8wu8XsOMKCfh7v/yNQ4/cpYe0/8ueff5azd55leWkRGf0icI2q/ku8eBl0rTm0ukAqJcF/LGwg7STBGyiNpqjr4IwlBE47WilIJZjrtOlZi64NeVWh+l3kC29BvShY2xywnQ9Z6EhSbYLylYjwLctn//y/5MU//av4yMKDCgz4n3tbcFD8fOPiHHzrd8Fv/xs4OIZ/9G74lw+yh7N69giKTVXOl3hfzR4+7wyWxpFMpbjuAr2FPn4yocjHmHyI0zVSpQiVYaoSZARJGyMl4cHeptu/m+dfeoV2K2Z5ydDr3UlZeqyT5GVBFEmKSmNMjdgOwjW9dod2nOCVYH1tM7DyjaUyOmiKIlBphK1qhDckaUYkazDVdZehLEpo+pyn0stCiqaE5ELLnyBk9IIZ2zn4+YTNv3aevCgYjcfkRc5wMuLKhUucPHqMIwcPhvedoQbh+lrn2Bl+Fm1/lXp3zLH5+6D6Zlwi2Fh/iXSrR7W0r/wz6HHhasnFtSErWYtUSHymSEgpakfqBWzPYT58Oy6KWU+GOGfJIokeD9kpc6SIKHDs7g442Vug00oRF4I/hvVgTKNnYC2JygIJ1HgEGtUo2UHC1WiO+uhFquR3SWJHIiWLiUSJJiP0QTvB7ROUDUv6lFQRzmk0P2TSv35aXj1xOdjLehmyIQKKYJ2fidGE4xWN9CxoHFJCP4tQ1hJh0bGk0hbq8GxLFcwInfVILzlfe84snqQ10NSyxGiJK2tULJA1GFujiPDeIMT3cnVbk7YKbF3O7qIXoGIViIJVGeyYvQAZg3OzOrYXnuVihW/9iX/Mb/3Q36PujJFVzNGf/ZPcefEbWFqdQ2qDqSccnOuxMxkzsoZSOsy7L15/gY4O6Z4q6HwsYr6dcmDlAJEUdNstkiQhi1O81lTeoGsT9mUHl65e5enzFyj3oT6igaY9IbAWgHc1Eou1HqcMtrT4KMK1W0xcxfqcZ160OCU9iJq29kxiy1f+rW9ndHaTjQcugIDexUW+7s/9eYraYb2kUhITa1o+xsdBWbXKy5lcb9jcJ3ifwnXb9uxpAxxK5rTbEjEOL+n2urzngbdz6MhRHvvco6wsLHLfPfeyNdhhMBy+qd1/em8h2My/+13v4M47bsfoutlnAh/FGIN0vlEEdQGxUgGZVlFEK0roLaR4qUL60gi8ORsShCiJsQRl1k63zQc/8LU8/sRTXL5ymdF4RFVW+8o0fzRDIMhaGQ888DaObW/C88/f0u/dcgAQpwkhuoHFxQV2dnaoq+sX5f3ww/b2owixhPcSpRRKRc2GHzKypGnny4sCozWRillbX6O+pAEfbkSrBfXeZ/jZ5h/6/rOsRRxHKHWO9fV/R6D+wB5131/3u9ZaHn74YT7wgQ8ghAZ2ENF/IE6/G0WvUZwTxELghMBKhcIRRwLjAzJgjWc0yqmUpjaGLEmoyxIpJVGc0O/3AYnWFePRhMvDXVQcM9efo9/toYRA65r1s8/ywvf90t4Fyyz8y18n+vU74XL3C9+MSQrv+Qs3+cF1YTwhEHCByLfPzjXUxD3B9DfBjXYYDy/jrMH7ZsOT4Z6ZOpyb6HSh1aEVSabqJKOqYqKhs9BjXP1naiOR6kHiOGJpvh1EemRErQ3iQpg7ts7Jd3Vg+OuKKE0xdYV3lijLsKVGRuCEJ85awSb3Jl0IxloUe6ZDQk6JPr4pN0mkChsQNB4FLhCihBBoVzPJcza3Nrl89QrX1taJJDxwz1s4dfxE4GRECuNDXjgNyZzzlHVB7PqcXPnrnL9W0p3sMqkKetcW+Zq/+Df56D/+F5QnNmlfXuae/+GH2f5MxkpLcO/KUXxdUklPlkW0YklU1bTjLgeXDnB1bR0rPfMHFtna3GCh1aV0IWteFC3unFsligSVK2b1SV0bqto0rtgeL+KQdXuLR1HbcOyl7PFSBsPoV+goQWwVi+2YVBFMdoRoTAd9Y7EQUnghRKMjsVfrnN9cpL9tGS7uZaTHnz+Na9ADROj0c00p4bVJggs1BULrnyEG5lVEO1UU3jIWgpEF60LpDWHxKghxWSSvlJI7l44SD16mcIaiqihr2ewADi8dhTtEp30cHfVZu/Iq2lQ0Z4WSCQqJNXofOVQ2apEKj0BbjRMRDskdn/k6KlXy6n0fIzl/gDO/8N3svu8pXj7+OeYvHWH+kdNQ1+xMhiw5z1AXbPzOWYpveWbvyXx5gerZOTIU3XYXJYKiZBIphHPYukQ2CpW1roJNtEo4v7FB/RpHz5tuLAKsnoRMHAWmwhiBjBXYJxnEHyFpfSu5XmYhiRH9hFascUT8sT//w3zy7/8S3sGD//DrkZdSainR3pBHObX4HSKVBE0Q74nSZOZIKmbrzfTPFAHYf4we6zRmX2Lzlrvuxp+9k/NXL7G0MIeuap574cVAIu50GE1udJu9tSHotjLe/9Xv49iRw5R5jpey4RmFjVJ6sFUdEkfVKCfKMMWFkoH8qlT4PSeavcOB8wEF0RZhXbBwVgrh4YG77+Yt99zN1u6Al158iWtXrzEcjXiTJ/GGRxLHPPjgO1heXUWvX3v9X2jGLQcA3oZF1XlPu9uj1pq6qvn8J/hplpe/nf7coQb6r/GNRK4xppkQHmtDraiqa4QIFqzQLOh4kiQKrXLAgQMHWMwyoigKMK8HawxV9Rlu2pY3G9MJKbh29QpPPP4472+IX/35ioMHH+fKi/cTuw7OW3Jbo0SCJyiPKQWRCSh7bSxYh4/ACUcSR0QiQgpFFEmc1mhtKaqSC1tb5NrSFoq5dpvF+QWcD+1t5ibyttAsjm963OxeTKG6fbwAoNHDw9tJWID83sMcYN8UbRQyjomyDjLJgiwrhtBdETwcfJqxtrHFsUOrxMknGI/vJS8EIz/EmcCK9oIGHoSk26XXW6KV52xurWHKkqzbQ1cVtqwRSuKcRQhFFLeIswy3eeOEts7N6vn4IDcKe7V+GYkZdO2a2ragkSJ2nrqqyScTqqpma3sbj+eeO+7k1PETREoRJ/GspIAMaNN0SC+Q1V0UWiLHFUdQ1AgWkoTxR2+n+wN/hu7pLW4fn6Xz0D0sHkwAz5XRLsZpdoZDlhf7zLdbxLECCdaU3H3qDMPxmPF4wiItfG6JEGStFgcPHCTygp3RDmpfCcB4S65LvPMkMkJ4MLXGe4v1liSKsLLFS7LPKPoIqQInJK0YFhIVsqBmLhjfED0lSB9KK1Nthv3w+ZFXjvOBX76H3/izv0zVLjnzubt47699HdKF9WG6qTo/RQ725rRoWjSdcwgX9Bp00/0RCWgBMlZIAaNSUwvQVuKVQFmDU5aislyu2qzSpxpfQqiIRMWMqiCpGxEh3ANYP4c2NcVk2JSVBAhF0umH89U2BD+I2WF6GSR5x4Nder0eVjt0rTn9kQ9y7Le+hlIbLn31J/nM3/jfKVY2mbt8jPf/7z9G96ETrC4uUmlDT7dZ/js/wAX/H9j81keQG21WfvA76D58O4vzPdpxUBBtRTGCMD8rG1w3dWUCymgcWauDUFG4jvuu/2u75abVTmdqhCiQshcMjqyjqgu0ugulJWtyjRcZc2ZymjlXELUFMomIC8n7/uZ3glPIiWUkG0tnDyMkVltqX4CDJI2RcXRdthees+k82s8F8LN/x5Hj5InbUC89C0azO9xFjHY49+pLzHd63H7b7Tz8zJMcWj1IXpWMJ5NbKDVfPwRBhfH00WMs9/pMhrtBAtn6BrkFnKMcj3G1ptVtOCve42qL8IJaSNrz3cY7oVETacTG6qLCDgYMtreJG0VHlSYk7RY6zpFpyuL8HO98xzvYGQz4zMOPsL6+9obO4Q2db4OC93t9Tpw8yeLyUuBGvYH3uOUAYKqn752j1FWQuXxNoDf7Z0MY1FqzubFBnuehZtvA9QBa77V2KSU5efIY8/NL4XOathhrLL2ygGefBsLiPmrsgF3TAubcp3Hu0+zZU95s7E1EYwxPP/0UdwB9ApLwwDuOYN0nGFz8RlIZURmLtyXShx58pRTSSyQWIwSaxpynsIztgCxOkFJS5cF/fFRVbFUV27UGAbmueXltDV0bOq0UJQWLT93OkZ/5Ri7/wG9D5BBVBD/yjfi19q3ekjc0vH9tHd0zhej83j7a/D8Jr7cWiNH5AFFmRP05ojQCghCJcxbnKnzfsFZe5vZel0OHXiCvvhZvgtjJKB8yGu3MJuXmtatc2xmA91STMTKOAwGwsZeO0jjwKIwlbaXMz8/TKUawez3apOIIY5pwyQEusOqdNahIIe3+TVsE2+ZmFyuKAlPVdNpdBsNder0uc/0+B1ZWgzCLCmYtUgik93gVUWpDCxBiQkduIIqvYFjvcNpJ2qIEDArHti3Rv3+UE4/cy+LCMpvCUQzGRJOcbUpcJNmpKiYbm6jVZbJWJwTW0rE1GXDq8CmEdlRFjnGWdjvF4xiNx2xNhrgk6B1MR21d6DFH4R3kuiTRiiiOkaJAyS4bYoEr8R/QTh5HJQKHZT5pNUz/pgODkLUbH/TjlVBBOvYmz5VEcOyV43znP/9+bOxJRxlx0QqdJtBo6IfwzDp3nZaGtT645yFw0gVxJhe6DoT3xI16Xpz4APsLhTYeUXtQCulr4jRhvRIoschie5N2lGG9YzeXGCXoRW/FmLcwKgp2NjfJJ8OmbS5CduYR3QWquiBJ44B4SYVzhgBdQKE9z73yMunJ2xDa40xoXa69Z+vMC3z6x/4BVT+UYHaPXOS3/v7f5UN/8afp1R1SY6mNJfaL3Ptj/x3zf+MrMSLjSvddZG/J6LZaZJ02eEdZVVSNAJaUMpREYkEkY3JtWF5ZCnbir9kIvyCy7Cpi2UJLhdN54AXHoLmNPPKcE4+Rd3+ZM9Gf4jbRxZox3hQII7B1eGat92htGCctSEHZYBrktMXqOpQnmoNYWl6iW2nG43wfCW5aPNpbd+fm5zhy9BDyldCKGSnF1mQMznH8yCGM10gHKsm4urHWvL/k+kDiCw9PUFJc6Ga4cgJxFDZ/53CNMmpYWxxPPvs8B1eWObS6iMoypBOUuqLVX0CqIKUcCYkRHluWiN0CNxry+PPPc2lrgwhJJgSHF+Y5dPQQSbtNpOsGyUzp97s8eP99/P7HPkZZVTdHbd7wENOGKdI04+DBQ5w+fZru3BxCCozRDVn31kOAN8QBmG4ig91BqOs1i+R0Rk7BnwY9ZLA7IFKOXq9Pu9WmamAXKSVJEgeL4KqiqEouXb7C+vomZWNDakzQeV80lvc3n3/lypWphldzjiVwAe9v7BP/QkNrTT2tYTXs83e/5x4e+cwvcenlGGc+QFWnAQLCkhpLS0UkkcQ7mFiDJWQslbUkdSC+IQS1d+S1oQIiIWiJoHN+cbjLzmRMGgcVsVac0Pl730xnUjK5+xLt330L7ufeATaUQP7bDIkgauByCzJkcXHSR2UtTDlkdzScvdoicN9bwE9skANPfPeQ/kdeZa63Rdz+GtJOnyTNONw6jDj3InhLkqb4OkdXJQKPrSzEHbr9OSajEUgXmLa7Q44eOoi3kMY3mAEzGo2I+j2kVERKEakY4yxRsAcMyIM2xHEc1PGahTTPc+qqotvpMC4LhFIsLyySNJu+d8F8SQLeWmQSk9ea4dYORwAlI1biQxQTwdCUKNVioHOkiqiANV1gMPSEwqDZ2tni8u6E9544i8rHPLt5EZkKxpXmpfUt/AHBkbk+cdrC5DXPvPAk3XaHVMVIEbEzHlDWJaO6JG5npEIh90kBW2sxDqzwFK4mzsGmitQaolQxZo4LmUN0ryITh0DREo6OgtAmGtCfAPGHr1UTYNjGUnvaKfHaWTk3XMDiZ1LA1voZ3uSDTEPYBvYFEa5hogQvhT2zmiDdLLAeam2wUtBJExyeUmoqgveC8BFCRUhVsem7LM8dJy7XSRIHyhGLjP78PZSFYliMqYo8WJB7gYhisnaHOIrIq4rUOHRVYk3VrFuiKZ1IXrp4kVackqCIfUAodydjNpLnZ5v/dBQLA2xsSVsZsfFQlMRZSudal8ULi5gsQ79vhSQKRjlCBgfFOHZUusbUNVZroiTFKYmuNVHcIpubp7Y3EmBvGPsSMe8cui7wptFE1mXohJERWnsm8n627ft41P8uV3a/nuPpHItRRlxWiLqiVlDUlkoqxkmK9gWxlKRpjGgFLkZZ1UxvaXB83U/ac685MBqLY0dZ7gl6We947oUXWV1YYK6/wMX1TZJ2i42NTcb5lMD2RtfBUGacDEfsbm8Rt1ooIRt1yqCWqVREp9vlXV/5Xh773OMMXjjHkYOrxHFMOj9H3OvjvWVwdR1lg5JtMRpzbX2L85trDPMchMTHksrUuO0tPLCyeoBuNyAN0oNVmn63y/z8PFevXePWyPNf+NyklHS6PQ4fPsxtZ87Q6bYxOpSip3LoX7YuAKVUgO9EELLotjtUeYFEBHU7CDbASoXWJARCPIPW72VhYYEiL7HW0u/3SbOUnZ1tyrJo2PuOhfl5trd3mEzyme59HMfXLXYQNn6lFFnWIssm7Ow8hWvqNLc69r9WRRFJHFNXFW9569soq09z7rkP4/gQlYnxwlELAdLRtgHirIVDTwMgB9IF1MKJ/Y0vTZAiIENikYysYWQD4UiSk6Ux9kc/QD/LKMoSYzWdP2JhiilMvhe+BQU4rzJk1g3ZkSmwu+s4a5HiY8wezO8bwo+PZqif/9cluz+8w/AXf5+IBbLOEbq9OdpZOrvm/YUletqxs3YO5wKZs7YOpw1EMRKDNXUgV7a7ZElKtnFjCSCJY6zzgeTmAnM8aEsElUHlFXEcN05u4HEYbYijiGxhAWsteZ6TxDELqwcxeUFVlUHOVspQKvDgZcTG2jpy2ongM8TwreTVBQwelbSoS0uXBFMbamNpiZhUxVR48qpES8OrW5eoIkkhwVXgUUy04eWr6yQy4rbOHHErwhvPlcEmlbNI45usRZB22qRpSitLkfue8toY8ryEKBDfWhIi1SJRBmcUo/48decVsuQyimCF248FiZTUTceA9TTeEX5G7gr9+37WFuj3MZs9XEc38T4ECxYxU6mb5oDWietIgJ7p+4X+kEiGTgAlA5/DNNLOEOrNqYQ4kejaYqwHJxuFQU+pY16uYg5MRsQqDeWSZAGVfg2TSegucbYM+YlQiCiiriuiVgshorCRWTNlBgSY2AYHyXGh2S4mjLd2KMqSynqquubAwgFOPPHVnL/vo7NzOvPR95OM+wgVI/EU+Tbb4yGL05Y9EZIB6ablMN8oYsYIFWFc0Ozw3gUUpyjprRzCWChvYoT1BYd3+LoGEaSf8Q5n6sAF0BYnoGwZIvtBLjjJtdpwKFKc8II5KdDCU3tPISS1Euha43xJFCmiJBgi2aKcPc/5ZEIVRaHEN1OCnPL4w1Aq4tCBQySNQiXAaDJBdXrcfvudbA1HPP38s4goYmNrI1gm7820Wz91AO+ptSYfj2h7j1VR6P4RMnTISIV3jqwtufOeu/j4Qw9RXrjEqdMnWV5aBqGoi5J2fx5qQ2lqkDGvPP88RkhWlg+Rpind+R5xHPHc009wcWeAb5wVWxGouCFEest8t8dVroW59XodXp93BOn7o0ePcvbOO+n2u/gGQYLGnKyZV+Hhu/XPueUAQAjRmKlITFUz1Jpg57PvMBviVNj5JFH8DMeOfx+DnW3W1tdYXV1lZWWJ9Y11lArQf55PkDIiihPa7TZZq4VooHqAjhBQBNbZ8ePHMa02SZJgTEFd/wq7u3JW/30zY3t7m/XNDVaWV7B2ifvf+pOsb/wW25sqZDdA6T1CW6RzWBEq6dOkyREgT2BvZWwIU86BixQtpUKGoxvEpKnrxVLSVjEHF1Z46doV9A0w/Zd/zFjEQoJMkHGKjLOwUNc53hRgGz8CBPjH9n7320fsueQAXY/44wbxy5dZXP4Yxfg72B3sIBdmXoAI5zFeEqedwMD2Imj5O0e30wZb473Ei4hRXrC0sESnc2NZxDaTvdKaOFLhfljA1ERKEiVhUXI+1Py9dsFXPgn+AePxGIEI2XYUUZsQiCqhgs67gChJGOcFW9u7HG5QCGct2+trVFYTK4G3FVEjLbtLToFByUB2dQZGVR2eSWPoyhapU+TeIbzDC0suIp67eo1Wr8PJuUXSNKU9P09V10gJsUpI4phEylCKVGDdnj2z9p5ceITTRNZCmoD2WOEo4w4bfUOafRpLyK4VjkgmGNcw7K3D+MDUlyIsJo5Q/5RTFJamSLAvAvA+LDi2qfkbH8iExrk9hT8hZz/ff99004ZpnW9ImiGzFHhc40UvXTDhSqUgiqZlWB+Ies4jjEQ4x67rcmz+IL4c0U569Hvfj64NpS4ZjXPqsmR24FWNNYKJ9TjjsVmMUgIRpXhTB6Jsox6pLeyMhnQ7ba7tbFM4j1ARvclx3vPzP8Gv//APcfnkY5x65Ct5/0/+TeykTTzfJUlSSqN55YnHWXaCI831MlrjBNjaIZDIOMbiqWrdXKsgKCQQFFXNcrfH9sYmk8nk867ns6z7NT/33oAPKJbDgdE4C0KFz6mNBaUQGJyQvFQZrlo4nrVYMTnGWzQxY+fAWUpdg/AkLsE5R633dRQ1pV4hZIMQC0QjqLS3N3haUcR8vzsT1Lpy5Rr18YxJVfKpRx5mnOehjFrmX0Su7KmNYXMwYK4VOoqiNCWJAmokpETXFdYY4iSinWZ89TvfyR988hNkS4tYJUnjiOfOnceLhP78PDujIZGKqI3hyJGj3HvvW/b0RqzhhShmWIy5ur1Ju5PQ0S1sVZOuLiEjxW133M6krrh8+TIO0YgU7RVbX3+EEvTZO+/ijrO3B6jf1nt8pn2vFNftxrc2bjkA0FrPYIzpB3vvUfs+tCxL8inKI0So31XnKIrwTSnh+ReeYzIpiOOYLE3pdubodLssL62iZEAPpGgyCQ9zdQ0bQVd/PJ5w5co1rDPAVbx/6Ca17TcyBMNhh4/87lN83Qf/OctLd6GBt73tO/joH34Ek+dT7iBlc67CB+hS3OTuvfaBNEBhQ29zK03oZX0KXVPWmvmFRea6XfKdHbIkxTTn8cVQAN/MCBUcgSBCRVlYmOocW00IIH/zOAqaemk6Q/nEiwrea/d4PxrExQxEi8HOU7z1bbezvfGNrK+tzSL/Ii9xcQvn/IwY6IoCFUPhNU5bWv0eAsnla9dwzrJUFCy+5rhrXWOdJ04SvLNIIYIpjwr1fusaIyrvcNYhpAwLTCNK5JyjnWUkaRK83eOIPM+D4E4UNgbn4dKlK8A+cSoP0hgm1iKilKE2GBGzazVbrmTTOpSHnUSylQ+Z+KY/34X+48CAD6xjRYIzntxqnn71PAfvn+dgrxfKU7bRe1IKpET6sEFqHPv5o9ZBbQ3COzKpwAhsanEiYydaws23sXYzdII4gZbhmgirGFmDFpIIGVT+ZPM/18wJvwf876/oWu+prMPicT4EFrr545Dh+84jhMN6FwKk6RSxLpANrQvKkYQ3FqKRGfYBHm5mGLWzZFFE1ZKYSY2zDu8cxlmEjEFGDNw8HTuht7CI8Icp8hptQ03fGANCQRQjrAOn8SYK5+gEtRUImSBUcCqdzm2P58rGBrefOMmhgwe5trlN5RzjyQTzSsb3/L//AzofQmWpCottCdqtHnhQMiaKU6Q1UIVy1OWNTbIkgsYwSSQx2joGw12EFLRaGXEUUVSWdn+R5dWDfPqhh2biXTcbn3cDmQb1jah46OgweBeycUQc5oySxF7gVcJ2XTEe1xzttFnwNaMowXmJsf8aYkltLXaSI6MmmWlWKhVFKEBJRRwnLC4tce3qWiB9NxczOLU6qmIyW99WDxwkO3aSzzz6GJeuXJkdp3ehY+dNRQANcW9rNCGL4NjBQxS6RilB1mojRUSaBDQyzyfEuiJRKR9811cSdbp467GUDAYDnnnxhYA2S8XhYyd451d8JWVeceXaGnGcMtwdcuXCBeraIL0kTVPKcclA7jKvYvRVQ9Jqk2UJ7/+ar+ba5as88eSTrO1sBtGuW8jSA8lPcdfdd3PPvfdS1nnotBGyCcItQvhwn71H+uCoO0XQb2W8QR2AJnLx0wjtxphjbxP0OF8Cr7Cy8gD4oH43vu853IMXmXPHmPvwe9na2gbhuXzlIpcvXUapsJtorZFSclDuGYns7GyjZxful9nPan9jYxU4TZCs/ACjMfzu7z3L+99/kG5vCe1jbj97Dy+/8AyTcWhHcUDlPbEIfdI3i1Gncd3++evw5MZSuRpPRZokrC4ts7y6ymhnwIHOPNvjEcYEkqV8MxP/ixgz8MIbTDWCyiHYW4BnJ+MVQl5ir9USxA/1EXGF/94QKMmfmSf631bwLagLy+MPf4TDR2/ntjMPILevgrPkkyHZco9KqgYocURRgkqTRqynxnhLJ0oZT8ZcuFxzWzHitXIszhislAijiaMo1PisRTiB0ePQvz/1FPB70qJSCrIkRQpJlMS0ux12t7cDLyWOaWctwOOEYHc0CvK6SKqmHVUAsRfsYqm0QWGQHnadpogEhYNESibSsTbexQkLLsDsY22ovQlQhWyEkrzBO8/2uOBz5y/xnjvOkkUCpxzSSmpbBSFMIRAOKl2TVwWHm2Op6kAATGJJJELfshACQwsWlpDqs5QlqBi8FEQ+IG/b1rKuLUI45lRElIT6lZdTiD5EvjJQ+mewPoRnoZxB/QJjPZqQzU+RgOka4H3gUszmW1MCMD6Q/4wP3wueFVMjqIA3eQdagHQeGRGCMh1KBF4EpkKUSK6OU06qNh1l0V6SlxUqjvBUeOsRIsHLFk46YhEMfJyMwjpmNd5LIAXbTPZG7tVZz6XLaxxYXKDf7TIpK4q64uXLl1heupfYtinLMVGaIaVgY2uLra0dXrp8kVwKGisAhDEkr54nzVJiIRAySGUjBW1jGpW6iDiOKLWjt7hE/LnPsXL+Vd75pp/sxhehKcYIXNhcLZAPQsGv0blAOJy12LqgjaFHzIK1+EJj9C4qCR0bpiHYeudp+1mkhJCCKIpI0wQVRaStFG3K2eIS2nIdq0tLM2Lu6vIKbmGe9k4vSEjPkKM3hGBfP5pftEBlHFcuX2FxZRkZCYpyQBwp4iim1pZWfyFI0dfbzM8v0G1niAqsN+i6CoGXD5Lda1cusbq8yl133sWl8xepqpJeGnHH6eNcuhox2B1ghGRtOKIwhkgFxdhqUpIuLlAmCStHDvLVS0s88eQTvPzyy9Ra30IQIDhx4gR33X0XdV0xszhrums8QdGw6dMJJHtjX1eyf/94AwFAeNNpdBb+8rw21ti//UlZ0em8RBw/yGg0pDx7DvdzvwJ3bLG9mzEoNnD/6q10Op3goS2Y1V91c4ECO5e9Ewey7BmyTLBPI+gNjD8JrACHmuMNXJnRcJc/+IPf521v/wpanXmy9jx33fN2nn/mMUbjIUJItPUY4WcQ5/Q8p9eCf/EbcGSE/3f3IX75nr0b5Ty9fmBqtqII6RyjzU2OLqygRzkbO1dne+2tx25fyuG5vkVw7197c9SBexTPxt6vyRj5P/ZRH22gv4+0g3ue97hIU9YDXj3/z+hc+06+wQUI3VQVo8EGKpJoKcFJVLuNSFPqvCBSElvW0I7pd9pUVRnEgl4zyjxHa00UxWilAuxOYFNLBLEKUL4QopEADRu8UhKJQkpot9soFVNVmkyVdNtPA7fhrcPKhMHWgFpbjCnoN8fg8WhbI70kUp7EQZzGTKqCbmcRqjW6vQ5LCz1evnqBWEhWo5R5mdJ2MBaC2qnARYgicmuaK6949eqYNN3itqWUVisiqWFkC0zl8EpQ1DV1rZnUFfc0NyaSkEYqlD0iiRce7xU27ZHM9RhXf0AQh1Z4D4n0aCfY0jUaGbKRxJBJRS9SGBd4AEaEGpYkQI2hXzoM6zy1FTz8Z/4rVbfkyCfvZfmJ2/BA7V1Q82OKEr52QRdNiScQB511M88B1WQy2lmcCD4CxjVRAkEiWkYCZ5sOA2sD5yNOGfs2i4ypffC60PWeoE+IHkBGCaYM7psijfC6BhSoGI9EOhMIXt6Glc178jLnylbghXSylMMHlhkMd9ke7CLzEdZo8onl2SuXubC2zqSxtY2imKIJhFLveWAyhsmYWxrXQuvYO27t1Z9n3NzBE1PdIKp13Zjs5xx4YALFjW6c059aZ3BeUBpDXuTsDHbo9eeZX1hEDHYCA995Ou0++bjC2TCnFrttHnrxaa5cWafT7ZE318bc5Fm/9dEEAM6TW8dcv8tgZ5M465BIj2q10Q42tra59PJ5oiQhA95y9g66/XmMM0RZPEvCpmtxVZQ888yT9DotlpeWSLPQ+l3pghOnjpJXFbbSDDY2GK2tU1cGR05LJSSjhEKB8W0SlXD/W9/GwuIij3/uc4zz/LqHQ85Sr7COdbtz3HH2rqBBQIMaTCtxPrirOj9NwwV1WSGUnGk03Mq45QBAqqbi731jsNJYcu4be+5r4d/OW7a2tlBqnXQZyt/4GVhsGJ5zJe6f/DoLxTHuvPy1aK05efLUTKKxLEuMMaSDHTh/HoA777yTKknw/iKTSecWAoCMYE5xAPgTzfc6TEl6+4dznt3BgI/94Ue54657OHj4BOOJ5I63vJ3nn3qUfDxsVDP350Jh+HYN/8dvwV94FCIPH3wZXynEfzmLdJ5WkpA5D7WhnSkOza+QqIjtnV2uTAb4SIZ2HfHad77V4eFow0zebEMZf+GXv8nP8DOp4TBkokhNRvKR4A+uogjbeLsaE2BGfEFZrANHCIV1hTY1zhQ4F3TPI6Aa7eJ1jezP46xmZXWVue4cRZHTu3wOyvy6o5mfn8PECcZaJIIkbjb7JCaRUZAHbmr5SRyTphlSBfGXPC/IWhnzc32uXlsHD63WInH6VchIEauYtZ1dtgajQJAzDmXF7FILb4mEJ4ljuiLCRRJbeyZliXaaVpYwrHO8d3TijINZj4yYNZ3TcQGmzLwk8wojJJWwGOGwSY+XbIut3ZLk4jlaZMjI4mtHbnVomRMCa/YIebEK1ruJAOlD/7yKBdXiAXT2H9EjG54pH1w0RZow0ppS+9AyicBGHidlqN/jw/0Twf9A+XC83tlGyROqpOaT3/XrXHjwWZCeq2fO8cDPfIj5F0+EvNMGpMwKMM5dhxNa7xqBoPDMBSwt8A58E3B4BNqBNaHs5IJmLVESoE9dur3F0guUVIzrmLlIIvn3JPF3sDMcB3l6GYdPlyoQATOgtPg8hyY0wjlEFOGtb9CAKcsnzPpKV6HU5D3znS71uOCpx5/G6oLKW85tb7FbVQ0TW+AjSb26w8eVY/5ayokiLLOOJpiaQuhN0D81UYrjpGGsB40N83lsuN/8mLaIvYYuPiWPNehHc+LNa74f4f89npw9Zb8wNoCPTpUdlULYxiXQOeI0m913KSXdfp/KmZlT5PMvvUjRbtHtdNjdHTAVavvizzBcv0FeEVnBfDujLAuc9BROcvjQIY61Eob1ZTaGE8Z4ru7scnRVY6xBihRn9F56F24QRWMi1O1mFJMRdZqgshjlJL20jROOlQMHKE4MGW/voMc5ZV1Sbm7Q1xVmOEZ0e6hOm5MnTzI3N8cnPv4JhuMRvinzTK+NEII0bXHX3XfR63UxNnAs9s9JAeH3Gr8MnZd4a4mz9MuDAHQ7XZIkCSxpJZmMJxRF3hDDpge+z2ylqS2raMzp04skS3M8Pv+aSLJtkC3LpUuXGAx2mYzGpK2UKI6Coxswv48Fe/nSJdaKAmufAl75Akd7lsBOux+4/VZPEQBrap554nMMdwccPnYKg+Kut7ydyxdeYe3qZazRDblm3/jQc/CXP7vvYmn4lV/Az/2/iMoEZzRlkdOKEpyHc1vrFHXFqC6ZW5iDqQ/5m4W+vvUF+JVfCMHHj78H/t7Xglav/3tveBwFnmZq6WknOUWhKRDIVkK73ydSEbrJzFQUh+hV/gbwVcCdLB04QDTYoa49UsUgFQ6LdZpWp02sFNaX7O7ucvTIYQ4eXGV+vAOb69cdSRLHtPtdsjQjjmKSOAm6/1KghCSSMpD6oqBAaYxlPB6TT3L6/S6LS4tUdRADiprulnZ7DiUUeW24cnU9ZMPa0rWQj/bkfSceRljmTUluBbWU5M5gklDLbnU6XNlYI5KKeRGz89WPoRdKqmeXOPzEcco4ZVhZkA4Zq6ZOnYGIsSphHCdM1p/CO8tcJ+VIf47Mw6Su0M5j991a5xwOi/MS4QxxnDCyKaO0i3cjvJXNHucDgddKSq1DadmAkx7vQn++nTaC+NABYfEzXMj7PbTv4oPPcuEr9o7BZprH/sqv8q4f+atIF65/2LIJcPq+dd02+v6GgLIKoXAu8GuUCPbD+NCaaFyQtpVyWssGIT1KgWnKBsEnQqFFC8MYZ8ckUQtvd7AOpFDESYRvtfAyAl02/ACPiOIgmGNsMJyarZuzCxHO3TmsNtgkLL39ThsyyXMvbjB2NcZDKmMMJhz7N7yA++V/Txk7fu3/+EoW/5dvRFeSobZ0spSe9EHMTPjQ1aESdNrinrc8QJq2KGvDZz77MBcunHtDT+eNQ4AIoZL3HmQH0e5AHAW1OwcQ9FREXiGsQ7QSvAllKaIEFa+i7O3o4vfw/rcbFNYjCXwWIUD6gLaoOMUazWg0ZDTee16sNVxeu0xVlZxtNvnt4S50uyAdWavFcLg7k3Z/8z3zoklSwp6kIklZFtTW0ut22NwecmFrl1h4JpXG4hBeMhyP0U3AlUahE2d2/z1475hfWOLChfNUoyGr3TnavQyZtvFKBf8SJSiER/R7LHS66PGEKEt49cXnWF9fI/aQ9HvMHTyE8x36/T5f98Gv41MPfZqrV6/MzPIkgla7zYMPfgWHjxxuSo8ekKHe72enGrobHFR5AaWm1e/gRSCc3uq45QAgLybkeWDjB9i8kZbdx4bzbq82Pj1Qa15hbe1TFJfvQP7zd+P++if33vQjp9j+SEpW5cGUw3tGoxFxGlOVJc5a9m9jk3wyk3a9+TgO3Nf8ubF3/PWGn2KWAq5cPM94NOLYqTO4tM/CgRMkrS7XrpynnIwQ/vW4qkFFTvuQXVVGM6hrRD5qMgFYSNsc7i/y1GAQNKzfzLz/U0/BT/5G2PwB/vYnwj350a9/E2/2euOtwCeYBgB4CyolSjJUGgeikAvwr/BBxMVFCltr4PeAO6Eu6WYthnWFiJPQw24EcdRCqhhdFSGbx/PIIw9x+tRtHLqJGE2320Ej8NZgmgw1TVOSKJgtRSqIMmlrqOuaPC+QUnLk6GGyrBUsfTe3yPMJy4uBYhg3EqBXL1+jyiust3TThNZuPXuovIChEOTecVAGtUDjHN4pam1IVIT1PtimSkX+px5n65/8EnYpJ/ncIQ7+hb9I99mDDEWB8QajwwYmhIIoASGIej1kkmDLmryqMGh67TbSWyIXSHDTvco6h3WeSoTFqzA1F0Yp/fhVRLGDJUD63gqkhdIZ6iYrlx6cFFigMJYsiYiERCBQ3qG9wxBIewLxBbuYPCHbxzYOhT6QBQV7cs3Q1Gd9aBm0IQII2iBN7hbJ0D0gsDSSDs0i3LSrCoeKBEI68ALhQtueFgmlEcTO4YVAKUGd5wjlQ/uqMagsxgsRdNxFjHcJJIpgfePC0YXU6obzs9YyznMmZUkax3STlDtO3sZgMmRcFuR1hZlMGH/b59j+Z78KcSN49iOfpIoVnb/zTYxN0PmvIxkCL0Da0A1xx5nbSOIMrS07u7sMbmL888aHB2/3FSrDuUnjcJFsum8Aa/FS4GWElAKnK1ARQgpcUSGyBCffhbC9hij6OI4C/Cb4AocF6xGNiJZrWrtnBmDec/nyFbSpZ9+Lk5TReMwkL2eS8l/c5t+cb/O3JxCwrVIMy5rBYIhptCmCLPpUuyBYhKskxosIUxvyIgg/TT0XEII4VqRpyquXLmNXDK1LBYsLy8gswUUQd7pkc/0wnwHZDUTngydPMVCXGV9bR1YV9WhIrCTeQ5SmvOPBB/nUJz/B+uYmnkBqvu/+Bzh0+AhVXQKu4U34GQIn8UG7AxC1RQ8n9Ho9jDU89dQz3HvlSug+uYVxywGA0TeHo/bfr5vdOmM0o+E2cSx56y//CBdXF1n/Y78NLy7Bn/12WO9hDhhOnT6Js5adwYCimNDKWpTFBFlWMyasQBDH52i1zjOZKOwsFfpmAsScAb1bPaWbn09zUtZaBtub5OMJp86cpTW/TNKd58SZDmsXz7G7szEjOfHrd8C/uQ+++8lA3a4U/LkP4Qs1Y5wHs5HwAb04ZrHbo521GFdlIACGE3zjQcBXXoKVffC4AL7tuS9LABDyoj8N/FT4OkkQrRYyzhBKzvrypRQIpXDaBQKWEsAV4CNsb30AsXw4WPX6Rla61A2xVFDmFXNzc9xxx52MhjtcunSZS9ubrL7mWFaXV7BRjHMWYyy10ZRlQVlMAqFKJSChqiva7TYHDx0gTYPt8HiUs7W1ydbWNuPhkIV+j7TTRiQwzAs21zcRThAnEYePHOTl5z/JkcZlxCMolSCuYbc0lFKwa2pqYGwrRCzZmYyotYUPvsLGP/1P+KbsVT9wlWd/5Sc5/ZV/n3RDsekMlQdweBGBSMLm1+ki0wxTa6xRTArDXKbotDu0dGjNgh0AJA5FYyFrHVcGu9QLq3SiSxgzChuAlWGBd2BNMP6RNDroPgg+FsqROEfiGy8M70K5AWYy0dOpufLZsyw95th64MWQiWjFbf/qjyGJQYZjcd6jm00g2Yc3ax8WZkdjKOYJ5DSC/LBucNfp5yNUg1B7rA+IAFKiYom2YXF0HryMyWtB24YOABVHiCYg8e0MXdeIakKUxOANMp0HkeBU6L0XyGaz3N/vMD3ucAG00bxy6SJ2dRUTG/pxm7nVA6HUZT26Lnn66z/K1srkuodm/Ceepv0/fFPwR0BQWE1SGUScUtaGw0cPs7C8iHGOUmu2drYp8glfnHjMTejZ3uDLCdYJSDLIUpSQYW2XChEphKnAB+EkiceZGqNTouwArkjwLkLwVcjOZ8hOl1SXHsPuvAJonKmI0oTaWXB7R+69p64qxuPxng7AcMRYa4y11wUAX5oRaqm7edmUv/ZaUffuqEcIhcTTTTKcMXQ6HZ5/5kk2htt7pRAEx44d4z3veQ9XrlzBasNdD9yNqGvKYYGpNEp6TFEycZak3SZO02BljidNWxw4fZpur8/Vl14hGReoJGl0ICISITl17ASj8QQPnDh5imPHjlPrILM/hfOn9X8hRJDl90Gtc2tjjV67R24qHn7kUYa7A+78cugAfIFLvW/cyAkIfaL/meXld3H5pS3Wv+dBhHwQH9RH8HjyyZjNzQ2MMfT7PZaWFhAitGx18xxeCdKzp06foowt1p7m3LllrP2T7MF1t173eN1zmlY1vKeuc1549nEOHT1Bf/EglYGDJ84yt7TK5rUrlMUQMwTx574twK3HduHn3wq/cC8Q6rORkkQe2kmC8qH9DAG7+YTtoggP+Zud+y8swTCBflMq8cDDh780F+I1I2AviwQC5dWQWVY1lXbINAmtT55gtCJc6MW3wXqUiQU+hvVtRpOvR8oIoy2xiiC1VLWm1e2i84IoSgE4ffI2jh46zOrjj8L5c9cfTFPvj6M2SjU6AKLJIL1oxGIEUZzgvUMbw2ScMxjssLW5RV1r0jThwIFVokgRJwbvP8OFi0dxSFCC+W6PtVfPo0djWnEEVajTjbxFtjJk7YmtIxGOTpRQ6pKk1SK2IfOKuiX14vW2nNXRLdaoWZYJSrspuAdR1kCJDqEyVG8FsbuFVTG7Vc2ByNORKmTlDbcBD/12j500ZVjmrI0mlDphoV/g9UNY64IXgA7lBt+k00KKRutcIIzHKagiGGpLDLQihfJBNKhuEAb2IwB5wqmf/QD++38D0ytZ/uh9tD97nEIGvYipSihR0660r4ynrcd5GWKY6fE0wYBpjm3aHtqgzejKBt0CGaErjQicKPDgXNCtt96Te0lXyKDEJhQkCRTB8EdFCX6ck6Qxy2duZ219jIxiTLGLs1XTJjHN/Kc8gP2TPwQpO5MJrbICKdgdDElkRJqmgCXPx1RPtRDDFN+vZg+NeuQYu4VBSEkvAi0USEWBIMnaHD1xEm0N3gkqbdja3gpGM1/U5q+a8GHfeXiDsE3bmFF4rbBSIVQUAgAcrq7DubrG00FYZDUI4lxxF6ptYIwf34XLl1m4/zC7T34au/0YzpcYa4kbYbX9KFVVXl/+dc4xmeT7NtovxZh2Le3DPNy0Yk5TGQj7DgS0QSURJ28/ztZgm8G1azxz5QLae1rtDq1Wm363TxRF5EXFbWduDwGl99h2m1Z/vkHRQhCp67rZuB3Ki0YO2+EkdFeXOeQ9Lz39NEfTiJZUwepaSg6srvJNp08hpEAisc4iZXPcTUAsGoZ4KOUoYuPZXttEW8eVrTWeffF5qqpsZOubqP4WxpsOAKIoCtBdqAeE78UxrUZ9DRHqjjSwTl1/kt3ddzb9t7PbAwRZ11FTY11fjxAiqH1Zazmwb36cO3eegd3C+3uBIzeYYnypxqw1bvYdx9VL58nHOfOrh5lYiNM5Dp3skY92GG1vMBkPsN//oRveSxDUApUKOs7awnZZYoWg1u7zmgLd8vjphiv8z38joA//9j74wW/94t7zC44I+CDwb7DVGC9qRNYBq/BoZJqGiewEjiA2s5/cI/gUlW6Bfy9CKcpao6uK+fkFXFXjsLSyhIVeF+k9C/NzdDs3uiNub23h0qwh+0UkSUySxHTaLaIokBKdNeR6grGWfJyztbXFZDwijhS9Xie4TToXhHaSRcbDtzLcfRmZZCgU1STHbu5wME2ReUBZnPcUztGKJe3Is+ADlN2SsEnoCqjqEuEFCy8eYvKZk4y/4tzsuFv/9h3kI0GFI5GKia1xQoY6d6NW55G0j52hvPISEkGta3ZHY+YWl2n1eiTeI7gIhPa7q+Mh28McF7fJTtyFWEgpKoMUoa7vPdBo3KCCHoJSobZJE+haBxXBTEc5RyxCpVcIiZd7gj0Q2uO8hTM/9y0hwHdQE7K+GewvRailNp89HWYKYBB8BkIVYA9dwYop6oqzDmccpgKtPVJ5cKFzBBrpYdM8Y0g0CbUriSJJEifUSYLVBoyGtEW8sBgsqsdVULOLLFQFEQ4jY4KSXYPW3ZBQhPN3zlHVGtPtknQFazsDBltreKsRwhH9qwdZxLL1478EkSf5xQdIf+hPMrGedhYjpMUaKK2l2+ty9933BuKfCaXC0WjI7mAw80y5cXj4W5+Ebg0fvgMe2Q/2TnEaiRAJ3CCRHhQIIWr4DyFBE1KCdDhdI6bEQxfKLWR9qApwOV6kqKiDoMabguKVNZicIlm9g0IAm5/C1haVBeEd37yX936mWrd/CN6Yguvrj+sTqb1icRMQ+Nd8LTxLcz2OHT7E09ee4tK1TZJOj9U04fRtt3Hy5EmyNMJaz2BnFykk7ThjtLlLvz+HkwanBC4SSBERyYg4UTjhg1lZE0h5PFWlSTodbrv3XiZb21itEZEkijNklITSiRRBswQ/29umKNCUfC4IJOByWLC1sclOPuTa+rVgcSwjTp06zeEyh4uvsaP+POOLQgCEaCKTZn2P43hmpiIROOGwzuKMAz7LzRpbXruJB9MhNwv/9//MGINnefb1l2Pzv9nwTd1oZ3uNosg5dOw0RBLrBf35A/R6C9R1QT4ZM9xep5iMA/GmqdU4PLV15Lmj1e7S7c9x222nefnll1i7drWZmG/+ZMTPPoh/ajXM8CcPfJm6AMIIR3mYwLN4OsCmdY6QEustGouPVOi1FhK0IIuy2doklADxcLPIvieIg4jAFC4rjXGBytPtdEJtLg61yNeOpaVFaLWC81/jByAbDYmyqqjrmqoO3hPXrl0lzyf0e32OHT02fbJAClxdBwdKKbh85SrGOJS01GXBuCxoDceB5DTNDoVAxgKrS7yXKBEHNCv2aKBrPbtaI2XE3IurnPqL38sTv/Qz5KfWWf7Fr2Dub38HV0rBWDWLsZAgIpxUIDzSgbeadPkQC2ffws6zj1ELwYWtEZMaep0uiYcHmsl/YXODTTLiw3fTOnoaJxRe/RKiqiEKzH4vAucEQeBmyPBcRUrOFhbpBQrZuEGE7NH5JngV/vpETQQBIiPsrGc/UIIk1gTkJRAJ3awmOh2uSU6kAGtF0wM+hX8F1jqscQ10C3VlsVpgdFgYg8WzRjTsoODxEIJpTwyqButRUhFFEVYJvLOYIseqBK09nUTS7rSodB2eUhUmp6SpQ1/3LPrr/uWtZmd7i06acXhpgTnvMFhi1aWdJeF5+E8fJHruADVQPrbAaMcRJ0EKuDAKK2F5ZZXTJ0/SSpPAuYgjytKwsbbOZB+B7rohPPzPvw8/+nFIHHznk/DN3wMvL7O3+fsGzs9w9dQZTjRn55A+pJLOmmCulEShOybP97UHir15kaZQF3jrkF6HcmykAtXOV5TrMam9jaw9Ry4kQjh0XaOiuMlCP/+69sVp47/xcbNPW1pZIul0qYTi/re/nf5cnzhJ0Q7SJAZfEjUiR8ZoBqNddtbWcEZz5vhxup02Ub+NbbXCJk5AIBVB9c83KqdRkoDytLKQnOSDAb0swVmNjyKwlmA+72edOGKKIjSlZi8E0oEbVZw7f56Lly+Q1zl4QZQk3H/fA5w8eYrOE5+75WvypgOAab/mfmaA0TWTwtz0SlfVVbz/d8B3Xff96SaulMQ2Nb25uTk6nW5QBTz0AmvDbeYuzNGq56mco36j+thfzGggGHwgX1TlmEvnnufg8dtQWYfKeHqdNv35BSLlcfZORuMhRTFhXIypa02/06P/9hFzCx2Oq7PMuwNMxmM2NtbZHWwFsZkpqfBN8AC8FfCxE1/wNVGkkNbNoMwwRBOA7SuQ3dJnt4B5wtKuA4lo4iHtULXAK0nsFP0sxnUS6tGet7eQApUIovRR4riL02/HAaPxkIX5RZIkxjjP5bU1FhbmyXxG/yaRXpZl+CRFqnA2zgvKumY8nlCWFXWtyfOcwfYW83N9Tp88TZamYbGzAQ5XSlB7iKOI8SRnbX2Hfq/H0eOHwF/imScfYTw5xrIMttDhEnliHHGcMjEVHe3IEUjvcN4FO15rkCgyFO2XV3nru340+NnXMXmZ0muVjPK6IUqJEAXLNJCyVAxSkFcVvbNvJ2ktMnjhCap6l7XBLus7u0i1d5uio3cxf9u9ONkm3xph8/Nkq2toPMqAlE1zUdgniaTEmUBwMt43bbchEDB4jPHNuXqEVyTT3nsnZ8+qc57a2UYymNBG5wVa+6CMJgVxHAV7bOdwZu/+2Tpk7dZ7rG9aBF2zGQhBXTqs8chmXhrtsdo17XEhs/I+BAHOOpwLhDMpJF4p0A7rdsC3sdogkSRJCt5SlxWq34c4otWO0DrFJim+KhF1hdtLEV/zMOwdvwDquuTVi+coywn9LAUlEUmEdoaqLimrmvHvHcE4iTaaOPHEeIwXyCRldWWJQwcP0mq1SVREkkQ4D4PRLlfXruzTwn/N+MFH4O98bI/we8c2fOpfw8Efa3TmHSADb0IIkHEgszWQtxTTVjMDVkEJriqaNksFXjOT9RSAFTitweYgfJirGKRMAgPdWrBj6vXHkOoelPpd+v15dgcDvHPEcTzzifn/xSGEYGl5Ge0864Md0oUFDh0/hKlrWrHCuhqpZCgzOc/Fi5eZX5rnLffdw9OPP8Enn36c+azFsUOHObC4hOxlyHabKMmY6uF4IYJ6YhN4V9rQmuthdE01KYho4WXj1eDDHN/fxuc9oUPFWkQcYcqKSxcv8Mr5lxs1R8XC3Dxvuf8+lpdX2Fxfx168eINy6ucbX1IOQKQiuAlZcHpCQmzj/RVCBrk3ZCPTGhZmx6TpuZTf8QKDn/7X/HRieOvPvZWFf/SXYdOyvr5+w2d8ecf1hBajS66de5HF1UN0lg6yO6npe0ErS0gTyYHVJaRcxliHNZadBz/Fs3/vb2M7Ey5/6lv5ql/8cbqdFc7ccZal5QNcuXSB8+dfnX7Ul3RMpRmmdszXf8T1CMsb++wjQBcaf0aPRdRliHijFGUqWtKw0O0wf/AAavNlcJ67T6zSard5+eJFxpNfxUcjnPsqhJRsrm/QmZsnzlKGwxHaWnrtDnNFdQO1czwp8DZ4RlS6pqxKirykqiqsNuhakyQxJ0+eYGF+DiWjoCQnfKjDNBu1F5B12jz9wgs4b8naKUZXdDPNpL4H2MAQYZteNu+hNp60pch1aKNzzmA0dEVgdxtv6MmIjoyD3/wkJpJQCoGKYN4KEBG+E6HHE6yIGmW45v44TxInDEea+MAp+kuHqYfbuHEQDpFxBJ/7nQBNzR8kzz16tIb3gnTuwwGBM9PMoVmImuAaF55FW4cNX8qQqUCQ6vZehKBeAFjqyIPzmFrOygDB56Ah3/nQO++tazbrxktAW7QK2aav9iaWqR1VYWkSfqBhN/sQXOnKYy2NXron+GzJQET04VijSDWuotHMNdR5jzXg3ATrfxfct6JkhFE1pi4aTQCPrUtKYop8hNVB4c4JD84QmFtT+P/mD8P0CfLOcHX9GrtZmyhSKBk8M7QLhlDGgW/0C5wTWBRJu8Py0ioHV5eZ63VpZcHXBByVdly9diW0z83aql+DcsZub/OfjlajLDnN9YVEeAlNNwRN50S320c7RzWZhNcKF0oELsjJBsjeN3MhzM9AOAutjaFbo0Z4g4rmsI0fQJCirVFK0enNMdrdwdoAR3s5xR9ubVxHePsyjunntFsZq0tLOG2I44hXX32FO8/eHuzAgfGkpN3usLGxzebGOhvrV3n/2x5g59Il7rzjDGk746FHPsuVp5/h4MI8J1ZWWTmwglhagiQJ96LhckxbYgGMCeTnnckaUa0DihnH+CwCggqlbwIu6aAsSpJWirSeCxcv8fS5F9G2RgjBgZVVHnjrW+n2uuzs7PDoo4/yrjy/+YnfZHzRAcD+UVXVPne5vTH9WkU7eP9hnP122AflCyEataOQXVRVRfUnHoV/+F8gCQHFYz/wGCL5afi+D30pD/kNjQAGhBqlNjWba5dwOBYOHGMwHmFdG9EWREoRq4gkFmy9/eO89Ff+V2wnMIPPvfvDiLbhvf/0p3jl3Bq97gJStXijqb+Uciah+fkemACth9QvHLv9ousmexpTZ4FPEgIA21wXAXWJp+b0meO87a0PMDffJo1j5GeCrvmZk8c5cfIkt99xOx/52CcY5x/HR4qiejdKCsrJhH6/i3MObSxVVc/mxv6xub1N4WGc5+R5Hs5VSCIl6bZbrK4sMzfXJ0uTpjToMa6xv23QK+c9stEtePGllzl68BjtLCHGsH7h/2GyeR99HNqGh3Q6rNbUEiobat+qKQ90ZcxAa6yL6EQJyoVjEsIR/ktQsceXnp5SgGQgwRKFLgCvQk3ReZzTZMoz2d3FCkmazRP1lsNGuS9BrAYDKtdCJjGJykiTDl7meAVYiXdBVcwLi20kkaeoiRASKz3eGmQrxgLaWKRSCBXapCIX9op6rxkH66AsQmYZDH1seK/a4SyNEp7CTPvP95V/TQ114ZprH1oOrQ1Wv86F7F8gms6DxpHRMzsPZ8OGLaTE6UC+Cu6BwepYmyBYJKbMACcwpkRYi4gS7GQEdYLqz+HNBF+WiEiFcoXT7BcB2pvxDQMrODnsgYLeMykCka3RPZxtoMKF7E94j0hSenOLdHt9VhaWmO/1aLdaxHHIpLX17OwOWbt2jf0tiDc8qr95Bh46Au+8vPe9v/u1+zgWzTPoLd6NgmAbntUDhzl84iRPPv7Y7HXhJEK5wEdJ+F3jQAQ1RmRorcUMQ0HAC7AVoX+kKTVM+9IFrB46Ray+np3tfx+eLReIm7MjuwW+lpSq4T58eQMA70OW3ev16HTaGFOTtVJevXCRne0BaavLzvaQl195maoqKIuC8XgI3jAeDikGI1QUcfL2M1gHD332M1zeGTAscg5OdjnjPb3+PFopnNUIpZAqwikRArOGyNdfWWbnyjW6gEzSIAXSXCvTzGFXVEgLQkqunr/I8y89T21CaWd5eYW3P/gO0jShLAo+99hjjIbDN0SH/6IDgP23yk3ZO4SzENOHoSEESiEwbg34eeAHgbnmpeI60osQwL3r+NXrIxn/wZfYlzr8kY7rA5sA8Fjn2bp2GWcsvZUTbI3ysMlYAgTcSsiPXqRcvt7O9uLZjzIe5aweXObZp55gc+PaPobKrQUC3nu63Q5lWQXjjZv8SmhB9EESF5iSNZQK9VHhBaZpN7mVqDtSAcraMyj5U8A/AQx4CyIiUZp3ve1+3vn2d5BAWKzt3ubpGi3powvzfMs3fD2//du/x8bu75FGjkq/lwPLS40pRyDEaK1DFv2acenCJUyakGYpi/PzRJGinWb0el2SNAl8lMZitqp1gKKdayDRcJ09njSKqSYVd5+9i0MHDmCMJzX/nsdfuI9555kTUOHwPgQNkZAc7vbYMBVDa9lWEh1F1AZqPLs+ZKaFM4xVMNmJpAybuIwY1hNQMS0FkRNEXqJFA996Qj+1Cxa5TkqyTsZwd8Roa6c5c4HcR6qUcYus08Nh0JUmEt+D8j9L1CiiaeOQkQgeGw0iYHSzuPimFVAGvwSBxGoHIojtCAk6CdmkrpkhAFYHXhjeNb3JYUPRtUMi8V5QVaaxE5bEZu95NZXDpTKoGjaiLb5BAMJclUEEbBq42vAawV6SYIwlihV4i1LRDJ/zTmCxAclQoKTECMX80gHanYir588hoghT59jtGi8cwutG+EU2m+/+50Bc9yd4NzSvmVXN/KyFFSWJoihISTuDUpKs02ZuaZWWSujP9zl+cIU4SZAqZNjWWfKiZG3tKkX+eWr/0/HSEnzT98Af/jws5fAPvwp++sEGtZAg4nDiImj/C+fpzS9w+NQZXnnuWWxZMA1mgoySDOJIUYzXdQi4hNgLQlyNcJrpiuudw8soEDlnsqUhY924Nm6UC0MHhRDMJGI8r7/5T2W7v9zZP+yt5UvLSyRpArVmvj+HMZqPffzjyCRluL1NWeYhuG2OXynJucvXOHv6NKrfw2rHmTvOIiPBJz71aUZlxejyVZYWl1DWkS7ME6Vx43RJQJt0kLByCNJ2m+7iAsV4gpUFnXYr8CtcUDfFB2vjNE2ZjCc89ezTTOocJQTLK6s8+O53IaVC15rHHn2Urc3NWWB6q+NLWgIIFyrUsacEwWnm5GWokQbZwwnwfwLfhZSHZpBemHuefr+PO3+a8eZn8ct7+tnpx+/iRi7pl3/MzgtmMzkE0CEW3lm/gjM1h0+epSxrNoqKymrm6JNcOkq6eYBqeW32fkeffx9Z0qYVG+bnFtjc2G62I424rg75+UcQTRqHTU4qrG0kX2mIVQ0bO4pi4jQNYiilgabF5cShowy2txgOhwjhqevqCz6lUkqOHz1CVyg415QraAO3IXghfK40nDx+hLffew/C1NgkQN9+X8oqESgVFAKX+l2+9du+iV//9d9kc+ejxJFie/d9HD1wNKhy+ThoW98k3rvz7B1E/S5ZmobgUgpUg4pI2dSe/XlqcyC0V1U1wodNwRPmZyQj8II0Tjlx+Bi74xGSNZ564QrDQZtjcQttHaWkkeIEj8OYAptbLI4d6/EmuA2W1lDZAJfuWkcmZFB980GaOIoivPa0ey0Yj1mMMoRSYGXIElDIOA7sYAXeK6z1LCwuozs1uqyCZ8a+zhFhSup8CCoj7WR0222q4hjCnw/a+SK0ovqmrUs2mbXbF8xKF3rBhVQ415D3VMNN0AHAFERNLZlQeintXibswJugLmCmVr82/B2u115Z0FpwZs9PwzU1fSlEaNkSod1v5hbpYSrKIqeIdhMYCBFappwNio/BW8AAFuNM6EZSivF4xHB7hLc2kAeFb+rawQMAbwiFDzGr3U6vTRhNoOJFkyHvlwpuwoM4ptXvo7xlNBgSJQkLy0ukScZwZ4eN4S6LKyvcd8dZZEPqMtZQ1ZrBcJe1a5dvinTdMHZa8JYf3ndsDZGUFCFivAwumMKrEBTWJS8/+wSj3WGzkAlmiKBUCKmaWr5HiGjfNVBgS/ByL9NHIuIW3ivwEQKNFwFVKqsRQoTP7jSqsdF4BDqonE6fyc83Qgn4y7/5T9fHNE05duQw0nm09SyuLNPr9dhcX2uejWkZRjClZllrefHiq/Q7LQ6qIP5TWc2ZM7fhnOOzjz3OZDJmZ3eX1HiIFdKmJHGGDexmMBYiQdLKsN7ROrhMhxVMrQNp0DelOuOoJhVpmlIbw2c/+zCj8RgkrK4e4O0PvgMVRUjvefKZp7h8+fKbCp5u3TXg84yblpB9iBadDQ+/MQZv3WtsCneBX8W5a1RV1WRoTYZhLO4X7kb80LcEUR3ggZ9/gM4P/wn+W2T/X+iyeh+Wh8HOFpdeeZosEWTtTnBtKyZ0Pv5O7v5n/xMq7wBw6uE/zlf/X/+UVqvNytICKyvLqCRDxtnrf9h1Q8zKAEpJ5ufnieOEVqtDu9un25tnefUA/fkF5pdWOHT0GCoK8V5dlVy7fIHtrTWMLtF1/bqf65xjNByw0G7tPwLgawEaL3k4c/QYqXVEoXgbwAK/755JgZPBNyCqQ6386z/wAU4fOkKn+zCePyQWwct8XOYMdgaUNzEjmZ/r08oypFIkaRIWnDhBRhGu0Zu3doKu6rD5ixAMiUZYRteOsqjJy5KiCqTBrNOm1XmKnfXjHGj3SdsZUkomzlMwDQDAakMrUgjvgh0unjRSKCVI4og4jWnjmU9i2lISSYuVjlrZoEtgLcIquhI6Tb0VEaR3Q8eDJ1IxSRqD2PPF8FJAEuOTvS4Pp0P5JW1ntNttvE/Av48A/obFXMmgsyekCo592uKMbbghHmtDnRpPs4GGYzLaYWuLq8HVdrbATH/Hu0Doc9bPYPpg4OWagKnJEvfdf+dCmQGmCBXgCMfTtAg6Gyy3nfMNkiiRgdvWIGWhK2AKmigVWrV8XVMlE0Z//2GK//nn4cAIEcVYEYhcIMP5edts/NOMfwrgO5iJGE9/Nl2zpijNvjVM7P1ECYilCO6hLlzfnY0Nrpw/z2gwwDrH+vo6jzz5OGmn3QROnrI2XL16jfHodbL/8PBw/ZIdESTPm2BN0iAn4L/vs/DP/gvFd32S4WDQEE6nR0uz2Qc3xHCqEk/4Hqd2EP/0V+Cf/FrwOWlQV5l2QCWgNXiFJ953bwXeHwRxgiieY35xiYWFheZHYm8+3GSEtm/fkB+/HEGAmP2VZhlpmtHKMpaXFgNtMk1YWlpm9cDq7HQ8AgS00iz8TpKAgGFR8NyF84wGu5Q7W7iqpCwrTp06zVe95z0cP3qUa4MBSdxisjXET3LqyQShJGkrCx05kWoesai59h4ZRYDD5AVWW3zliLxAW8vDjz0csnvg0IFDvP3Bt5NmCamKePWll3n11VdvdtK3NL6kHIDXGzdGKGtI+R/x/vuZKfgJmEwayeH/dJbFZ36QP6088+fn+cndL7U5xpdmTEklo+EuF155mttufws1MTvDMVZb+g+9i/f+2H9CpdDfPQz1HCJyzM+1OHXiGM889zK5SXCmxvvpA/d6I8hGhiAriNrUpiZSKbUOimbajDC6hJ0tpBToOmSOuq6ZNJ0UQorQM11Xr/uxW4Mh19KtvSMQAvwSXrwH+CRKKuajGFFU+DRFRA4vJc7avYkmBDbQkYPsqDEsxylnTp/g8qcfIhEPsXLoHbTSb8B6i3CedOvaDcfy9DPPkczPkbVS2u0OcSMBLERou7HWos1hJH7mAKhrQxLHAep2mqIs8daQRAlKRdTjp1k//ytk7oN0kpjd3QEKR+wlhdsTp6kMEAexDyPCNiulQFcVXiUhU44lSRzRlzG9Vo9L+YihqVlKWgyrkgUliZ1gKWmzUTTyvrEAGQI7D+A8URzjRGjZMtZgaoMp93Aw2crI5rtkSUocS0xlkVLhnZrpixtrQ++9mL53yAKdcUgVAiLrLHgZMmQHprYhaPDM3md/YuyNCKUD1xAMhWwQgj0RJtlkT/tNw0R410bOOMDHjfFeY9Zkmw1fzBICZw1JGlAU70zzu7IxglJNYiEQFEz+7wv4d+bAC9h3PIv4pr+HsAk+aSNqDVbjrzO1mdazQ5nBz7I/v++E95+83Pt6Vo706LpmuLPdlCwa0q2TQXNfeOaWDmCE5KUXXuDY4cMcWjmANiWj0ZC19asNE/8LjekxNOUKIoLYj8T7imAxLUPd/rsfhX/8GzBfwm4KRsC/vX/P7hiHx8x61KdQjvce+jn8xs/h79wM3799E/lt34sSc8G62QQhpkblg/2iSUIcAHGI8fB58smIgdCzSyyEQko/FYFoXi9m9Xjv91/vL/Vogp5wh6m1ZnGhT7uVhcRTKtIo487bz5IPhshYsTMYNG6hEZtbWwjviaOIw0ePsrG2yaNPPc2dJ4/TKStavT4yS5nv9/mar/oannzySVZuP8kLTz5FYmJavQ7D3QH5aMziwgKi1Wr0MmAqhY2zQaK5rFFOoitNUVc8/uxTbO2ENXf14EG+4p3vDJ1UUnHl8mWeffbZL4isvN74ohGAL3Z4v4P3PwNsNl/vf/QE0dMHOPjEQbLd7L/dQX6esZ+1Ov0zGY144ZnPImxOO8uoas3usMA+e4Do+ePotRREyFq89RxcWebw4RWkTFDZfKjh3dJnB9Klavqg19c3qMqK8WRArceUxYAyH2G0wZoaXVezWqmQivnFBaI4kH+EEERxk0l8geE8XNnZnn0dAoYIOIyjhbWWi+vXGI2H+LrC1hXCuyDJuu/hlkriJPg4Cn3cwnHn7af5jj/2zSz1U7a2HydNNccPHuLYkUPMdW8UAlpYWiLJUrSx7GzvsL62zsb6FjvbA6qyRiJpp21aWRucZDQcB0IWIHzYCLM4RiEx1qF1xe7201Qb97LaOs5mMWZkDTsitLvZfdFRIaG2VbAgbmRPRZrgI0UchQy/7QUt47FFyWC4G6SChaOIIyZVRaokTklqY6Dxh/fOIaWalc+UCkGEioIWgnWOuq6CXXIz0laLLGvN0AKPwLtTCN6LmmLmXu5ttl7soTJeBA0DF0xzvPOBZGd9E1g6XGPG452Y3ULnwDuJM6LZDBoynwtZvJJBUcA3CbPct8zMeppdIP8pGaB7RBA4EUI28LhFCYXV04BA4byd8Ymm6oTTkiMI3E89i/+KPd6Qu+c89t/+o6Y+n+LjFOI4wNpB5JhZxj/Trt9f95+9EzduTmLf39PSyHT9akpxhF7ywyduY37lGPXEoauaxx7/HOO8RDvP+QvnmYxHXzD4bt69+aiIIHk+fV4FiCQI/zgBZ6/B//WLYfMHmKvg534N7lm7/k29IVgy+T07F+Hhd/41nN3c++BveAn/U/8V4yReW6g13lb70JLryyXe9bB+hNEaXTdID6FTRr7GpnZ6vV/LAftyjAD7J41GhaM/N4dKkqatFExd01cx95w6xVwS04sjWkKgnGV5YZ4jR4+QxDGb19Yo8wnXhjtc2tpiPBhSDnaxoxHeaOqy5O47zuLaKenCHNcuX+X5p55isL7GUn8OKQTeWELBL9xZ5xy61hhd47VGFxUbO1s8/NgjbGxvIITg5KlTvPvd70ZGEVIqNtbW+Nxjj6GNme09b2b8kQYA0/rsXlugaFoAc+AXCXrxX+D3v/yH+IbGjd0OofSR5wUvPfM5fLlDp9fBWMd4UrK+GVpkkkgiVdNCYyreft89ZFlC0p9DJLcW6ISAOZDkokg1JhwuyKLuq7kKYOruNh1RFBFFIXOWQlI2teUoUtf1oN5s1PW++vPs/e8GTmKc4bkLF1gf7uKKAqoajLnuvkkESqhZTV3GETKJ8FXFcr/HH/u6r2Whd5GLr/4k6+uXgrf2TVxolpYWOLC6wqHVAxw5dIQjhw6xvLTE4sICnVaLWCqwHlMbpBC00pTd4ZDd8ThorlcVQkiiKAYlMfUOsvgdWtaG7E2Hja00hkJ64r1lGOs92gsqZ4Kjm1RUZRVMeYzDCkiUIo0S0sV5hJS02impjLg0HlE7QU+mrNmCNZ2DikAGH28VSZQISIYgcBbSNBhb6arGVnofCTMwp2WTlTob6pRGO3R9HGt6DbzusSa0200z7yn0vncv9/gj0/NEBja+MS5A7LPJtxf0OuebWnxg5ntC7d5YE4iF+6R9p78rRGi38y5s9GEWyRlaAeHY3BRZQAQr46makacpwwcC4bTPHeWuXyQEIBvjIykgkngVgQzZ8/WZ/vRPo4GAQhAzzbT33rjZiGfiVBYx7Q7YTxIW0J2b57az95F1VnF1jRSaKGsx2N3l6vo1tnd3uHL10g1z+7UjwNEKRMqMKk7DmRI05yPxTfUG9ZrnRfnXLJ6OveBBTPs/QwBwk2vohcGbCrwLqI6A/TyI2XF6h1Lv4/ixu0GFbqDZJ7qg4fDaERKYL2f235yGEEilgvqnEBw4cACPRIoge2WsZry7w2C4Q16EZGG4M2C0OyRWipXFReY6HSaNtbDzjgtra2wXBUVdMNzZphjsYvIcXZb4okRpSzUpOHnoKMcPHUYphRMCqw1G68YjxuONxeYlNi/x1nFtY53Hn32a4SS0hN525jbe/o4Hm2sVSoKPP/EEk0n+RQdOfyQBgFIKKSVxHAci1L7daMrOhnXgMl9oIuz/yZdUQvpLOHwDjOm64vlnnmBw7Rz9bgtkhBKKlYUFYuWJIkB4tKlYWehz56njpGlC1O6/zie8CISaz7SmJ6WcXZw0ScNi3sCxUgbXr+lzC6C1ZjIZB3a8368NfwsPob/+i9mS578KaLM9GvHshYsMyxKqGqE1mD1xqKnSlVQyqFw1GWQURwhn6UnBO++/n/e+r8/qQtpseDcely5rfB3IS97YhuCnZkTA6ZJivSfPC6zzLC+vkLUy8jJHGx2yeilIkhif/xKjFx3YO9keT+g7x2oU0yYiNwYdydk5V8ZS+lBfTxqnrrquQynAWLyRDA1crWou7wwZVCVjUzDf64QWvlghFVysS3KaYChOII4QUob2OCVnCohSysClKSu8tuyXcpJKBcEVAdZ4psRU/G3g55tnS+CdCLV6FzoBvAuXNdTzw6ZrrUPrpke/2Q9oNnnX1PyBvdp3s/kH0lmjHisk3oX39xZ0bV+T3frZcYQyhA/9581xKtFA+r4JXLQNpEEbWgSnyIGg2fwJwkY4h/8HH4Ltzt5H5Qnif/kOBPsyeylBxk0m/Vq0bS8Q8I2SpxASISKEmAYDQblRRSkizhBJFixh95cGELS7Cxw9dSeTSUWn3WK0u0aUONJOl7rSPPvsMzz11Odex7q1CYyICYTbhCBGb0BIPKppgQQwgUB8bh7+2XuvT8z/xTvglYWbn6ufvl/z7f/+j4OWey+5NAc/8dUI68FrXFM+EbJpE7nuWEPgk2Xt65BMMfvxjSiKiuIve/YPIQAQSLQxCAQHDx7GO4FQwQGxygsmVUFVVzgXAmmJwFvLaHuLF595hvH2ACkEkRBkSlHXNS9ePs/meEjWaRMLCXWNdBZb1awePMiJ2+8AodDa4IRHxQ1HQ9eYosCXFa4ssXlBPSkoy5KXL7xCURV44LbbznDfW+7DOIfzllhJHnv0Uba3t/etvm9+/JFwAKa96q9V8LsRtvgtYAE487rv+UfQLfKmx1TKEW155cVn8N7TXznCQrdNt50hhQle3EI1LnqGu86c5Nr2AOPnYPCFkJDT132llEJrHfS8vUdGEmnDkucaRnoSR3Q786jBIChKCU+ZT0izDvMLC0wmY5w1GGOb9/n89cj9cVcfWJ7W8VgFv4iwl1g7f57L7Q7tg6uoooOI90kTT3LE7m6AuxsTGNvYPnvr8E6TaIdMY1rpzzPY/BCyupEEmAxHuLxoNrtArHFAZQ1xFE85UcSuMSwqSkZ42t0OKs/RdU3c7YVr5oeUVx9B6D61UESTEfNKobxAScFaXWNEw5kQwa+70AHdcM6RRJLYC5SHJIpw3nJYxTjvmNQ1IovpxAnjfIJNoFNJRjhyV+N9jCDGN21QqinLSDVtORNB6CgvA+HP+RmZMxwPoEC6wEVQkcBLgdUOT7fJtvdMh0LmHvzb8W6GHkzrwEpGCARa66bDJAQOUuyhQ9MAMyAJfsrLm23irmHoCyFDM+K+9d25oCIqCJbBe53DwdQHwml7QneL98FdUDbzIyTfzabfBDB4EQRtnliF9/8o/hf/BeDge74P/+IRrLcBHndNYCwluJhZpBOObN8MnyIBU+LcPnSk6WywzoNUyDRGqR6+0ri6wrsaoRI68ytcuXyNY6dPcvHc44zHJfPLK1gzQSrF1lZwE/38eYwMNxbZsPube+Sm0RdBgVCEMpSfqvlNEvhb3wTSwAdfhN8/BX/zG6B+7VK/V9bwTgd0wUv4+Gl4/5+Dn/twCAI/8JfwV3t4wtrgZYQUcQi47LTcEjHlARhjeOml9xCpx0Jg0VzeSKpQNthnfCpE4CX9UazlcRyH4D/PWV5ZYWlpiaooscITC0k9mlDkFa52CCcQUgb1PiAmEGlxjljE1M6E0pYUFGXB1fUtOjJmaX6RqJWS1xWpDmZU3QMrSOtCoBdFQWNDCGxVY/Lw+daGBEYbg5FQG42SkiNHj/LAW99KMMAUxFHC4489xrWrV79kLZN/JAHArR+oAf4D8GeAO760B3H/NXjHZdjN4D/d86V9788znLdg4ZUXn+VYVfAV3/SNSNHAbzMIQ+Kx9Pstzhw/yrnnX9n3DlPP6v3XTxEQgCNAQlVVREnSoACOMi8ITi+h1u196L7Y3R3uRdo+sPCtqbEmDlapTfQ+3QiECNmlc/Y6Etf+WP3bYV8kJoA/Dfyj0Ory5JPw5I3XJProH8JH//Cm12t/PjadmAf4TV4rHy2Apf/6qzd9jzc3foEjXOEufgQmr2+i4YUJNfMoorSGxAJYvPTgND2pWJGeGk8rjqg95HlFFMfYomQpW+BSPWFsTTjTxj5ZyGaBAETDpDfaMhntUudFIHFFETLdu1JShtq+deBF0HzQ3uGsJIq+He+eB2eCg55slP8aqrMUUUNAIiS2083Yu8bWWeH+v+z9eZBlSXbeif2Ou9973xJ7RO6ZtVd39b43GiBBSgQJcBuSIAcAMRqK4irZSBqNjUwy2mjMxmxkktloZBr9I5kkUjOgRHI0AFdAIgkSJEAQ3Y3e2VtVo/YlK9fY33rvdfejP9zvixeZWVVZXVm9kPS2rI548d59d3H3s33n+zTirM1Zgs5g6KKLIHfvkeZMR8hDbumN2bc4mT8iQlEUC6R/twpipgUWSUqS2pUZSERDWcMYoyZrB3RxvYCJaLMGTQ9e2kI+9J+hfpTOt2rA9XN6WxIFbgbMoS5fUmcMu3+LhdA98fRPyffOkLgvLDoFXyhSObAWqZNRHh0dcv7SZV578dsc7d1G+gNG02NsyAQ7Gbtw760xe3UUiahHQIikun2ELByFmNSy2QUcpHKbNsD//A8vXc9dM/jua+sieFX43KPw/r+S5nSHzBfQcggI6j1CIFEHJ/xIYlMM+VkWVGWZpIUzbVC8g79FScHLslBYniB37CnvzMh1hjLtj8LO2QtsbayhEkE8varPqy+9Qn14iG8bvCahCiOCKSzWB1pVxAeMQllYJFqmHeNtFG7v77HWq5hOphTOMRj2aYqK3uoa1foqMqhwsQ8+pOOHgIbUotldf+EKKikhl83On7/Axz/xCbDJ8bAIL7/0Ei+8+OI7qvnfOb7vIMC7RwD+AfDMgzvkk3vwC38P/uqvwF/7B/AXv/Lgjn0fQzVy7fWr/M53nsG5Ivd7ksBjGrE5UnrkykXKYtkMdqIcd8YJp1OXwYfcx11gbJEomXN0E0OSwk012TRpRKDfKwkhMh6PFhGYtY7h6oCVtRUQWWAClnEBHvitN7zSPvDp7/o+3XtcJ5U93q3xPAl78vve8p0K/KaUtE3A+5QRcGKZEploSs1HDVg1tAbmMcnUFmoYN3NGAfoqbK/0OJjPaMhbnHEgFnUWK4LN9zt4z2QyYT6poW0x+BTtu5OMitgsOhLT5m2N4IrumQmiP7HIknQMecZ2KPVsvK0lZmMbSRz9y5tvXPze4QNSPsHk1q7OGLOYrTa186ki5hRkIZHJaMoqdIa8o+CNMX+mO3ae+tZl3JAmmuAQckOcSecfNML8DDrvY0KDFg5x/XTGbUPqVYxI14YsLqWvjZwY00UKv3vScGrdLapqCgSILYQaDfPEfjmfoT5xTahvWN9Y5fjgJqO92ymjMWupj2fM5jNih9G5aw/v7nE+F5PPURPvQdeLDzaXMCQnKk4b1vszmsvOgUFsLz1LPKolqh7VbOiTuUbaAG2N+lmSUM6j42tIxTxASprwY/m+du85HUhAFndbMvYiluFgmEjKHkCJN7VIWwaDAYssjjHMZjVgcbbk9q3bPPs736EoHa1vkiyzJOczcWiklD+5XCjWMSx7rBcFm0XJinM4Y7i+t8dxPaeJgel0ymg8YjwZUR8f4ycT4nyGn82SCmXTEBpPaD3T8SQB+SC3MMPG5haf+cyPUBQFwXusGI6Ojnj66adp2/aU8X8rzNZbje9pG+D9jwnw90m1r8e52wC+jVF5+Gd/Ha4cp9/XGviv/jHs9+Hvvv8dn+n9DFUIwfPZz/4WbdvyiU98gtC2SRGNBGISDayv9nnk0nl4Nn3OlgNoppxa0CKgDwEnOvM2S7y2TZ1JWDpD37EwxlwDSyOqMplOU2SnMFhZwdki0WKGwN7tW2iMtHXLcDikbupF+SYC/wL4MvfyzwuSVHCNkW8yKAs+84H389jFCwz+0a8iIeB/94/BY08kxUArnSlJ21EIqA+Y0qJNC41P7WRfexp55h8B/1PAosDnP/Aj7NdzSiMMen3W19cyaUZLv9dnNp3x8iuv4dVz4dw5NlZWKMuC2nvms6RVvrbW5/ln/ho3xltcOf+nePnFWxQKZ7TgrHG0eKIRrjWe4+gJDm40TdIQAPAJ2RyNoQ0hRdlYvMAktIzxWCybsWBlWHFtOudhCrQSDuI8PRsiiEOtQ4oyZaqNImqYzmdMx2P8rE6pZ2NxRUVRlifTARBNXPhREgeCLQqitkTvCPowRhSR1BbYNB7nUqtf0OSgiFGsSwA+myPL4APBx/R6NsQn+05Gboeu/UuygxGxmWsgaqqhE0xnFvJikFzjz2WqSCpFmESwoyE7CQIaA0XpCDFQuoIQoGkaVC1RkmKgGEXsRWLzEYjfIcw8OBBXEkllGkKb7vGyqqQhz7zUNpoC6a4roLvQpdm9PNHF5FRu6l8ntgmPYlMZClVu33gNbZsl58knZ2t+r46C5afZnZwBkYTP6VrnpCsTObQjDMsSsrpMqrAoYdz/sFYJ2iSOClMAPrVc4jJhUEEihPD52H7pOnwqHwiIcTgRYnMh6zmkLM2CgElZvGZt6mzpmu+rqsK5gul08kDKAidA1c7xMzRtSz2fMZlO6TnHl7/8FYZViW/btM+Flrb1uT0RysItHGhnbZIOLyxV4Qg+su4cZb/PPAbaGHBVxXBlSFU4ClugTcvc+9Rp1LQYKzngg6ZuEGMpXJEAr1kT56Mf+1gmSws46xDgq1/9CqPR6I62ybeTXb/3+AF1ACARiP9N4N8DnvzuDyMK58anX1tpYfi9VamKMYmlfP7znwfgU5/8xAKtL7mIGsKch6+cX3ymWt9EDkD95MTYLh74byL8FJDU7GxR4NsaOBH4Ucigy5DTqfl1VZwtWVlZxfVS+95kfMRkdEj0gbLqUa1U+OBZ21hjN1NMdgfwJBqnbnROqCoYSkp5mJX+6xQ9y2v7x5y/fJlBfo8UJdrvJ0IbmwCKIS9Ua22KdEJAhsnAaNMgrkU4BF4FHgXg6mSKLy2ls3hniRGGYpmGlusHRwwGA8oL5zm4eRM/mjA1BVUEFcH2+gz6fW7sfRPirzNc/7N86cWbrNk+W9ZyezanVk/fBIKWvKoRawxnhwPW4hFT45j6FmuEWiNlNAxMxTw2tBKZqWemFh8VJxCcMgxC33vWt7d56WAXrx5rLBKVYGyqJzuLE4di8G1NPZ0RfZvyPapQGsqyoFcNTjGUaYxYUYwjdzVYjG2JQXClwbc57YyQyuGZflfSphZizEIwivfJANrMLpkovJPMry42dDJT34lnkDYlwYeYgeWpiyEGZTmck4xW7+hzNTuniUtAsiJo6loRl3AU1ppcdshYBAQVi+DRWGC4lBjnigqCQ3NERdUHpkjbooWBpaxCMrK6oEBObZhkY9SRAS0PWVqDCaBoC4urUjbBamJBjFn9LjZNWtc2YQ8S335E1C8XRDgx+t3vJ7X/k7+ftGxoDIkvhMyq+kD65xXfjukEkVKkntgFKcrEVhnJbI/LTsbySM/dFYkK+sKlJylunIP6dRRYXduk6pXIrWuQ9yLVLq2SHsp8PmM+ny0d+517ASGEZEidYzYbM1hbYf9gn93d28TWc+3adT76/qeom/kiS5oyGrrIICRp3wSoLguLs44mRMpBD0QoCsPO2hb91VWmkylN3dDr94maQISo0Mxm7O7tUZQlw0GfXtUniqQ5ArmUY+j3+/QHg6yvAdYIv/3bv83u7u49jf47xQL8AJYAlocCfxd4+rs/RGvh//qp06994RJ8+eK93/8ujqiJ7eoLX/gCX/nKVxPiW1IGwORe2LXhSRtg1R9i+6tginskJX9qkcSbzecUrqQ3GGIyAG7xZlXkA7vwf/xVvvhf/FOijfQHA554z3s4c/4MRM9of59mMiWq8PCT7+HMmTMIUM9rrl27gfeBwpb0+/27enmtNVRVRWULnBgq59js/V7OrD7Mzuo62kYOj5f6nNsWmjbhBELMZZBEUUtUjLMYsckQWAtlxckm+M8W36u5J9yHCCYJlEzmM3qDPqPxiNeuXaVuapwt8D4wbebUTUNTN/i2oW3nNIe/itXz7F6PBAwbrmBVCoZVj0nwjFQZhzrV1o2h8YE1UzJQg+lq566g1kgdE1NfiJ5pbJNEkhqcOJw1mDawUVVo6bhxfJyiSBGMaAaWC4VzKfqXRBHbHo+xxyM2w5RHVwOfOAMf2gys2pNNUkWSuE+OiBVFnFKUWdRF1zD2gznjm5DQqMHaBPZLYCaTiXwMzhgMaT4uzE9m+Fs24ygLYyzZCUgdGKZ7Jb3HmAU5Dvm1Rf93nsApAu8itZP2wu47rWQHIJDS6RpxRojBgImYyY8T53uos6kOX5jUtuZbxOS6qvqTbpLF+pCcWQEVc0c54M6tMZPmLKXOQ9vSTmf4eYMPLWISo5tmpFvEJMAcBomaiXe6cS9D30X+HTAznkTc4oEGaImhOVHpewBGMp1sF4x05+DA9RDXTyWekOr5Sx5U/r8TJ0bEEAOsrGyzdeYzFGW37yq+bWia5Xp/KjilqN8t3dflne4B1AHyCNHTNg2zyRjvW65fv84zzzxN09T0ehV106BRU5TvCsqyTAkWk7ts0kQFBLGGsiyRELGa5mMIHkJkY30NojKf12AMTWiZzWYIkomFDMYZ2tDiXJE4MySVJoxJ6xJOnI8Xnn+eq6/du1X0QQABf4AzAN2YkUBv32W6Phj4T38izaX/8ZfhtXX4uZ+BVzYe3Cm+3VMKnt/+whdA4OOf+DChASMJ7Rzbk9raSn/AijiOmxmxCSlFuvjrDPhNyFmAyWTCytoaRemZHB/jfQsihM1j9O/+TXhyn29E0EvHXPrLf5qr167RzuapHS54zpw7S123HO7tMx4dJWrclRWqXo+mbghtm2qxTXMKpxM1ElzNsNfD1X2ESFWWlPbfZdj7RYTAwWi8mKgheEJoMMFhnFkAwWKICeWtuW0Rg/pMkblAvd8EvgD8CM4ZZt4jOObzhuvXr9O0Levr61y6eJ7fefY5Wh8pjEMVGu8pQgLDTWcNo/1rlLd/k8P6SYblBeaTOf2BEH1LZQOuLBjVLSNJHeF9V3LYTClEqUxFEQoKIjYEphpTS6FCRwKz29Z0pqDnKibzBjGO1/d2mUcP6rBKUr2zAloQbEHPCPMptNeu8thKw4feU3JuXSkHDZUZIWKY6Qrm19IdaSZzDkcz+oOCwhYYSWI4RWlpRjXEVaI+DPJMtneK9zWucAsEvmaFvRhj4uK3Joug5NR+yBHhEvFVCGGBD+mcV9XUqmetzS2CWQj1VNuv0tHChrDEKZGNcYokBedMisQkmYqmbrBSdH4MqKYMmv3ThMkIbSbQK8joVaSIaO3BFukptEpSCLLZtsviejLKbnEeqsvG/+5MwEk6LP+sEe9JmQaFE5Cgpq4OAW3rxJy5GJ0D0H2HZucj89UlMEGOIN9NitylaFu671XEFlBYVFuILdJlB9B0bafugQVS2jwSOT7e4+lvHnCmPVh8y3Q6Yrh0L0WS0/m+97+PWzdvcuPGDU7oijtA6YMDu2kEr556PkNVee6555mNRqmLUpXZbJbL/BYl0ASPcwVBIz5AUThi2+IbnwCyLrGLpjmUnLY2eGINq1ubi26rldVVDvcPIEbW11YZT8e0bQNYrImJvRQSu1/piJKvWQy3bt3k6W8/TQjhnvfhh6YL4Ps+JiX8xz8F/6ufTL+398e2926MDt3cti2f+9znUBU+9cmP4X2LYGh8yyC/t1dZVsoBzWyb2V6Dxukd+8AJaMk6YefsGXZv3OLMxUtMZjN61rL3W/8p4bH99DYD3/z5b/L01Q3if/ITaARXOC5feQgRx/7BVTR4er2KwTCREs2nI5wradvAZDxNRDUmtWGpRPTnvkH7C3+fQ+Dsz//PKP7J47iypHQX6A+uUNevo3pSs577gKtbjGsThsmlboMUKYYkrFOYFMmhKfhZXHNL4oqYEwTqWYN1cLS/x+3bN8AHpk0NxnL+zHn29vcpBhYRJSjM2pboW/xkgtv7PxPM46yt/ynGB2PWsDD3uKpiNjlKClxl4KDx9K3Qs4FJE5mhNHFGIZYLZZ/QzKic5Ti0BJRWFYPSimR0tCFgaUNkXYTjkHQvjCTCn2gFIw4tobCGelKz1d7iD3/UcWVrgtVRqpkHAzFtwINwkgH4vQ/VXNiY8cztGcd1hRmsgijlKkx2IfpIJGA0pdsVTdz5PkfhJP6Etk3iOcZafOtzff2EnS+H7mnWGaEobI7S0/wzIkkhjlRGsNZkB8PQkT6l+a8LA3eC0E4Zn1QGSs6EzfgQgtCEiDFlAgh2bVveI3YLmrPE2XMJDOkVcZnkyFiwoL7N96qFaMFUyeB33pcuGUFJyohpYnaG7s4U++nfO20JjKDStRaG5GzEgCscWMmbfnbyWc4ynAAsxbjU0kdLYvVZMpj5P5rP9278TXc+bzcqlHydgDqggrJMAESviG8SIFByZlGXav/Z2ZGcHdHYlVR8ftYrQEkiBNJFZqT7cAier/+rf5X/lkniVLK4l83aAA9uqEbms2nWNalBwRmBEJlNp4SmRWzKgmFsopp2ltZ7DC4xp3ZiSjFpsIiYVKfPmJimrRlkwqowb/DWs3PmDLPZlOgDw8EKrW8zt4anv7pOjh3w2dCbEGk18PWvfT07Ju+G85fGD3gJ4AGOaJLh/z4a/+XRRVGf/exv8ZWvfjXLmgrGnjyS2AZWqiGDlVVMNTwNpqIPfAJ4nYQfCESNFEVBOewzWBkym86IdonkA8BAsAEUyrJkc/Msdd3w+msvURjY3D7DE+99H2fOncX4htWVJFzSGwxZ2znH5rmLrG9uUVYV8u9/O3VXlAHKwO3/5v/O9I9+C2sK1td3GFR/gtVBHzUnG1kMgXmd6GxNGzA+tUSJ6XpvSSnj1oP3aGhPQFAAfAN4Dd9G6iYwredM57NFXXEyn3Fzbw8xlq2NDWLrUzDWtDTzmmbuqSffYXUwx5R/ktf2xjR1YK0AiYFp07DSW6VuPW1R4EXxCkc+YEyBitDEQIyJV37gLCtiGIqhyBFs1ICxnshvEcw1ptoiqqzaAtuR5wC1Km3UJJGsBtqGJ4b7/OEPjnh44wbSTvOccIkYKHOvLE0RBvYWHz1/lT/5/hG/51KNbccQDGVfUmt3yEx5GjMa3y7UE7tIy4eQGN4AjXpCuONTXXLRay8nDmf331R2SCZJ0MRKmduorHU5tb+U+M794DEGEBbUx9aYfA5LKWWyMctOc2or1CQPbbcQ86fQeUGQEbiUak/8QpKySFXJQtcYcjeAh9Ck+y2y5F1KisCNsOgQeMs0dCT132c57I4/QxyJsa8gepLhEIOoS8ZfltL8Syl/jTXQktrrljd9WWAPZOne32u8fWOxXH4AYyKEBq2naDtBdQ40oDWqNZwy4nbxOYxgywrX6+cuFUHMjwDn0tElMdgtcCS5lNhRQxvJEbXmbNASY+yDHEn4SrtIDEVofaBpWyazGePZjDYkDRNrXAqSsoFXBEwS6EnZr1T+7FUVmR+bvb19dvf2mM3nCyKv1idJX1eVOOcoizJ9hlQic84lrIGRVCaKkS9/+cvs7e/fHznbOxj/5jgAP4AjAXcjX/jCF/nGN7+V2PGWnnfhEtJ4dXXIYH0NKPMm3P27DbwGKL5uuP7qq0mV8LnnuH31NcbjI/hf/OSpLN/272zzsf/64wxX1tjZOUPTzNndvYW1wurKKvP5jKuvvsre7m16w1XE9Vjf3GJzY4Nev8/k6ICdlR4/9sH3sP4ffRvKE+Oua3Oan/0y3rfYXo9q+Bhl8TFm8+li4U8nY0ajY6aTEaFpErd40xJ8SFGbS73NNmrGCfgMkFkev5H4yKPiQ8t8Ps0154BGT9PM2D/cZ319DWdsksEVQVvPdDqj8M9g+QSHhzNqDxu24JLrUaJM25o2RFzV43hWU5UOW1r22pqxgUk2is7CJLaMQqAOKf1pVCiMA7WEdkYbfo1x+DuM+G+pUbCGJp4AMtts2MQoEiJX+pHf9fCUoUxRDFImBHaSwkuP3FhZbJ6Q4i+0pZB9njp7nZ98pEVmx3gM5YbFT0Hje9F4JQOOUuQffEzpT/EURTJAMRhCm3umc0nKiqVD7C8CP+2UC3MlN8YFDXDeu0FShAecwo10vBIu1z5jVg1d6GnkVsUYUs010fjaRQJiQSnrtwn1ZeLx60iY0/XWSt7YJbfO4RLAUsQgGqFtTv4BJ8DApXXVsdzJWzkBXbTvIdQQG4SwaKuFgqZuqCfTbGxyylwyqj5T+C4yFJoi/xMey5PvWRzTpHLFAntx6vzebt18ue4eSfiCGYQJ6BRhns6LkP//xDFJ32QXxzGupOitYMSyub3NhYcfpur3F+8tMwvs4pszMj9R9DqqwYDe6hDJbdDxDdLe72wk8OsJ2DPtv9P5HGMdUVKXyaxpaNoW7z3OWGyeD6n7R7EudQNUVYm1hhAjMXfNeB8ZT6fMmpp5XecaW8J/WWux1mGto+r16A8GtDlD5WySZTYot65d59rV198VB+jO8W8dgO/zEEla9Z/97Gf5xje/ueB7BnjiscuIRlb7PYarK9jhBkkBTPMm8SQJG5HQq5PJGN/MCcFn2dOI/sp7kT/x8/DiBuvPbvPnf/ef5+FXH+ZHf/ePY53j+PgQQbFFn9Fsnnj1Lz+EKSoQSz2f0ysr1FiO9m/xvocu8Qc+8TEe3dlk/fjM6X3KG9qbPQ6bKXt7+zhZw1Y/y7B6ajGZ5/Oa2/u7XL99jf29W0wO9onTKdJ4aJP6X1KgCyllHbpm9eXxOmE2wRWW+XyWsh9dDdkH2rrm4OiI6XzG1tmzNDEQLEzahttHX+L69F/y0t42UUsuu4JLTrG0rBhDIHK9nXK1nTElYn3gwpkdiqKgjR5PQMVgoyOocBQix0QamxKfyWMfAX8NMUojN6j1WVqJjGiYxjZFDwaqsqRfVakFKMz4xEM1A7+bjLqRZLBy2ttIZ+DuaKsTMsGPRXTKhZVX+PTDBpnXbGz0iLZG/BYah2kjiomaN4GNIsYqSFjU2jODT2bpy0Y8eVcsxKSERcuSZPXCzo4qZGrpiC0EWwqqp7UpkvZ7xxYoC7XBxTEyOCHJA0eCDymTgcFJgdCH+Kcx9Qia26gpU2t8yaJl8SSCjxhboNLVr/O/dpYIBbAppbJAJOYTNTkjsKwD8Kb7cVezn6N+hqHFSkSjJzYNicWoIMnwupzED6ToumE53b/ISRgw1iyygit9y/Zqj37P0h84XCEYtwAvnFzD2xrLn5Gl37tcw70uOnVhaL43SoQYaedT6tkEZ23q5Qfg51EpiJwuTXQ/O+cYDgfU82nqANDEVPnu4x5Oyhj7R4c546AIEd82xBiIIQEHoyZujE7HZuG4Zme08Q3T+YzxbEp/ZYWV9TV6vR4qBt966qZGSAp+VVXR6/cwzjJYXcE6h48nZEkaIlVR5Axdt97ePUfghwQDsAsckGiC//UaXV/nfF7zW7/1Wwyi8vH8t531IZfmyvW9I1YGK8zW5oz9nFAfJ+OABV4BrgAboLJQVsu50PQdv/xe+JX38jMKQ8BuFHz76W9z+/o1QOn1hxRlH2sN1hYcj6b0hisc7h5grXB4cIhxhsvba3z88Uc4vnUDLRw/9r/73+A1cvW/95ugsPlLP87mf/az3KyPuT0+Ij7/HTY2N9jqfxT4e0BkNq+ZzuYc7O2yf/uA7c0dtre3Wd3YxOaJb5wj5PYwE/UeDgBsbd3k2V2TFlfHbmcsGlPbz6SecXP3Fh947L2sVRU+BI5Hh/jwLWr/cYKe4ZOVsGKFGC0+gBXlnCl5tp1x3Sdvf1AYXnvtZUyvj4TEC18hNNoy04BgkRgImiL6qBHMPwLdRUh63xofZ8WWifwnan48gRgCUTwGZb2ybA/H+fl1nrld1HQlg40UUudEvg+RtGGmGD61zV1Zu8Hz++c51gGDHcv0dsCVJJ79JaciAkgqCYhRTJdqV1A1GZSlqfSwxCMBWUVQchyag1jnXEqvSsIVCJLr9ktlt9zql/yZ/Iw7rYrcSpiwf4kYJmhY3AONER8Ulaew1uIn19CiBlOh1CljlO+PWkGcSUE1MRvzCISUolcPvgWXQKeLdroFMLDLunR96gL3FZEmRyCGBJpLm3gi7hHNug8CGjvD/8Z17qrs4UOTSjdiaBVMVIxAf6VHDDAZzyiqgrYNNG3zDkzmnZ98syNlFsVFu2RSQOn3KqpeSRhNOL61jyECFWjqArgznd09V+cshU2MpF1mQFV4N+xe+s7Tr+0fHbM+6GOLAvURrxHvPQUGFajbZpGBKQtBSO2xNoSk1OkMAeV4NGJ9e4uyLIkxpPknieLXtg1VWeFjwBVFUhIFXNSFHoQxhhgia6srDPo9Do9H6V68K45QGj8kDsBLJPDXv34OAHRRi9A0DV/4whcWDkBTtzx85Qz7x2Os9JisDJjNNontPDGQAfAhoPNru6mS06hLEVunhQ0wm065NZsTQ6A/WEFVmRwfsLl5hnnbEFCi9ziX2OjMygr+aJcPf+hJ6skR0SXykRiVj/8X/wHbv/F+jsYzxn/jfVQ+UhK4cXCbtV7JK9deg7MfIDkrLVjHfN4ipuD6/gHX9g+5Mh5xsZ6zvr5O4RyFTbwGySu3qQ/5jrGx8xXGL/5YQi37XIHWmMBg6vGtcHB0iAqcPb/D0y+9wGh2hNFnKeVn2RRJ6myxpMbg1RNI2uCI4DQhoGdNIg3S2RTVZNQGzmCkZOxbZkEpbSLiUY0gL4MmXEYH9pvVP8q5Xp9bs+MUiWrMgWeK5X1oOVNFKueJIeHTYk77ds8uhci6XK4FUvtkVItIIGJQaxkw4eHNKV+81aO/amiOFfXk+ZBx3F4xWYhNSSXMZHQl2cOc1hfp6KOXeCSiZnBgqoOqxlSaUDK/QGfkE7bhdAUnGfwYQ74/WWNAItYlBkEfYwIvZgch+JDLCOmGGPPfRyb7xPoW4iKmDURnEOOgtGhoIYaUGcESNXVgoIrGOp2nKdJ7QouYCtWEZF86zWQssnLhSeR4+lpOv7Zssbq0+tLv2qY1ql1nxJuT9fg2iXX1qx5bm+sM+n3m0wnT8YzpcUOrSlkUXDi3jm8Dh0dzDo5Gb3rMtz0W5D3L12oQCpT54jWJFhOV7fV1Ch+xVZ/V+RTaOfDjwK+fPiwsSmGj8QTnCnrFkNlkfKrU9K6MpUtRhMlsBjHQy9oKltSBElWT8JRNgtfee4LCvK7plSVVWSY/0RvG0xn7kylalayvrDA5Pk4qo2VJr6hSOcE5XFWhJhNp1TVo0htYkLVJkmZfW1tbOADvov1/sA7Az3FqCT2Qsbz1/wXeLr/VG4/hW7/lezh0sRHX8yXhG4n0ewWXz+5wfW+PwUqf2SwyCRPacY2JmnqNE5/cqcNFNHHPx0QPKyQ+a1RxxiAhpYKNKQgxsH7mHMHD8dE+4+kIhyWgDNa2GR/tsV1ZHElqNsREL1y3NZObM4bf+QjNaMbV/RG2jLntLrI3GfHYhYe4fvsmMf4Mlv8366ub3JCAxppzZ3Y4OB7zzZdf5nA85sL2BmfPbLO2so0LEWst0usj3B0KiL+FNb/K2vrPUPaG+DYBsY6PDmiCglFG0wnffP5bbKxvsr9/iDZ/k2D+Pc6XO2wH2G0jxikDseAiU9+yT8P47BHtr//fwETsX/7jlF/6MPP5PKPGlSZEBqVhezBg3DRMfJv+hGC5TeQAMqlRJGKc5UhbjkPWXxcFb6iJlGohKn0XcNEnJT2THBkB1CSCHo0FIp4um76A41mfRXMAScyJEbi0EnB7DW0UKEMSfNOQyJeiSZkJCWhMksMRT1Q5ScsTs4Z7Mn6dLG83vwRJLUxG8V5ZtKWTHBtIxzAST5RzIW14RkG7OnsyBEYMMaQN14jB41GrWDE4dfg2Ik4Q95NIu0l9+7eQ5iamKImm052XlDoOITssEbUZwxD8gv2P2IIr0v2IIXUHJEWldI4xZQt04YFptkZdRN/9nEoMJzv0yU6dQL0xt1AutfC+1WYumR9BYG1tjStXruCGmxwfHjObTvj07/kJpkdH7N++ye3XXuFoPmK18py5vM7rtysOjyf3wMx8t+ONHJxAh+xHHGIqorZMJg2vzKYJBCvCxRh5EgGeRPW0A9AdtSNDC21qW16WVH5Qo7sKyWUm/DKbXtp4p/MabyylNdlR1KQdFVokcfkgxtI0iWzNmARcbVrPrGk5rmtmocHv3uaR8+dpGw+lpR6Nmbs5vaLAh5YVk45jRBKxkhGkLDHOIWiidY+R89vneO3q62kO3ocntCDPElngYO5nvGMHIJKY1C+R1OHezbHxgI8XSEzzP0hjWTO7LHs0UTl7ZpPdg2OG/QGTYUsbtwmzERpncKfxXwyhV/VovE+iG97TteHM5zWtQK8s8L4GI1T9PpXps3/7OtoGWg2UvRI/nzE7OkC31zk8OMA3LU3rqZuGeVMzbzzzuqWO4MqSWiNBUn3zxsEeTd2y1h+A7ICeISD0iz71XFnpDVgbrLHS63Hr9h43j4/4gIfZes3GxgZr2+dyS9LdDsATDz/EN15+lfXtCmXIq69eZXN9jaeeeh/PPvc8dZNwAddev8rrr18FdtFY0DdrrKhQlAXzxrMbGy5RUhUGtT0mj7/M6Jf/H+iVQxDwv/rXufRn/gqv/+IWUQJ9FyiiZ9YoM18zcD36Zck0Ro7Db6P217HR4bN2eszCOnPvaWPaMFXaDK4Do4lWVcVAjjSuj9dpGoMrK9YGUxotmbdDNnsTxAaOxyUXu3awUBB9AOMQyqzbbtgqppyxa7zgI+VAaRpQb1ALAY+LgjgBFYIH58rksGSkfarVJ0R/ggXcUV9WwbeKSEQygjlql77NhCmSKYrvkHsNIQEp0yzNh1Myt4BNmYcARQb/BdUsivIB1H8Gdq/B7FUUT4h1bvlLegoJMGlz6j6n8V1B4j4OyegTkgPcfbFvoUx9+QmztYyxyGDAe2YBTkBwp1r2JKsoxlRXXgj2vOWQ7FsYLj30MKv9NWxvk+P9fW7cusXm1g63buxz6/Y+61s7PPmhs9x85Rmee/kGz1+dMZ8nhsCuufCBDM1poiVsAHQgxh4iuewTFXGOoC0h41zCPVL+b2SY9G0Yrbc98vcWWZU03qNVUoEmhlyP78pwgkk2GodQYChIrIDzpqb1gdp7Zt6nhlERZrM5z7/0UuJFUUfRK2l9S2Etvg20TUtZpnVhrIWqwDpL8jYimETZvXNmh6pMBEUJZ3HitCy3ep50I5Ssrq1z+fJlLt+6Ca+9tagZPAAHoAH+Nt81Tc9bjhXgR/PPv0kiCH5Qw5NoZX6QxrKp++KXvswHf/9PUBTKztY6k5u7rA57yQD3V4jtjJNq8MmEltyCNR6PGa6uoCKsDTdw+3vgW1ZXV1kzJnVIiWM0GjE5OuK4PaCZT3GFw/uA9556so/GyP7xmG/PpknJKtd22xiJKrQh0qiytr3J9PgIH1o6Lu3xbMx4PkZ1Hfg0h8ff4KAoWen1McYxGPYY9iuqqsc3X3uVL7/4Ik9dOE/ZQK8aULmE3r7TBSgbz+bqLZ599h8CH8eVFbduXceYyIVLF9jd3WcyHqNisoLcZ4HPYFTxGph4qCWBblqZczb0mKqw/x9+jvjQ4ckXuchr/8l/w/Bv/xUqU1Dl9H2Ra9gxQKsh25pfTfdfgRgWNc++KZjHjtAgICiFBAqEVVew5+fQNmismcgKX30BHrvc4/jaIVvrFa2p+NLvjPjjP77C0y/VFK7kQp4rIbYJI2EyMNQWSUnQjFkpPNN5w/qgRxh64hgkJMY+r4r1KcOQ5l1npFKb4mJWxXhHZJZf1640YRONsIZk8LOKYSc73DkCi7lpTgxpxwgILFTRUttgwGTGwrQZK04d0XwEZlP88dcQyShxjSkzE3wy/IUFW6VrUYE2JDpem8Rz1Gh6f+wUATW1BqpL17Iw9N2DhJPI/w4nYFEe6Hr5T5gM27a9+/1vOZROEfGhh57i2rWXuf3yM6xbj/Vjbrx+wNHeq5zb3uDoxpjjwSY7557g/Vrz9NVbtG0Cbz6w+F9I2ZPF+QtJvbIHLgMrFUQiKm5RikI6ZoDFkXL757to5N9saE6tu4Kmqd+wu0CB0JU8lFOYBYsm+m4rhJgyVSrJYY6LR5zKX7cPDmiHK/SKAls71oZDClfgXImQ14YxuKpM8zVn3FCT5OHbln5ZsrO9zes3UogqizmYz8danCvY2t7mzM4OW9vbbGxsYJ2jNzq+71vzQEoAB8BnH8SB7jHO0jkAv86XeJTRD1jy/kGNzjlenpq/8+yz1BtbfPxjH+fhK2d4/eYeg7Ki7jfMVzeZz44SB3ryD2FpoXZRh29abFEu6ksAirK5ucn+3j7zepTrq5HJ+AgNLU1jEGMoqzKlLgWmdUtdd+DqrlEppULTwoHDw4McKSaOa4NhUDhmzQnjW9SAdUniNERPYYT+YJVe0WPQH/Ab3/oGX716FbUFHz+/g+7tMmju1m2wojxy/gIvvPpZfHiMtlkH4OaNmzz2+BNcunSR1155ibW1TYriOlevXkD9GnVoODTpbh37miBQquUVP8OZkrbjOVn2q4JhYCMXjCNiuOU9jUbURObRp8jB//+IOgGvhMyLnozXZ+gX5xjNZ+meqKSOBVITVU8clW/xPqJNhZEppVlla63mkZ0WU66zX7dwY0B/OGS/iXz03IlaWmhcikqM0oSSdr7OYHAbNcL51RJtxsSVHrgxSCAEl0TmrCWRjqZWu+AT2C4qEBP4z2QD2Rnt5X0zzSUlhMTVH3MXZ0caJDmPvWzku/uZNOyTg5gkYeMJ8YuAakBxxNggOFQEKVbR+lHi/ldQfwSmxGhSL1yI4GjqJCH4HKFmOuSQHR1Dzg5U4OdLp5TKY+qywp6cMl05IxA4MejdB/O1ZYdpOfWens/bZbJLxx4Mh4wnB3gfiLFi7NKzpw089sgVbD2irj3NfMat0HDm7Hs5dzRhujcivC2H481HOvWuqGuARA9M0UvCSt5nXIrm3k/FWsPF8xfZvX0L6pN1X/V6iZ30Hmv53R5Vr0cIHpETR/MNh0Ln2i67MAFOZbIUsqe1lNXI/9fGyKSeI6qslA7fJjVWSGvJFg5TlimrlR22JOCVcAdGHSYKly9d5NbebZqmTV1AYnDOcvHiRc6eO8f29jaD4TDbjkTC5H17V8btzcb3DgSYCpqccL++3Um6x5shZn/YR+58OrV2Y4x89Wtfpdfr8YEPvodHrpzj28+9zLDqUQ/W8etn8QfXE13o0gcTN3X6uW7mVEZomvliQjdNzXQ6wVUV7XSM8ZEVhlRlQVOHzFIVmE+np+ygP/kCoItt4uKrfezekT7lVTmsa8zSpnnzYI+pK+m5kslszs7qGmW/h1jDQ6vneeLogG+/dpVv3b7NcdvyiYcfwcznd7l9RwcHXNw+w0/+vlVeu77NtWurTKYT5nXN9etXE7OXKsTrDFd2sTog4lCN7DU1O70BZ8oeYx840pYYAhJa1v/Xf4zmwgHtH/122tu/cwb7E3+JWZjzStuiIkwRmqhZOE4R+ZeofpFFJJh7f1OtcAsjFY0/yo6TQBDUChFhQqSNnuPxnNqtMPRjHjrrefGVgq3tLerxnHOXNzB+Tl2XrFnPreszHsrLqByCl0RXfPtwwHOvC7/n45vAAWv9GW7aIFsW6ilmfpvQvwBtwDuliBAk8fAXZYGS2g11gdzXPA81RyDdc+wi+JAVHXVRy++M/qJ8ntnduhFCXJqqqR0whCS2lBIGmh0JjwZLjAbiKqH8C+jhq+j4pazg12ZSHRDncjYgiQVI9Cml6iLqXGqdDD5pwHfSy+JyG2B2ZUObBIGsY6FfvLD3ibUvcR0ACxCfLjx3vSPu/q4C3ex4DFa3mIym7N66xtrOoxg3YNiPnLGOJ66c53ivZPPCBW4d7HHt2j5nz12G49e5cThm7LtsxIOMtLPxtz2k6mfjD2ItGnya08agwVOVFR/+4IfYvX0T+9V/BRpRdplO71A1/R6N1HKbRLAmS/vZm37mlOk/GUquLN31ave59LsqzJsmZR2akjNnzjLs9Rj0K4xLKX4JIfG+OIPNuA/JTpRkZ/uhRx6i7JW88NLLnL94ibW1NbY2NymrCqJSt3XG6eR5mdfh2+me+J44AInswWat7+RdRRPv0of+N33cuWkoSl3P+NrXvsLGxjqXLl/h5VevM4uBwbAgNNscjI+JcQz3AP50WYV6Nksb++J+C0VZ4uct/X6fdjZlf39/4fUqehrvRLelnK7hLZ/uCX95XggLb+ak+wDA+cBz16/yyPlLFMagt2/xeL9I4i6m5KFzF3jmtatcunyF2/u7/Pp3vs3vre92AK7fuMHs8JjNhx/h8ce+yfbO/4RXX7vKq6+9wng8TgZLfofx5FcYT/8AA/kI60VJMFD7wPF8ypor2HB9JlES7oHIwXGN+bk/Cf/7NUAx/5ffhTku0eiZmK79LS2clMw/QvXVk/uDpNS1JFIah+VoMgHNHeUSUSNZBCpBOCsNHE9aWl+jBp7cmXKjaNg/tlwYtITbY94z7LN/7Vk+fK7h+uHKyd13G7x6q8/lcyX7bcXZs+vEdg/XC5RG0ZCJZcxHWDv8Z7T9FUZmlSIosQSJKTOkpCmUkPdwQhqUouDgT4hZNEe2CxrcRX0yRe/djOgC5KY9cdxFTGrxNslJCiFtmEkRMgPgjCVqKjMZMwf385jRlPbg24jNplZdAjtl0qF0PLc0LzU5H0jSAxCTWQA1tU5IJ08bT4y4ehI4UcmcwGgUOJV+laWJn8mQQnevTq+I0+M+9joFEUsMgeu3XmZ7fYenLl7A+kAoA73VMxyPjnnl2jVujSesDgZMDm7y2o11tjcvsDa4weR49Abm6+2M5UxHfoiugKqP2iJlWDKItLsfkimCZ7M5+7u7fOCp9zJ7+VXYuwX8g3d4Pt/9UFXm83niV9H7uzNv9p7TmdY3cAVE2Nze4eMf+RgvPv8cB4eHDM6dS2srE4GJdRAiwbeoCkWV5mjaR9MxgkYuXLjIpUuXkaLMzrKnrk9UX40xOXOs35XT+T1xAFRTRKnAPVNiS+CQXHb6fjiLP3gjlwQOjw75jd/4Df74n/hjPHzlPN9+/mUGZYkMYL6yzbSZ5nrVkjcqJgeiKcXb1PUi/dU0NdevXcO6kqIsEFuibZOi2a6csEhv5lPROze4NzhZcko1g1NKV7LiIkyaxXEaibx0+ybGQLVv2NrZ4sLaGmXRoz/oUbmC0cEBn/z4Z/jiV7/E7miPR+74tkbhuWs3GL/yIjH2CPHj+JCU/tA9lH9Cah39Iwjvp8Bw3g1oYk1RVIxNy83YsN+MCCpIUeCMpW3mxLrC/sd/KAHYAGsilTH0UNQlo9cqTKIS5RB4IaXEY94g5CQG6xcVkUR12lHKeMllaLFINJREDkdjjserbK8XiIcLazUX11N9WwXOr85RWzE1lijdkZTru8q13jq9jS1ujiasDCK1OiS09MtAv4i0IRBWfpRe/EnO7n+O5zefoAkD1DeYXK6J3pMEWmzuOtBFJ0IyonZRQkoiQYl+2hjbeXzZcUjUwSmqt2mzW8oAaIQQM1OfpGOFGLC2yPrxCVehRIxGVD+C1Gu0t7+CxFGq5We6YWIghhRNySmhrDQLJUA0gpQKpUsbS1Dwma/eueSMRI+QSHswxYlzAKRQN6VftSs1LGpEXfnhXsZ/uf5/f5uZIJS9HivDAZPRHmaQyiKrq0MCkaP9A1ZWSq5cfozWwMHudVrfgBjK/gq9qrzDQH23YzmqlZQtcdWSPkfn7JsFBW6SdYYYWhBlMBywdfEi7N26r6j73RwaAz4GxBgKV9C2zdsszZw62lu+o1/1+JFPfZqzO2c5c+YMX/3c56hnNZUrEGsprSPGSJsJgsQYfOMxpUvdAPmGxZgc4xgCGua5dNiV307m1kIaXE6AuPc7vmclgHvd8EWMuIgsWDgD3+9J84MwZBGZwMHhPr/yK/9f/tAf/iMM+33qWql6nuHGOrPjXaQdnfYAc0SVDnTH1qgAkdA0hODZ2N7ClgV1XdP6hmY0ZXVlhRgCs/l8QVTxZuPOp6tZ+rTqGy6f3ca8dA0U2gzCqsMcjGEuyrXb+5yt+gQTOLOxzvnNDY6aKa7s8fGPfZrLT38NXnju1PE//eJzfDzGJaDOLwB/hKSS+P8EJqA/DbwXNBkTOzvI7o0wPjuhqVqKUUnvsA+NP42uXbomiSniufuaD1H+VjImunQP9OTzZj4B3jgqE2pA0KMx1d99ObfI5YUpmozWUi/2KsL7AkhOU2+slnz8iUA9eo0Pnj3P0Rxefrnm/ZcH9HqBtbJlN3oqYxhv/AEu3xqzWb7K7uAKVj2BCo1JhEcsSRMdg81lusTUl4Bdy2uyY0WLIaQoWDWj/pMTYG1HrnMnFXDMPADpmowYSqmI0ae2QZOQCSYoQUuMXyfe/A6xuZqMc2yh1SS84/qY/iBhBEQTsC/mljuNqRU2JGCiNindL8YgxhINGAIaO3rbmGx7DCm13ZnSLoWc9RTunAGqJ2WEO5/syev3aWyMsr65wZUrF7l9/VV6a30YWr749Df56Cc/we0Xn+G5r73CzqNPMFjZ4Pj4gLX1AU8+doWtnvDcN0lljEUp7p0OQekhRR8pekQx6XqNZuBk5xDFpXtjuHHjdnIclmrRInkehLdnoB7kKIsCa10WZnrwowt6PvrRj3LxwkWmsymuKHjsPU+y9/o1mqbJbX2p1986i7jU9ZJkvc3JvMsBV8y2MBFxmcyiqYvvW8APTm3+9289v+dEQItTE8nMYXGhA26NpaxK/MLrSS0ti4I2z8OCJudf/3GaOhN2d3f557/2azz1wY9xbTZCrGN1ZZXp+g7j2xMk16AXafyljSeE0xFSQnEnmt2jvb30PKxNIJKgzGZT2uZePPz3OyTXpiTVVfN+aDTpu0eBshjQtnNeuHGdh9ZXuNDrUxR91sRyULeMxiMKV3I0WGXbFRS+zUeGMgTKU993G/i7wLP59x3gDKcXQ7oDNz58nV/6pV9i7z17PPGPn+Df+Uv/DutX1+FtRwV/C5i+xXvuxwBo+u76re+1wOK6dXuNb9yAvo188HFlKC+zvn6R+fERLXOcaSiiRb2ilTJe7bN//UnMYYX0jmnV4SQxlvkYKE0JTUgJb5FE+Abcswsgas4AyMLZ05gBg5r6kiMkhcelOWRsV2YwGIHGN1hxGEn4oBADFpvxVev40Tnwz4KrMD51oGgGphI746xoO19E46InTIN+cdomRfxYVGzCAeQTkmz/U0TfdGjG/NSSeNGC7vAORbs3fr732/q39GyjYXV1i62tCxjr2Nk+y6WdIeE927jCs3HuDNdee4GLly6wtrbK6nrFjWtXqXo9iioQjdwhnPW2vv2O8xWEAjVZ6jfMEcpURgw+YSpixkKIJtGljAXYPRgxms7Z6eIPY1hbG3Lx3A7u+Zfzfr5cYvjejJQ6fzB9ZHe34qW22UcffZwnn3wP82aOdRYfPVvnzzLeP8hBWcg0w0LhbF5nssicaaYcTnTYLAJAIH32FKhWls6nC6bfXuj8PXMAuogBcmRkUi/kAi2UDZePEdvrYWLEKBREGE3yUf458DHejofzr9MQgWvXrxE18OhTH+PgeI7GyNrmNpPjfWJ9xGJBnVrP904LKqQJFhL6usmkHAI09Tv1kpMK3GQ258bu4QJ/YFLRFFGTmekMo7rl6v4RmxubDNt1fN5gDg8OuHDhMjcvXEbKPqvHB1x47mkU+JdbFzieNIllVVzGZO2DX0HbPshPU+olWoFSUrvdvG1xH9zj27/w99l7zx4Az//B5/nlv/bLbPzsz8GoRLSL/pJDakkReIyJItRZR4hK1G8gTHLK/6Rk0uEoEMGK0HcDZt4lDHkEk9PXURWNAaOJzS2o0rDGhTNDPvKYEmdTvEY0miwrrOm+icGWSYKUSzu00zVWVipsMeK1WyscH0z4wBORMAugSS5ZvEfLCtNfYSIl682TyO3XuXrmGtaUObuRevrLytI2Id/TzIYW0/cueCY1pfE7lb8uBYkm7v6ujiemE9Q5GWaRSgcfwWU6Qu1q/nS4hEjYXQW7h5QW9T20yFSxMff1S8gRuE1SujkaXzDtxUz926XjfUSCS4Q/1iSjZav0ORXQJisGBrBFXiE5kyF3b7pvbPy/O6MmznBmc4vN1RUGgwEGz3defJbp/JDJ0QyxPTDCq88+i5026OoKMhyyMhww3btJPXvrTN0bj0W+a/FPSeqJ2iq0BnG9RJKU69gnKVxBnAXnwDpq9dzc3edimRXvRNjYWCMS73Akv3fG/0GPu3UNhMuXr/DJT36y21hzJ01CvfVWVzi+cYuBHaDSJtBfLtNaKRI2KNNrx67zRjv+1pOsZAwh8QfcwQkg8kbQxTcf3zMHoEv9KSxaFhYzKIMEjTGUVS+vK2E2ndDcFZX9m2n8oYvalRs3bjKZfZ4n3vcJZq3QHw7orW0zvT0iEY/oXWsr1UzfZILoUsnhAZxrJ3PZtg27h6PFMcfe0wqJha5JKm6NCm0xoLe2hU5nTOZzTIBeaXHWoNFw+8xFWF3jwnNPA/DC+hav+Bp6DmJC0up8n9i0IH8GxybnqgKvkaPQsiFpfplHptz42I1T5/rCT5DQv9kAAQAASURBVL5AuaLEsSOoQSVSaMRGMCZ54NFlch81tPos8I+xZSCEzNcf8iKMyeB0/9syBVMKZqbjq3FJAyBGjK0wCk5Tj3EjLethBekZrmybRKhiK6wqYi3GREpX4vollIbQX6W4aljtVdTBcBzW2Wsbbh04zvQNQuK6j7HFeEPh+uhwjT5jVC+werDB9Ox3QE2mxvcUlUWMJcya1AmgIJhTWV7Jk1Eyh79qV/JJ98BkedskJ8ypxvQFT0Ag0/SG3GaoFGqJCFFGhGvfwcn7kDIQij6YCeoLEA8EjBSJ+lgNpl8R1SYtAB9BE55AY0y1/e4EYo6mok/PKCbiFYoSrCBNi2pEY0BMcYf/3OEf3j1mdqPQ+jllr88T730fezdvUw6Em9enPPz4ZRK7QcnDH/gAV198mf6g4sylKwx7jms3rrE/nb2D8ztV9GJRFoETg5ZJprIpWtyTxLRIIscwlojj+q2bfLQL+DRy++YYVz5M1I6m6IfX+C+PLqhdX1vn05/6FGVRnCp9KAoR1ra32b96Ld02TYspdC3cS2URWxSpy8ImHIpA5uLoHPATg9+1/y3u5XdxS7+naoAxZrCIKkTFimCLElMUqDGIGOq2JWik6FXdh76Xp/gDO7oAJDlQMDo85Llvf4WeTRKUG1tbFFXCysvSfyE7XPc0/nLyvm6xLvEFvJMRY8SHcFeQtFY4Vvu9FOW5NNmtKSj7AwaXLlCHOaN5Q1H2ePWVFzEm0cG27RQxJ3NhMh4j2kLTgp+jo10YA/HPYHQdJTLznrOUnKdAQ8NMAke3HLy8cfpkv3mO84MNBjbpEOzYkjNVxVbhWBdhRQzDCBtaUIli5CpiG2xZYBYSp9lCCosUcgTmwWel2kCMLSHUOCeYImVAgnqiktaBHzNuG37928rzx1tgq4QajonFrqKgKguCM2gJgiOOAod7Y/aOV3n15oxpG3j6FUvAokEojE9yy6rU1jDplYRpTdUaNqerFDevIBJTBB3Ah0BVGIpKsE4xVhcNgCdBsGTVyu7BpmjQZice0qaVNAPCqbkXutY7I6iNJLHHlIXyWGy7i77+ecz0kzgZ4qRTHeznFj6H2HLBpUDr0blHpMKUA6Q3RHoDtNeD/gAt+uD6UPSgHCD9VeivQNlHixKcRQqHuCoh3AFimzoCOiMocWk5LRvJt9o+F3fuLd6XRhs8N6+9Rjs+oJ40TOeeg/1Dfuwzn+HTH/skK6HhocKy2hvw0KOP0eAYrm1Sj4558dXnmTb+fr/qjtFdi+MkJlz29vI1aCA1A3e4h6VyQwzQtEibWmpHx2PaXLodDoc89PBDNMVZWBKGuhfF9w/jKIqCT33q06yurC5kjBfrRSMqMFhZwVYVGiKFK7HOYV0CUWrj0z8fiK2HENEQCE1LqFskpmxhiLnkpnFRVnuntM/fFzngxbahqe9YojKoeoSmobAW26sSu5KQaBIX40EgXH84x+lESEpST0Yjdq+9QOkCMQqrW2fTRn5Xjej0Qlv+rUOUdr2nqeb0gO5xziYsl6UcsFUWWUa2SBNQEtPf3rWbfPX559mdT7g1Pebg4IAQIioRFYtvT86rmdepLqkt0hwTayGBAHeorGWz6BMUDvEMC8fAWEqjzL58HvkLfxxeXQPAfukRhn/uz3HrpYZpjAyAC7Zgm4IV5yiNZWAMfWNRAvN4TNRfR4Onmc1SxqHjnu9Sz5qhhmKpNdLQtdYlClUfBN9EovdoVNrQUnuf4FSzAyZS8S+/NePLLxbMGGILQQqTROxsEukxOExzzMcfnvLQ+i3K2W2uFPt86uyMybFn/8iCrymtxc8bQlRMaAm9TYIr8cYxoGFtbIiH0+SUiCFqFtNxJrPgJg9OzBKRlCajaG3qje+kS0OWSE3ZvlRKsdaemnDWukXqkzbx8xtNokl2cpPm2hexTZ+e2U4zVQUjLu2mhuRckWh76QBT3iOzGbQNIim9L67Elj1cv4/r9ymqHraowJaIKRFXYYoKU5SoyfwMrgc5HUv0J9jZpWV0am0t0NhvNu7XyKX7u7u/x8svPs2H3v9+Lj/8BBcvP8lLL73K1772JW4cH+F21rjx8ktcv7lHb7jNeDbh6u98k93DI6q+u4/t8c7zWTb+ObJfNux65+eWnL47yyE+ok2LAdq2xTct8DWapmb/cB9v6tM4pPvay+/fgfrejNPnogrvfe9TPPLIIzRt1kdAF3OjK48pwtr2FoeHh1nAS1lUUbL0cKd4Gb0nNG0qqWWgc8LMmEV2N2W3lOWy+skP979/f49KAKcNt+TF07ECVGWVPKEYkiRk0+KszTfuznHnxPs3dSRDvX/7NsPVTfrDM7RNH1f2aeejU6n8xLZ2h752juI6pTVrLW3bYG0iqninTsCpJMKSl1ptbmKAwjX0VgY0k0Ddzjiezbi1d8g3XnsVb0pa71Hb0vpm4eUus3i1kynibDp0E4GfIcpZSo0MEfpAz1RMY8OGLXAY5rGhFMf8nz9O/0//Rcz6nM2jS8xe2mBfj0EV5wJzIxTRYWLqyw2qBJQWJcR/cFLnz2jckwpdt/BT5KdkClTN6GkFnyVrnQgWS5WkwNBoaI2jbufEMGdarvD1257X9qY8dnGNjzwqrLk6peMFRAWJLYXs46oSK4b1skYZ86Pv7+GaGYQB6BBtxhDW8KJoVbEblV47w4sS/RrV7oeZVc/jytQrbSqXUPjRIJp73dUsZk8CPOXfjBCDSTgwSfNMAO/b1GKIIHoSZ2iMaEjOEJK01cEQDl8gHn4dqwcM5SOIbDHSgEgiKxJjUbUkhH++6THRsSI+EQa1HkKu7YshGIsAzgjOFkRniCpo9Ph2nlKvkq4hYUmqRGzTJjVBlUSAlNy5LA50aqftIvw3Wiv3G52lxeJKy5kLW3z9me/QW1vjw+9/P6/f2OdQHR6DHwqDjYonPvgkIcLqsMfo1af5zaef5hMfuYLH8o//xdP31bVzMjLG5OSm3uPclq+ze093/cuf0aS27AOz6ZSpwCZfp65n7N66gQy3TteRJOE97tzOu6AkxozvAJQ79q/v0zhVKBHh7NmzfPjDH2ZezxPXSt4DJAP5RA1BA8FHzpw/y9UXn6dtG0TKhJGJ6W+LApMqKoIag0USvUWIBJJ2QCoHJMcjBkGsyZXdLpP79u7Ru+oAGGMyxWeH9CefZ4oAjDU09ZxmfuJX+7pGRKizAVrGtIqMEf4Bqj/9bp72D8Xo0M+o8vqrL/PeD++wp2D661BPUwpT0nzouizuHIPhkMloRIwdUluyUMs7X2jp1HKaaun149mc12ZTYkjp5WplhfboiNvHx3zgEx/F9Sr0eJxazHxgNJpgXQ9DZDY7QdwbDaj3YCOGHQo9Ayg96yhNMlyVc1jtsecjzliaKBSu4MrOKvNnNtgfH7PXL5B2jDUFBs84tMxjYNMmo1IYSyAwiZ6p/m3gWRbrbfnC7hUNZjpmaw3eexQoVBiQW6IUChQrUIghinCjrYnTEaZX0UrJTVOye035zg3LH/zECo9erDHErA1fYIJDizktuU1IHescY3oWsRtMZ2PEB4xviaZAix4zMaxFwAprheNcFK7e+FEmF76JmAn9foWKMhkFmrnPke+y85h/l4h1KXNkxNLWnqJITuWJn945RfmWYAgBjLSoA9vMCbeeJjbPosaj+gGC/GjqXMCBGgSPEQtSoqZNZxI01aNFUvGcDOQLmrURBIxHjSHERMxkxKUsDJHCWUKbOhLSuUqis616EBoktqnbwLjsdC0zIXaz+kHVstPN2ljbolC4cmHAl77weW5fu87HPvABHvnIE7x6Y5dzYQ3fKJVTbNvy3Jd+m5dvvc77nzqH2JZmMk8EU296VsuBWGI4TCO8wfV0mYHlyL/Db93NiyBAaBqODhuuj0ZcWhwnEifXcxlh8RLJFghF4ajz3i+SRJliDDiXQKNhicTs++kILK+Dqqr44Ac/iHM585LxLZGUBdMQ0KgYZxMpWH9I0R9yPBqxvrqKauJoiRpTO61GLA5rDa4D4Ybu8ydttZ0P1WpDaXqngv47y79vNR6IA7BoW8gpj2UhkNNCEjl9QURVclS0HEWRf9Z7PuekH51bweTOtPi/OSOljU6eeGhrxkd7rG9sc2teY6sV/Oxwcf+0Y3c7tYcbRuNxxlgscbY/oPW1aJM5eewAjEbjpBYoymw85pHH38sLx0eMRkc8842vU/ZKOE5ZIo2Rw+NDNtbPpjTZMh6kq8vGCzj+BBvWUdqCOrS0CgNbYGJLg+U4QB1nXO4PMf2KeTNlk5KV9R1ePzqgjhHnhIfKAXPv2W/n7Iek2DeMylyVWbxO4ObJ95+6R3JiRPIfhUTg42NLzwmNptUwEBiKEESpEeYaKSUxYzqFnjVM6jGhWAProbVoUXFQ9viHz/X5/T3lvTt7WO8RP8cHEJ/En1IUV2MkqQI2QTk6avFikdZnwSKD66+yEQ4JUZlKQ4XwcGh5YfQw8/IZZk1L1TeUZSDUhpizFEuxD6omgwANIoGoHmNlkeHo+vlDDITlNkAlKwxadHyTcPAdaF/CWCW0DkUzEDOQgsNIV6RCNDH6ic1VgJjZUrKwz+LZ6ElbnyoqmhTgSIBDyaUEsbDoE1RNUaYYKPuJx14DiEfJmgJ3OXgPcgNS9nZvM52WPHFllY8+tcGrr1/l1/7FVTbXN9ne2KToDTAGnvva1zk6PqYcFFy+tEW1usG1a7f59tNXaZt438tXcttg2krula240+nhHr93WYH0c6IGb2m9Zb7Ub5/AcEeL75HFRpM+m1r00h4fgl+8HkJ7V/nzB2EYYzh37hyXL11OfBnI4mqMNRnka4lGQNIasALnr1zh+vPP0S+X9VlO5mDX9hc10HV0xmgwRlO3jkgmXZKUGfAeKYqT8u3bTI4/EAxAzEakAwBJbu2LMQGAJKebT2PLlifO6dEFDne9vhxJ/GDMg+/LOOUsaVo0x/u7rPZ7FFVB6pDv0tBL71v6XFlVmYCpY3CTRSlgeXy3gMBFj6x2bHInZx81EmOiKK7nUwpnGU+O+Ma3vsHe7u6i5UyJtG1NVJ/R4ktfYEHiZar4R9m2aymVTmDgLEE909gSTMlxVKaqWFfQDCsGKohX9mhom4ZKSogW9Z6DdsbQCju9kmgLmgiT0DLXA4L+PeAmXduqkFtbjTm9Jy48LV1M5PR/uUddhCiJql4kPaFCLRKVSKTEYMIYE6dIADE94nATt7VOu3WWz10dcLO5iJrIrInM6oZ2Nqadz9DgiWIgzJEY2DuG/UmDOJcMXWjQGJgW/Sxm5DESiGoppWXzqMAcD5gceaaHnrbuwEwZiLe4zsTXby3JgHil454Rk+qbUdMTNMZgl8lfnKB+Ttx7mnjr88B1VC3RO9CY1rWxWQG6A04Ggs5RExNa35pk8AsLLuufL2eIu/velQli184Y8cHjY0j8mFkXILcxQIfVKEtwJWgHeFvKEryL5ceoymTa8sKrI6at8uTj53js0iqPnrM07U2uXnsRI7cJOmdtp8dDD59nMjN8/gvP8q1nrjGdt2/LPKp2HRL3Nv5J2Kn7+wnF80mUsBz9L5cJQr6fS/cqwnIP/sn+HXOqXxf/ZClS6XQlfpBGR/jz5JNP4lyWGI5J0yKl53Nx25jMKJ3e37YNO+fPIa6gadM87PbJGFMw3O3FSNpfrHO5SyY5Ucv8ACIQfYvEuBBpe7vjHWYAugcu2XOLC5TiEntBmiZ5h1gmUOjesvzaqdfvCEcfGDjth3105Wa6fUnwdU3fGTbX15kdHRNql0hN7jEUGAyGBN/iNbVEqSqusBgxBO95JxtdmofC1tYW49EI3yxHAqfHKy8+T+p3VaIYTAgY53C2pG1mjI+O2d6+mLaGpecfY6DkLJt2mx5KZVOrWqGGQioOY2Av1HgRhoVlsDJg3DQLAaLmeMzrfkoljseLPmoCN/ycq2GWQIQ4mkKZa2Te7qLcSMbeJNBqjDHVr7vWsq6WvFSj6zZGDUkZMWoSSPLGYDW1/6lYDIZAwKjBWYvxLX62j25sIqsryNBRtQ5fjhiZHr/yTMsfevQSZ8x1ChdQkiqbjzVFrAhFxX6zyr98NtBUvRTZRo8Vh0pkZgsmlPSZMAiCM0KtgfUoHB4c4YcDalqqogBqvG+xtksF54eYN7kYDd4HRC2YnPLMtf1OVyAubehxcgjHz0D7OpgETkQNRWHo6ZB5+Gka8UQfsUbxdYP4QJAALoHVEpjSJgnVmFn6crS/MERycp5YydF8SsVqSCllax3GCjGcPDeM5CzAEMIIQmYG7ETM7prJD25P6nq+x7OGL3/jBhurq5zZtshgyNXbNU88eoXWz5lF5eBGw3deepG9vSOaU5id+ycDfuP91GQn/M40/xuN5UyBLlqRu/bLZNZPf/7k3fc6r/s9z+/PUIWzZ8/y2KOPU8/nidaaE0peSKUlQypFIpJJPSPWWta3d2in45TFNTmQcG6BwwgaMFEQ13XASMIF5HVks9OqIe2D0SfFV+QeWaq3GA8YA6B3SRFqV2Nb/H73wzz12nJG5J7T4xjYB7be6cn+8I470s/JM4w4VVZ7PdbW1ziYHCBkBbQ7PiMC4+k4SVSq4soiebDe02a+Bshtm+9g7e3s7FBVFTeuXTt1vgtvv6vZ2gI0kf9EMdiqSu1jKNPJOGd3s7BLd5S4Rs/+IQZZja7xLQNT0qC0WOaaFlJVFlw4s00zq5k3nmIw5PrBEQbBGouop5UJZ4oeqzLkemhpfMBrpMVTG0Xl/4NzDslKXikjntpykgfWZQE0RabCIi1oxFDagnnTJsEjVSwGR9IEmKpQq9BqIPVvxBTFtyNkehM1ijSOmemjI8GLYSo1f/PFwEbPsD00bPUK+oMKMS3SHzDTgheeH3E0WEXsPLONmZzubRFTcWQda5oMKQYaTepkVfsh6tv/HPvQRWwZCdNEXGSsW4owDMEbfJtaUJMWj2Ya4BxZqmA0CZpgq8VzK25/hYHxzATUG4wGShH6xlFJRdP28CQEdMjRVKKdBdoZYImLLJUm6t7cJZKmVSSzrCRxle4YXYsmunhPzJFWAmxlYxWzoZcCcUWSvM30zOlZL6fS7rU4ZOn/OwN6f4tIuzWB4r1ya3/Erf3IMy8cgsDeYc10Ok8KjkEXteCuZCqa0kpv32De6TLkiH4BDHyr4502PClIvfNzescn3spRyUyi+s4ByQ96GGN4/1PvJ4a4kLXOdn5xKyQj9oWsfUF6PcbI2s42rz63R68qKYrUelt0WhtKTvUnDEE0BsEmhzof5KT1P92X0PpUcrNyMu3uc7xDB+DBe8FvfairwHPAjzy47/y+jQTDiXQ6cpJrMjl9jizSYSmikqW+T8n/TW5SGyPGKsOyz3BtjaP9PjqZZdGVO71vQX0LqgmkmTsEEkd7RIyccuQ6jMfbXYjGGM6dPcuNa9eW8zh0G05HWCyZ4S5KQDAUrkJsIDRzfGgTXazaU5wQzljaGGkIrBmLd8IsRBost0ODE8P2cAVvI7f39uiXJTsrq9w4PGamEYOwIwaxcBA9k9mUNVvQt455iBxIJGhE/ecQPEEDoj6J2eRanenq7qeWQa4HiiSYggHxyanqYipVQ01kJjD1IZVJjBBNAgNFm6Pa6S4SImoL1KbadyBv8r1V9iawd5xbEE1EhkPsxgph7pF5j/6mRX1IJQCNOZudMnFHRZ/L0ymtBI5DWCh19qxyeLSNHN6mfPQyop56YmiaEwyGKrQhYpWc3UjCQRqFBDRIzptEQUtHnO0unttK0VAjxADBKKXAmapi7iOtCXhpk08oCddgnCNgEN+gLZz0osuSUV42uosHkeZY96OY7HPm1HQkR/XZCcidAwYlNk1aH0iuDCgLKuBTRdZ7mTDh9C5893tOXllOp5/eS7u6fEoyRYhweDxZep+cOhLaORB8F+NO49+pPN6v86KcAAM1v7KkAAkpCn6D8zu9k6Wxvr7BlYceZn9/j6OjAybj8du4nnd3rKyscPHCBdq6ycGSLrKe3WPvrkbJQpXkkhTK6voaaoS2bSlLR69KDrKiaFCsW5rbUVGjLLP+qcZMG5wybqrQNjWu3yd2ibD7HN9zLYB/k8fy9nRCGpvrO9KZ/pNF0lu/wubOKpNrr+EuvwfKASthztFkTDyeYrXGrV7Az/bws100Rs5tb3IwOWb7zEX2mxEEnyL5pcVlrcO5IvXYLzbJFH1aa5MBU80tgeGuEs0bjitH8Ct/K+2R/4M/xWz8MDvb2xRFAYvWpDvdUyX1DrksZJJqtaura8wmI5q2YTqvWSkq/NI5rBU9Rt4yIrAmlgGGQ4HdtkWcZacqIXgmEcqqRxMC+7v7GGMZlhZtAxhLpUIJHGngNT/HicEZg0SI8csg/4JOZ1798uabnZeuN7e7lqxyh2r22BVrksHxqniERlPPf4oaPIVz9FyB0UgsDUFgOpui0aN+hMQ+Gstkw4zDFL2UjWjapBXkCqQ/pNjawjiD3z/EGsGTI+PUcoMSMcGBBua9FW7ODjDRMzFgSNH4PHiMPEJ77RcYF0Lv8fMMe55iXzDdVq3g1BBjSOlISRFrRLDiMcESjCWYgNl/lbOzpxfPbaDCsRFKSeBKZyzGQEvNpO5Ig4SAYlUwtoQSYsYWqG9OKH7FILbIjkcX+S8h2zXStaotRKljfj1nlUBSKxqKGiGEBnwNoSMDAhMtauXEYXjTpXBnUHSPjOeb/HbXexd/vte6eevPv72RjP+J9blfXYE7o/3OEfhtILFumuy4dmkLEWFlfYOiKIgkjM6g36Oe1Rzu73PmzBmefO8HcUb53G/9JtPx+IFe6Xc7RODi+fNUvYp6ngKNzuVB9CQTsOyoqSx81IhS9iq2z5xh//XrDAZ9fAhJORNwhU34IGMhZzclKiF6lCQYBIqRRJufulgMsfWo88nZfxvj3zoA38Nx1wQWi2qqJWvZZ7h6hr462Hs+/fmDH6Ud7FBdfIqhbzi7tcErN2aUmwZTBj6yscqkV/B63dDMI9fmxzShYHN7Awk1s8EKk+PDRQagSwRElDY0KEpZVATvUeKCplVyNGitXWA7Tqf57rEUn7oNv/ELcC7rNnz+rzL/d89QNI/iTjkAp9OFABojVeUoe31Gh0f4esYYBWMSt3xomRtLf4lRrhf+hwykYqyB615pSEZps+izuj4ADYzGE9TAPBhG4wZUsLHlTDXA9SI36xk1NqWgXer7H0XPLLQINUZeRzUDq3Ke78T0kyhjl5jNFmk5lYQVAKwxFEWBryfJCEdNiH8j9G1ioJu3LT60OBWaGIiSWBJjnYyRdsFnrwdlhVZ9ojWYkJr/oiswq0Nsr0ezu4fUDWZjNUXm0qUMYxbDcahroFCuHxdshpoCQ5SIaKRUh9o1RvGnqF/5F8R6yvqjjxM3FVwXnUZ89AmcVOdoXyw2KhGLV0MV9vGHz7M9fpXzvRPwV0sgRssKiTK2CZHd6ZyagNdLKYiKKScWfUtRDFAcpXU0fUdoXepoaLMmeoygJ/LIycKbxSbcadafdLl0adJI15KWIZ2JaMUWYENyvnLnQ9QGFsDat+r9f+O09+lxr+j/zca93nO/1f77GR3B0jLw77sd3TmNgBYRw8bWNpeuPIz71tfBtxgxnDl7huHKBqPpiMJZ6rpFCjh/6RKz+ZQXX3wGVBlNRriyxLftD0A5QNja3iZ2eDcSqFnsyfM86VBTFh6BnuydIUbOnrvA1ZdeoW1TgOV6RQoUNJfRtcVKcaqEKyQtAEQWWGMndsHw7pvmbSIA/q0D8P0dGhDjsBsPwdlH8Bs7NE2zcADatiGEGd4VDLZWePTsKme3+0z2jpiPa+IocLZYpQQOVgoO6TOajjm+ccTg7HnCOGJmz0EzIizlALqUm5BKC4HUx615wsY8edu2zZNN3jqt9L/95yfGH2ClJf5X/wj3H/3UKTnYkwnakYikA3vfcunsQ9TzGc18zrxtwVhCSLLFhemxxCfDYYy0Zk5AmEVF1TBwYAYV7WyG10C1ukZzeMzxbEzAYkRZM47jesawcJxfWeXaeEYbIhtW6WGY4/A0YP4pUb9y13meug2qOZLUk4tbLHxQEXwMTNt5StsbkMJQGku/cKhXmqYlxEhZVkhU8C3WuKzzbnNvcXLQxBjUpXKANBE1Bi1LZDjA9nuJmXA8SbXgsk8IPjl/xmS7KJk4RzEtaFVi1WI0EEPAi0nMhCYi8TGMg+bGL3N7+jq9nScS1z4k4qK5R6VMcXXMXRvOEpsZvflNevPnCe0uxibQYzcsllVTgiFxLjTtojMC+ROLjVKkoKwDpmrBFontzwpGKqIU6WaHFoiIO6mH5xPMwExlwV2/qP9nR0A10SxbA2WBmATAVC3xUiRGTe+SZHI4YXg8nQG4Hyfgzcb9Gn94Y2P/IJyAXPNnCVvxAOPtfr/PQw8/nMoseWyvD3llf5eyKPC1pxSL+ohvWypXceXKFba3d9jfPeCxx56kGvR57eUXuX3zOq2/38zEgx9lWbG2tn6KlKzjZFmWb+9+j12vf25nxaTuuJW1NYZraxxPxmwVGwlPhBBDJISAdY6gHuNMzsa6lN5HsVlkSzTtzVl3MwVxoeWOxfCm44fUAegm6Nv1d74f42SBWkyOUSIqBaa/hZx7FHfuIvTKpP62NLdFFVuARGE8a/i1l3bZdMJT59d46KEzjEdHTPbHTI7h6iiwcmWb/Zt72DMXaXuO4uFN3HCb8dO/ATSLfGLwnpBb2ULmrjbLk1hk4SQYYxaR0Js6ASp3JQlEhZWVIWW5LNzbecpdqjFFG8F7mvmc4doazXyeygEIIbTM5zOKchVzUtBlGjwz9Sm9puAqQ2OUG+OjBCirSurjQ5pchCs1ZEIfqIywW8+Q2NJzBaEJ7IaASMDrb2PlOYriKj4UidpWT/fAL4Y1S+WRzshk1zw7B1G/RK3vR2U1/TlEfIRGFeccZVnQzhpmTUNUn2rTwee7ZHOHWoCMP0BdMsDaJkW7qkR6fcCg4wl4jxYW2+vh2zoh5zWhSzSGpAw4GcF8SoieiQiOBhGYoVirFCFV2Rv/JIX8HHH/ZaaHv0UMB0CiK1b/LJg1YrGaywtjqG/hwi5b1ZQV5sx6jvm8wS8R0/WMpRbLUaypwzw5MSQxsJhyn8lZspahFUIMjBXaUCM+gCkwVtBeiXoDbWaKXLToKeTUfWqozs/LmFwG6J5fTFmANqdRrYXCpXlpHVr18yNNBC0ak9CQYFK5Tr8bI3S6CHjy8/2MO9/3IPe+ZUKg+0H9v72hKG2MqPpFGFL1lBXTMp5O8FjGTc1kPGLY6/Gexx/HGkPwyqVLVzgejaj6Fbeuv873c88XEcqypN/rQdQsbpds0Z3t0t3vHShW8muaW119DFx65CFe/Na3WF9bpW0lSypDJ8KlPoVtarr2YZMJrtKxYwYXis2sugpEzZiu+xs/pA7ArwGPAWe/3yfypmNBeZyKxam244a4zStw9hGQPhIg1oGSGjNt6C/ReIZgmBz5zCJVUFrHLCZWtOl0zsr6BttrG/RmU46+9iqHz19n9fELTG5dxVulqYW4UTN85PMwehZ7+Dr4HYT/0QJ41gHZjLGIeIx1hEv7mGGEg4p4c3hK3eoNx5/9aXjoCH7k9fT7MzvMf+qnGb9/jLXLDfxLhrSr12pK7d28cZOt82fBOMr+ClVVMNq/yWw+Y7gWqH3XTvgzRHrJWJoMgmxTjaxwjlmITCZTFMWoMBTHhjUcaGQePI0arLHU3lMTKI2liR4NX0L4J4goIRY5kF9qaT0F7CIBdFwXPZ1+8gYhKkR2aeMMWM33WCgrh9NEMdxMa4JXjIGhMaxQEkVpUI5IqWjUkKRqEwtgDCDRQlWiRYkpHNrUxNk4ZZWqXhIpmkyzE5EdhsbDbIzMJ8QoiFtjHg2D4PHqcRiaWFOb5HiY2NDa8zh7Fhc8Ev8xACtDR0+vM59/B+Zkg+ux2hIQRlrRKx2VKmNRZkubto+BGZ5WoTIl8wBRBOMEgmCkoDSOVgKYliJUBAFUiF4hTFPvv82oZy2Q0OT7RMIHLJMWaVjMNc3ORXIyMqVwBPENGEOoDVI6rKuI1iFFSZxNwbckxCNv7gQv5sb3Ik39oAzhnZH/cvr/wVyL9562bRaRMYBqw9Z6n5cPG4qqT900rK2u8OGnnmJ/f5+rN2/w4Q99mNl8TGGFF557luOjY8L3WRxuOBjS7/Xzb3pXHHonXkroQN4dc2sCBDe+ZXN7G6/KdDZj0KswlMhyUKGKb1PmDxIuQDAEVaxLvDrWGgLJAU5S2fc1SRfjh9QBOGGK+kEcCfMlmMyBIPIS4lrM6sOwdRYtx/T8txgMLcVayWpPWV8dUBWWbZ3DM18BQNgBWU8CKDGws1rRrywH12aEvqUZCdurj7Nezfnkp2d854U9psV1qurvcBAi5aqhnVtiZXnqyY+y+rV9uH2Icg3lEs45nLPE4JMwDaCfugp/478jPLYPv/YY9j/4WcJzQ6TbMO8aeZOYFfAH/334P/zT9Op//vsIh46yKOn3B3dW/UkgrjI5SRpQAm3bMJnOMTlddu7CFcaHt5hOJnjvOR51SOACZyxalLlMoZRlQV03SUgjpjR3KnEYvGmYqLCCI5rIPEaCCs462hiYhWdRvQ7yT9M9UCW2NYua8fJ1Ls4/v67xpGc6g8QkQrQAEYNFsphNzEj8qfcpLZ9reKUIJZFVFXpGCURMgLkY5pJEiAg1kgmRUpRfQlEhZYlIxI8niQu/KDFl76STQ0jfFWKqrUfA9qByqAiNKPFoTBUbBn3D45cuMZtM6A/XeeXqNY7nc8T1iPHHidTAP2XSFNSDx7FmTAz7hPlLiCpBFYmRoCWjecD1oBE4XoqWj2Lg0JqchRcqicxVsMbRLyp8SPV+vNBikPoY7Q8xtpc2OiJSN6BtAjyZvPH5kI193pTz1ovY/BhTeQMFTBfVJ+ct8STEDAvweNsg5RApcyaAiIRZwhmaMvMOvNFauHOuLP+u9/h79/ObGdt3w6noztWQ8A01J3TA3TjJl7z1t3dOcJeJSZwNQKLybZsk2dxlIYPBKIz299i5cIXYeh597HGefubbfOhDH6McDKnrBqPKzRu32D5zjt29vbeV3n7QQ1Xp9/o46zCRXPbqQMB6l/GHPN0WQSCZ0jel7V1RsHHmHIf7uwyLgtYGCtNx6qROl9Q9BBI07S+5TTU0bQKtkkt8mroMNAt03e/4AXQA7nPK3WsNfc/H54Drd7x2khLuxEPsoMZsP0Qb9ljp/Q79c0kFr522TG5PmUTD9Z0VnBjWbh3wEVKkFQ5HVGsb9HsFxrc8sX2GvhNkoMysYTRvmNQXuTm+jTNzzjy6jvGBa1Kx9/wR7fYA1RkyLHnppRtMRnNWGOHKZ3HxIVQj8+kE1x8m5PN7rxL/6t+Bx/bTpfz+F4n/9S/CH/l5GCXGq65VsLvWUzf+sA9/+Y/lPwlN0RBjZHNjA3n1VTj17ojEJlGz2iyoF5V2nvTQY2zxvk4GvG3AOnpVf/HpMhNkeAEVs9iUY0xiNAtynpiC1JaI1RZjHH1bMI8tTfDA86C/gsjx3YjrRRrvzoW9ZAAUFhSFkjkCbCILSnth6gJou+AqRCyGEkMphoJEjRuCEExyHINCa/Lm2wWzqkjw4FvUZg37qkSsI84btG5BDFIWUBUETcyCqhGjkqMvgaKCMrGUpWP10P4KK3XNB5+4yHTecHw458ojTzDoD/n6t55mHmKiIpUfTedSvID0e/gpECbZnzOARxAKURqJjAO0QTjGL25XLXCMp4qp1c9ZgQjBfxRPn7aZU1arWC0Y1SPUz5PhHVbYokRtYu+LzQyaWZpHChDyHDBgUhpV87MRyWlTK8mA+JBMW8cEKIlZUAGxLskDA2E2SwIsZYUJkTCvUzr3LkyM3PFzty6W14lZeu3OEsD9bGB3vudBlEC71P8biwfd+6y665GTfwuthNyZgYFQLA5gMJS9apEaPx411BuWrc0h9axmfWuDp5/9DuP9A3rVc7jSEWNgOhmnThFnGY+P3+H1vvMhNmd1JRt8ujQ/3O0E5ABCoWuB1Dx3jBHa1nP+4gW+deMatff0ygJVyZLCCfAnRaL89dpgKVCXn5mk/VKtxVmXgLkh4EzH33B/4wfQAbjPk/++JwBa4EXg+Ttez2A6URSLrD0Em08SGlh5ZJUYA+Onr0F7zMrAMVhb52gO9tiyc36NaqW3ONKnPnqR440t2jbgY8SZgrJUxt4zNI7h0HHc7CESuDkO7N66xdmtAefOVPhpxSv7EV8aKAr8mUcI178NjHDbe9i6odlPBBOucMyaKVyYwAdun7oa/V0vYlYUM3N4HzDW0WmlvxkiV1XxrWc8niQWucVCWLyDtGlzou2+Icw/d4wMwf3nJeNf3cf7gA8to/EImU2BTwKP0IRAVRRUgyEHkwmtb8EIhTMUIRl8jxKNYHKLsjeKqqfEYWnweg3kbwNzOgKWxVgY9TsW9OLcOflMpxrXtf4pi/KDMQZrfhGav0SJo2cKIFJIpGeyodGW2pg0Z2JS3ytUO9B9/q6IhjaluosiOQDGoiES6zaRh7hEJeSKMmV0MvlNQimna6GoQBwdZ7tEQYtNhu6YgobXXz1A2kNefe4/5NOf/l/y2ksVt6ctpQ1UUcB/CZUtfDTYlQJu3iKmHoyMuZPkCInBCJSOlLrPY8U6Np3LmuceI5GpKMGfwQVhYzDEFAUHR2OiD8mQtIHQzpDCYkwFvQpxPYyfEZtpUu9DEGy6J6LpcyZFW4sOQWdT22QvOefGuIQNyAyPiKFwjjif4keHoJ7oLEb6xFAn50HCG8z7hKJPZb+IdqWHN4xSliP/7zaCWc4qvJ3RGabOaHcZ1TvPTzhhr5Clvy6VUrALJyslXhTtJJu7RJ8RitJgl/yVeaPMQp8YPAf7e8z8nN/zu36M6cERveEKTzz+MNPxjF6vIoTAtZu3uHbtKvPZjO8Of/FghuYUu3Zp/RhPnIElJwBIHUyLaIScpu8+CwisrK/RGw4ZT+cghrJUCueApCDq28Q14MSiXhJzqE0OVRsVazuqdUlORZO6Y+53/AA6APc3ZPHftwCnvWvjy9xt/AHCyWRYeQhZeR/aK1l9VKhfv8HKeMxg1aCDdQ4PIr6tuPDQOk88soV1gXJL4YvpSN/67Jd5fRYpB32MKXHDirNn1jl7YQe3skqjLWVhOWv7jFvPB9+zwXg0ZlDP+dQTO6w9u8tUS46O5xxMBM3yta1V9KEz6NgT22Pq8XFKEe+toc9tw5N7J5fzxUv0ZIC3DsniE0XhkrrdW9z4qJHDo0PsEon/qeqYpPSriYq+B/S/C/BUsqvtX625/eeuwf9L8G3LweE+TI6Ay0BCbau1jKdTgqZEbgGsS0FhIq1YJhppQyJTqWJqaxyrZxavIfJLCLcXkYsu0sZLkdUy4ruj+Vr2EUg1vYQChkQkk0oB0Rq0VaIG6novJUSdTfo1KjkZ0CBiiRQoNRVCYYVafe4k7KJYSZS3sQX1iC2T7xFCqk+H9qTVU5J6WGxDpijNUagnWcHERJCOGwRjK9TVrKzvcOPGAc43lO4XsPWI4fqA8yuR0fSY7c0hg9HTMDqGueLcy8j+Lq3fR8QtUPdOhX6wzA3UGnEm4pZumtVA6Q2NwBxD8JnBsih55OxFDvcOCEVkn4YkqGSTIW9biC3BtMn4O4ctV3H91ZQWjS0+RFTAmtQmZUiZIbG5SyEqtijxGhA0K7ApaCC0Sb64ngZophDn6Xm3mSuD9g6juTxOIvtkA5aj/7tn/r0zBm93vMNNT7v/6PILnGQFDIIhissGvmO7lLRAF0mAbg/OTkW3ZjQgS5nCqIbC9uiuNcTI7t4BD106y97ehMn+HtPxmM2dHapen6uvvs7KcJhkwYGtzU1+/0/8BJ///Od46aUXvy/tgCLCcGWYUvrxhJwHWGwbIicsqos9YzmIEDJ6n4z2Lzh38SKvvfAC/ezsFLk0qxpp2obC2TSbomchImITkylAW7cYlyJ/Jz9UGYDljbabjPdTAhCUL4L544v06Pd2HAPfued5Jf1zhfIi4p6EVUe5HogvvMAaBrvS53AyoBn3OHt5jcefXGF1UHA0mXJ0rEyee50P5KPNbz1PdJazl55kMLAcHo8Y3fBMRjMe/tDjbA1LrEJNzWZl2B3POTcc0neGM4Vy/kNnEmBwXvPMK4f0XkoLMviWYL6E2fmz6M2v4MMMqyBfvYD+xT9J/Ou/BI8cYj77GPz5PwZHFW0zp2tn8V10+RZDRBiNRqwMh/e4UylLYIF1Y5n9rDL7aDz9hv8yYP6Gw7ct8/mUXq8P45eA28A5jmczQlSq3MfvNVK3HlsURA0MTEE0Sh1avFEajXjZA/1lkL0027qFeqpX944NedkJWPgIOZ3cpT9VkRApioLNtTVuHx1kkROhVzjmXpLnrkldzhNxJs15q45K0yIPxjGNgSmaWRkTF0MCliRjj6aefXx7QoqT+9/VJtKadtKgPqtt5takfMEnayZq7v9XDmWV8nCXnnwbaIl1TVt/CeseoTSBSzv7DMM/hJFPbaWHLyI6T8eLibFSNWUv+rZk5lu0SFHJsOegTVLOUZWR8bgo7LgSa0oOvKcyBdPplNoHfJOU+BK1UM6MLGr9Ddp6YuFQVyVBH2MxtoctlNC26X026Qgmoqs0X9VHfFOTSdSzPkkiwNIYiBhUfdqgixVEcsYltOlI3f27JxfAG62HzqDeqWff7XWGN9/r3s2R5r5IudRJQV4HZmH4NUe8Jy2U+T0qyTkzJpVXMnMhPqSSYsezIIK1RdJu6IaY9FE/pSosvg48862nec9TTzKfN6wN+xwfH+GKkqrXw5gkpjMeT/h+DZEEMpZFyYNE+Z0li0/wQpzuDsrZUo1LbH755+A9F85f5KXnnmc+r3FFgQ+ZG8BamkYJPpUCQBLXhvrE0SKkwE2UmKnTbYd1uc/xfXMAJKd9Uyoup+rC3d3qaSzV0sRgigLM81jbx4lhNhl/j52AMfDKHa+l81VA3AbGPsJgxxK3hdXd21SbO9waCYVZYf1CRbFmqUvLaBwpLfiQxB02dzYXR1y78ChzD7vXRphyzpUPvZ8L587ier205kiqev2iYHNYYsUybhr6DlbcgDom1rTNlZLHH9mkXzkYg4gjSkS3fxNpPwa73yaawNraKuPPPUL503+RWE2pRmepX3TUUmdvN6X14h2R8JuN+Xye2mbuGDYDVYbGsiWGvRiZBU4AyQAN2XhFzmxtcaasYPcbwCFBzuJ9pDKWNWPR6GlFmAv40GJFIDSUhaUywnFo8bEG+UVEdtOT0k4+sxPrWHYC8lOVe1yqJEdvwSQnBqLnwpkzfOwjH+Hylcv89pe/xNe/8S1EoBTDlJZI6uW1eVM1scRooLCeYA3TEDhuW5ruSzM7YMhqaaIeQp3Y8Joird4YEJPOMNUWk6BTDCFTkKa0vMnrreN46NaLRlAJXD9q2TE9+v4lgibj7Udf4OqoordmeeyRL2J3j+i+SGWK0ST5m7IMHqFEUWptMBaGleKIaH2Ssh26iktFiYtJBW7etDQqBA3s9Po084Y214qU7PiYbCaFZKg0oC2oGsSB+pZoLEIgep/5CnJmJkTIbZRo7hDI+iQp7tCMX0v0y4kEKxBJjJLi+oAn+nrh5JxqOeTOn+F07d9x78WinDgS9wts+25LBff67vT/Yiy93gp1c8IWaqzkTjMhNHlPlk5G2ZxkpTqHuLvuKLm0E9I9z8eT7ASLOWHE2945y3OzA+p5xDpFG9jb3ePrX58mjn2NxKBsbG3Rq3ocHx0Rgqeu59+36N9ax872DkYEr2FRy0+2X3Jp4O7RZQOMkWTMJTmmRix1aBn2emzvnGF+sM9wbSXV873P4MnU3RJFiMYQmxZbpHIsJuasrqYsl4E2+CwqdH/j/h2Ae5awTr94pwZ8ei3tNemiTyJ+KRzYKk0KErexwS7UBJOUosHGAKO08fQ2tmlEqaoKawaorFOKpZnPMnDiezExlBSB3mMxioL0QC8Tg2O4WiHjKe7sJW4cjdh4/1kGLrLRGzCdzqlcpKxKdmcto3nDcDDEDE6M5cd+/MeYbW5Tt4FpUPrOJAY0DTQ+MBUHIbBeOqwtaEJL2SupW8Ozh8fsrFS0reGF10Y8++I+f2TcMgBiPYOjW0icUJTvw/c2kXoXX7eIEezz5/DzGl8U2AJ8k8QmxAhW9BQJxlvdq9lsxtrayl1/EYkMsViUmUL//1QwvqS0fzmmWfk8yO8zCyT7fDwmDLsNdheR91KI0EcJBGqBJqQYw2sCmBkRpk2L6pTIryDyLKohT1uTI/dkwJPdD0ube3q8avLvCy6AZFVVOplfxeF5/D2P8rs/8xkKscxmU37v7/pxjkfHvPLSazQ6x5j/Fuf/Ag5LlJjYGCXgBGZEpkCrgkEZGjCFJVrDtG4JRhLILgrazsGPwbt0DVagSWlsjMX0SryvIWRegRC7tpSE3dDENy5B0Zw5EBTvKg6rPr2wSYyOrc0Be4dHnNvc4sd/6gOw99vENnS2EvJ1EMLid4isVwNKYzn6Q1PGf3MOwIU/swq/mG7pzE+ZNjXWOIbGstMbcty2TEW4cbhP03ga6dgnBVwBrkgtfDGCUSTaVGeWCE27eDZaltiqRzQN2tSJMTDmLEnXIdA5buJZ8DrG/OwxIAVqkrKa5sYCcUUy/m1NupGeO9HyWJdKIQsiFrO4J3cb7Tudhbfat96NfS07gKFlNun2M0CE6AU1BmMTpkJsAa5H5/uk5Fdgwf2R29sI2bmKNXDS8y+S7mHZ6y2i5JXhEOb/f+r+PNq+7arvAz+r2c1pbv/rX9/oPT1JSEiAAGOIwWCoALax4zi2ywnBpLFdozyq7MqIq5KMpJzUKCeusrETkziOM0bitkzcxhAMoQmIAoNkNUh6enr9r29vc9q991pr1h9z7XPO/b33xO9hPenVku773XvuufvsZq015/zO7/zOGa4esLsNx7OOJIGzB+c4c+4ct4/vMfJDUug4mh4y3BqRrOfcsObFz33uDU3nvhKjqkr29w8IKSMbIvSdTKH3qTe74Wrzrz5wkr4Kx4C1nnbZ4LynWTQ89ewz/PLP/Tyj+QKqEleWOGcZjIaEoM6rBKGNkVISkhK28FlcyWqBQBcwzq6CqwcZD+4AiGaF1N/oI8CNidkb+p40YjUa6Z3FHiZ13mVFN4WXrHMYZ6iLgtI6xBgSoopIITC0xcoBKMuCnbqkKAqMGeDsGCdwZPsWtl+JkYB/vHHtvSOQiWCcQ4ptbF0zvzXn4a85w+RkTpoYbn/mLn6n4PpwSsSC9xAXkCV33e17nDXr1rmvXTvizkliOHTURcEyaPObYTlQUohNdEE4urekqhq2Css8BBap5M7xFPf6lFIMcdbkmlFdfObwCnJ0CyORTq5gzO/SXuTLKdY62qV6rKELWmcatVlPVddIssTF4oHulIgiAF2zZhnnJInuE07L9E5ipGsj6f+Y4A5QAX/bIJfz/RXh3u0bXLl9Mx/lJzHhwxhTE6zDJKGwKme7zKmJLvXb+xL4ceDzbBr2nsSkDmq/+dk3C/ff8FMf14jVCOD8mbN82zf/FlLT0cSWwjmakznPvedZXn/tKqVY2hhJztJZS5u09E23CGXr4yw4rbKIVqN+06HNf7AZek4QGsziBClqkGx0nFMo0jpcURK7qJsxgvEewZKsA5sgBS0pigJ4dQhCg0uJpR9y030P4/DPeOjMiLPnH+bJD+1zdPUv44oBhSl1w8hR3yoa3iA5pRSI/xrM/vJSq8uAO39twucDvO/vwcPlDgeuwACLFDgOLcfdkmAaSh81kpGUm1RlulRuxCN5wxXr9LpSVLJd7j5Is0BSpCgqzLgkhgbpXK6NZm2QUnZ+kmi7X+mDB831G2cxtqTvT6FluKXCvLHfZ3roPhvzlBDTG//NNGb/9YbVwdqJ+GqlADZHPoccxUqMpJg7IQaPi0IqKkXJklH+iDWIUm3hj/081B3y48/Ap89w/3XHGFguF6u5UpYl23t7fPHKDdqmfz6Jq1cvs7O/z+7WPoUriCky3t9RzaaQMpHwvhTdV2AIwpmDM2yPxjRNozof+fpUFtiu3gnQE/7WqIAQQsRbT4i5bTYGby0tkdFgyPbeLou2pa5Lmq5RQmpUBDQk/TSfib7KN8jibSjfKqWEd+5tOUdvCwF4c2ghR1CQpUa1tzGuZ+EK1jmctbQLlW713mOwGLHUVY3aQot3npAShS9UzagoKDYM+854TFNp61rnS6wpsKKb5puhD7/Z8S8BD73lb3+K000yNhAQEUTuYcIMMzMUyVB8omR+tCBFUU/6LojLkE2eyD2SZg0M/Xpyf/Tzv8bCWowVnFE1t9hDt6wEoQh5AlprVHELlYmUkBh4R+GErgvsNBMAnpOWM6KOhuF5kPcCT2VimJock6E+k6LKWIpgF4sV4Pugw7Ut9a1buHze34LwCobGOZaSKJLQZr2E0nqW//H9egP6w/bOFmfrGm5qc5HBeJvlvKWVCAlKDIW1GDEssocu5udBXgNe2cjPiZZ/2Uz+y+V6ZlVKmKMZ0z+YvNELYOz63ASNEkUY1DXeWOXZOZcNYeRgd5/xeIxdBpK1zCVpw5/Y6UK2JkP2jqIomS+XSOxwtiA6oy1z2x6uz81opIPlRNn8w2JluIxEyPMqTmcqcwvgs5OA0y3LGAQlwZkYV5C4BKiqgtR4nj77B3no8Q+Qxuf4xY/9Cs+cvcvVu0c8e3jIUM8Ea4I6LvmWKYoSWQbD9A80yEbWpxvBJ39QHYBZaLmbAl1SxAIstXsvy/QMpS9pO51/vXaBKj1u9FYThylLtMwsw/oSV4ZF2oYQIq6qcfUIW2zl/H9HaBb6fmeUO4AgQVEME6PeH0lI24BR0p/6hgXGl1hfIFFJaRjHSnEQ8nlsIAzAbwztvxsM/1uNrJyYDOBIXafIUaFojEGV6BCB/+tPwL//T6GM8K9dhH/534Crw9WRlBMpuGIdmS5DoBps0YWbiASsFZIY2tixbBeYZKm291h0DXHZURYlMSRsVaxg96/kMBguXry0Qv0SrCSALWiqKWv59xocfUO1tm1Uato6Qgg471guW0Z1zWQ6pR4MOL53xMOPPc4XP/9Zhm1F5R1No0hZH5f0yLgrNE+6UvzrFQSBrm3fVjD8wA6AtsFW/KdnOoox9HW01lvd/JyW1EgXqcoyn7g2SBgMR1hn8L7AoMIozhgK56iqAus1txG6gDUWV5U4s24kUpUFyfu8WSeM/TGM/H6c99Dk9/0LrqmPAt/G6VT0eiyAV7/EhwgwgaCGlg7lC/YjonntBxx7d66z9+Bvf4uxzXpDUih+j4Y9+ih+Ahy/8c/6Fba6VMl5vbc5UoL5DDgC5lwE/k0SP5qgRaNhCovN4jzG3F/VoT9849d/Ew95Cz/54wAMxo7DqcFZg1ht3RsTKpcpEfglkF+gd9Yy2Ms6Z5u/elAgw3fZ+pD7cLGGRje6pPV/mlNb/WtiwDmXHWHDWAq2tsYcdSeENmCYY2xFWZcUhSNIwiRLDC1N7BAs4jzLKKS2U+TFquJXSujaSglJrZLqYoDgNEpL2pI3hoAsG/2D3mExQIqZqIQaNgnQNtrvnk6vOXV862/9ei669/LZz77Ga7df5v1f840M9/8Zn//Vz/Bkl1b3SqfHKr+nsKc1tCKYG6bnJOqtizBWv42rYckt7ymMhdhRGMeSbZZmh6MUKbyj7bROX/p0zOqxWfBO0zK94c/EM22/LID2NUiLFrMw2LKCoqQYDCnrAe18QQwdEprc3dBrisEqwiL0gkKaDsBr3tobQ1gs17G9elcYwpvsBpsIgLnv33f7WK8NLXRLiCasIFkklCokZXMDpj/2C2vjD/DB6/CxH4Un/s/rYNgaXOHwG4qgNiXO7h0w3t3mtZdeYjE9QVJiZ3uX2o8gdXTLBePBEOsdbbPEiDplRVEoLP6VuiO5quvM/sEqh4+spXi1qVvMy1DWj1kghYi3TsXJch8An/RetG2rSyRq+uTs/hleL2uOJ1O2hzWlV66Wx2XnQYghMA8BZy3OWcqyIknIhGPAmvuUV7/0eGAHQHAY5xGnkQvO4XwBRjQ6NBbrzAqqKAc1vij0RI3CyIOqxFjLYrGkazus0xKdoi6z1wTeWMq6Jokqi5VFsTqHlBIxRDAqXUtaUtUVo60t5rPpl2WJZTVwGhQ0Pj1+kb69ZT/W+uIjjBuDN9DOsE6Q8S4yj1pKNHKYyiLRqMdYJiStH1SfT/ISed/hFQzw4sFFFkVJEpulZQNWhKEtGFcwKDzeWUIMHB0dMZ3MwDmMrbHyDG2zJMbfDigL/z35u1v8Etf4pxvb1BthbvKk79EJ572q7r2tMQVexFvLc+kf4rIxHmDYrmomXUdIYEXV88SBs0VmbeczyZH7x375/8szW9s8no88O/nvieZ/j5GEx2KdpYmRkJ4Hc43SfwxJ0MU+P7cxMyRhcNnAyH2Bmlp22ZT3tRn2XBHAUOuf07e90IwxOp8NBqzgbMHFCxe4eecenhuI/WWc/w4kCJ0kRZZToKg9IWrZWjtfZnRHGDpPNJZOQp9HA5Ia1tBpJYAYpGkAj/EFcbnQ6gCDwtfGKJPdWvoTNqFDmoWW1knQZkDAchr4+Cee56GzQxZ3T6gxvP7SS3D8ReZtXOU6EQPJqbNGXNsMMdRGuPTHdrnqjpj/Qd2kD/52xff/2+qgn7EVwXiihXlqmaWONnUkicxiR+E8sedp9OTGJLnM3qoEsAHpWrImcn5WkhGSpHwJiWCFOF+C9cTZBF8NcEVJWY+RVEOMxCwKlESNv4mihEkDOEsMHSa0hNAgoVsjEcZleDc/k/sdS504KEIqD1Q189UbPWoh932R72n/vrQqOBFbawqz7tbGvx87y1PH0Ft5ujzNWO3UuDya8f73PoOERJsSXVAEsKoGRBGariPOlzhvKXyBM4XqinwFh4jgnWc0Gq2a+2RMLu8rUV2llKN/6UMHQxs6fKEprxgSVVHRtS1FUbKcLyirivlkyng4Yj6bs72zy5XXX6POtnOF8OU5GkNObRpdg3GFOCZtJSwbyMADjAdHAEoVHlH9+LyxkCP4osAZR0wRm0URvLF4YzFJMFYYVFVOlSnE53JYY7E0reaRJCS8t1RVqaxL47T2MY+UBJ8hIN141TM6f/4cd2/dVOGFL9OYA//gQe+NGSA8ha0OMBwi9gq+KuHshwjXJxgrFE+eIdXQXVtSFgb/+Ij5TBu8mMwVkmQZhwXvO7wCwM888gFuVSNi8rgkHGwXXNqquFSVtD4i3ZKTk0Neu36DW/Mhi+E3ZljOYuLTtKYlmcziFcMPI4yAL/I0P82nEW6ii/SfA8/QOwp2Jeqh/ae9d5RlxeStHIC8X56O3H8GuAa8iMfyDGmFqhjARtHmNGRU1ur8UcfD4KzFWEeIEesdy+WMS8+8B25cBuCxxw44euGf0MUlQcByiSRDjPkJjJ0j2LfWDRcVocF51pGZrC9k0/ib9TvWcEGfYug7JYqKDaW+dE9bKicRHrp4gedfeJFOAk3UeW8MiLOElIjRENpE6gRMwmGorcMZQVIgicHl9JveX6shtbQQlojxkFrlAViDNEtsSiSb0xtJc+QmOb3fKSDNApOFk4SC5EqwJVjPXQwnJ0KxsMT5XR4ejWmsI4WgKSdB/9PPK9PfN62k2C1rzriK7k+MmX0skBDqv2NRlxoKZ9hyFo+hLSoWSdsDHwdFUZJElTu26oitaRnKkTDOkdpWnRh00vVNVlg5jWtEZqVQFyIxzImupRhBUVfK8i/VmIeUSWxJwAqpm5OWR5iu0zXVt2VdzYH75pax6lSunKR1ed/Kmd2cVKfCla8GMrDpqMBbkRH7FuLr32jaxURBTAk/+Rz8/ofgQ1fXf/RnvuMNh1IBnNM/L5olKUVSFGISfFEwXcypC5g1LWVZKnfJGubLOc7AeLil+/9XdBiGuQeAxLRCoKE39hoEb9b791r+3roVJJ9i1OZXSYhdzFF7gXcO4x2zozkPPfwI169dI8RIF3QfTGiA3ae4U9K0uiCEGDXIzk3bYlQH5UHHgzsA9QDrnFblWKNlCCJ446jLii4LaQyrWvMd3mXZWFHSTUoUZUEIwng0xDvNh4gxCt+GiHUFg7KiLEu8d4TQ4cIaM6/rimUWSVD/yxNDZDzeph6OmE0mD3zhb3+8Cnz89D3J0b+Yc2D2IBxiwktYuyC0DnPtCk4q7N6Aer9k8eoEvzD4x7cpLFpznES9OQfWnd5UZGaoQsnDZwqevDBgr3TEdsnx9B6ffOUyN6/fZLn8CM7/HpouEFuPpNx2kvk6p61H25gW5xDOsEYzXkcFc7MD4DQnvVw2WF8w2hrTLJpTe1b/rc1HNcYSCVgaknwMwy8hWdM/3lceIwhNaBlbQ2eEhQhBFMJN2RBbZzjY3uZotiSYSGojR3fXKoXnLx7wZHqVl77wWYRITJ9FSW1zMi/sVNR/Gh3KkWIIuXzGrl7u/zWuF/dxK6O3KivroyWz/iPjLa7wigYYIDnEdJw7e5ZBWdI2LckYonHa6KbVMh9LgRcostOsdfkhz61MuzVmJfSjt8eqAWxP9PyMYKtKO+PNTvS6nQcbIWjkLxTgMlfHl0g9yt3vyh7iQUSZ9RI7unIHM7/LI/u73L19zFq/LEckWU9A+jRA9gOOusQknmDjgOKvO5rlnJ1iTQjYwtIl3UAL9PnHqEp+zoJ1lhhFESFRgSFjleRnKbAxKcKBRejTHAYjdkMMSTtJYvU5W1uCL6nqAmOFrm2Yn5xg1KPI19Zv5ArTkoKWk0gmivYxnUHTj/lfdcg0DbqaX2+J+N9v9N9pw3+/Ibjf6OvcyIryG+9xq/fp671z0O9PCSGnmT5zAN//78LP/3nYWcB/+u3wX37Dm3y+4HOjG4DSeULbsbW7TcLgvJLXLLA1HHAym+Eyw95VBWXliaFT1EX65/xG7f13YhgjbG1t4ZwjBFX+6y+tl/iNkrDGYXKWLWV0zljlYmkaSVN42vkyMhyMWDZLiqpi2Swpi4Kqqtne2+Pk7g2MGeKLQiuHpFNui3N4o509XfL4lDC+REzEelXffKDmbXk8sAPgrc+cKBUo8K5YtZDtJGG8o3Rq9DEGZ9Qr6WLQ3I9RD8ham1mNaG4nBOUH1BUO8FYhtabrcpvO9TlsChwYZ7WIzFiOjo+5cOESr86++A51i4po3f/y1KsCGLOFkbOIaXHSUdXCY898PVeWNZN7HfEk4qsx7d2G7loDY8NWmTicLzHBavmQS2qIWukDJQC+5mKFPfCkxQlHr17hM9duMp3tMZ1+EIkf1uUogixySQ7xFCn7/g3m9E+/F3gJeCOjP4SwKsOSGDk5mRDbNtfK64Ek7yV7O9s8dOlhRsMtbt74Ke7c+2+YLhdUBpyvWLYdMZ3e7NQIOYZJEaQTSUwlq7ihDlEIHW3bMCo9TYDWF1y/dm11jLoo+MiHvpbje7e5fecOmACpVbtslfmu4Yto3rz/5NVFAOQ6Wksup8nOmP5KN5heOIf+vqb1DcjvXS6XtLle11urtfm5RGqrGvLoY4/wqU9+BmPuErpjYECFpnCKLlFn9KMxkYUTQkx443PWQVNhK4glIxRiIjRLTLFEqi2oK+Jyjmn0ea4aFNlc4ugLHCXJOGw1whQDgrGK9pioKEI7xSznpMWUFDvqcsDBVsHnXpoQkjsFY+uxDX1zEkAFiFJkpxpw7mCfyfSYraLCL9aQZDJkEqzDJUOVLKWvGEqJJTJtltmoJzTk76OpoBUoIlluuIetVTExZb1fIwYuTuCDVyBaqo99iMI5QtsQZzO6ZqHozzpWX3OaJKMH1uDKIQzG6oTlaE5PYwHdgkwDUyciO2mwoYj3hjV4v6H6SkT996clNpnqsnpNVmJFsBIu6pX/eohOcrXESnSqL/eLmGsVPP1/Q6TN6FjY+BzW1SJ2XWUznc/ZPnuOWbukcDXWWLz3FFYFsYqioB5WxBho246icDhvKYuaqqqYTCZfobJvwGhb854ZZFd9D3K5etIydk0F9PsOhBgorM/zVwOrtm0oipKm7fADR4iBsipYTpeMx2MWswWPPvY4v3rrGoOQMLmNuXUW5xwxRqVfJItIR0oWZ7SSTlIvfPXg9+WBHYCqKlZiB77w+b5ov25jLIXTFqtq7A1d1AhnUFUkSXQp4K3BGkuUSJE9Zp8vTBE2ISVV7sJafFHhwul6235/jjFg7D3a9nmWzQV2d3dUA71ZvgNLqwV+9k1eLxD2MxRc0Am4tMtrNz3RBpASrKVdClIFEIc0iW4Sabtcj20iplP4cSSWZ7fXDW+mN17ki184Yj47R9c9Qhf2EPk67WviHLFt3hLs+Q294zdVuFmPvod16KJqpefN3wr0GMPezjb/u2/9l9gfbWHkMzSP3uDy7a/nxcuXuX3jBtvbO5ws5pxM52qc83DOsb23z+LObTCqn+9Etw3LuoJ4upixWw+xIeFdQdesHbDRsMZXFfsHB9y+c0s73WEUyvY1hoSEhlVZqknrSHW1+YFuZDkCdC5HgShbPvNberu3rvjtb5IeZrFcEoK20zV9eSGWZCLRgis8yQjIZ/DyQVzxHuooVCJEA92KsBfV/uAwOJIEGhHE9fffrErayOJDpp0j432tP59N1LjZdYWJKv/oNSYbEWtIoQW/wBS6Nk0ISDOHVvPcGMGYkqrepotwtOgQW5BinwIyq2tf6XuIMEwFO75m2xcY6VTApQncS2uvdmkME4kQEqFrmYllKZHCREpnWPY8IpGcBljPcEkR5ysoCmKu/qD3fUUNtdlvSD/yd+D7Pg2tI/xffi/tj34klwuS22AXeRoIa6KLzYgAkAJxOQezwFivIkPWgCtwZUkMLUJAq0DyCcTVJAGTS13fEO2/lTPwTo2Vi8NKl6B32sjn2DvJuPW86p07t1GBIZKlqDtdW1oilNfO/RyI/lPX6KCIBharUkzncd6TllEDySTYwlKV2jCoLEu6LlCWBWBpmoblcsHOtqeqqq/I3euHiLC3u7eaK72QDznXH5MGBDEK1sRTc6trO73PvVaFqARwUThC12mQ3HY4pxo4y2bBmbNn2drZZTGZkFzAFp6Br1TiGkcIkZS0hfpgUBNSwKBptX5NPuh48BRAVjJalX4Y1VavfIE1yjw0Rqd9TCl7cgrvpxgovSOKENHSCGe0Z7pkPeNIhlCMyyQcbURhzRoCsMYSQ1SoyFkShyzbF/DlI3QxUA1q2rbZDIG/TKMCvhv4p2wuXsM2wgD1ogVjx7QyoJnXeCKSNLeTFkvM3gipBJsik5sL3GBALAKj1HJ+MGR/u2BYBorDvtbdcuW1ayyrH6Jta0IYaDVFJmz1RI/TBD6dYKMsvTubvblspnralq7dZIv+I+AH2VzAAqvuf6cciszCfvqRR9lKd5kf/TfU5YTUnrCTLNJ1TLoWc3TMsCxprF2nUFEy59HxsULbCLXxOAuzFNbyIbkrVooBazwWJQGRU0IpKflrb3cPbwxRLOIt1m8hQZB4kklyPZx5P/y56QSIsuQRRajIJZA5gjZWU1jSs8Mx2UgakkRCiMSYqMoCY1x+XQ/bNA03rl5lZ2ubk6MJYuyK6NjlMrcOdGNJUBhHS6IToZEIvszAsRr+FenUqLCNtEuYn+h9iUsUuihz45sCbAHercoTlcKf4etuDk2DxDbLChttKORrbDlka7DL3eVNuiTgutNGrr+F+d8KzyPVDoKnFRgmS1FXdJM71H798LtgcL6gQHO7nbGkTtePx1DlNETXl9pm50hhU30OWK9CSVGRQEhYUXQy/Y//FXz75/Rvy0j8v/9DTOsx/8M3qQlOKv1rsrGTXFlgrcMZp2TbFIixQVKHRHXKPJbYdvq6xIyw9NB4jwdvrp1NY//m+fV3fvTGfYO/kPwquhcxyuvaMFD6uyww5QSRnrxnchWKNrPKtiZzlwRSi8mfI+sZuxrea8e6fowGNRPvGLgSYyxNt6RNgbZrqauarm05PD5mNBwrYhxbRqMRy/mC4+Pjd/7WbYzCF+zv7SsBnXW6qK+Ks1aISXujhNjfm6z3H4JWvVlL27U460gx4r1nsVxQVzWL5ZLBYMBivqCqKlII7G/tcOXoiKLwWANd2wFebWfhVQ47RtquA4SqrOhiS1GW74wOQFmWoICYXqBVCd+qUHJf17XZ+bXagQ0tgdAWhRk6cW5F1okhaCWBUzZ2WRbafiKnEPpS7MKvHYAYo2oMoF7ZvXv3qGurPIKQGNQ1J0dHD3zxDz4s8I35+59By6bM2luWGSZOtTxLDpDQElNQFjYKCaeixgaQ2GLvnOBqy8NPneGJSzu4bsGN6y/xwquvwknNt/DNwHfQtpZpk/OZG1T1jMLnIasXC++p6wHj8Yj5fH7qCoxZIVOkmLIk7L8J/Jf5HTd5s2Fz292eYKkoeGJ7POT8Vsdk+RcpfCC2DpoOmyIpdoQUaBHKRqi8Z6MjLM45zl+6yOVXX2HLWmyIJAveCCFBT+yKAsl5CuchCWVZrRyAECMmBPbOXMAVBaFpMakkhSV0HYbAvrFUKwj5za7u9IsmqSDMcDigC4EQIpJsRgR62LPf7EV1uB0MmyXcuE5ntUqmLD3eWoyFNkSqo0NGFOxalSkepgghUBhLyMqXVrS8KCLMJdJKp4hE1I3FWUtIgb4MV8RicsmaHF+DeqQdAmXNWcCqBK7mxPu2t06NpgHTRVK3wIRGI7CyJmXpXRMXXBKIt/4cFyUiaaXrw1CESyuoUSPJC0XFRQO34oJlNIyC5+jwmHMJRpvOn0vM6ChEr/UuidYa8AXJ5eeqYVUufDC5u58+q9S26tjhsVaNis+lkhGQ9185/Yh3GswTC/x4F9VgV0NncyolYbS/uxFMSgQJpOiwfpRlkg1JDJ0FL4F0fAuDZE2Uzah+NYvum1tv9p6vxujVCyPGVGArxPacBsGsTMEa5TIYxAPJZAc+95rIbFCJArLEyBIN7fp9KiO1OWBo25bXXn2dvYO99dnEwOFiRooJHxPeK0elnc24fec2O3u7nDlzBhFhazzAOsvdO4d8/vnPMZ9/5foBGGPYGm+xvbVFignj7yuxkz44zoJpFkUCUO6T8hqU7B5CpK49MSqpN0XB5L9z1jKZTjk4OODOrds8dP4hbty6yeH0hIH3bI1GFBQrLZ6ua+lCoAsGqNUJrypCDO+MA7AqeBFt9dnP67brlNlvDaVXVrUk7WJksJS+yHW6YEVwxiEp4ctCuxkZlAWJljKYlYOhx49xbTmsdZSFdkI7Pj7El0VWRvIU3jMaje+3jl/GYYFvzgf/Kb0n2rINQ6ELgSFicn7RQk9SsibC5DYwYzg27B8ULBZ3ufn8C9x7AdqlYRmew/JezlTfA/N8zT3zcyPKWPnVb3KdzjmKwrNYLFncp9ZnrdPGNyJ479VBS2OsfYYYX3jLq47ZKFrnVeMbYVTXfN37d1mc/BVimnNQjTIJzlIFx5PnL3Hj6Jiuawgrvaz18N7z6MOPc/n117X8ykObhCjrIFU3EUMQ2B4PmE1nVEWh5RmoExO6jp3tbXxVsWwiJgY1mFguAD8gkXOnRJu+1MhnKTF3HexvwFu8PW38fh7gp/7pWx75D5z66TOwfIgHWnobFTCnz+PXUf2GjfG290QHfAOnUZE3G9f1wzc+/30I77u/JWt7rF+gVJnJnTc9Wu0LtsSBiZyQWDSQ0mOErqWNQkqBvs+COm7p9DpIAboAqSB5dcRTyDl87+HHPgp/7KfXH3h1D/trTyraaMAWJUrOEt1/elRFlJxlsnZ/EgGrm7eLMWdeLMkXmDZsAPz9ROgrSvqFedppX493bIN6gJFh6KRm3hivCJFRAbd8M1anrRVued/pNR0Mavhjq864NPchITndIGs2unOeg4OzDLaGK5SkawNpGUlGkJhwuZlXSonheEQniXE1ZD5bcDKdc/3aVW7dvM1y2X3FyH/92NreoijLfP1mnTZC6KtgTK7u6UlSykXTKoEQIjanIE1Gi7uuoygLlosG7woWi2Vm8gvLtmN/74DxeIvDwzvYECicKgDaXOff6+X40rNcNkip6KXz/p1JASBawhdTxHs13jEm2pD7FXtL6CIiGl1660HUKDmrF21NbkqSc61t1xCjUJWlqgPmmxqCQs7aT319itYajHecTE8IIbC7vU8Metwk4Kta1QTfUZGIb0Yn+U9jKJHkMx9skOe/wskKhEVETkhyBM0MoWMRPTcXDiNQD2qm068jhgtgnwJjiMGxCpdPzfH7JvzGjyb/vGwalkvNtyrnxlKUVa7aaDeQQC3vIw4xfBDDC6vle/+y6qUmlRtlQRqeffrXeN9jFzi+V/CJz71Gt73PpTNnMMMBrTGcv3iR51LkxS98Ee887X3Po28sNBiMWM4nkJToFoyjMAqjqSyCEGLAG0NpLW6jzNN6JdDUrmRn+wyzk8t5S1aVySeInHuT6/nqj18DvpO334dLUGP8v6EVKacJqW/PrHw92sr6pfyX3wGc3TjS5meeHr8Z87V5xFlomePxFkJhMPUWRfgQ49qzXLYMhrvM2yWLZcMqcuiPIElTNaJdEk1Uw5RSz48wmP/0B1S97o//FCwL+OEfIv3yswTbKDciJSxu1dCq77rWl59aMVReq1FSigiOZH1ObUZFSbqlxsliNsDumG1g78H20PtXC/5/q2GBiKRlvnk1xlWIN2Cc3ulcjdRfhtKFcrIxRugCJmYeRG/wTzk7NoPk6jV0Xcerr9xgtDNeIYn1sGZ3Z4vZfIYpPE2jVUZ1OcBaj5jErTt3uHfnDtOTltmsy/tc37TonXek+j10f3cfb7OkdP6Fdr40ahejlu8mowbfGrLoz0YDNVGyatu2WGNZtg2j4YjpcspwNOTk+JjRaMxisaSuapq2YWu0hXWekCLL5ZKYhLIu8UklhIOFEDrEOmywmOVS5drfxjW+DQcgqcqZgbIoVpu4MSYLFqQNSDB7I9YqccFZ1UvOcKF1hhC1bWtRamlET5pImhbVkhG0hGJ1st4zaXRzqEdj5vMFSIv3AWOEqq4pivIddgAMqhfoMPYzGNMiJmFMwpqWlFqSRESW6CatxKlV+jU4urQF5gfpJpYYPZomyHpbG4SpBxIPMWvFLkU1bea6JQrvqKuSwnmcqfDHk6z8JnijGl9BnkPMB0E+A/wY8PtOHb7PWSnp0lAVf4uvffprSfOr7A/HPHPpMe7duMG9AHY85j0f/Xoqscy6wAsvv8jZhx9iPltgrry6ggRDCBydHLG/f8Dr0+M1DwuVNd7UJGmahslsRmUt5YZnWzjVirfApQsXuXrtCqaqUQ0EkLYDIicY/ls7UNg2tZzeNDajFlgzoTW3Z3JEpESpjaqCzBbv8/GlM3zvd383u8Mh1hW4zG0wzvGpz32Oj3/m80SJ1AghWGCsoj0h5tbqCV9VFEVB0wZl/gqQhOGg5OKlMzz2+M/wwfefx8j3rgSMlIjlsDiEcjUPJBmw34jwYUyAMGlwSZA20hXCYTS8fv0Kr9/4DHem/5ClfZ5kX8GY95HSt2LYwsuMreLnOL61S5oucWHJ75WORxE+jeGnXJFJkpGxG3CpGHHhYI9uMWc83ubG8YR7yznf1BzzRFzrR2wbz1zgRtdyZ9ERbIlzkdm8RYIQnDaSUe0lw5rtv4bU1wQ7WUVcfddAmhL+zO/E/IXvVEfh7g4imoo0zoG1pKQwaczcEnFKXrZZ0VTQZiuFUy0JBaQMHk8qR9jUEeZTOKVauVEdcmp+vcGlftNl/M6OzTm+cQ4iELO4ka3AFTndr+JFq6qTnJKREDXylzWZb308s/FZmz+rwz+dLpjM6pXA6O07h7yeHCkmdrc6Quxo20jbRebzhtlEq4diGEISjEwRWjSF8ZVBAETU5hwcHORqGdkQ+elRgEyJ6n1U1C7GGFYOlGp4aGvwFDT/b3MFjnM284vAOs/86ISdnW0WiwWDwYBz5y5y/dplpG0UlXK6bw+yoY8h4kqtDlgGxVvfTorkgR2Aruuyhr/JRDy9IOdsFspAF5gIFpsNeM4hY3Der3KZSbRvehciSUTzpWR0wOpt1P8LGyuMRdNwez6lqgfEFHHGYqxf/W3XNqdSBu/ccMBHcxvI1xH5HEotyucOnF50fb50F8zvQuRJgFw6cno8aCtnva/6b10WeG+ZLRvA0oXsUKWEl0hhPYOiwGUBDYmBonDs726zSMKdo8eAFzhVg5hHzL2pTU7VPPfkJca2IC6nNCaxXw+5JYZ7A8tH3vceFvfucu/6Hb744vOcTGe8/NJL7A5Hp55jjJHr16+zs7WN6tRnrz7LWdqNBR4RghGG1uA3jjEYVDSLhq7peOihR/jEJz9OCqpKJ1FW8CPGEEotu5E3RCrmvn+zIdn0SOB0FGrzf6zkR21YxkgsK+zWlh7eelLsSN5y/WTCxBqkS7QGTYlIwNlK+194S0dkkcC0HZI0TRZFlfeeemrAzs4XmIazpMEHs/FTBE3kUUTeT6LAGq9+JLLmKaCVEW5kSNFAE5C44Oz2gHOPP8zXpg9z6/gHePnyP+b1V77Irdk/Y2F+GTG/h6KrEWmZ+kKbDFn0GAjRGOaipnin2OZMvc1UOi4vl+yPBtxq5lzrWvbLinEoYcMBcERG1rFrKo4kqyYiNF3EYjieTPLScao0B1qGR3aIs+QwPfPcmBxA9Mp8CVqP3N4Cyc2Cekg7RWJ+vNZBUuoxMVi6nA+3RnlNLvNOnLEkYhZNSiQxmGKE8R0SN1GYTWO/Oa/sfb/7So++ZG1lyfPr/blFjepBuSUmKy3mCFdS3y9BUILOpm7ApuO8iXRsaGWgwQhyiJHp6vMn85rXZx5rLdfvJuKywwTBGEd0I/ywpKwr0q2XSHKE2KC9rDhdifNOOgPGGIqiZDzeyl39yGhzXxq8aZ8yEo3JPEoN6qyBGJPauaxxEXPn2sVyTug6UkyUZVYLTKpNcnh8xN7OLrt7e1y9dhkxGjiVuQugNZbBYECzWKofFwLWKdHwHUEAfKFs//6CrVO2qCRZcQBCVHGgwvmsO655fRVQ6AhRW3A66ygLJf35oqCuKkSEZdMo47NTAhYY3GJNZptMJviyYLlcUA+GetNMAWju+86dm7Tt2xDb/xceH0Xka4EC+CRv3AA22LcY4BFEntCXfjM4ao9AWUNReAZ1ne+TkqTKrMtQl5ayKPDW4Z0y7CuznqyFNQyt8Oi5s4grmc++iXn4pbcunjAG7ywffv/7+aZnn4Xm/0Mjkdl0yt1bt1mQOJlO+MJnP0+xaNk9s8/Tzz7D2Sce49r1qxzfvnXqcAJMpxN29/YoqpJm2W+kGlmnvtQtv3u2XOJcwXTDuWujlsdISuzsbjEeDJhMp4g1WdjHQQyUVckTTz/B85//Ym7eYTit4mby/3tJZ04b/t4RWJ2PW+dc8noQMTrv8qaQktbtdiIcTyZo7bQQRStbkvsY3n83NkSIJpeRKsKQDKQYkJgoiyW7O7tY+6+wPa5J8Sm8z6WGgkZpClFktFxymVv/qFPezLMcbeGpGenmngKubTnnHQ+97w9w/FTDzau/yBde+nt87vrfwNsdpNXUnRRDbFyCCWsAxMLQWg4GNWWK3G0SN7spR7GjayMSrUoM3+fRSoq0VphHbUY0qJRgfNwoIU+JdwIZXRSrJN+VupOx2g0w34OVNkgWXukfYC/WunLqEhppGZ3Pqb8/GaxWXfc1pGu9p/QFGC1fNwjRRNVndw4jgfakR5TeKso3fHVHXwHTpyP67/u6f1l9mZSQEDBlLiHNhEmTRF9PaTWP14JQkdPOhdz3pUPvQgLTrMGH5U0wE5LzWCm0pNXkSps2ELuOxTRlHozo8wN84Xk4q+U1zfIdRQJEhMIXjEbD/HN2Zkx2PLK6obGW1BOYYFUmaJ2qABrAZa2JmJVCMYa2izhfsFzMGY3HTKYn1IMBy8UCSQnnC4xYzp09z53bNzBGmM5mxLrOAXJanaeIENuOqi4p/IMD+w/8TmuVLZti1Kg/q2E574hJlZyM0dItzeWrce+6jqPDIxaLOb707J85mxWTEpUvtASkaVgsl7ShIwRVx4tR9f737LoKoKxK3GgESG4XmlTiXIS2WXLv9psTj96JsV7aJVoiGBE+k1+7HwUQYAdjvmtFcHtbn5UnXEJr1AelOk3WQFk46rKkKhwDZxnUWgOfJDCdzjF4tra2qL2lPJxAFyiN4ekLF7h04TzLNjGuB8xncEr2+9QQ3vPoY3z4Pc/StHOcM9QH22yd3WPv0Uu832pLWttFIgmaltnxBGM6Ro88yh3rMK++vHJADNAsG+7eu4sxjp4w1KO5vZpez95vupaTCDsbxLPp8THjs+dplkuMgYsPPcTJC1/Q83cAFqKKb4y2tzC+ILXNW1ygyQ1mNiK3lbO7OqnVAl+9p1f6ksjR8TGPXLxI17T4wpJIHB5Pmc7nWluPOh/eGqJ8ni78DgqjxLE2dioelLRbnzeG0hVEY2mbRxgOImfO7Ge/Ra275Py4kFj1/hCNjAXUEZIcseQUmzU2dwN0YDy28sRlw+K4ox6OePzCd3Fh/DU8/NArfPKz/wFNqJi4Ghm0hMatu4EKmBQ5M9pjiy1SHdmuCi5uVUyPZxymlsoIw8z92RwBASloSZTeUHkHbWCQYJYrHtg07s7oV0gq5eBLKHxmoOfN1m0aJK28WTVysn3ZZr53kAVpEqzuod47yWlLYx1iLV3eX5IoEmBRSWScpRyO6KZH2mfhS44HZ2R/ecemYe6j834+96hA/2ycTvWUlOgrvdMbkRBgle/fPPZbcRs2UQDZeNWso2ZAaBFpIBpiyOXj/fkZdapN7H82OOfZ3tlla28H70vi/U78O4SwjEZD6qomBRWxS30pFRmtjjrDrculztaSUO5bCkn3bKM8ldhpmjzFhHGOtumy7L3FWstisWBra4v5dM6gqulCR9e0PPXEU9y9exsMNCHQzWc0TcOgKilKT1nWueou4J07JZj3G423wQEwWq7gdCMQ0Lx/VP32dhnoQsgNCyLbW2PEqmOwf7CPswe4wtO23SpiiTH38k4wqGqGgyHq7ckaUTg8Wp3CcDhkkuEX28uGpsj29hZXX3+NLgvjfCXAttOfUQM/gE7ET/NGbxjgjyCyBTw4bKUlKCNmsxl7O7tMJycU1nFub5/97S2GVUlVeLaGI6qiQLLNijHQNEsePuMYDAYUOUVQXb8NiyWPPvIwO888TQqRSKCqK5gKwhzMEcjuyv4ZY9gajnjuoUc4unyVKEu8v0UBFM7TkohdhwmRZjZjtpjhBCazBXZ7i84ZTo6O3nC9SRKH9+6dvp+9fdl8b75XrUTajUV//epVnjl7kRiE0ETOnTvPC198QaO5bqMmOSZmkwX4CsIiw9FmZWRWH96X+OcXDGTm8+a55J9XKQHVBktA0zXgXIbjAJc4Pj5mOpvn2nJ1FIzvn7+hTQkXpG8ZQ208Q18QUkcXE024wr3DZzh3ZkzXdoROG2h551YRgO0dJXoWco7sRI9JguV8yauvXSG0kXMX9hlvDRkMRjhX4PccRRDaaUOzbPE7D/OhS4/xxBP/Mz/7c7/CyeW/Rcc1pLgHYZ6dD0tynvmyZX/QcLwUiqpg1nWMDvbZOjqmaOeqoXBfEBwFrhvhTtdSWSiNJYjewyTK5elVAFWJMf9RyJFnUWpEH7qMmmw8oyT5M+2qk+DqGCYnhSRzZvqfjTYRctaB8xTGYY3qr0sXsNbn3iNCkNw4KCqyUQ63aKaHb7V83wVjMxWxafz7sdGdw3pdE1E0FWdgk9C8HnKfve3h/jdDO7LDb+xK+n3zdxop6/fr6iaTy5Vl5cCOd3cox2NCiNy6c0hZFBn5PP05X87RM+kvXXpIeVASV5F/ytUpQu9zy+p3xhjlcyV1ZAvnadsWEa2G6EIkpoQ1hmW7xBc26/7HTH53TCdT9vb3ODk5xudofjjeYjY5Roylk4RPiS4mnMiqq6CmJ8zbuhsPjgB4zbF1nUr3lkWRUz+ii6PrqKoKYyxloYpTxmn9uDEqJtOFsOIRtF2LL8rcUhU9RghgVD8gSaSqK4bjdV9pRHDeraQYLYLzBaFtuH3r5lcZcDPA9+fvP/UWv9fxoLBV4ZwqPTUNQ29pjHD+YIdHL5xlWNeMBjV1PWBQDxjWNa4qNXoRUcesC/qvU3ETm2GowhcMBiPaxRyZL6hKp2Qocw2RXwd+64r/IyKc3dllenjElWtXmLcTrP0UFYb98Ra7wzEn86mW8Swa7k1P6GKkITFILdVwwGhrjDm6t0IANNhImBiwvuBNPfjeA8nGrSo8A2TV9vl4cow49chD27K3u09V1SwWuXOeVQQgpcRy0UBRQ6izdU5qKDdSWqsTs2tYf7VnnppYZv3erN4HmoMzIsQYSCKUzjFfzBV4zUZZDBqBxxnEz1JUz1HESIVHrNYKmxjpbKK1INxiPptQFI75dEnYSRRlQpzB9JuucP8JshaH0oc4GA+4ePEcz3/+FV575QaDQcGlhy6ytT2kLD14S7EzxI0LliHQdC3bw31++7d/GwefPOBXfvVnaWQLwy8DdzEIXhIXXcV+MgyqGhsXzNOYl67fpovCUxR0Pr2hWW5hrJZ9YSgKrVxpYiD0jZQ2ryNHpSYIJIOUWinUE4PFSN58NRozPS3AGSxOcQAxyrswrLqWSkZzDCrdbK0KjiWUA2BBIWnfd7rIMLmxBAIikZgMhatUdjrdHx1vIEdf9dHf0ZwWO4VO2jWy1ZcBkkWOJJExR8hpVoWgcirm1HGE++fg5mf3hNjKO8jpvqIoqX2BSKJrG5zvW8crGhiy7r8xQjOfM5vO1E0QKLZ3vux36U1PXQzj0VjFowwkEoa+NwTqLPVBS5LMm7AYEoVRhDpk0l8IYSPwExBLipEQAiFG5vMFKQntUuv7E8LJZMru9g4nkyllUdFWA2bzOWJgGSN0LYVzWG+JZMXQEHhA8wK8TR0Am8lgKSY6E3De4a2KaBSFbuRi+uySGiFnnMISkjt7iZZ2ac90yfPP0oXAsm1U/rEoqEpl8x9PTtbPJLtcJrdJTALe/Tq3b52lzWUkpxvgfKWHB74nf3+/E/AbndMbz9sXBcY7KD3EQFV6xlWBM6LwaVUyGA6pigLjLFE0Z4dkpSqb85o24WS9RCUlJEWssbiUGFZVFvt54xkba7h5eIdbd2/TNB1Rlhhzgidxc3rCE2fPcXR4yDxFgrMMx2N2RiMePtjj0u4BDji6ew9z+fLp68sLIUnCOr8hM/tmt0tIROWg5NEs56QugBiaLlBUNePhiKY5yaxbPUBKidl8mRXfaky3WInBrPdps34Ep77ZjJrSyhlRK9NXBWhly/HxMUeHh5wcHTHa3sL7LW7eupnTz0atU0q0XZeb23wBJ89pXW+CJIZWAsHoJmiN4OxHmM2nHB7dowuJwaCiHpzLt2aTaCWriKX3WxBZbb/WwN7+mK/7hvdx+/YJ08mE5bLh5HjCaDxk78wOZeEx1jGoCmKKTOZzhuWQD3zdU3zsP/5BhED6U9+C/KrDyT/hjB/x1HCH4+WSaFrOHuyyuHlI2UYqDOHpyOd+9B6vpsAf/AOwdU3v3NI4Dr1CvrW3zBrdFBMo2kPv9EHfflAkYQoPpV93OMwQtXpYvcqhzm2SpimdL5AkFLlLXx9R9oiLVg6YrJEBNgneolUC5FJDQQ2hU4eyNJ6YBGO1lNCWFXEZNlbvfdHyV30I65x/T9bbmMN9m97+nveIiWQ7b2xeH5a1mNAmmfbNIH8L+Jzeg7qqefyJJ2jaJfaVVyElzpw9x6O7uzTLJSkJTdsy3t6mWSqRWzvn6f4TQ9ScuPNghEFZsZhOCO07VxFgMko4HAxXqchk7nNzrOENTSJZowfOWmxpdc2jc9Ou1qeuc0HR0Cbz36bTCdZZlsuWtu0QY7h3eMigHlIUJbPFTNeLMSxDYpAiZS5rXTatclTu38i/xHhgByB0ml9QwkPESsLZQrv+GTTStJqrCTFQascCBKELrbYQzr3ZfeGx1tFlGM96S+0q6rLWkzfKlLx5+zZPjkfrky1UFS4krbl1zoI54vDwGiL7rB/BV3MMgN8J3AMu/4bvPlXSBBSFgy5HlTFybmubenfE5Vu3GBae2jlGZUVZlKqAFiJiHdHmfu3GaMc6EUyeCBLU4K+NYqRtG9qmpela7efApirYCrxDkqgBXZ1wBfL7Cfwtmhg4vnmd3fGI5x5+ijNbu4zqCltricvszhFtu6Qshxnl6a+5P5bKQY/GYyYnR19yIacukXyx+vlkOqFtF0qKs56QEju7O9w9OsEYj8ls45ASR4sO40sI2jxK27ZqDngFQ/Za+6IsaGXZixqXFQyQ88arIUBawcx7+/sMBwOSJKpBzXQ6VYZ6DxNm2L72JU1YkLo50dQsctTTpUjlHZU4ZvwkXfutTKaWG7dvMRpvM5kuOJh3uELVGSE/L2PWhQtCJgOitciiUbLBUXjPxYtnOaoLPvf5F7h85RZ7u7vs723x7HufYms8xJiYS+AKDsdX+J9+6HcRLr4IBv72z17lj3zLD1F++k/zpP8Yg+5FbjvthHZ3tmAqkXFp2H7Y8Os/c4twNnEC/JVfhR/+LbD7GtyQBQssg6qkqzxt928obBlTBixkbYTUo8cUheb+U8xyxDYHAQm8rJ6J9MZJEiG0RAyF95RGqw06qxG+i2lVSrm6Zzl4WaYWYsQmQ5tUo73IMufOaPvVaLTBkvMeqYaktncquW9ufLWHoFv8adi/71eB5H0b1qmTzbJKyVyJVbBvUWltTdGQlQXXn6WOgnFDwChPA+WJbe3sUC6rzI5PKgCGCjMZEeqi4N7xMaPhkKocqAaDUSKdtZbQthRFSYyRuigZDoectO07RgLs15Vz2qUwJsF5u+H29M5pRqE2cDcka1No8T/OWSyemBUPRfR43hdZVV15AV0bWIqm1A9PjoiSWLQNs8WSs2cPaA6PsVbbesek1nXSdeuKOAyzdk4y9k2v6c3Gg1cBOJfLQhJ1VWceTaJtmlwRAOS+zuTmPzYTImKMOElKpMmknBCCevNWf2/RCWmtVgk0yyV7e3sUG883JqET1Wx3VhflYj5juVy8o+Ugb3+s22n+RmPVHMMYnDOn7MtzKbJ175CqtBwkQwzC3rJj52iCm2itp7VOKzT6Vq/SIwD6vWR2tIhgMnzujk4YvHwZHzrMYs7jkxmjpOQf4QqqMrcFiC7zU/dWjaLCqQaP4UI14FExnFy5TEoBnLaJlvmSrbpke/dN7kY2vL36ozqO3ca+uQkxOrpkWLbrTbbrWi5fucJiMcV7ywfe/2G2t3bAXAZbIkmVEEUgRFFd89ih7V+zQItJ+QrJ0aaepQb2GTJZkSGEVe6/92YMerzY0YVIUZR0yyUpGYxVvkt/KOkjLkkQIjZ9lmCepbJfh5hI+uZX6D50lXhnG/N330/guxDv6GLg+Oge+2fOsgwdk9mMYmgpq4rNyH8Thl3rjQmp1W5qg9FwRWg7ONjhW77lG7h5+x7Xr93i5GjG5z77Co8/foGz53ZXxvjj3/X/5s6lX1/f81HHj/3N/4kf+Po/Taw+xCJdpYkzvPU0i4bQCee85aW/dEg4u35W00vw438J/uDvhJO2w9iSlAIzCTg/xLiSuGzAGRxWfaWsDioGpDC5oEFWjpvQezz5+lcBqM5/AaxE1RYwvaCYxRQa9UpICv1LwhtLq/AAFq2cIQqFzTLcAjZBG9tMODRapmnA+QGuqAnNfDVX3h3gv8L5px2AXEVB7+BuRO0mYXLfDOl72tKXk/RoWT+H+yu8L/zFYnwNxiOxWf1ecoq4a8MKaTD5f4XXMuDFcsn+7jbNQu1JUXiariN2AeMURbao/7dcNjjn35DB+/LePcF7R1VVqxSAxgj5/Ncxm4Zv+fuUc6fW5n3Goh0qrVXtlZgIohwAVaBUrQG1l4auSxRlwWS6RBBm85k6nVGYLxZsj7c5Oj5azf+mC0iAQVFQOO2YKOF+Z/Stx9uQI5OcJ87tfKMuNAcKj5Hh5hzthKQ1u4LeDGXS6t1KQaEbn6U4FQJM2TM1NG1Ds2wYjreycI2OrusINuexvOp/Hx0e0jUtxnz1l9yXHv8r8Lvf9Dc2G/+68tCsofBvDhHur2yYLoAb/0JnUt+9R313TcB7DOihZPgC8FtRByAPmQH/hNXbmObv89/cuQ13brP9Vh947eqbvCi6ESf18uvBkNn0ZOUQrYZxqlQWhSjruRAFbt67w707N0kSePbZr+Hs2XPUhWMZ03pFimh6oQMF4zPjP5k19Gmyvn5/ZhtwqK70tIL7V5tpn3DOyIGzWuoqoq1MI4YuaB/7lY3OLOEgSbX9ReHo5Tdcof2rP0Z65g7x7gCKDv7OhzEu4fBMJhOm0wnOFZzMZoy2asqqWpE0N81Nf5ZK5M4GSfF1pHcYRNsunz/Y48z+LvP5gsNDTQtMJwvKUp3KNzyLPGwH1+NHmJifZTDS1JTzBXcXE0qxfKn4Q0wJWNrUEhvBmQaJ+hdm4wtjNfXV33fTN/Bx+azsygdbPVfJUL8FktHnblVClZjAKBHNOYtLhuStNnNKYJzHCOqAGIPYRGUVR0opi7UYwYrBicUaTV+lBMZX0CxPPYd3xxC0aVSf/ze5j0Jv/DccgAjJGkXPTF+9cR/TX2DjCeX93mrZrTE5FWNVHyEtVn+bUqJdtszmszUKKaqsKCjMnwTaLlDWlerMROWLkSNkl3vPiKgs7jtp/Ptr9c7hvCIA1mjwa2xOcPd+kaijvRl8qkCeIGjn242bR1EWEFQbYDQcqrquNcQUcN7j+iqMlPDO0S6XjAYD1QtAGA/HTKYzUmh0+8LQAjYlnNcqlrejhfPgCIBx6qmEkKsBtI1vD9sVzuOtQ8SoQKQo8c87iy9UHY2cm+01ALquy8ioOg/ee8q64vad22yNt4kp0XXrun5JWj5h8sPAOo6PjtYP5F01Bvf9/NLKEPTn6nIa0gCjqqAsHdM28A8QvjP/lbc9YzkvOmtWcD2wev2Nm/UbEQjXdRgRkrVEp46UCIQMKbGKXabAX2AN7yXeTGzeZojMWZs9WnKTi6TMabIxNYaQBBMD9eqvlTAjRghJF/58au67DgvGY4yHIuEQ7cwMlPWI+WLOstO2rPcO77F/sE9VOpaLtQ65nr42xMEofGaM6KaV1hucquf1ZX0b9y+tI2u9rX0isI8+dUNQiWxPI4J1luPJhMWiWUGeK+KUaCmctx5JP8F85wLtP/7v4CD3bjhYwF/+B3BjG/ux56jLiiSwXCzY2RFu3r5NValISlWVrDdos/pvX84pIviyxJUlNiMaGqHY7PQIHtjaqtnaqkA06j4+OaEeDPmtP/2fcP3CJ7n26C+BgeGdIX/oe/8QN23L6+mIs2d/mJPJn+VuEO4ezni6KDjrCw7+rX1+8qdvMn9C58/+C/C7f1DP0htHsi47X4EYFTHso/q+ppnkc0CvaSKV5pVVFkZ6C5BUWElSBJe7H/aOmTGqyW5sntlJ29WLktskGOULSBZ3yZBtEMl7WUCMwboi+6tOW6+aRCIpzdAIUtQYP0NCu5HSe5eMrEDXR+9Ch5bdqoFfn6umVVX1sk+1bYS5/TAbKJjVLne9+igkJCxyV8oum6b+MSXqulohVk27ZDqbsLW9TYyRmALelzhnKbz2izk5Pqasa7yzuRV0zaAuN8C5d3bfF1G+G7YPCrRvQb4RWSZE9wPJdtD09f4hB8BJVpy1/Ffr1EJMhJQYVDXz2ZzRaETbthTeY8RSFSXz2YzBcETXddRZ/c8VnqJ0LOYLxcQEljltXDv3xgv5EuOBHYB+ky+LAlOWmWig5TPOWnxeZBiDz9EJSGb5G3qFP+13rkat8B7j1KNvu5bj42MuX77M/t6+dkkK8dT060IHVYkx2vluOV/QLHXjfMc9wrc9fj/w/6DX9e/hHug1FQxWAEnsjIY4KxTe4Xe3+dTtIz5lVIb3ibN77FSWqh7gxDIaVQyHQwpfUHqVMgXV0resoVEtSdGyNFWHEx7+1X9OPZlw9+J5rjz+ELQdy7bjs9euc+1uSZIaJGD4Oxv3XRf7pkaAQe//oxfOc3HvgAv7B1TG4kJHEwN37x0xHgwovedwOmVqhMtHJ5y/fpXvW+FmshK7SClRFpXCZiLoitNNZTwesrt/hrBcMF7MoNXnXQ+3uHV8gjOWGDpu3rzJQw89xM7eHkfze2rk9IN0E4zdiiCmnf0268PJiKZZw3as8+qQ2f6npIBt7pWum6nmgJOyfQvHfDqhaxvEGmy/eZiNa0dIZkFrO9g/3biJcctox1MMRmwNh3QhcOvWHUajLZqmw1nP1tYQ7yzO58huMwH5hqyAOfXsJDsia/kAr612RegyipEMuOU+v/uv/VP+6e/9QZKJfNe/9SR7r27hLuzzkXOXOI4nSPM4Xfs6TYjcTYZKIvVN+OB3nuWFP3fEbmr5vX80McyA04XSc2I6Fm2kZUOupy876WF0UT0JMRacVxcgB5o2GyZVZIv6c+FUDNCAE4uYvuMlK+Sj/4x+V0lJ10wyRvOoGFzMYkyicuYWgdQhVmWB1WQm1UIJ2pbcFRWmrelCu7rD63+/2iOh8JfF2AKpKkxUcR96ZcWUUcf+Z5G83rP2AquFoPPeWowvdMmkzN1IosZfeuj/VYR/tjoLaw2lLekn52gwZH93D0SoqoK6rIhZNM5kNv3e3hmmsymdRMbDASlJ7vpa0SyX7/h+X5YlpS9W2jd9uLWeT2zcFyWGkqtZYO2E9/yQlATnrAYKTUs9qAmh0x43xjAYDDg5OWEwGGCtlr1PZ1OGwyFHx0fUdcWiWTCstO/MYrFGnXonIIbE29HCfXAHoHD9dWYPEp0IGxtJ07Z473FJodOma/HkLn/OrpwBcUkfttGyJ+sdRVHkjW2HsioRSRmqW5+i9542RYzVmzibTNY3+92w1h5gWKNtb8dVRQqRsizxLmnrZAvloF7tHZIbX5ici0wk2qbVRZI7KyBRUQRj6GStT48xqmBmrbaIjN2pftwYw7IL3JlMaGPIao4qnXrK+Geodf2awoPOWmxKyGzBpLnFjcWE2XzGcDDAOsfR/ASqkmt3D/UzQuD8fffCiEHE0nZLbt9qMqHbZ+NrMCnx5GMP8/BjT+GtYevkBH5OO711zZLQRMpBRb014u6de0gMXDhznsuv31R4Bb0vzgop5J73+boyIN5j5ayiHUHFPfKk7h3ZVT50gxugG4DDkIhRDai1nrIsaZolSYKKx5j+QP32oZKgGDCzAv7aR5E/st4s6194jvM3n2V89gzGGObLBcsQmc4mFMWAq9euc+nieXxh2doaZ+nkjRu7WQmzeX1iVimA1RzLTo21Rs9rucR7FSESibhY8l1/82+RTGTvxt8CXqMa1Jx/5gKv//oxfvjbiPF/4GBcs2ccd2cnYBz+uuWpP7zHh5tDRmEtmVvGhLWJAqt8nhTV0BgtbdSnkp0268EKYgImRujUiUumwJQ1+GqtwS6hXzRgcmcMifTSzSmrACq6AOKs9mwwKs9spFd2c5TOZvJkIhhFurxk2luMq9SVMSrdmpLgqyGymLJWmXw3bUiKtkjqIKneAc4r4hVAmhmkFkiZtBoVzseBlUy8FG3ilvUCJBmtMnJalimh2TD+bxzG9im2fF+MyuxKDJou7hHE3BpXkta3l2VB0yy5d3TE1mgLZy0nR0eqd7+a5l9+Z8ugdfuadsgoSl+tkq+jd1z7CjlJa2OfEwOZN5fRBElYMVrVYszKGXC53Ns5S5XFfeq6oq61P8h4NOLw+B6DesxkPmVrtMXJ7ITxeIvpKm2qjmxL6rn2DzQe2AFoU8QmFMo3ZHGMRNdFRLyy0stCo/oVLFJohJbiqjkQaC2uiXoTikLbUc4XC46Ojjh77hy+KNULNRbbrUsayqIkFn7lhZ5mZL/bR57+BoZlQdFFqBx1XdE1zUrlb7lYqke9QbyrKo2OC2ux2cinFGm7pOqMPcyE1j1LRmMMEELLtY98nE/90b/CYNLxQ//Sv04bArfu3GNydEKwntm81XaVb6jaNpwmNEb6MqJF2/Hy9Rvc8p694RgJLcu2wUynRAudMTQJllFhtHUSY0V7xKBELHyZIzmLxKgCUdlI37pzl1k0xGR40iTek8/k2WefYfb8CwzH2+yMhty7fZPZbM7Fhx/FfPJT9Jlo5yzb4xGHkwkmE0ixHpEWtfI90QlMzusLBlIu8+vRgh7238xwmx5Y1nr2GPsukEpS7Ks7jLUZes7XnSFsYwwy/0fYf/9fx3iQ3/dJ6i8+zDP/4f+Jc/ExyjMFbdMwtcLhdEJYzBgNBkzKY/7Rf/Q9uEHie37mz/Hky9+lG/upp5fvthVFmjiNkpkeGRI1uEEMMXUs50vKYUUbWu1nbr2SB+N6rcXxNvWjj/P+Qcmv/MyE1n2I/fKfM3YJEwpmTSIZwzx2tPepkt3slpxYTRMl/mWgWs0Kl32TaARMxKQOgkGSQvyrzIt3+lqWBlbQQ6VqjQEp830W5Sv1CaGe2d0LJPXVG7naHYMhmkQQ0XUmmoKzOdolgXeeFfE7w8ApRJIvsOWA1HzletX/xmPTMAZdug2YcoiUNi9tizVDUmswsVF0SloMmcmf77HxHvHKhejFbpQeEyFq5H9/8mNzdxaEtEG07ULQgFF1lnPaUAWDjAiRSOmV/FeVBW3XMZtO2Nne4dq1a/Slef3R34lhrXZNjRn2N3bj+kFL2WMWMVqVEq+Tj8Zk8Tzrct8Yu7opSfR6+/JU51Tfpq4qJCWGwyEGw97uriLG1lFXAxxOkYHFjFFVc/H8Ba5dv8p0NsPIZj3Zg40H1wFIicqXOUjS3tkkQzWqsgKRGqai8Ksz8LUSa0IMmosTzaP15Kokor2MgTt37jAej/BekYaY86abtd9lWRC8J4SIJGF7e5vT0+zdNszGdyZHt7BcRsrzEfngDebW4H/lYY2E2xZfVjinrZaNUYitzOVvpfcKj4mQQtAe1ZmNDzmitpZeNzt0HTe+7hP84n/+HyAucSLwV3/pr/LB3/uHuXP9Ybb2D/DW04XbVHXNs+/9IC88/wLzjf4L6/rhbMxM3hSAIMJR1zE5PsJZQyQRuyzEg6Eejimrgua4Qe7bHEA35JQSIrowjFjw6liaXE0ynU0pxjssm8CNxXT1t+9/z9McXLjEpz7zaQ529zk5nnHlxmWefuJZ9sdD5Pgo37OCZx4+x68/f8RCEs6WKpphy8wN0JjTGiWEGbPh8IjRSGj1GPsNLEeaNtH3C+hCoOlyusd6Do9PMkog+XPW3ALdVCPGeYRD3HTA1p/8g5z5D3+YYVlw6cIltsZjui7QNcJ0OqHrtLrg1vgLXP/R/5z2qdfBwN/8Q9/HH/off5wnXv4dmsbtGdwJmDQwKkmFW82TdCpVpJt4zEbRAot2qU2LmqDiX84hJupv8yYXxLBMW1w4/x7q3ZeZvLrHw7s7uG6G2EBjA9FBKQ5zX2uOhUnE2Ev17mgToyxfHFeqc4r+kALYAuu9EvH6KKxLEBuNtvs8sDFg8lwiYao6p2b0kemtz+kF1IAHm1bO9qqgwOSNOiWCEe2lESEYWUmsxhS0bI48VxGCOJyrSWb+LoIj7z+PoBF+58Fquk86RT1MMURcbtwUVQcBmzBZeVHyerRGy8DJ6TQkoMSct75mg5ZMxo2qg8J5jO2dL0vh1VjGrlOEtygospNMiFgCB/sH3Lp1kxDatSO7ESi94XO/xO8eZIxGGV0zm70l1iHMCkHrMySS97RMLl45m3luWqeclD76X3fUhUE9IMVI6Qu6GNnd2aZpO8ajEV3bsr+7h4iwt7tLXVdUZclwOOLDH/oIkhIvvvoSpLRSuX7Q8cAOwLiucaLkuygazffwRYwJk9sF956ZABITErXm01i7vkEpsWJOGphOp5RVxfbuLsYoHGlQmNtvihoIavxBc6Fea0TfUIL7LhzGWS5dOs9DZ88DCz7+x/4s3R/5BCTY+dPfz9Zf/VaiTTkfBP1CsdZRZjZsr1LmvMt1n5mbgYojGWtJJvfilkTTtPzan/oRJPf3xsDtD9zm03/yczz5F7+Jm0eHVLZke2ubZEdsb+/x1NNP89nP/voqtbIWDlEmsZxiteqCiLBBIiRXyiRiDAzqmtYaSG9ciEr4ipAsRnQhrFizefHGEEgpvMGz/cRnfp2v+doP89u/7du4fus2Tdfx+iuvc+PGLYyHxx+6CK+/RuFH/JYP/Ls8cv4OL11+nRdeeY3FMmDMZ0nmTo7f3YocJz3ED2TpPjbkKjeuHVYqdMZqWSvgvCJT09k83yL9e8lkNFJS2NSgjkBaYOUz7NbfimuERx++yHAwILSBNgQWbcS4Ao8wXTRMf99P0D7z+voe2sQ//l3/Dn/iz79Cu1xilkK04CJ0X7iKfWgP/8i+KgxKwNeqo77KaBpV6rSiNe4hNLzw8hWu37jLYFgz3B2zszXk/LkLq53FWEOL4e4y8t73v5/uxm0ODz9OZTtal69ZtNOe9XZF3ATltWRiip6B00gdEWI2sqs0lrXqgNiMxgjZuGaouq/iQI3TasNvO6Qskc2GN9m4izEZ0Y6YvrGTMTijPdZJWulkcpWR1baBrEiFEsAVWKfcDgl5b7MOfIF1haab3rUjkeIclpLTsrn9sVMDhRuQcGCj8jAkITFgorZITitSrCKGmw2B3or8KCiQVrliFT2XhadyxUrTJSWNiJ1TFdkQNfjr2+cO3BCJkenJhPU6PG3g7zf4/6Jl4SGEjLYq1yNPFVYSWznoNcZouWLvsEBOHUjWEdB5ba1d7TPWWLrYUZTaFryuK2azGVVdszg+YrSzS9Mes7W1xY1r19nZ3ubO4V3OnTnL7du32N3ZQQzcvnmTC+fOczKbcOf2Lfr0/IOOB5cChpV4Q1WWRNFWvi7fAGct1qqH0+dNVRLVa4eyfJOwWnKlToIwm83Uwzk4UDg7dJnxWajYSbt+iCFGxFliEpqmoc4NiN59BEAda4EWwwfe96f4lo/+DiqEn/zX/yTdt35C32Th5D/6SZzzDP7yN1IXJd62RJMNoUA9GOAza9QahSGttSrC5Cy//Mf/ApOHrvLQz/w2Lvz972AynTKfTghdQzylma3HO54v+Nyrr3KwvcXFS2eZXF7Qye+k7X6Fpm04bWp7R0CRAJNr6E/dbrPeSCCtyG4SOrzRXFqQ7r6HlCO9nhhnjDbai/q9tvR1dF1LbFt8UWNZM1zvHN7jU5/7PO9/7hmefeZJvPfcvXWDk1kDCUZ9tNYYZp87z9OPfANPfMsBjz92k1/4lV/haPocKf53KEFK1pcI2dhko5NyGVWf/++92xUypdfRL2znNJI9PjlewdOnUKrVJpXDUmkZVM+zmH2EM/tnuXD2DCeTSW6VnclTZcFy1uTNuP/8jUNuNjGaRZwFWzjKpx4mhYAcNwTp6Exiq8o9IxqhPZ5jhyVmaFWNsOnYH2+z+6GzfLZ4kS++coVrN29jYodz8Lvu3eUSWY/DQBBPvb/DI5d2mb34vdyLf03XuHMUxjOLDe19JUkx5bljlVhq8/pYa8H30ySr1GWjvXoeVg0XodO0SjbuhkLnX3ayaDpsWZFsnyLIWhirR6KORN8dUkyWwSfhjFYtGUHLtGLE+VIF0IyKMDmvjYGstatrCWWJ7ep3qQPwE6hKqQXTKWkvZWNmBBMzq91WWsvvnM61GDVdm1JGrvQ5qT8WOd1eGN4KCbCiXUj7IcYo16RL2WZkRzRL5aWYqOshXdtmQyzMZxOWywXD4YAksJivuSVrhG3z59+8URDWzqqmnnqjn8UT6QXE5NTnr6pYWO1wpNBl7o3K3ZdlueqLUxTFqsdOjImqsjjvqcoSIymLYWrJ4N2je4zHW9y5dYvt8TZXr1/lVjKMt8Z84LkPcPzQEYf37rF95zZMJw90nQ+uA2CNiiKI8gBM0tKZmBkONpdPxRBYti3WqliJ5Gi/bwvcl06kBG3bsFwu2N3doSi99jTOH5dSIkVRDzGPtmsIlDjrlGwm8Q0P/t00RLYx3KGuat7/0Dfzi//kfyG1LS/9qV86tYHLqIXn7lIXBYOqovBLmk4ZxaPhkK2tLZ1uohuN1qd62nrOr/zQj/DCd//PiEvc+JpPMX7hFc78/W/BJ+DihCf+9B/i+b/xIyvk+r3/4L185M9+P9cfe4S97W3G+/t84dVXccVjnEx+ipdefGnDy4fTDsAG/LUaCg3LCiXod3AhBC3lSX3ntvwXpv9vjo51pVhVJs0oZS83mnINcERWpYUAF4sBx7ducMsIB088xnv3tomPXuLenSnHFq5ff5WL6Hx8/YtfZH77DsOdbT7wNe9n97d8lJ/9uTmLuE+It1iX6K3CxHyGJuvNp2xkNq++zyvre4dBkLt3dBMrC9zRXc70qIKsWeh9IyDQTVEM7EqDaSc8ee592KMj6qalWzZ4DE4SJjTEtmFgYPc//17uPHKFm9/3abCwdWWL3/Ef/NeE0U3KaAjHM+zOEDONpGWgMgVpKpgBlKOCdPs23li6GxPM4ZLgheqRHaQuCCcTrPMMR5FvevISj41rfvGX/hmHx7dx9YjUKonSObAxUDtHYwdsPfUkk5dfZcAuR+6eOqqdIGVB151WBUh5I7TyUax9Sg2My1rmKeUIKxt1k8t9ezRAP10Nfe4hYWKEJPrI/FpXxMQOE7WMtK+6ML3+A1ngKjdxEVQx0lmdj227JPaKp1la1cYW51WJ1FoIsVOYPJ9nYS1RPFJUsMzIw1d1Y9pErACuskJDpCfkxVUaCDKgkgLEBpN2lMDmS8QmfT10uSqgLx/cxOXu80rhVMrFOqMIQ//ulIgIriwhdKSUtSD6PIy1NMsl3ikaU3jHlSuvIUl4z7NPcvm16yzm6wqwg4MDnn3v+/jsZ3+do8N7/8LRP2SeW06bOVlvYCk3qyMz/NfrOnOX8g3VslXRlInRVuCKWAvOGZpmSVkWq7vnnJIf66omxchgMMh8gAHWWna2dzJR1VAVFXVdc+bMGV555TXq4QkH587w7DPnOWfsl98BCEGwaBeszuS8s0EFGrwjRG08U/iCQV2TkiIAObDTssFsS0IMhBA4Ojxka3uHohrQdV3OM2lOyFlWDMrVybqCwikcnjZSCO/OYYF/BcN/TVl44s1fYH7bce5gn0d+8Zt48dm/vzp3ezRg9+PvYW9rl2ATdV0wWyi0PKgLSp87oEkkdVo7Op8vufK1v8Tz//I/XH1iKgMnf+Xv8cSvfgfzwQ2u/sW/zvwbXgXg7Bfex8Mf2+X7/p3fzp2DAXF/j+HuWJulpMTR7VvcvPGZDUnTfnH3HADD6QUP69TAOnfLqY1BFcCMgaquGVoDc+UX2IwYeWOwVktXrPF0ma1t8yKyIoRmydb2iLpsV5/6XS9mhbpXvwi/8osAfP2bPIVanuWbjk/g+ERf+PRneAz4QUAjov9m/eYHl9B+42iBH/u7qx//8Fu9b3O/7G/15NPABfjlB8tlpd/zu/nxv1SSfOKj/9VHufCpXwZ+GdDm1P1wG//eXx1cvclxd+77+RHgD/Q/bPBCpGmoRJiYgDUF9uxDTHe3sHe/lz3/11kkYYnhuO1o79clD3muWIsTC85o5L0yQqb/RvcDYyGZFcl1XU1B5iew1nOIuWJGQGIkdUu9cufBuhU/AFh3TcsyzMZaoqwbqeh+lUi5RXHMMLex+rcJIBlsEiQGWu8VkbAeU5RIu45Ovzpjc51eBvpS036NhtXbpH/dOKwtFLJuT/Ly1na1fWrsVFrlFOPfr4+5OoW8hjMxbrFo1vc3V1lIjLrD9M8tJFU39V6Nk9Hn1DYtk5MJdV2xf1Dx2U8f5yvRAHBv/yzF+DxnL5xwdHjvy3D/ABKRLKGeVndJrzxGdQKs1QqqHPlr9ktWRMmEgFPyYIyKuqSMdig5UI/bNCqXH0Urw5rQ4quS5bJha3uXtmkZVAPm8ylb21uEGBgPRjTNkmpYsj0ec+XKVW6WFeN79zj7gFf4wA5A7T3eeaJR7897dwq285kwpNG9ZH15IC8W57x2B0yJLnYcHh1RVCXD0Ui9mqhCH4XTh6/ywOaUqpjmuXN/cwTrtJ/6/b7uu2kIMJvPuRv/Vx7a+lcZj0cMfuT7Obl3wq1/73+FBFt/9Ae49IlvZbA9YBFaxsPI3aNjQOHWhOBcQQyJWdvQNkvaZcNJJrrd/4Evhhdpf+TH6L7hyurl5APf8l98My44tra2KS6cJ5IIrTJQQ78xv+kVbBr+B73bZvWPiPDIhfN8zZkD+LWP45zjQ889y8NnzzMqK22TKcKte4e8+vpr3D68RxM6nAhDV1IEYbccsGw7Xt/e5dGTN7nutxzf9iV+twt8EG3h/P8/w0bL9/2x7/uqfPbUev63ReLxwzvUZ86z7BLUW4wef4Lrt27i3Hup5XMEp1UAb4jETDw1lay1KwXF/BJ9GauiQ30qLJMzV4qMqHPQz8e+lr3nooiWp+EMtAFTGD1ETlmuiIMAIqTQgXUr0RZFd6AXsiJ3NMUpLOydJ6SALxzGaA92sRCTVWXAdvku2pM+g/Ym+d+Ab7/vd31qT/VYtJxy7RyYGLLRtxjj18jByuHvvzaDBDZSOTmMsJbUrdNtMSXats0CcarDYHrBJlG+QUhqYXxZcnR0ROgCjz16nvlCW+FqVI1WaRQDrt68za0b1zeu7UunJb7UMMYwmUyIQbUfkog6exuiPsAqCO2dSEUNwxr1NOoAJZOrCYw6PxI6nHV0bUdRlCybJVVV0TYt9aBgem/Czs42y/mCwXDA4u6Suq65c/c2Zw7OcHh0xM7ODieTE4bDIW3XUvoCa12uRnqw8cAOQOEcySihL8RAiIHC+9XFu9wPIMaoqTpDJgWiXnZuNykIx8fHpJQ4d+6cKrBlr946ZVyGFLU+NwSG9+WNRdT5SJIoXCYhBvOu9QAE5S589voXePaRj/G5P3HMte/5FYbPX+TD3/kfYedDiit7XLi0TzGsmE3nzKPlNbmJkGi7kHkE2t3MlxWL+ZzjyYT2xx9l9P5vZfbDHwOfsIuSwb/9A1zc2ePFj145dR53n3qB5fYxsKU5TOsgBZZtyOxrVjD9W9/KTRD//ne9+V/120NpDNuVxp1VUfAdX/t1pKZBQqQeDQkGHrtwjq9/7hnmTcudO3epR0Nev3yZ12/c5OJjlzh/8cOc2RoSuxYRx8nxjNv37nHz9m0chvc+8xTDQcXx0RHV5z/L3mc/SxiOeP0bv53x0THbZ/cp9naIXcQWNZSeT778W/mlX/v3SfHFFZ+sz/EbQyaloRGQZcPo5GuWhBHLoC75Pb/zeymByzdu8fMf+0U6LNKlXC2geH/Wu8vrQVnCQ+958oln+ehz38v8ZMLdkzmzkwlbo4oYWvbPXuDGnUOev36dJjf2cd6RsAwGQwKOrbLgwoUxH/qaS4gpcHaoOVpjIZWILXCiSnjLxQRrhGq4C0Y7i7XLJYvpjJ2DAyKwyCS7TkCioZNAXXgWJ3O6L77OF198ha8/f562iYBj65EneO1XP8VxdxYnHdOgWnnOuLUR7+eEdSSpCMZATERR2FT5w2vDYvooPcmq2diae9E7AUZzKUnpz9KXwPSkzSQY8UjbYqpSCYWcfr7SkwlFGyFp74z1POi/Ut6njCgD3qSkpZIknHX5Iy3YElwJ8d3ABegNugAvA7+NTcNt8Dmf3c9JrcpYR/frtsDSl1QAayfAnToebDSn6o+QEl3bcv5gT0vB+3JZ37dmtqQQMBKoi0q5ZiKQItYq4fk4BzsPPbzHjZuHeuBsYIuqZJE8967dIE6PN3anN0lLPOgQWCyXTOdzBkWlWja5F86qmVLeLFR+N2qzoJhUJyJGNm+C6gUot0Fyuqvw2nkzxUjXKeN/uViojXN+jSQAy2ZJPagxwHA44N69ewwHQ27eusn58xe4cf0658+c5e7R4dtKfzx4CsCktVduFJ63zuraS2klkegyScbAitzgrCemRIyRk5MTFvMFFy5cUMchl7YlRMWBekjFWLy3+A2Kf8oEw64Luu4LR1F42uZdav3zMMZwfXqL6//G34YfAgw0l+4xDnt89L/8ExgzYDqb4CVQOkudFd66LjCbLZQsUpZUZU1VDTjY3ePpp59BsLz61z/AF87+t0wvXGb0P38tD33y29h+r+Polfdy54nnV+dwcOVp6tk2oM+l7RqkaziZz2myotSbs3j7hb358yaO/aUdAd1UElu7O/gVkUZYzqarBZIWMyRoisgNCvw08PDZC4wvnWN3Z5/3f8Sxf+7MqixSBjUGx2AwYG80wO3scfn6NV6fTHjuwlmqsqZ5/gt6As4R9vZ5/cZ1HolD6kVBPRqTrEfGWzz+/q/jUy99HVfvXib1vQZWZD+FOQ2eXkBmVSvWby65aU05HuP39yAk5odHHIkQNu/oaYZSJr4ZjLfM0pDHth6B4YDYtoSmQ8YD5jHgqhK3t8XDBwd8/vCQo+MZHVCXNaUvcFXFvGlpneHOlTuce89zHJw/IBqDNQknLcZVWPEquzzvMClgRAiDAdQFRmDWttj9PeLuNjEKnSRiMro2JSEJjqMg50Z8w9kztMsGMUJRWpbLyGjnPMXePpM7X0/jvkDTvUhhHV7eZD7IwyDfnhuH6X1Ughms6qnJSECvwZBUzW7d3GbDwqwQg376rcvNtCQi6UtZX0IRhrSWE74vyLAZxdTDWiQrtVnrVUUwJaQLqmefIepoleVtMFhXIvWQONsgmH7VxgvAJ97kda20Uo4EQJsh7ALLmEQA02aj3xv7HuLfTAP017e+5yKRorxJ7Ycwn2OsYdk23Lh+fVVdVAK1QJBIG1usMXRtZNK0uMLjC59lOpScOJlMGAxr9va3iJ3lBXkNsmtS1SO2tvZZTGccf5kImILgfMlsvsDUsoqu1xY9V8SYtcqmMvZyn4jYz2fJyIBqSrRtiyRZ6QNoQzxV1I1J0x9t02ZOnVD4guVyyXA4IMbIcDgipcSgHqyC8MqXWGMZj4Zcv3GV0XAAs5MHuk77G79FR4jqqRhjKLJoQYxq1Pvax1Xf5JhIIiroY9Q7wqgXM5vPOHf+PM5pb++UUvasyQpQ6mi4DPevGCr9CdvcUdBYQoyrioJ38xARODDwJzkVRF/+7l+k/cgx+xcucW7/AnU5IEahsnr9GMNkOmc2X4Bo/ba3DlcURDFIDDxyfp9v+it/nI/+e/8hH/zF382Tjz3Bo+45vvtH/yznX34/APtXn+J7/sr/k70bjwOakjg8OmQ6W3Dl3j0Wy2aDT3G/sf+NPGh5k+/VcCpio6zv8wcHauTziDGxWCyYHh9z+8ZNbt+5zdHJEe1izq17N4mxIV69zeErr1GPBsQ25HMULIJJidSpYSq8ZTgYcPfuEXfvThhvj9naHesHpYRtte7/2q27hC6S2hZjEsYkduuSs2d/NyLl2qvfjPxk0zEypwwURsvUAM4c7FEaXcTT6USV6FLS6HeFhcoqgl0xqmMkpvOU9v24oiB0EWdVZc1az6AeEpuWwsIzjz2CRWiWUxVqk8hsOgcMbQhgPL/yax9XJUNjEKkIdosouSRODK4usYMxdrSNK7xGsUYIsyW2qmhy3r1AnfuQhBhAksPYgpRgFsH4AbMmEYEm1y9XZy8gGJr4YSIFzkVWKkT9zMhOVM+KPmWsT701z71ePEWyLkBKG3/TW3zRunyXy/qs6jqsWjDnFuMSOq1z71WZeom2jedpJKcYVzkJRcu8VQ0PbQ2rXwpzGnzhqauayhc4IypSZAvlHryrxhHGvEQf+WM8Yj0UNabYxvqxciWMYGwBpgRbwaY2BnDqmdE7COtnYkzHYPirjMe6Bg2G8XjMYGt7Nf/nywWHE4X1bUyERpny1mlTra5piDHRhchkckxMka3xgEFV0XWdOlp5v9rZO8u4gm52L6udbp7rb844GAOj4QhnLF3bEWKgy9w1nRqZ1Junn/NrtUmsEhe1HMdgnMroxxgoq3JVUhhjoOs67RoqQts0hNCxXC6wzrFYLimqktlshi8Kmq5hvDVmsVhQ1RXL5ZLx1hZt27I93qLrOsqipK7qL3ltm+OBHYDCeZyxFNZR+ALX12z2izkvPCz4olAYJJc9CLBYLDg+OmZnZxdX+HUpFEKUmCFWgytVGTDS517W55BiUs9KWDscAOafcKrY+F0z+twYuhndd4omWmpfMahGnLn0MKas+MKVazQhUZUlGMOi67RUMnTqLIUOaTuki4SgztJoa4uHHnmES48+wmhvG1t4Llx9hu/+c/8F/+qf+pv8rv/Xf8X5Vz6w+tz5Ys7169e4dXjCF16/QohaHqX9Hvrz3jT+bxXl3x8FcN/P6gBUZcGlvQO63I64L4dpu467R0dcvnaVl19+hS++8EWe/8znmJycsDw84oVPf5LPX3kVjCM5i2AxwSLJ0yXD4WTCfLGkabWeNiFcvnqFrg3U9QCwhBCZz2ds741JWO7cuYcxBQ4LJ4ek+QmPPXSO0vzAygFYt9Nel6ya7M3ryyZ3crJgtE3pEw8/TGo1lz2ZzRUyzaDZKi+djbD6tHpQm2A4GNJ0HXY4whpNJ8S2Y7y9gyCkrmM+PaE0sL+zQ0qR0HaE2NFJZNE0GjX4inv35nzqM5/CiSUSSCzo0oQ2tXktRfzIYUeOZHMl9+1jkiTKoqRJiXkISFLdiRATMcKi61i0SyRqV9CYIEZh2SS6lFhK5Myli1k//70YWzM30N6nSuKdA7GI0RIyY3qUZXPN9PC+5DrB/LqA2cw/CxsOWRYKMg7EkUQrAEz/PhKGAG2WhO7Lu3LQolBUFg5S0hLGeVxR4HxJ4bSBWUpCFO1L4ApPNRoyGNRUhe6JISyJneb/bVGuDN5XZwinma0niFxZG38cxpeYqkKcJzmX5ZW1pNeYgjWF9M2opP1nrD/HGN1HHnroodynYn0LzEYVgPEV5WCLNsFSBF/X+BzthhCJKa56A8znM0hCXRUYLGJyh0dRbpkvSl76/D9nfnQzz4Evw50T2N3dxeYeELP5nMVySUwhN6iTFUdkVclsHcZYIgnrLdYrZ8Jah3EO67QJkHeWoijpGwf1pYOL5RLrXJbUL+hyCe2yafDe0XYtdV2zWCzzvwtGoxHLZslwNORoOqGqa03NP+B4cCVAUYgr5tIHdcrTip1r+13TQMhdkLrU5Qe45M6dO5w/d46i8LkNpCESsclS+AJrlNkfw4pvC3BKCdA7zzLnYpKoGqEGEvd4d8IA54DvAn4SrqHdgP974DzY6YD3/bd/nHv/aI+Fe5VbR4e8dnTMhz70dTjpGN87ZjZvWIbA7cO7HOxu4Y3Deqfa5NYpSdJ7cKoHgJCbcnSczOdUxwcM3Fm8d5gaVisjCW0SXr19m6PpFGMSo+Hfxdgx0+mUVTncKsJaSwCfjrxgnSt86/v/8Nk9qtTRNo12A8zPdFTV3G46ghjunEwIKVKUFaP5kis373BrPqU6s09CKHEcHk24e/sepa+YzOdMFzMkq0VujUZYazmaTLl85SpPhwT8PpwdsVMPKEaesqr44uefZzAccXD+LN4Kdr7koYOzlMV7aJvvR/h7q06TOiOdRo/o/TPGILYXkLHYmNjdGnHu3FmWzQLxBZPJCX0e9RTZbDUyjyDBcHvE2J9jsfc6Vx+1+G0oP7nHqBrQpCWF9dw7PCGSOJwvcVYyXLpUbQQnxNTgKFnIjOHWNp/53Cvsbe3xyBOPITIDJgQKMANiDHgzozVD/I0B4aUbJHPC1s427cdfof7AwyyKimCU+lU4S7AGnyDNlogIjbEYU2RCVItNwnI2Z//8BV4cD3HTGfDDtOHPK8dnY4g4jP3DudpEThHFTsFjpwgZ+ZZFQULUSN/2aob5/au2wRatVct/2yMNEc3/O5PRhJR7bGy8jfUxjFdCYEwdYTkl2AJjtJrAWU9Zek1XdlGdUxHa2OF9RWELlqHFmgKTFSa/OrvTDPj7Gz8bNfziWQlcpQ4btUxPREghPxdMTusKavh7tOZLDwF2z5zh3IULxNu3V68riXntjNjeoc4QedtqdOSLgqLwdF1HEzow0DUtGBiMary3tEtt/y7ApYcepTKBw7s36Rt4fDlKAMGwt7uHxRIk4pwntoE2qdptR4f3PjeNMpi0rpTobZZ1BilUSEpT5l5Lmp22pa6qkpgSIesExKAIe4yJEBNN1+HbJgd/amNTUg4eRpGq4WDAvTtH7OzULG/fYmd3j25y/MBX+TakgLVtqzXal1kFeNT4x04htiLXNLZZztF5T7NccnR0xP7+AcY5uhSwecFKiIhFS20y5F94T4whK0M53AaM1vffjkFVoxbzmZIO37VaAOtoxgD8BMgfBb4JBjef5pmX/xBX5TI/+6v/DBFhd++A167dYH93h6/7hm/klVde5vnPf5auDbRNoKs7rCRKp2Iuzrv1J4hATKS249rNGxzNpzz9nmcYb+8Qli3tdEYdo1J2jKOznht3rlIUBSEGnnvfe3n0kQE/9VM/xfHJlLWR34T73izi/w3ugDWc3d7m3q2b7PaiMCKk1GFFeOjiedLNWxxPJ0yPj7nRNsj0BDBEEc4sWmaTGaN6wOF0wkuvX6EuC7AQslDJcDhkvlwSQiBI4ubt21yYz9lBFfru3rrFo4+dw0qtUG09pOkSpnbYKAzrgoO9PabXD8BcxHC9z2ae9nUMmjcGeiNurGN3Z5dBVZK6hLeKOpzehGQjpYB2uMNQeMdOtYt56ut5/j/7ET75TS+xdecC3/kj/wn1zz/OdL5guDtg0TTcvHvMnemEo6ZFgK2i5mBrl2uTewgBEy3LFCnbiuF4j1/6+Cf5trLg4iPniYz0fonFWOValI1jefk29cE288rg3ABz64TFF69j3/9obiOfw2HjcV3gk7/w8xxPTtg6e4H9/TMMdvYpdrYQa5jPFuxtbyHDmsXJCaUf4HkPTj53ym6kFEnmreZQb4D7H3sj3t9DNRgiYZ2mWeVkUcNtNkWqNsVgUj5U3qhjSzLlKsUgq2MYtU4pIl2raYMkiLekrP5nEqQ20kbtBWmsoSgqRT2dpoVs4UndAgle68O/KvvTmxhtuQwcYu0emITEjhgXSG7EICuyKjnV38NhPfv9S6//QV3z9BNPMj0+WQWFCU3/xm69LkJKhKwj0zYdBqjrOje2EmLoIIG3nma5wKC2wXvPfKEpUTEwOTnm1s3rGw/wyzO89wyHA4rCZaYBYB1t12WSoqLQzqlgVS9wJabX+GcVpIbUYZ32pZBMPi0Kr6kAq4JIIoIvsuNgdd9KIbGYz3HO0WXRqdlshjGGxWJBUZS0nSqlhhhomiXDwYC2R1of5Dof9I3WWZzJQgYx5mmg0YjbIEIom9RQWIexhlsnJ1SDmqquV6IeCQiZyGdEJUgdgGjuzjtPssorCKsObpAkUtQ1LibapmFrPGZQD5jP5m8433fbkN6S/H0w/wCK3YL6u4c8856nefnyZe7cus3k6C6PPHyJl194nmuDAb/tt307J9MTTiYnLGKg7lpK8UQsxllcTofEriMZkC5ycnzM7Tt38GXJJz/xz0neaSnhouHb5nN2gU6Eh558iquTGfcOj6irkhs3blKVI0Lso/1+bEb4m9bwwUZhHaltOLp3h8FisTpSSoGIVopcPHuW0heMB3dobt5k0jbq4QKHR/f42V/4OZXCvHeM9TWJxM5wiHUF1lqaybHCaJmpPW2WnEyn7OTjb+1uExadlgw5bWVaj7eBltS2lM5y4exZXr1+HszvAfkbIEfra+0h4hULfR3VxgS3bt/heHLC7mCs8GUI992p9U8G3SSMEfa2x/idhus/+t/TfPA1ACZnbvAz/4c/w/fEP0v9c/scnpzQpchisWQ6XzJPASOJr3n0cQ4evsiNj/0ixlaEtsV4z2w+o4xaJfEzP/NLfM/v+DYOLlykCwmDJZqK4GrM1Ru4WcR8+IDJy9c58/AjWL+NT8KhRAbis+iLrnfnHR/88Af56Z/6ae6+/BJXXnyBorCce/RJLj37QVKnAkej4RaH5hYheJx/Hyn++n3r4L68+6nR39/+RqnBX1VerHzSnHe2WaNi9TiywTOi7P9VVN//WqsErHGaDkkxZ2bc+rO8ZcUX6Lrs/2gHPNOXC0ZVQZWipBgMM1IQsK1q5YskrAhJPL4Y0IYH35C/vON/ue9nAb4I3EJkS7kRGbpPabOcuse0UWfAOH6jbLHJSMylhx4mITRtt0oBWKOtbmMRV6x27xwuB5NRDFEis+kM6xxFWeAHFdKoCmPTNJAdrVOcL4HDwzu8daryNz+KsqAqK+W7IbkcOykHJCYWUUV8ikJhfuPsqqqkj8pUeVI5ACLgy4KuDSodnRu49X0ByOk55xQNsUYr70LurTKbz5EkLJaaDlgulhRFwcnJCc47mrZR2eGYNGXygOOBOQB6UbmZQS7viyFmoRerDNkgWv8IhNDx2quvYQTGgyHLxZxlp/naLqgykrMeg9X6z965SIll29K2HU3bsmzWiXNjLSFFokTaruXc2bOMRqMHvoSv7ugVpPsFoG1jC1/xzJNP47wjSqKZHPHIuX3m0xM+8c8/wcHZ85zM5hweHTGfzVguGpqmYT6bM51MmJ8cMzs6Znr3iHu373Lj1m22d/c4s7fPI2fOc7Yes+sKDg72V7mhc5cu8djjT/Kd3/nbKZxX5dSY+MLzX1hBcatSoHPAE6DKEr+JBSbC0eERs+WSZbvmADQLJfmkGCiMYW884pFzZ3l07wy+L7kSQYhcefllfv2Tn+HezVtcu3aZ6zdvcHR4grXQdi2LxYI2d+MzWUd8Mp0CU3zp2X34EiFE5U94x3Q+YTGd0siMu2cvc7z/Kg+dP0DVFs8hcqAQaR/smJxbti47Afk55uTfdN7ya5/6NKZ0zBZLlsuWFUKwAf/3zZqM1Tz//mAAvqb5mtdO3bLj81eIZ2c88dSTnNvdY+A9RWUJqEhW6SxPnX2Ux2SfDz/0BCxndNsNs4tXmJx/lcnskKZZEIuaH/+Jn+fG5dewMdCZSCsBQqS+tEdxZpu4XKgYi3PE2QwznbMvlmXoOKzvcbL/GnH3FlGErXMP853f+/2c2R7TBOU6vPT5T3H1C5+mqgxN01CVFYLyBtr2vUh6330TYmuV+2S1Hja+NkmW/ZQTkyPRTEbrnYAYM5wvrHrZs7kBrwlhpn8x5IolY6CN0KlDpQhBxHQdZrGEZokJgZ7tLRIRm+u4y5I0GlKNx3jnMplZ6Eyi8ZHOC8mCrWtsPVjVt39ZktNva7z+Jq+tS/w256a6SpvRff8sFHERaVmXE75xiMDe/j7nL1yg+f+x999xlqf3XSf6fsLv9zupTsWu6tzTPT3dk0ej0YzCKNqWAwZsMMGAiTYLlzVhgQu8WMK93MvLLAvsBXbXgMGwGC/G2BibtWVjOcmSNQqjyaEn9kznrlx1wi884f7xfc6p6tFIbgmNJO7leb1qprrOqVO/9DzPN3xCvU6e5VPzJIUi14Yiy6at3Xae0+90me32mJ+dZb4/i9FGsE9RrIDR0kfXRjLqCS5FabVXAErH/pUcSimKoiVZeqpKG2PI8jz9TETMmsZR1w1N43B1I/iYqAiNVNkmnHydJK21NUQVMJmsIzaTVpK1hqKVM3FHnSgCqhREZFkmGB9jGI1GxACD4ZAQIhsbm0Rl2Nzaptfvsz3Yod6XNP9m46YDAB+CgPUQkZ8QA42XJQk9uSnJHAGoyjGz/T6dbo/KNQKc8QHvPE0tKM6JPKc2qbfROCmbKEXjapqJhnIaUnYJGKPZ2d7iIz/3s1y48PpXqOfzVo9JlSRChCIv6BZtcpvRn+nRabWIETJr6KnI0fk+7dDw+quvMKpqNtY2qKqKajxmMNhlNNhlc3OD9Y0Ntre22NhY4/LqVYJS5Dan02ozv7TAoYMHObBwgJbNp9Ok3e3imoYDs/N88P3vZ6bb4eCBFY7fcoq5ufm9Q74D+DmEPvzTMPXi/RKG857BqGZ3XFGnHmCIkeHuUIQxPKgQxfYzy+m3W1OWh1aKLCoKbSmMQRtYXJgjNo7GCXLWao0OUUB9Xnq7VptUfvxPKF3RXzoAnQ66yNCZZVCPqHzJp771h/hn//hb+cd/94Nc+rZfkIRSKVDfjWQcAvIT7dt9m89koUyI9GgzXjp/ictrG9SuZlyOU9HgxmqBAF0lMOi3Z8i1RZd/mJlP33PDNVu6cjv5a32q0S4HDi1z6sQJ3vW2+zkw1yc0NWeOnKDdbXP9tYvceewsnUUY/z9/nOrJv0X1ib/F+OGnGNVDqnoXZ1v8/C9/gieffArViAVz1GOaepX69ddprl4TydGLO/D8dfR8DzUeUfvH+ZU/+Af4d3/lLD/9Jz7M2pGnGDsoZud474c+wIHZGVQQV7nrr79Km+TkZlQqzyqiyoBbgWzf2f0eYnzjsrPvOk2uWYQpQC/ue99+VsBk859eYFmBpu2W6bWfuA/KV/QevAMdZfNvamEYxEB0NdFLlh9VFBU3Y6S9gFQDlDFolQL2uiYEj80KcptjnUeVNXFUEnbWUH483cC+Rn2AN4z92fwX6+m/odT/htjljbAWm1kOHjpEVVdk5ifTLVDpkyK195RNM63IlE1DHQKNVmRFTpblLM7P02236bba5NoKXsw3iUUWqSoBsmojc+itXPbtpPUcU0U7s+RWApQ8z7DJq6ZpHOW4xNUSBLjGT2nx0SXvDpT45HiHsRa0omgV1N6RtVvE5C0xodlbYwVPr6Wq0Gl3UQjuK/hAlmdUpXi2DIYDlFbCGMhbbGxtCjj/JsdNBwAmORhN3PyEDihCQD6ImIyA8zyZFQOTvCiS7KGUekyWSdST2USDkn6I914sNlPE45MQkDHSRpgM76WXVlcVg53t5E71mwNT/ovH3/lF+OH/CL/lha/Ix01LRlpjM4OJMNvtYrXGYJjttFFuRBhtc6jfw2YZm1XF5vYOdV1TNw1VolQO65Kd0YDt3V02RkMubm/x/JWrfOaFF/m1p57il594nF976knOvfCSgGmA4WBAVTf4ccXRw4cpspymeZhjJ05w2223YW0GJxDA4gPpoN+d/n3wSzvXoBTbdc3GYMS4SvKoMTIaDhlXJabIsUWONoZWq0W7U7A4P0eWAJ7aqGkCXtYVTePQVtNut6QC5T0+RlkU9lWpplD++KtkhSFfWmTsI8ePHSEozWd+1w/xqe/+QXmPgke+738ift9nxW5ZZaAflo3fpCBA6fSVNhKlAA3ao6yGvMsz515iXFc0TbOve5A2rX2l6iKz6A8ELv69VTb+x4/yHT/+zzh17hsAyM4vcvff+vMcvP42sX8m0Oq2menPcv8dtzNfFNx24ja6qkX/+EF2m122f+Df4/7IJ+XD2w3uf/1R3IefwI1GVM0Opc757OPP88jHP44bDdFOU/dy7L1H0LvrdK4M8C9eISz3iER2zz3FL/yeP86l234ZgJ3lF/jEd/937Bw5R+MU2dwy7/rAe+h0MkyroCyFmeJ1lLJy1OxR7B4AOm8yA/bVcSc4k2n5f1+pd3/feVJ5me5G0gpQ+L1gAcV0k1Nq39/a/zsB5RvAEbWXHn3TiNaA2ieCY1Lwh8gRx5QREzzRNeIAZzVWA+WAMBxgfUTHhujHJHcditYMWn+1KYGfAd4oR6yAs8DyG+ou6vM29BuvM58Xu7xx8z185CitTpvh6Deom03qpsG5fdLARhP2reVGa5qmJjif1rQK5x11Vcv+goIQaBcFnXaXGAKD0ZiIotWa0NzeuoqKTiZSjZcqkk5rkTGaIs/IMmHDZVmG0pqqrqmqGp8ccPGB2CQ13LRHTXj+IYI4Hqa1wwJWkxXS0my3WjjnKfJC9lkrFFTRDhCqfJZZ6qYhzzKBagSP99Iyb7ffON++8Ljpp9L7MPXWNkkOMyhoEqVGG4MPUI7GrK6t0ev36fdm5OATNWLPB3kikhBwtVBqtDGYVAkIIWK1lEZ0vVfOMMZACIwGA9y+RfYtG7mDH/go/OlPQxbgt5+D3/Hd8OvHufmHb59wyf4RRFVRa0On0+bkLSdxCsbjEb32Ct1Oj63tDUIsmO336BjL9s4OhTW0YxePR3lP0GKhq1sZWfBkXtNsDyTLGdfM5jmnDxzmQNGh/epz4B3Xr11n6/Jlbjl0hBoB41y5Ns+tpxTHjh7niSeewB9wxHe+4ZgfRtRzr77hFAtkjQ/A9l65tb8wz3g0YlyOWR8O2N0v8qICuzsDeu0ZejMzYAwmKlqZ5Z133cUnnniSnXGJJSb2h8c1jqquyfM2BBiXY9pFnhDiDms1rm6wRXtvewnPUY13mV9ZZHd9nbarWOrP8In7P75nkwzQbQgfeAXzw+8jRAPqA8j2/elEaVWyOeiY2qZpM4gKrIWixdXrqywtzAoodaJRQdwztEmgH3Wv59o/WcMvBlT4BE999h/z2370f8F1YfuFq1RPR6rlEe3coDGoTKPJWDy0wvJMn16tiFhaJuOx119i+E2P3Xg/Dm/Dna9jPnInTVNCiNisxcvnL3P12lUefu/DHDi8RHVgzLipmOkdBz2Pm+sTh2M2rl3m2sPP3vCRm0efpDIv0le3UTpHZ+kgH/jwN/ArH/s43ssy4jBUjQMVsEojJLIIfDc3eC7cMHcm/ebJ5i+BwIRrPdUKiPveH2NCEkmZP4Z9ARlqSruc2goDe2DWSS4exP7PZhKgJTyNxAxiAiUgZy1YgRih0RCSMJBW6NCgRmOphGotqsOjMdE1RBxBGSpXoK0BZVC4r2IN4DzidLl/RGAF6KG0vzF5mrgt3oCZuIk/oxQLC/MsLCxQVSPy4jJNPRTrkvQBIQS2t7dvMPOqm4ayrsmDCE3VaZ2vq5oYPJ12Cx88w6amrEuUUmxt7rK54bh86ToT5ci3anifElIf8MpjMKBlDwoxYJXFWGmD+xAIQdPUjmowoFXkZLmI8ygj6oAq4dsmzBWXNnHvHVmWUTUNeZFTNw3tdouyKrF5RqtppQ1eAuq52Vmcc8zO9RmOS+bmFyjLim6vx+7uNsZkGON+0/ObjJuuAEzpB6n3Lyp/mjzLyfMCawTgsLaxxcLiEnP9WWEMTLjVsBfNx2T0ESUgSArKeOemgb7WSmwg960VVVkRgiBJjXmTTfUrPb77afgfHpHNH2BxDL/4r6F18xdYMqCz+/4tJ1RVJVVdQ1QsLC1x5uwZvvFDH6JsGnaamoXeDK2soGWBEOi2W8QsY6ss2RoPGJc1zkMMiiLL6GSG40uLHF8+wNGDB7hlaYm3nzjJXYePMassG1ubCeAHmzvbnHvuWS5euSScdWUoinkGg+9iYek2Ot0OcTPCc284laeBwRt+NgP8A2ANeAm4HyCilNyjXr+PKXKGEep9AC+bW9CR9a01Ap5WpwVGsby8wjyatx05wqEip6XBxCjukyGg8PS6rZTgSwm2rB0o6TGKitue+E7j1rl67V+i0Rw8foyR85jac+DZs7CPomabgjOb7+DAXIfCBBRd0EeIpiObBY4JnQwCKkrvWKGJzhHHI8aDkgsXr+JThC/6lgL4Q0f5WdSMPzLCL6YNTAeeePD/5Jm3/Ufm1o9zPLuH2+66g4WjB8mXFqCVg8lQxlJow/H5FTZee52wvcu2rjl3/RLhk8duvCdrHcLLS4xiTKXImqrelWrMOPBzP/8LPPmZJ6h3KsZjQ9VdYqfbJ3iP67Xof+M7Wbh87w0f2b90jF5zUNg5AQY1ZEuHeeDBh8htEudqPK6q0VGRaUU7M4kSNQ8cAhaB4vNaIzfODfWGzX5fX3/ys0m3Oko2LpbN+96jo6xsJlVt3ogzYN9nJRzBROtGkNpRBHGmxYc94GLEo4KHusKXYxpfScGjamiGQwHkRvnMGLzQCfMOutUT0Z2vCg5gi8+fqDAJWhU+KdVZMC1U0UV3+uisizZttMoS6/+Lr69aa+bmZllZOUg5HkN4mhieIEtiN5PhnGdzY51rly5NderLsqSppc1bO6mkiLaCpt1t4xEcgELRabWJMVKOSz7+iUdYW11/Szf/GCNb21sMhrsoK5m6936KadCJBqyVFnB88q9ptXOKIieiqOqaxjmC8wTn8c4TfZiK6cUYsLmg/7UxxBDQmYBNAxFlDXmR0epIIDQ3P0deZMzPz2G0od3qELxnYX4eYmCm16Npaooip65vHnR60wGAoBy1BAEg/NYwWcSgampW19bo9mdodTrCZXQTpUB5v/dewFghJmCMkgs5kQ4O0grYowHeaLyAUmht6M/O0p9fAG4EWb0l440fr77UJ2/yAXslT6UUPkRq5/AhUOQtOr0e3VaHw8eOcu61C3gUnV6f3GpCjGysXmM4LhnWDZVzNDFQRc/INZQhEnWGMoassGTtjFEWuNTscGGwzpoa0zq+TNZOWvw2Y2d3h8effJLHHnucTtGm3+mSm+Nsbx6mKkt4GfhDwDPpsB8HvgfYbzGggb8L/Kn0/RLwoxDfpQhRUZZjUNBO0rt1upfBBzY3doTO6BpGOzvE2knPzHnGzZhuZji+uMiRTo++VogIa8C7QKvdxUWoasdoXFHVIio0yeBiTKVopGx2+fJ1BttDWjNdivl5GmV5/4/8Be76qd89vTXv+6k/y3d+4i/zrR/8AA+//1208haoh0Ad2kObQ1L3UwkYJcdEUxPKkto3XL56HRfSxYn7kNVKJzOr8CbPFCjv0HWNJmLaObQspttFtVqgpSdo68iJlaMYFSlHu1y48jpbg134Y78XfiJt2KUl/0u/F/Vj90j7LSa+fAjU5S7aV2AyPv34Ezz5+BNkRZtgCmL0hKDwVcCaI3zwR/41B84/CMDc64f48J//y/Q+dQSHKHfqGKgqz8zKUe5//zdg2h2iaxiPhgSEwql8SFetBXwQCYYXPr9+vH96TC2lmbZ09louk/n0hs088fr3ggWVJGRV6gZM2gz7vyZ/14NvEmAw/X5M7ABlJDiYMATwqFAR6xE0pdiRu5o4GkrJf9rOAIUV1E+ohPFiMnReoL7k9ePLGS8Cr73hZ5PrFSUwTZRKrSKtLOPowRVO3nILZ++4g7vuuY8zd9zNyuGjQsN+k4BNAdZmrCwfxDuPtQ5jzqGNReuMzLaSdC5kWcbhI0c5dPyIBIowdQh0TYOva+rRmHIwZDwYsru1zWgwYDQu2draZncwkCOPsL6+OVWOfSuH945nn3s2KftJW9F5j0t0v0kQYIzI0RsjlPU8z8TWXMn619T1tCWg0bjaiWslQn9XCE7KWAm3slwEgIp2S2SOOy0cgU6viy0sWZ5htKJV5LSKgnZe0Mrle2Jgrj+bnuWbGzfvBdC4aeZutUk8bJko47Jmc2OTECJ5nuOcJ8sshIBzXjiM1iCFu4mhpMxwow1oUULSSCQUkT5/luc3HEOIQseo64pRWSaQ6v7+3ld4fOQ2+OmzUvpXgFfwf/utUP2X9vMyjP52UUUj9ayJFEWLd73z3Zx77jmuvPYq4GjKhgxF3m5RDit2R2NWjeXA4gLz3Q5GRerosdGjE3Yib2VkWR8VFGSJVpO3RbhFRcrf91m2/9Sj7F6bZfn7/wxZ1iGEhl63w4ULK4xGfZQaED8b4LuQxG0VWVf2DWUU8Q++YTLeAbw9svLyDO988F4+88QrDGOkNzOL3tkA5D6ubu6gUawszrO9uUl/fo7OYh+1aym3SrYHuwyHQ0G0KuigGCnDeHdANbNFXXvyVhsXArPdNpm1IuBmM1yq7spBKsblM2ysfZJu/xuYXznI6+tbLCrDQz/yJ7j9k99Cu1Vw+ML9xBBZaLc4dPJtNKOGjz/yOGTfAfGfEhlCTLafQdDQikh0EZS0uebnZ8hMxurGRtILSGVKLSVggkFbOPRXV7j4j65N9VVOvPwe7vn0d8gcsIZoksZGAKUsSjvwjlh7OnmXublFyBTnX36N2ntYt5g/+9vJ/+mHKEwb9egJmlbDqK7xRoGRaxODT5VuhY8aG0dcvnyJ1fMbvP3Bd6NaHUoXUMEzu3o73/Lv/iHjVz7L3IuzdLfuYPWeNlXZyKkkPnzlHD5vs1Z5drc3KXe3Ey4o4qfpRQn8IvD29O/4hv9Pn6i9/08EfCZvm/T/FRKE3/CrcQriQ9sbsRoa+OOfge95FM7PwR/8nTdUfmQkMKFSewGT99LyUTHdByV9XV+lzo8E5hNBo71IbxJIGLSKxKbC2pzgLTpm+Dezzf2Kjr0gZG9MzncFuB8UFNZy+sQJur0eDZZ3v/tdZLnFKBHeqZuGuqlZW9/kM5/5LKtrqxI6TJzmlIC3XUK5l9U2ZfmInH0IqOg5ljzpq6rkheeflduUksa6rtja3CRGKPJcaM3WYvMCFwIB6LYLynKMMhkgoLcpDuOtHhGuXr3GI5/5NA+940GBhqT9TiXsmtFG9qQoioViFDR5XWGNoaoqXCMsG5ti/5CwI65u0NYSfCAvCuqqkmtaedrttkj+ZkbkpouMTBX4GCg6LVCK/myfuqlot9qMR2M67Q4hhAQQvLlx0zvZJNPOioyI2PdGZCEYDHbJrGV2aZ7haEQVKxH3CckgQ1u0E2CQVir1JDPyLJuiaCclPZ2ME/K8IMuyG8yAjNVUUdQH55eWWOj3WV29Tl29RTLAq1343b8HfuLfwZFd+CfvgH95P59HVbqpkcpvqSS5snIX3XY7qT8l2WTfEBu4/fY7OXPH7dTVmMuvX6B59VWuXbtKq2ihQ2DkPK9evcpFa1menWWh36VI/SiJuS2tbguLhRhpfODKlassjke88N1P8Ys/9B8JmfQ8d3v/nIf+6f+b+eYQszN9ur17ef75v8D6xl8W6s/UU0fcIG+wwgxRXn/7vtPchFtnDvPBbzyOH11nuRe4sK0ga5G1OjCU0mRD4Nr2BmM3YmFuHn3hAt57goNhU3F1Y5vNaoQj4F2kcuBijlNw+fI1CJG8nbOwtAgEWu2C2jUURZtqXCVN8DQZ9YhLVx5l/sAD9PsLzBxYYnD1Ggv9FVovztHJc0xmQWuy+QWU0rzjgXvpdNo8+fQzrK7+YZz7l8AmcQI4Q/p5SieUeIycPXWS8xcuyXuSzK1gB5QESwRCHVj4aB/9l3q8/j0l+W6HB//G36TWJfbeSN4uUIlWJp4HiYNc1zRjT4cOS/MrrPkBF1cvoz2ySa13yD+1SLvdwnYKytpS+oAj0lJJwlYpaieqZlRDDiwc4+KVTZ47f4nXz1/mgXfcz8ETJzAmx0fFwupZWp8MmKrN8PQ8Wb9HVtY0XhwQQ/R45SidY2dYcvnlV2jqEhXAKKkCSdX0GtIjmj45Nz9tYkjzzcrv7W8PvPHzYhCt/wlo0wB/5FH4Rz8DNsC7Lshnff9vha3i8z9jwiIAqQK4OsF3onxeCCIHjEfFG+eC0BQDCkNUGSo3CT8QcdWYVreDj+CdFQbCWzbWgF/4Aq8VwBxZlvPudzwI3lH6ht2dXZSv8eMSZXXKMC1WwaGVA3z7b/lmPvXpR3n+3PNJtFkSNuccG5ubaL1Nnv0IykSMMeRW7pW1wkfPsoyjJ24R9sQzT4P3tLtdZmb6iMup9MmlkpBhfMB7x+5oSFU7yOzUuOmrtP3LfqcUly9f5JOfcrzzHQ+lZ8ATgk5Ot0LVU6hp8E7YA7BGC23boSprgnOUdUNWZGAMmVE0TSDLErA+ydxrI8DwiettjIFOp40LDTaz1OOKVrdDU9XMzMywtblJu9tlvLnJXH+Ojc2NaZXlZsbNBwAIUI8YUUYsIxUw3N6hGow4dPgwZVOTW4vzDmsEBGGUJuzze7bGoKzQHqKKKKNQiRZkjLyutVDArLVYv3eIWimKPBf/eu8xMbC1tUVT7/mJf8VHY+A7fv9X8AMji4sLvPtd75J+ofPUQWGNFiBl5Vjf2CAi8pkHDx2j6PVRRZv7zt7B6+dfZmN3h+FwzM7ugLWdAde3tljqz3JwYZbFXg/dBCrfMMYxqiuubG5y5fo6t6qKn/6XPy2bP4CCrQ99kuFLn+bOx/4YGsNs0eeuu+7lY79+F0o9Pl3k9i92k2CwlbcY/7Yx/BvgQ8AA7P894+KPrPHCfZbTxzQHDy1ycfM6LsZp3hOBcd1gi4xx2XD52jqXrqwyqsbUXhD8SmmCVSgXcN4wNBawqZybg4eqqtjZ2qKTt6mqCmultpRbUWxLB05UkWHzUS5f/ACd0w+ycvQwl4cjogp0W+mzYsDMzpLbNmrs6eZdHnzH/Zw+cwu/8quzPP/8LiH8FKiJ8IiebhgKWJjrMb84z2cef1IW/ihZIFoLSFDoLkQF21vbzP30B7j2g++k38o58ttOkoeIMROetDj5xeDAexSB8fouWdMlKkXR6/HahfNs1+M9bQI8ZVNT+ppO3SYaI5IFE3MbhIvdeFG2y2Kk125xdXOEyTuMKs/HPvYpDiw/zx133M7BY4dwvmQ0Z1FDQcBnRpNnGU1d4YK0QHzjwYMbDhmvr6KiwqDJLdio0OEFiD+e7vxroN4GcYJSnizn+9tkE5zIHl5och/fiBfceyTf0NdPPXi6DfzvPwkmvVEDf+BJ+Jnb4cffqE+w7/cBAR7W4KVMEwlom0nv/Aafx33lcaVQWYEtumStgpYK9AqFjZHVjU2autrDkL4lIwJP8fkVALmuUkSJLB9YwrRyLlxYpaobvuXbvoVup5BNbALDiFC0WnjnGI3HvPfhd9Nut3j8iSdwTtbb+bk57rzrLsbjf4XWi3L+CEo9KuhWNWxtoZSmlbfThinXSkXIdCYZdBMS3z6TCnPCbmhvybTBlxFb5LiyTHict37ECfZDKa5evcInPvkJ3vvu95LZLLnVQtR6igOIISa5CjV9rCf6D3mRQ25xzqGVpqoqmrpCJSlpkxwATWbxzmGMSCHbTCSD8zynHJVkeY4PnpZtU46kvVpXNe1Wm6aumZ+fZXMr0ut2YLR7U+f5JdWygxd6RvQN2ihc9GxvbbO8dEBuLiKhqJX0h7Sa8DUjMRoBP0wAPEqJ1rg2kCeDjRineAAgPYh7G0+raEHSXzZKYbMCm2VflYjwv3xMACSaBx/8H+l1e9RliXeB3BZUVYNSnqLI6WUG7xq8R4whlKZTtJmfm8OeupXjUTQTRuOS3cGA1bVVdne2ef36OlfWNsnzDCLU3jOua8qqIkaF128yeSJsb2/z2FNPcua2s7z6/HPcdvp2Hnv8w+zsPJYeZnUDYlhAmBKoqSuK+H3AvREqiP9ZMtZWu8uVjW2efH6H8bDGdhRhgtwGxjGi6oalVodeu8u4rGlbS+4cdeOpvBO5UIKQmZxGaUcMnqgMCgvaMhxWrK2t0Wq1UKZDcDXaaDK7x4VVWmGMZX37P7C4cZqF5QXmDx9i97Xz6H6PrDeD7XXJsxzdBJomYjKAQH+mx/ve+x6uX7/O+raFuEmMP5WywggYYqi5587b2dnewfuAVhZUJKggm44WMB7JQ2OjrJjtelb6HfJej/5cnzgu0WXEa5lbIN7j2nu8awhDR6ZzAaeZyLnL5yEi1qOJ9tj4QDRRBH0yk2RqM7zSmEkgp6T/O9stiCqyPSxxMcOjUJnh2toWa594hIMLs5y58zjH33EXrjLsOo+vKpQywvV3kcZ5qrqhdI7dtVXiaFvWBwJ5q009rPA8yR4d7RzELZTqsicru7d5y6M2bf6nzD+h0xNzSLKtyWv7N+E3VgRSIPCm0/BmVozJe5LJjbZErLQFYoOahgCpFaQsJu9j2wUZNXcdP86ZM2eY7WRo57m8tsHHHnmE6xsbN/G3b2ZE+BOPwre+BE8vw1//EPArwK9/0TMyyjAcDHni6SepXODUiZPMtduE4PZJ2EISXUErQ7fdZWcw5B0PvINXz58X8ZnoyfKMyDmMvUaMKlHnBNXvQ0ytzZS4ZTduNa12h26vJxgA59LaoHA4ceCwlo7WlI2nrkryzOLeyGp8S8eNic/q2iqf+swjvOvBd5FnAt4z7AkSBSNAZa2UWEYr0f6PwYsZUNREJdiBXElL3cfI1uY2vdk+rnF02wXjqibLDHWohY4c5RgCgnEx6RornVoIWuODJ8vFXTezGd1uD1av3dRZ3nQAULmGTFuRw1SR2ATWr11jYX6Bdq9DWVW0221c47DGJqSjlEhC8EkLeQI8UWRaY7ToCFTBESLCe1YarcUSsq7raSkXEMWyPMc1jQCRlMiUfrXKQl/eqJjQcbTWnDhxgltPfYi6EalMY9qMR6Vcu7qmqioRncgyvJbJ1M6EIumcR2mDFRsg+jan1+1xaGWFsqxo6hrnBVhY1TWjcsxgMGRza4tOt01vdZXv+dbv4cd+6seo5iqUN7R/9GFe+YE+r1aPc+z4LaioWF1d450P/TY++sufg/BLCcTGDRfZey+qVDGKUNAroNBCTwQ2dyoOHlim27nG8ROHeebcq+RaBGGU0tCboRyNqXzkYKfNoaV5htWYK5s7DLd2aGIgYBk5CMhECL5h0mONNAmEo9jd2WZnp0+708PVjlY7Q0+EPJQsQEJTfYWNrU1a3Ra9uT7jwRJFv08WLL4BX45R7TZYUISEBA8URUGmLYQTRHULYFDmPxFjhY6aA/Nz3H72LL/wS78IIODAGBIKHWicHHdUKBUp8iVe33wHxw7MEMsxrzzzPMePHUdRg/LSUsBAWkQHG1sE18Lkojp3afcaa9sbkBQTdRC8QDAOArgAJlHjtCbRjyCogDUWVzpWbplnsDOmbBw6SZT6UKMzQ43htfVtLvzSp1l49AXedv9DzB0+ShOCiHo5aLyjcTWjsmKwM2D3+iVi9Dg0rZZhaXmBCxd+Bl/dKAUs1+dNyveTn8eYKhpIyXUi9pNIP5FJS0Bxo0z1m4xhBr/t98G//UmYrYTG94/eKbiemx4pIFHsZZ8JHChDAxbVKTAEDvcLPvzBD7M0twDeoUwkaMVtMzNs7e7wq5/8pOCp0pisk1/SMAH+0OPwD34eOk70SSLwt1+BKn7e50rWnbG09N+jdZ/xeIRRmtluhzvPnEEphctKqt4mROhtHZ52WyZYLWssm2sb3HriFgxwfW2NbreFNa/RNIOU8aopRVylZG4yQoyYSXlBDkoCgyInJhVUpRTO26QFI8GsUpbu0gFWL+9HH39txpVrV/nEpz7Jux58J0WeE6MI++hEbw8Jy3YjfEXwAiHGpKAbp6I/mdEYlDBnFIyGI6w2VKXI/LqmwRpLXTdobajrBmOtqCQqoeBneYb3ghmom5p2p0PWjG/6nG46ADDGCrjDyklsrW5StFq0uh2G4xJjTJIr1ElnO8jGrw2RlLFbO+VI2yxDW4vznlxleBUIUXprRImUlFZCuUlDfAjEiMF7L0hS/WVMoK/aGCN63C8BYnZx7z33pAhOTZORdltKOFprdAiUo7FcK63FAKPIhSajFLkV8x6VdOlDyiqLmWLKM1UoTGYoWm2yLJP3GMXsT/4EC796ku/809/H43/2Kp3BEsf+3X/HpVuuUFYlTzz1FKdOneKJJx/nQ+/7IMePvYdLl56gaVa5AeeUxo3XfZITRaLSPP7067z7HbfxoQcO0JnpEP1RqqcuTN6KLgrq8YjVwRDnGvrdLuPacWl3N/WOVfKckEqRGOiksnoEUi/eOykrbqyvsTA/L9l3mBY8ZSELEWMVmMhg+AyjnRl63S7d+UWanTFNOSLvzWC89NztTEFQHo3CBI2qA0ePHaGqX2dr4An2beJKxy+iGHHL8cNYFdnY3pJNCy+bPxoVJtKzHqJC5y28D3SLHuvbWxzo9Hj53Mt0gmH51AkRIooBpQOhqQmjilArCtvBjxy+rXjm/Dmc0ZgIMYaJVQnKK6KW70NMuh3UGG2nsqSZsUQqjh5e5Oq1TYI32BDRIVEGQyAYJ5Q122J1e8BHP/qLLCzNc/zkLcwfOkzMuoQAZV3T1I7h+hr1YAuR9YbF+TkOHjxPXX8GczneiHlTr0A8xI1l+3SjJvd38lY1mSP7wHlvCvjdXwnY97MY4SNn4Ht/O/zO5+BiH/7yN7/J73/xoRTJIdIJyDDa5JQnAXk0GW1teODsaR648w5yq6EcglFok6X4IXLvbWd59oUXuXLtOpM++pe1dh0cwA//zN6/8wB//WPwy8CvprOPNwYCSkFZKU6cXCb6QG9mhiLLWTlwgNqM+Pi3/W0++8H/De0tv/Of/1tueeHDCZgNXkV0ZjHWUrTbbO7uEBW0Wzto/XFR3kxsJaKIcqmwp8CAAm3FCW/SvrFa2p5EkgSBtIl1hMaLxLwjEoMiMyR9+69tqhdj5NrqVX79kx/ngfsfYG5mllDX6DwXDKk1gg9JPZSQWgjey88mUvfaGKL2KDRZJiBAYwzjpoLMEBpPZjLZ8FuGelzS6fUYVyNa7TajwZDMZlRVSavdoqkbur0u4+FYqiob12/6nG6+BZAkeKOC7fVN6nFFZ2WJ4Wic1IikF6JTJCTyjdIDybJJzioPQLTiqrSzuiqUM6DodJiZnZWLl+h+xhrC+Ma6T5PkJI02SQvg63UE4GcR8jyo5JF9cOXDKHp7a5UoZpAVOT5t7DazbO8OyPOcVtGiKSvK8VhK2Ul5CqQlE1N/U7FnyGTS666uid5htZHeXFoUbn36mzn4w+8mM5YXw8scWTmEsoat7S1CEuZYX1vn1lt/C+vrv0jj1pAF6zc75whotI4sLi3y+NOv4KpFjh+M3HHqKGuvt+F6MrXY2cG7hiZCM65YrxxNBOeFGTHhiqBI7nEKdCbXywfwUhoWW9BIWVasra1ycOUQzjWiLDk5qkQLNDYS4q+zsXUn3f4M7ZkOu7tDtDaMN7boL81KCTp4Uf71HsYetVnyoQ++n9tvv84nP/kZXnntGkE/iMKiw4+zuLTMpSvXGOyUKVNMIjSuTijxIFVsBa6qaGzAm8jMjAg6mW4H8gIaCaCjc6AixjvqskG1Z6CVYUxGVW/TRMi0oiISlErMORHEkcVHgh4FQo9TQHDYqPA+kmeGA/MdPvfUK8mhM0KmCROXPeeJyuGT22TMMq5vbLGx/jk6nWeYnZun1Z0naOFmF9UQi8cLIoqZXpfCfIzF2Tm6G9sw2M9J/zVQ7+ELA2njNOmePNny7AX2UiuVvt+/IXyh4AD4ybvk68sccUoBTFbQUYFKAl/KYEPDfYcP89AdZ8iLnBBKfuH3/z/wuuGdn/gTLF+9S1Dt3TYPveMBfuYjH9mbS2/xfrbXqIgMdnbY3tggEvHlmFaWc/7lFzn/l/4Fj3/gnwFSRfq53/+n+PBP/H1OP/ntSdNes7szwDeOixcuYozh7JnbWFp6RIyiUhl6KuGdrIanFvGAUlGs5PedrzEm0b0tmVZC5bVggtDdaufxLjDc2UzYiy/0zHwVR4SNzQ0+8Ruf4PYzZzl75mwyszMivqb3HlFhlZCeVfllabEIGD7EiMqSBLDWGCfBr9Ga6ANGGVzVoNFEFwiNR7cVvnG08oIQPFnWparq5Cg4oijyL0kd90sAAQqFr9wdMtjaZvnIUZSVjd0aTWYziF7ke5WGLKWMKSsQBKfkZWVTs7W9xe7ONgaFLXJanTY2iW/UrkHjUI1iv9WP0ZpWluMSa0BMGZILU/z3CFH9az0C8EngMfYjn60x3Hb6NjJzK5GWoIjTKqeSw6LWGlvIQ3Dp+jWe+cynueWWWzh+5Djj4YjBaMBMu4vSIp40cX9CTWQiky6Cksh/gk6dUKsnQys17QmHGOnP9DDGMD8zw+7OLieOHmNpYZ5WXZPlbYw2ooz1RYcsxiptRHeevQPvB6xeu8jp7gFeef0qcWcCTIkUeHyeUzWBKgRZXDHTyaISg3xqpaoAb1OfWEkmNsnyFASjuL65TX9+iXauk6+5TLkmiVZpD/Aa1fiTrF39AEe6R2jNthlvjOjlOWSaGD0qWEHt144w9rTyDjZqjh0/yML8h/nlX/01nnvhdVy8F1RDWV7j1Qsvyf0Mgg2I3iWBmpCwLJpMa87cfobbTv4AV69E5hYWeeqxz1HqipWVAUtxHh8cWRB6mScydhHyFqPVTdaGJa1OiwdvOcvt993DJx9/lItXL9OoiAlKWnOpFEtabIMS8K5rGlSQBs2h+Q5ZDGzsVGiTE6LCNY7c5oTQoLA0zqMdROloooLDR8/OsGEwGKG4iFaBI4eWefiB+zn3UpfHn3kOoyMLC79OCCWdTmtqEX7D+IKRpARf0gJIHEmdrmkMTOWdJz3+GxLCL7Q5fOWyxhiTh8BE2hmIoWG53+bEyhLjwZioa37u+/8C5x74BVDwyt2/yh/9B79Af3CIUDWcPXULd509y9PPP//lH9q1Lvzpb4O//wuS/TvgHwGf/sK/omKGsYZennPLyjKKQF2WXHn5ZZ4985M3XL7RzCrXbnmMu577nZTBc/6117l85Sqxdmzt7nD2zjtZnGvj3XMoErNLy5xxyehNwV61HzW1+Z0MbQzWGIJWeC8tS5Mkb2WD1OSZBRtYvfpmokZfwxEjZTXm6WefJgJ33XkXrqowyjLRtpnUP2KMGK2nQFxhBkWRBPYN2opYVgC0TfLXqKlgEkBRtKTfrzSj0YimaagraRd397Gd6qamh3prAgAfA65q2FpbZ2llBdPKiU3KbpTG+wajpX9vtJSitTFJNdCR2Wyqr6ybhtnZWWbn5rHp/UppmqZhNB5R+4ZWXtAuWhR2bwExxopkcBJZUMZitE0P1tZNn/RbNyab/y9+3ivz8/OcOnkPzndFXhyFnurDx8QpDTSpP3j7mdu59ZaT/MzPf4StnV0OHT7Mq+fPc/fZuyirknZRYPfpi++nfkweANGunlhy6r05rgQUF5Xm0KHDjIZD8jyjyHOKLGdhaYHF2Tny4ZADB/4EKr7MxuZvZroku3SMkVYrZ3FhkcKu0GvNcu6V5zk41+PI6WV4+iKddsb3/YH38dKlkl/5lc9RVckECvGSEIqd9FZROeiJoI6Aa6QaEKStrk1qS4nJ1NrqdU7ecnJaFp9UE6KPwl+npvbbDMuSwfaIubk+Tc8RyXBliSla4vceIDYB5SJmvp20ZTSdTosPfeh9oH6d589dxMX7eebF1xgMnpKJHyLEhv1Yb51aMu95z7fzznd9M0892uHZFx6j3+/TxEAdG7qHFuQq1oK3CQSadot8YZ68spjDx9g6/wJqPECPGm5ZOc7xb/lOnn3leR554lE2trcwSuGVEgW61PePMTAuRyiToaMEfofmO1y+vkHjoMildKu1wgdRsVOI3G0IknlL8b1BaanwKKWYbbe4/567uPXEETY3N7nj1En6Mz1q92t0Oi9Ql3UCBt/4zKgYES3po19AzjVIEGAkK0qINPb3jtOJ7f/UN3kW3+zn/6UjAg5FRsRJwgN0Wy28gtHuJr/xPf98uvkD7Mxd4v/4M7+VP/U3HxE9+cbxngceYG11jatr63xZqHZn4H99SLAAv/UcPHke/iJvHkzECQbkD3Hs4GE++MDb6BhL5RrKqsSHyOVX7uHCoV+dHnM+7pOdP8jr5y+w25R02h2aqmL1+nWGoyHESFX9MHkWE54hrWNB/h+8mMXtMSWiSOGmZA1SUKAVwUnAPFHU884nCpyldgEdwVU372731o/U0w+BOtQ8/uRjDEcD3vmOd4qSbQCMPPVaiaW9SonZpBqglEoy8Psr2FFYACFgoqUuxd7XOYfODFobWm1L5Rv6c7N45+n1+zTeY42haRq5fkky/WbHl+QFMBqO6M72aXe7DMcVMQZM6tVrrajHFZvr6zjn5DVr6XR7dHu9KW/SxwBao5wXukiExnmh/FmbTs5N+yhlOZoeQ+Mdzb76oNIK/yVYH76VQzaovc1/EtxP1qs777gTa08T49kkjwqTYnHwQpuJIWCtZTAYsLOzS55nPPzu9/BrH/t1Xn71FYos446zd9DKCyn5JLVEpfW05BsRCV6VXNOUShrUYW9LFJRuwAdPu1XIIq0koypaRVJsdLSKnIPLK/hmgXG5ymg0+iKgpTi9EDEqynFFTUPenaE1PoY3Y3bGm6ykd1ulcGPRipjoY4dUJYopu1IkrrU3RK1T0jXZABQqk8ZSCA34BhU9mxurHFhZ2tO4jFF0Jow4wsUYMepV6vI6o91ZOt0O3V6X0eYuRYjp/IA6UO2WFJ0OMYNYiHKlxdLWbd778LtYW/15rqxvs757hlB+mOBeQvEIMfUBVTLuQBms+UYK8ztR8bs4cssmSy9e5MBin5lWxurGOu2sTaYNm/WQ0NSUwyGtkydplQZ9dYdgFMsLi/zSE5+ki2KxfSd5qbjv1L0sLizx6cc/zeXLlylVoPbpHIK4dwYvUsmZ0rSUYnGuw9W1TbIbLFtlLsY4yVZisiQNgrrHEL0jt5rbThzhjjO30eu02NnZwOPZ3h0x16nw6hrj0mGtJQT3eUqdCgd8BPjjXyT5DYkrn6RzE5tIxbhvU5nMsv3P3+TrrRxRKgGI/LBSiuA81bikFY0EsPsPS0FUqQ1jLcZq5rXhvrvvZvM3foOqnqgQfqlDwT98N/zDtwN/hy923hFYmOnz8Nvvp9vKCUBmWtMN94N/72/wadXh3MM/h/KGMz/0Z1j6lW+jvTjD5uUdXFT4smS+30/rwiVi3Jpmsj5lt66pUeipLfeN1RmV9oXUkkhAv5CUNTOh3UhbV+WpCgB1VU51+W+iB/lVGDcGrTFGXnr5JZRS3Hf3fRRZTl036fxFDDxON/29LtZ0HU1fMYkJhahSRUVNAwdtFI1zKJsJvMgk1UCT07iGPK3ZMzMzRPiS3ABvOgBoyhLf1MwtLVM3MrF9EEEQHyOubti4vspwZwdtDUVRsLCwSKfbxVgrD8U029U0UUr6RosewGRiN3VNU0n0Y7TZl8nBeDymKYqpRHCc+BN8DcaN1btI5DdQ/AqwR3UUQYhIUeQcO3Y7dfPuaVSolZlSoSKy/04AkzN90WXfHQxw44rjRw7z2Np1vHPsDgbMdjoMBgOyLKNdtIhKYa1Oz1KYYiOmLIyYtKyn0FThNRPAWJP8rpNRTQRtjVgxG8vi3Dzra7+LA0s/xGuvn/+iVYDJQ+2dY2tnG187nILoNZ3eAoPyCgB1E/jPv/Y8F69uUjUNMU4e2Jg2f8VEaGf6uVrU3QR8JYJReE8ItWSMKkCM+MaztbnF/jVVTdoE3mOUBfUqSm2zvbNF3ilYXFygPdMhjhu5Z9bQbA5Q3mHaGZhAUKSFWmOiZnF+kfe+9yH+w0/9ggDB4u0Qj6Y/+EkBs2M4evy7uP/tv592fga3OmB4ZY2V4wd53wfez1OPfppL166jdM7G2iqLB1dQzuGbmp3dXQofUYsdlDVUr12nbrcZmsDGtev8xtOf5n3v+QBqFDiSLfIt7/xGnn/1BT71xKeom5Jo5Ak1PqYkWkOeoYl085xLawN0plFR4/BToJLEgknhLqqU3AViDNx+8jj33nk7rTynqkuGu1vJvEsAvHX9b/DxIhBSi2ov49s/b1QK+L74luVFznGSJcVJWXX/zJt8eFLje8s3/33HNhlKMWoaVjc3mTnY4cxPfwcX3vEIW8dfldglaL7tX/2ArHPGiPVrkXHPXXexurnJY089JWZGb9HmppQmsw/z/nd+Cyu9GbyL6NzuZaU0zIYF3vFP/zxzP/oBrl3fYPjp4/z67i/xvve8lyeffpJ6OGZurs/iwYNom5ObR3B+B9eYBMJGinJqgm430/bs5IrJZrd3XLdcusih1evTNSPGyTtlFZj8LDgna8RbcnW+QiME9EsvMr+2SmazvbmUHs999asbntz95xQnwPA4SQr9NIFQE/Dgvv/vD6SUUhLIpz+Zj4Y3feg3HQBsb26yvLKMc8LPFrSDqM9pDZm1LK2scOjgYbTVUzek2jc0ZUlhLHmWJ3dAR7+YEVnJCNpo6sbR1DUQ6c/O0lRVOqm9FcQaQ8xzxCIxT5H4jeMr1/H74uPGv/EyiqdBHyLGb0Spj0wCOxQwP3+Q+bmzOLfIpDkWpptznIL65AaqtNjBXH+OcT5m8cAir124wJWrV3n99dd5zzvfSVmNwXvG4xFZq8D5QKvVwuiciGT3xgiIS9gbepqN6WRe4Zxcfwm2RNoSwPko3PUQWZxfxJoTLC2dYnXtOqPRiC80pg8lkfWtTTKb4xGLz9q1OZIoSs55nntlFW0LWeD9pKcq4CEZ+/qsOmXTEUFWa0VwzVT+VdDimqhEp6KuKoLZe7RFk12ylDDR3Tb/lsb9JQY7u8z2+8mZK+KDEi2AwYj2bI9opNKkYyQkAyLRwvCcvvUUd995F+deO08wihA6wLdijOHYEcW99/5PnLr1NmZ6syigOVKjMoOvGo4dP0Ln9B2cv/YkO+eHMByyvnoV5SPD4ZhseYnZhUXCsMItFJj5I1w6d47Bzjb57AyPP/ckjY48fP97yHNDZ2y5/9TdnDp2nI999pO8dvUibaVpFS0u7myKV0FTsbjYJXrHqNQEIs7V+AQQDEnARQIqUEo0/3Or6Pf6PHDvnVij0ATauaXRci8tGuO2cc06BIuva5wTX49JhWEyrNbkeBoGxAnC5wtO2EQBnEyO6XOhuHFJ/Wpt/pOx97dcCOw0DVu7Q3Z7O2Tnunz7n/tBfuHv/1nIAh/+8b/OyXPvJ7Ym2Z4Cq8lRfNM3fSPjpubFcy/QJKO1L31sfsFXjDK0O7O878HfzW0nbiO4BuUj2kMwQG6xRYauc+Z2NQ8OvoXr16/hTkSiMmy/9hpvO3IcFFRVzVMvv8JD73qY66u/IW3DLBc2lzGEphEb8ZT5CptFjkMpmUMKzbjdodjdoVuO6ZY3T1f7uh8hwFdM4+GrN246AJibnwdtGY3HCaEv2WKMIj6QZxm2sGgEtS0ZsEp2iZ5ca7RKpWYi5XhMCIEsF/WzLDMEr3BNYHewS5EVqWe0v08ii1SRJxnPqPY2zzTesmXgC0QWEkXfRrt199QBsSg+QFnXXL1+jQgcPHgfWn8A4pg39iXVDf+e0IK0CC05h9aWEBVLB5a5fOUKFy5e4pWV11EqMD87S78/A9rgavHfDik7mXhLy3y003YASKvBe9Fm2B1ItNhtd9J7pGftg+h+d9ptZrqHqP23Ye1nb4q37GPg0tUr0uCIYjs6Hoxphb12jYqISqSWjffN+rZKp9L/BFQTPajUNbVGslof9kppIp7LeDwmdtrTzwmp1TRJMoQ9UQPPMRrez+7OLotL86jMEF2k2hlgWjkqFz+LqEE1EWUVuATvUSIK+9BD92JahpfOBUI+5sDKAZaX/irve/hhWq2WVDFiIETIupkAyBvD9cVz/F+/+49z+chn6Lx4jOpvfj9bL56kHlfUTnHv6dupakfeyVm/fJXKOz7xiY/TznJaWc6ajjz6xGNsbWzyzgffyZHOMmqnYr62fPvph7jcO4yrRvzG+nmcb+Q5C5FuK2d9sMtoPCKgcEqy+xDN9PqoEDEmMtduM9dqgfcUs306nQ71aERQUDcutZI8zr2Edz+OD/WU75xpjY5C9d0/TASrrhPUI8T4TW8o6b/ZeOPrghH5PGTr13CUVU0dNRtb2ywtLRK3Mn7Pn/8RWq0uWVEQrIgzKCMbY4yA1Wgf+bZv+ibm+32eeuYZBoMhMYZpMWA/n/wLj//AG+eOBMSBg4dXeNf9v49bDv52vKsBM62UTfwSQgSsJe+08K7m8LEjVHVF4wKHVg5JyzZVJ9ebirX1VWY6bQozS4Cpj4lgszQTp7s3ggBbyavj8Xvu5eTLLxGJ0wopSkSvGufEEK7x2EyqFMPdXdzXewVg35ic9+S+FXnB/Pw8nU6HGGJKmOU6TTJ5gTWlDWZK20xJIjAB9IK8RearJGgxWcp7L230xkkgFvznJ8dvNm46AMg7HeraoU1GZg1FkaOCgDeKokBrKc8ak8lGHwPBOelnWCnlyxKtpiqBPgRMZqaLcisvULmibBqsnYD79m590SqIeU6IwtGelEtubqJ8+eMGUY29n6Kt4dDBgxw7cpjDBw/S73axRmO1pRwMWdvZoqwaunML1FWVNuB4w2dO4G2v3/GrPPvuHyUf9fmGH/v7yKTSOBfY3tzl3nvuY2trk0OHjlA3nl634LXXX6dK2tCHDx5kYX6OzAiQxMeAaxrRlk6mQ9Mj15N+eKDxDZcuXebuO+9KQZt8SXVG9KhnZ/u8dml0Qw/vi16nEOm02xiboWJgbe06IdTsTwZ9dLhqFxUVMDF9SmVnZaYP+jQqAemLTRYYZZI4jN8zZAkRbTLq0lHqfYYYiqmrl/jAB6Jr0PknUfFt7G7vMDc/i8oUvm4I3tPudcTLO8Wsoj6nURpc9OggJbvl5Vm+6YPv5vQtJ9DasXLwIO2iIDcGFxIWwBhQEwtbGMyv8n/9rj/G5SOPAjC67QLP/LV/zMof/T6Gr7Ux3R531YHtz55j7uQh5uYXWB/uMqprZro9rFUQRHTphYuvsba5xulbTnP7gVP0t0teuvY6G+MdttyI17eu7wGvlKLXbbG1O6ZpyinWgrQA6Sh9x7YxLHY6HJjpY13Dej3Eak01GjNKgbtzjhAjZVni3CM0zWuEGKnqGh8kIHMhcGR0Y5Yn8uCT+v+XM2kjewJAir3y/9du+BBZ29lhrnWAsmpYnMsph2O0suStgkTMIfoIRsyFtBEcjLJixHPqxAl+41Of5uVXX52uZ1/qmjZZX7Is487bz/LQ/e+ib34H0TuZZ84L0wWVKj1MN2CMxsQMX9fEoMmyLPH5taizas2Z47fw6IvnmD1lIOZSCQ5hymCaHK6YtvlUKZa/4ZMMdjCGc7edYaKBIBgAqRqOy1J63WiZ+1Zx/oUXqd1bKPX+Fow9/JfGEuk7x2KWc/DACgtzCzjviD5IFRvZxK3NZF/TUpmzxlA1tbTBk4Wwa/YAfxPl3OglACirkjzPGJcVRdESsaCbGF+CG6CoEVmrhfanNSYT2p73DqUEoY+Sfzvnkt+zI2ioo8I3gSy3ZFkGIZDleeqHi0iQ1hrXOJEHjpK56XjjMTgl0WjZVBQ2+6rkAPvVndqtFp1OiyOHD3P3HXcyv7TIBGjmCOgQqAYlsW44srzC7rimiopqXNLuteXB1iqVrmUBvnLy03zkj30fdXsHFTQ+L/nGH/ufMWWHcjzm2tUrzC8t8MH3f5Drq+tkJiPPCu6+6x6aUpzzdra3GY6GtPKCmZkZ2p02eSHgkNq7fcBDOR8J/C2Li4uUlePcuRe448yZ1DsXIGHtGoIPzM7Ocv2xBcryDOIL/AWu076FuNMq0Cbj2rWr1GUFyqCQ7EMbxfKBLuurQ/wEUKVE61/2IkNqaYke/sTcJQFjYlK5wws3XCmThFogIqj20TC1KpT8rGkaMmumC5X3QSoKumI8VuxsbtNfnMVkGXQjurApGg/okPTgK88E46GiQhnBKeQWbjt9RBZabUXxKwgWwzWe6CMmt6LrrwJNscvlo4/ecO3Gd16k7g4I2SyDnV0ef/ZJ7p07xvD51yn7Oa9X2xADmVJ4JuJ4GqsMm6MRjz7zGM/kz6NDZOxKIBL8XuagUFitmG3nXLy+LdiamAIUZSiModvqYBvHoU6HmW5bfjfLcEOHNsK2KIpCAqkQ8a6h034JZS7R1AeJJLfOpqauKgbD4Q2SzAB19DTaEkKDIOlvdgmKb/jesycalDQjvlaBgIJBWbFV1fSdo46eWDqIQ7q9Hjrbw1bgozw3ToJbbcUl7vjRoywfPMijjz3G5x57jMFgQAi/2fn8EvupxsZoDq0c5D3veRfHDx4kjv4whCWsVjjvRE8jeKFva6n4+KYRd1ebJbEtaNsMl9RbjTH4KEyu+XaX1WvXGI6e5G335YSqxihJwHzapJgqUJq9iqMS94RyVBERDJQ1yQsmZf8xijywKktCiDSNpxyNqevqv67Nf1+yGGOgqWs2NzfY3NzktfOvcujQYU6fupV2q0PtG1knkryvb5xI3YdAIPkEZJqmaihawvuPSokDozFThV3vQtJikDXHeyeVm5sYN18ByDKUklKDShteiIEsywlBHpCASp7JXjTREX/jqCB4iTRDiIzLMZnNUF4kgK21ZJkVFHxmscaCFuRjsc8S2Afp5TauoWkCVk3khb+0m/SlDJVK5yEINeWhB9/B6VMnsSmMr0ZDVFoctdaY2pFrjVmYxbRbPPboo7Q6M9x3z924RNWYpNoKcMrz09//e2gK2bCiDjz/0L9n5fz93P3RP0LwNYsLswyHu7RbBbO9/lQsaH1tncWFeQ4fPozNtDi+NZ7RcMSVK1cpq4pOt8PC0oJc0zQiiJ57FM2FlaUlunnO6toay8sHZFOIAtp0vmFpcYH5uSWurx4DnkPkjd9k7LsPq6urKKVpFQVWL9DqtOiPR7CxgUIx22kzc7zH7nDE2sYYnxbEqDQxTixxEg0tJDKZJi1SKgUqk8x/onngidET8DSx2TumGIjJ01snNkqM4MIVtP0VIt/KxsYmRaugNdMlOodTEWMUOijp7/lALGtAgVH4RmOyPOE2pB1htBVAaxTcAlE8E0bDMYUPaKtRRlNUMxy6dD9Xjjw2vV7Fs0cwu13aeU7tG55+9mleKF7mxMHDLNo5Hn/uaTqZGGHpqDEmyLVCZLmLzgx5rtkdjPBO0kelRLtDxaT9YBWZVewMK7QRtUUBs0c6RcFcq0OtK0a+YtF00dZS+xpFpLAZVluccvhGpI5jGOD8v6MeiwR1jAoXAk3d0NQ1hc1EH2TfqGOk9I4YHwFuBW77cqbl/if5Dd9/9XcLFSEQuLa9Q7/doj8acGhhheFgl6qu6OQ5QTmx5w7SD0drqWOEVM1ETHAefve7OHLkML/2a7/OpcuXvsjmtw1cBgQUtriwwL333M3b7r6bTGv8eBFl54jR4nwg2kQDDglzlPQVtLX4xoktbWaIBqgd2kmGWfs6yfga2kXB8SNHePK5y4xHz/H2Bx4gswah/mtxgJ1UFCbl68k1Uoosz8TlUuuEUxL+f5ZUBr0PYjQXRPI2jl2q7v3XM2L8/OdwaoHcBM6/9ioXL11gYWGRI4eOiHqpthDBeY8PMSXVkbKsiRHqWiSBvXNkKbnA6L3yvwpkWUbVNKIRozXK31xqfNMBQD0aUxQFnSKXNVVFfPSMxwNcI8BApYDGC4faCMrfNQ2TSVon8JkPYaqGJBeNaQCgk8jNhM84v4/mt7G5wdzBZYbDIVuDMVluBTzzFs77mHAOKMXhgwc5dfwEvmnAe3yMKGvFyU8pQpANdbA7YDAcsb67y+WrV3DuEodWDnJwaZGsm0mQnMrd+/Wyb/zDshH2+zPMzPSo6xofIzrPUEqztbtDbi1lXaOtGMFopTEYuu02eZGzurbOyy+/wmOPP8H8/DzvGY+ZSZfKOY9pWUyIVKMxCwvzvH7pEmsbGxxYWpJJbywRoaWcOnmKV157N039CDGu/qbXTWnNzMwsWV7QbbfJMsPM6nXY2CCEyMXVEqsjRQ6agGdS0k9a/0o29Gn/X2nQQUr4E7mtKNUBpaOocEEqJ8INLY8o/UbnAjZtnOJl7jD+ZYy5hgtH2NnZpei0sFlGOR7T6XVAK0LtUc6TDCvQIYrQUJrsEs+FqbiditOXUAqKbovh2jZFr01uC2Z2D/IdP/mv+Onv+l6uHPksnReOcstf+z6K0XHMSs5y7KFtxur6Ni+89jLuFUeuLUeOHEUHJIMM0LIZZSWe4TOdtsilekdMvOr5mT6j4YiyafAq0rYZVQjsjGuR+k0HGImMfUWfHodWDlFtr0vvPgZiXSczJUNZjggxiCuqC/jwCeq6kQA/QAgNjRMKaV4IxfCNAlLtVkGsqn11Ur743P2ir0/U4fYHAV+7MS7HnL9+nV7LcGT5IO1Oh8HugKIoMMnXIAYl7IaQlA21nj4nRkn2duLYUb7t276Vj/z8L3Dp0sUvEARcQPEySimOHT3KN3zg/SzPzRNDQ3QRpd9NjB0gCADPSwAQrU6bh1RMtFaEJNaklSJqkolVSlSiF8c7a2mi47477+TitXk2Nzf53Oce5b777ifLJMgIQSUb6IjzfrrxKaRWY/OMuvFTZleeZRhtk76TkgA6g0pBXdeik3LD+NoEeF/6ePNjnFQGnHNcv36NtbU15ubmWUzBQK87g3dOEuQsY1yOiSoyHo1Far9u0KkFYK2IexVZTgj1tPIwUVe0N6mSe/MsgN0t1E7EpVLq4aNHia5h8/oGeVGANdRVRV03NDHSygy9dgdCpMizlIHYaW9pOBoyHA4FZBWhUuIW55yjaRpmZvpk1tJut6bHMNvvUzYOY3MW5lvkRUY52KUcCpDtrcACqPTBRSvn9tOnsESUNXglWXKWGbQVJ6bgPHlvhtWLl/nUZx6lP9tBKQd4nnzycYq3P0Ag0uq0ppNNBc23/Kt/ws997/cSbAMRTjz9TZz57O/AWJNYUOKORYxUZS0mPN6DNeRZPi3jgvTUfCMloPnZPvfeeSeXr13n2tWrNLUEU5sbm6xfuc6Jk8fEKMdqyqam0+3y3PPnxFBCCy2QILS+5eVlZvtzrK198UdmUvabmelz5MhhfCOoe52YGyAtnGPHbkHsZB3b2xdA1aiQgGiIjPR0cZ+AZHQCMTWNON7ECNoTo+ZGaVmDbA5CfSzrmpZpobWUqJURfTsdFc5dQnMd9GFG4zHjYUlvpofbcWxf3aDX7hBdEBtrFCG3qKrBJP18jPTsolbS40X63BrPREJHa+jM9Rjt7GKLnGgUi1du5zt+8F8x9pcpqh7Z/DxrZ6+xsznAZAIAff21i7SKHGPazGYtZooWa1ubFLqdqHdC11tcWJJybpCsSmlNu2hz5pZb2bq+zuXNNbbLEfOdLq6OVE60/m2W4b24wAVrWR9uk7dyFvp9xnVNrryYSzmh9UWtcJW0MUL4z8T4SbIsk7+rI94LoDArCrxv2N7dEXnXfaPVaqPrBBZUE1vdL5Kt3FRi/7XdFKa9bxQ7ozFXN7Y5U9bMtFqMxmPBSpGMmxSQtC8kEYiSjScAnVIQg2dhbpZv/MZv4Gd/9mdZX19/w7o2aYGAySwf+tCHWOn38aFJm/Y9xHir6K1EWRRjZE/zQWtC8FPAmQwlQjYxHStqSrHW1hKMJGYLrTanbz3NZx9VbG9t8cTjj/G2++5H24mEN+SZaA2EaSkcfBT3PGszUJGmrpP1bYYiSqs4iqWuTbTxshqnI/ta3+G3ZoTg2dxcZ3t7k0uXLnJwZYUDS8t0Ol1sZjFZEocLbuq7ESmnOi3eOTHMCxGbyVUSbJ0Tw6GbGDcdAPQWFjBK6FC9Xo/gAzYzHDp+DGUNGQoVAle3Ntnc2WWm32d+dlb0oJVkkyYhT8uqojvTZ6Y/h1Z6CvjTRnp5wi2OhBApyj0vgBA8kNPpzkAMFK1CepLsVRK+4iNNyrvOnuX40WPTHhYKdNTSDqkbsBZd5IKQDoGTp9rMz/0HjIW1zQ3Go+9kdesWslZGnhlUXhDxRK849dS38OEf/kGe/IYfojWc51v/yb8gU5no0mtFRDjVMQTavQ55q2AmhiS/LItJTKYzyTVVom5tUFnOwQOLzHQKWq+9AuWYPM/ptFvsbO7SznPyLMP5wHxvhjOnbuWV869x5vStydFL431gdmaWE0ePs7nxR3H+B0h5+RsmZlpQFHQ7bYo8R1kHxqIj5FlSJdSKlcU+BJjpz+Lqmtdef409DYBkwTrRAoheFjNlJINqPCqKURBhXyo5AZcpg0IojiI4ksoCUYK0oBWxcmSZIaII+icgnsE5zfbmDu1Oi3avxebmdTrKkhU5wUn7RqpWck/CpNQZIi6p7OEi0So80m5wvkb7iNIZM/0ZuWZKWjDL9WnwpwmjEtd1zC6vMNzcZXXtCtcuXuKuM3dQe8frr73O0myfshzRxIBy0m5rgmdubo5WkbO5tYtXIjpiTEYTAs89/wLWGrRWWG04sdJnbWdMnhXScnGBPLMEFzAedJFzaWuVcafLDJY5oyi9o0ETjbRl5Hn4LD58QrA/MKWTFllOq9WmcQ1VjLKYbQxgsEcdDUlLQUUP6t9D/ItAZzLV9lVw9o3fdPP/OtkiomzqmyNx4cwIU/BW1DVGFwTnpASvdDI48gkUqIkmE1MmJcWwQwcO8O53vZOPfvSXk6X3RKhqB/iPANx66hRH5uaoR0PINCouE8N3MIEXCx7FQ9QoqxLuLxK9zLaQwKzTWDthr4RGrNHK4IkYpWU98p7jK8s802ozHI3Y2Fjnc499jnc+9FCSJvfSo1aa/QynECKNKyEqMis+LtELiLSuyin4TWtpAxpt8I17Q1L3dXCPv8IjJoDkuBzxyvlXeO3CaxR5Qbc3w/zcPK1WizzPiToS8PggSYVo8YhCYAy1VKB9gEz2z5w3keB+k3HTAYDFoGIky3IBUE3vjKIua5q0fBc2Y2V2gW6vS0hYAen9e0pXkmd5OnEkcwo+OSRpGi8RbIiR4KUdYPb5SFttiHkhPfcwMb3Zc+Z+K0YkYrXl6MFD2CyjDk6MjRqHrxpGTSTvdkWzzHmuXbtKr9PF+0/w0kvPsb6+RlnXKJ7n/PkPsrz8MMsL93D0yBEOryyR55YQDacf+w5OP/7b0Uh7BE3qlcnZuRjQRqiXNpc+sCJONbblV8QxcEK1VGi0ERfCibkSkCSbdYrGNd5LZccqxUyvy9X1a3zqG/8XTA5nPvfb6V8+CTFy66mTPPv8k7jyXohPvOnVmlQAghcOd/RRqokxTkFNCkVuc7l3sWF+fpbXX7fpLu5f0ONUB17EaayUTLWWMir7KU2aN40AEwTbNw1ZrrFaiVNZEt0IEZxrsNajvWc8GjEajen1O3QX+zQ1ZBMQqJFa94SCY2pPaFlUCOAcMbP42GBqKbtGBdZkROXY2d6iPz8vhlgpaFOZxuUaHXLWXr7ARr1Nbg0hOGYPLON9g6tq3MElfFAMdkYU0TAej1HATN5mZW6RrZ1tooJMGbJWi4ZIx7Rw6UoaZclzy+Jsm5cvb9EyBp21qBLOQ2WGUSkbzMxMl43tLeqswBmLNYpGR2bm+kTvCWFAjOcAEfSKSgmaHY1SkapucM5TN46d9Q2ODgYs778d2qCNRdkJS0Ksj+PkSZeFQUBz06fqN5+lXx9Dnl2vFOOmotMYrLGMyxG9QoDPAvyNRCXroibpW3iRQPZaMnBtND7AmdOnOf/qazzz7LMpho3A5wDp5Z89epQw2EH5QAyWGO8GLZazMQFewxQPwj4zLZ80SMx0o9ZKE7UcW1STY4jgA4EGr8GimWu16Xb6DMYCNt3a3uDRxz7HvffemwJEYQDsNyLyQQIDaXOkuYes+3mrIIZAbgQ4WDdOnrX0OHydhHdv6ZhUYkLwjMYjRuMRq6vX0VqT52L73ul0mJubo91qi3JunhEhtQVSqy2SAsubGzdvB5xpNDpl58mIJkSRe9Vi6+sU5HkhWZJCgAveycKvA3kmegEhBlzq2U7MEpTWVLVjVI4psoIiz8nzHLtPLKJVFJgsw8ekzKb2tO7fuqFNdAvKAAEAAElEQVRot9vMLcyLEIwSq0vvvfCdWxnWWEyAZ595it7cPK+88lO8fP4j+DAkBpc6WRXe/zwb68/SKf4Kw3KWtbVAv9uhOzsnoj060tQN1kpVY1qcm/L391ZFhWTnopWUbDa10CxRArKRxd8IICRXU3cuawy5zSAGxlWD1orMWMqqpt1t8/pf+Gc89+F/DTrywtt+mt/9j34GtdXlwNIB+jPzjMb3AU+86UO2X6HKGCvBXDIImb4nRbBGG3yI5HnGniZ6SkWi2lcZTht+cKgsI07u+UQ/4I0HkkqJcu0QsGbTSF86XSu5ZhNzjkDT/Edy+z1479ne2KTTKWh3O2wNtiiUmeLMJ8aEynvijkfpjpQ7yxpTZNgsExBhHVE6YLX0/Gx0+FGJ6XQJRGlSqPT/2qNGnk7WYeXkcYJvUIUlVDvUVUk9Klm7usG4quh0upQjifhXFpZQSXfdeU8rz5nrdNjY3qKNorKKVjQs9Pto3ZAFi2o0s6bAIUGRiRFdZDRFRlXWhI2amW6XYV3imprCGGbnZuj22ri6wYcfw+jzmKR4VjcOH4T1s727Q9k4hqPxtKKn3zA/Ywh0ul20DgzLMZFfBH5HqqwFYtAoB9F7Uc0zsuZM2itffHwdbBUxMhyNGTdJVVIJnsl7h/ImHaWe9v8nFso+VbqUl1aCDhFlLLk1PPTgO3jhhRemZXIQPY6F+TkOHzqEqyqJmdw3oO1DMvcjKcgN++jHErjqKJWByXtCqpDFJN+rMyNYAKSnLK03iDGI3G8ILB88RB0jO1uboAIbG+uce/55zpw9S+0a8jyfKpIqSC1AAbtN2gUmCZZNLL4j4vaqk7vgysoh6rpiNBz8VwcG/HLHG1s9IXiqqqSqStY34Oq1K+RZTqvVot+fZW5unplujyzLyE1OlmWUdfWVZwFYZZKphFCbQloIQx0YRIcn0rKWKac0E6c6ETyQXlVIACFlkm2kSa5tqd+rlWGm06coCrzzNFWDq/YEDfIsIxpF9AoXo6gSurdWClhFeMfb3sbMbJ9hOSI3Fl9XUiJrFVJmbzxr164xPz/HM6/8J86/9oM4V6fyvXDHiyJj+cASy4ttfPhnXL36J6nnjos/u9qkN9dje3NAu9UnyzQ+mePEmPTwomTOQuFTqJS5JxWJqfb0pI6q03tiAm3pfRuqMZqZbpvRcCye0kULHxytdotPfMvf4fkP/xsm/Mu1o8/wb/+Hb+F3/ZVfYm5ujoPLK1y9fguohyF+4vOvF4BSGG3odbuMRkNiFLbH3oKgyKwWsFwd6ba7WJtJKXlfT1gKTEn0xYBSARVD2uD3Z/16WpESERR1wxFFJTzmcVWlllF6RQmdTWnw7lV8s4XS85SjisFozEy/h+212dkeszAzIwWHEAjBY2JEuQg7Y8xMG1c20PK48ZhoLbbSBBtx0eFwlG6ErQpqZ7AzLfzEzGhnTDMcc2B5ma1qRGtmVhiR5YjB0GO0papkUwWF9sK1P3RgGRcdo8EOZV0JmrrIyNsdOuOaljbMt1ugAocPLbHQtqzubHPs5K3ce+YMNI6NzU1Wr1zm4sZ1ImDbhqqq2dzdodfvMi7H+Mbx7jsfwmpF1AMUl6T/6BvKStD/zjfyvIdIpg397gxaKRrX3EA/BQRb0JvBNWO0Uij9HEafJ/IwPjyAD0kzxEv7y+Zik+3814fnxxceUnmTKnmgjoHMFhircfv9SmIkOo8ylkgQkKQWdohKlU+FImiDUgKCPbi8zNvfdj+ffvSz6cmW4PXUiRN0s4JQVyifs73RZe4wECS7VgloqEiUamOFsRXDlEYmXbN9lTsEIzBJ4ggkIKcm+EDwItijteXgoePUTaDTbrGxdoUrV64wMzPDyVtvZWtrm8k8VkqT58WU+z8JOmKUSuGkomwmTCUl61mn1+W2M2d44fnnGY9HTAS//n+9GvDGEeNeu7WuaxrfMBwNWd9YT/uroT8zQ29mhm6ny0y/Lyyvmxg3rwMQkUUPxKnLiIVtWdVAoChyKdto6e1QAym7n1CvrE12iTGJ0CRQmk6AM2szjJZ2gfNJ/GEfSn6q+jeJbqJEjG9Z3J8mh0bhyrFktMETmgbtA2EwgBgZuMiFtWv4UHHl8v9GiNLKiN6jteHgwUXuvvM26qbiuWdfZG19m8hfoz/7Wwjqd3DfHfezu75Bq9Wl1e5MjTCmQkgJyBNST1ulaHzSE5QJA0zEOJQI3hgdxVAvaIzam+RKGwbjMS54cWlUiqIt6ore1kTtb7gGVTZgPBrT63Y5sLyMROzHgB7w+Vadk+Ny3iUuvLSGplTEBOoT8ShPu9Om22mxvTtKN3KvhQEGjCbPLa4p8a5GYQQfkExq5BolGuHk95MOafCBalzTbWeUzlFVNTNJJVABXgniIGOXxv0sJvv9NE1gd2OHbrvLzHyP7UpQ9BpQQaU+aRRfh8ZhGrle9XBMZiyMA4P1dayZw8SAzixRdTCzXWIZqNd3yPst0NL2yGbahMqxONfHD0tMKyN4aPVmWL12jd3dXUpX0Wq1qJqGXrtLWxs2x7vUTcOocnijqKqSuq7IMsuB+UVuPXKCaxtrjIclwzDitjvu4+UXL/DsM8/wwP33cZBl+tGw1J9ndXeLK9sbVG3P2u4WTVkTo8YboYLG+hLEn4ZQSbU6RuGwGyVSzlpTKPBe6JYC4Pp8tz/xnNBU3mOsRVtDCCUm/jLaGnz9oFSMCKgkTd1td9mptvcB1r7Q+FpuDXGaxSoiw0ZwTBYp69ZlSZYXELw4JZiI8gDSUlLOSVAbhUKtbI5i0qIrufOO23nu3Dm2drZAKXqtNieXDxJr4eI7/wDj4Rz9GDExBccq0WNR+EkpPW3CNyjFqRQkx4AKaYPVUvXyoZHqVwyCPA8BX0bKpqFod5mdWyTgWFo5wtrVi7z80ovM9GeYnVu4oRpYN800gWlcLe1dY9LapambhrKqxB/GGIy1uBCxpuD0mbO8/OILjEbDKbDw/9/G/rPeXw2biCltbG6wsbkxrWy22+3P/5A3GTfHFYBEe/Pi+Gg0rqnRRDpFTrfTIbNWVOeyXGx7tUq6/5q6rqnrGtc0VFWFj56JSEL0YkQi/sfCAnCNlJCsNalEJKOqa7yTf5d1Rd3UlGX5lk17lbLvra1N4cWWFb7x4uFMZLy1TTUYsb6xwerWJufO/wsqV+NcgzYRY+DsmVM8+I4HeO21SzzyyGNcW93Ex0iMq2xv/WueeOJv8tRzz9LuH6AoOhAEGR2mpf49cyH5gRJBHGvQmRYPaaOkJTJ5C0iQ5SMEJT1Fbae9Ptc003YAkJTdGlCKOx//Luavn9q7CBHe87N/jcY7RuMxhw8dplUUwO0oFm+8Xvv+O+GoKjTaCsjQub17aVJ5OMvleWl3WtPfnfzhyf6ujDxDwccUPErZfnqAN3zt+zECOqvGIxSawuZUZZkoqyodh5H2izGocAEXXyZoGA0GjHZ2UUpR9FqMgyNaQ/TNVBQntDw//b1/kp/6A3+E7VPnaUYlPtOodoaylsHF64KzGDlaUfzPs06B7bWp1ncI4wrdbbE92GVQ15TBQ+kIoxqiYjAsWb++lo7Zk2cWpyK93gxbwyFlVeN9QpNbS107mjJgjGV3d4eyqrjjjju49ehR8iyy8cpljs4vMNef5fkXXyK0MnoHlunPzHLfXfdydGGZzEcOzS3QycRpLGu12B2OCM3zxHCNxosRmDZS2SMKfc1qi0fhCDTBE3jzTM1ay9hVWGtoGUuW7kG0hhg/imp+AvxziSsvj/upkyex9qZzla/5iBHBZWixty6KgvF4LM6lE6dFLwqWwXtIzJ04rglNIDrBvsSEFUFHFhfmuP2O2wXUSaTbbjOTWVRwKLeIb86QZVb0KkA21oklNEjI7INgV5S0A7WRqq4kFwIiVpMNdioSqXBNg4rgaidrhffUdYlRkV53hnokwWJvdh7vI88+8yxVWUmbUQ4Fa8Q1UJIC6fW7IDLxIch6MS5HbO9uMxgPqcoaYkiOkobbzp6l2+1O5+1/G28+JgHBcHhzhkA3HQBMeIVNXeHqGuWltxtVTIu5FhRwlmGRPnlMPVqVIsxRVVHWFcPhgKapJbmPER8amqbCT22EDXVT4by/YUto6poQBSTXbrWoq5JyihHYAD56s6dzU2MyeQa7A4bbW8RGELxRizqWBDiRnd0dNtY3Ge48Tl2LSE7wnqMnjnLq9HE+8+nP8uIrr1LVjv3ZKlExGj3Fpx/98/zKx3+FYVkjXfxUmp7gACZZvtVTvJt8qRtsIycHHYMAQQgimOOdlBYnS7JJalPdToeZmT5Fq5DFg8DShTv4rv/Pf6C7eZD27iKH/vKf5LZHfie7gwFlWdLKCw4fOZxU934XUgWYjP2ATEVurbA5vKDTtZ70QGXT0FrcCI3K6HbaCSCVwAxMkMmK0Diij6m0v18DPjKxZb0xANiPPvYMBzsinYlk/KV3SVQKVAC8UNhgGx/XpjIDWxubNFVN0cnxWlpO5WgEzuNnav7PP/O7eebdP8HTD/44P/rffzujpevUW7v4qqIz1yHWDaPr2+RRURQ5ENAxYHKLWejjVreIdU1TlQwG6wy2NtA2R3nFcHOHzdeu8tK5V9ndHTHT6hKrhsJayuEuLjhGwzE2t7RahQSoITIqh5w8ehStNE+98BxlUzO7tABRkxVtrq9f4/ChQ9x2+g6uXbvGph+zfNcZZg8c4O0Pvov773072gU61tJvdxiPxvQPNOwMfkXQ3VraSnVd42pZpCdysN6JYJLyHuXleoU39G6NNRCD9IO19IYtEqyW4zFN/TlQP4WyrxMjFHnOkUOHb2jdfL2PCIxGo6nBT4hBsvSqRsfEbQlhqgxI48E5mtE2pinRocKVQwiNSL0qg7aau++5k4WFRTQw1+uShYjzjrq6RDN+nGtXr9BUtdyPRjT1mZTNg5cgX4s0dQTx09DCZpko8oUg2CEVFUGL2qXJMsFsRcALrTZGj9MaYxT9+Xm2NzcY7GwCMByOOPf8czfgeow15LkIvk3A40ppYYFpacd1u13yPBcas3NkxpLlhdAQo+LsHXfS6838tyDgKzhuOgCogij8aa0xWmOzDJOL0tfE+jGStMG9w4dA7WrKqpTszTkya2m1W6LyFPfoJ3nSBpiYJPjUO7RW39D7K5J0cCTSyixN0+wrZQXgK+8uFSOMxyOGO7uCHveeWDf4cUVdloQYWV1fZ1idJ8SKVpaT25xet829d97B5z73NOOgMFmfiE4dDZV6WcKbdvU5Xn7xb/OZxz9GGSKh8WgvOs/RSyA1KftrnfL4aSksGYwkvn5wTqhu6St6AZLsL5+GECiKAqU1jatT2UiufeM82bUl/sCfe4Q//hef5YEn/ggXX7vEYDCgSZWak8dvYa4/h7FzoL4PWElHIkOMLPa4qsbqqbnH9H1RxGTqusTVJf2ZHloFIIGOJkI1PvEa9YTrv3/zj/u+v7F68MabWBhLpsWwxI0rEZtKwalIDAgNiXCF4MZEoGwaRrtDjDIUrZyqrlF5jhuXfPTb/yrnz3xs+mcH/Wv8yJ/8rbz0xDOUG7vgIjOzs2xfXWV3Z0CjJjhIhTKKrGUxR5doVrfRmeba5iqXL73G9vYaGzurPP/k4zz/xBOMR0M6rRZdlaExuLGjqwtswlRYrSmUoqUUh+bmhbsfA3OLixgVefZzj7J15SLt3jzH33Yn93zoveh2TlmOOHXrSXIXOf/sC6wPdzCtjFOnTvH+d76PbtZBY1BR8eK5XfqzvxdiRzYHraaSynVdT7E40YlyWyTiARdvqF3JrfCBwmRkOsMYqUqF2lMNS3zt5LONQ/FvUPFZ2vkuRw8f4cjhI/zXtO6XdcNgNJRyPgptLE3jpPoZgmS33kurz3sybdjY3ObChYsC5I2OMBqjxhWuFkDkTLdNvzeDtmc4eWAZvKcZjxkO19la/TGubPwnRnUl2JqmgdoRqoYJYGuCqMdHcbf0XtQxrUEjRlgC3rRi7Rs1aKl2CUhZtFzWd7ZRKEkKjSLPcw4eO0LR7krLVEU2N9a4euXK9HroqMhtRqto0Wm3aRU5ubVYI74yvU6XmU6fxdlFluYX6HXaOO9omnqKCw4xctuZs/R6fW40iftv48sdN30VvXMivuICRkkQoII8vMRI0zRUSRVM8rKIV9Irzayd9poJMd30YioKtLuzS101crN5A10s25MCJga0ikI7mnBEvgrDOcdoNGJnfZ1ya5tYJ8nFzEKIVK5B+U+j1ZClXo9Ot8WZM6c5/+p5NnZGNMHS7c1z+Mgt5HknYRb2o9wjPp7j3Lm/x1NPfgZjMxIAQaoePm3kPlHQ4g2tckgVA9d4qrKiqWpc46ibhqoR0RXv94x8BHORhJsqMY1IiULS4XYYbdFKcfTIEepqzMmTJyjynKZpOLxyiPc9/F7uv+9ttIuDwNv3Xa2JyRFYa0TQIslX7gdDia+1wmpLZmBxYYFWS+SmJ/3LyWclFZP08fsDgL2/uff1+c9EjJDlBh9EkyAooTcpUV1JUqyysanwaaLfkmBAwc72DnVVY7oF41Bz7sWX2G2q1DK58e9kRU673WbtyrVkhCV6+dc31ml124lO5SBKQGYLiz0yTzvv0Mt7rK1e43Of+BiPfeJjvHbhZbb8iHaRMdfpcOzIUcoQMRjuOH2G2W6PIhfMTPSOQmuOzB/g8Pwi165do5sXzHU6bG5vsTvaYPbQYWJhUcqydOIwi8dWyHodDp45waFTR9hZW+fy6xe5vrpGURQcOXiI07fdRpZnWBSWe8jMdxLjw9NnKMtyilZbhLBSIBWi+FEqpSh0Jtof++9FSJse8pyNqjGjuhQwb/J7APC+Qauf4MDir6OCVC2mt/rreqQ2m3OMyhqXQM5KyRrqGzct+xPEsCvGQONq5paWeenyFZ578VW0E6fLUFeouoLK0TY5y/MLHDj8vYTaS3BcNYxGIzZ3drm8+1O48DEBgIUAdQ1VI1UZleZ9iPhRSRjXUCVKs9KCTWgarLZCH1RKfuYEy6CUYDe00eyMBugo3psm6biYrKC/fJS8JfTvxjvKak8yPDAx/tmrCxR5QZGL6ytMWpEO5wPjqqJpGrk+zpMlQzNlLKdOn2bl4KFpRfG/jS9/3HQA4JyXrD/LcFFAIMEHXBDZR59sWRUiCKSJZFonqcJA1VSUvqHxUhZUSlNVNavXrwswpK7J8wKlFFnKFos8p2jtlf60VqJQl0wP1OcFAG/N6jCsSoKKXLpwga21NbSOaJthspw6eGbb3emmF3DkJmNprsPLL5+nCYaq8iwfXOTUrac5c/vdLB86hrYdFIkLn47cNc/w+ON/medffE6AVOWYVl6g0YxHpZSs0dMga1LUjykIsDqZfETh3fooX3VTpch8r03gXMPuYEAkJiMmyRC8cygURksDotftMjfbxzc1vW5HJrD32Ki59eStfPOHv5le910o9QZN9xhFIc576qpmYn85fRmmzwzB0yoKijz1ef0UTZUq/nEvQvmCQZ/6gt/XTZPKzhqjlTyTSFtCpTIoOuEGlSKG/yBBV4Sqqdnd2cF4WDywiMkszz3+DHf8m+9h4eLpabxhmpzv/OH/nUMnjuOA1WtrEBWdXg/XOEFgE1GCIEThUcFhtaK9PMvhk8c5c+Y2Tpw6iSly6qYmQ2OC4dSxk2w3NdvVmLfddy8ry0uM64qOFbOnUVUy2+5ycGGRvGhxxy2nGWxvcecdd6E6LcbNLnneluKKFjBtZi0Bjy4sMwcXOHHfWQ6eOY7TkWeffRYTFA/d/wB333PvNPsL4XaU+gaCfy8xCrhs2mNOFTytdbJ29dS+eRM6UqDxNVVTMypHVMERdRKNSfcjxICOCqM1Bw40DHZeYW62n5guv8lk/ToYSomdtouRsqoS5kcqncH7qWa+0dIOET2KSKYChTb88uc+y4sXLqCNweMIdUUcVLiy5sjKCocOH6XRhvHuLrvjAfXYc2V7h/lDK+T5J4n+Y8QYCFWFLytpoSUKcWgcvm7wdUP0XhwvQ8BXlcxtJYnX1OUvMMXdTCo9O7u7uBiweYGVqYMxhlanw4GVQxiz1/uH1Jf2Lj0XYlvuQmBcjoUVhgD/rNW44Li+vs7V1TUGwwHaKPI8I8sseZ4nXnyLY8ePc+KWWzDW/reWwH/BuGlkTVCaJoTEKxU3NJ2iMhcjaMmqxnWF0RCDwuiINVYWv1TWz6xFTf2QFYvLyxTWCtgLgWRP8v8mRczToRQ2L8i1okkgwP0jMcG+skPBoCypfMT5iq2dbbq9LpnSlGXN1nBAJOCClx5s7enNGK6tbVLXDdiafn+BxdlZYqiZnZ1nefkw5849TVUO6PV6tFoZnU6Hqmqo64qr1/4BR478v8jtPNs7u+TW0G4XycAnpM0xMlW+S1a1xiDVmCA92qIoaBrJVEOQDdkAzjuGwxFaG9qtFmntJQRP43xiZUrP3mjN0sISW1ubEOMUPJQXOdF5Fubmue9t7+FTn1I493dRDETJzJhkdBKxEzDgvmxQa2nauBAxBJTR5K0OkU0mdsDyQRqmkjYS9txY+ucNr+2TEE6vuUakRjtFC5SY+NgUQGoUKiUSE4OkGNdFwCoGlLIMR2OK4YDOTJfTd5zmiZ0Bq0+M+a1/7t/wiz/w/Wir+NC/+hvMvnCWvF1w6u7beeK5pzj3xAvccuAI9aAkNkKF9TjJ2qPgQZRqY4jMLPZpz95FvbVD3mnTVBXWaA7MLTPX6/O5F1/kwPwChw4s8dTTz1KNRthOG+8CWYCl+QXmFxdwLnLk7K1cWbuOmp3h7C2ncTsvo8YB4zRKOaGWtnJsI4IvRikwinavxcm7TnPw8Arnnj7Hi0++QM8WHDh6UpTb8pgSyvfTuBoXfl1gf94TfaBpHLV3Qv+LiHrgG7wuFGJaNapKtIbCtKjGTXpmlJSuEeBnO2/R6bQZjv4PitZfpdvtsrs7+DoPAvbaUXmRY7SmKke0Z+YEKBkEq5GHiDdRSuyNVIuqsqKl4KDJ+Oyjn6Xf63JocTEZ7ElG34xLqkFJa3GBnXJA3B5yfXuX3dxw9x33sjXYZK6zQ/QRn8SdlA7kpkU9qCWJsxqnwGYZwUd0VVPuDLHdDmSZsBIaR2w8DQ2ZyQk6oAjsDsdsbG9T+iscOHyUmExpDJroA/TmmVnYZWvt6nQx1lpjcwGVEqDIsiQUlAKL6KWiHBVzM336MzP4JP41pfbGmFzwpF1ZjmuWllfo9/u8cO4cZTnet/Z/XT8gX1fjS9ABSJm9mjjYx2kG7monGVWMSZtYSlzWZCzMz+Gdl99LNzI0skB02iKi4psqUexEkc5YEZCp65rePrqK84HhYCBSh5ml3CcT/JaOCOOypNVqMxgNefWVV7BKUznHuKpZ390VwZsY2BkMmZnpsLq2mnRqInkrJ0ZwjSezmk7H8IEPvJuiyFEYhsMhVTWmrhsym2NMxnD0k9T6O2nlswQlVQhrRUhpkklPtL1BSvwu9RaNMbRtm3I8Fn69d2htpr83Ho8ZDocszM+TJSOQECJ13eC9JzdZ6tkL1bPbFTBYWZbCzGgCnXYbiFRlyYmjx3nxxRe5vvo2Ih8HFFmeS8CoNdGI4E/Yx+gAUcTTSlTzrTEszs9y/doGcfqEJTDpNHDYv/FPvt74GuwPEhTCYx4MBszP9nFNFHBRZiR/TX15neiVPgQUjsjTOPc2jDE0dcNoNCbvtLCdHDvf4YnPPsdLFw13/5H/mTPHb2W21aI0DU1VEpXj9JETPLmxywuvvMTCoRUBYfpI1s7wwSUEfUJgK/EN0NaQ9bpkRZfbbr2dK5cuiF2zc2zs7PLAXfeyfvUaq2tX6XRbDJuKunEU7RYryytgDDML8yibk7c7WGM4eewY5559hWE9plXWxFyLb4EK6FzvcbOVKBfGECh6He556H42Lq/RygqG1S5b21fxMWBT317xYVRjaJrzeP8MEaFzGQw+RpxvqMrqBuYHQNnUjJNZkVJSiYrBYXQS/EEC2xgjy8sHWFxYpBmN6XR7rBxYZrA7/DxcwdfjmKCxi1aLxtcErQla9DFq72maGqs02kj7JHhPdFIJm+3PcG19jV977HN824PvpD/TIQRHOShZX73OpesXadk13v+Od/DiY0/x6tYFjp89TXCiGR+cS+Jqhso3tKyhHAzQUaOLHKcECKxcgxuOKOuKfK6HzXMCnuiAcgQ+YNotojFSjGsM2zu7DAYDnCrZ3t2h3UqGWS6KiZCP9OeXGQ8HxNGuXAtBQooToRHsiEnVHj9pC4QwvatWC/YkINgcazNpGVrxuohAXhSU4xJjc06fvZ3r166yub5G0zRTrMPX/1PytR83HQDEEPEx+Q5PJFiR+zpRb9JK0Wm1EmZLXK5c2vxDELaAmfi6w1TEJ0aIToILqw1lWYltap7DvgUkhECdkK82y9nd3b3xGN8iTMDRo0eZW1hktH6dvMgIzrO5vcmoatL5vwTxJVAqlfsiVVUTYyS3mna7YHN9g+PHj3DrreIncO3qBk8+9QKbm5tEH1hbu4r3tVDF8oJ259d4+N1DBru/l8Z5brn1Flp5JuW8JHOnolhIZtZIf9HLte52W+wOBmhjWF1bY2lxMfXh5fpERB64KPLpNWvqRmwnrUm95Yn6l1RmsiKnrGo6na5o0YdAllmyEBmMR1hjgIeAj6dNNzAYDkUYxXqhJE0CltTyMdZQe8F96CxLNJ+Yyst7z5jIHe7f9OHGIAD2pvyN1QGJwRRbOzscPbKCybJkP6qTHnlimgQIWjIOHyu0/hw+3ov3AWMU5ajCjRtanYLTt53i3Asv8OrlDa4//iQozanDR+nPLrB28SqZDhgybjl4nI2ig+7kmCyjqSrcsKbVySE26QjF4VAnC2QKy8LSAbaqisXFeRrraXzA4YWSurmF6bSoxrs0zhFrzy3Lh1icmWM4Lpk/coRRVYEPgqh2NbvDktp50aWwGdGIf0FIWvVE2dIBaYVYydwXjyxRbZW0YhtUhprStiKBiAvvJap7cd5R1k8K6j8Gatfgaofzfl+Qmm5lqjbEhMHwzk0rUPLgTKpHkeNHjhKdtJB8/Pd0Ovd++ZP4qzgm5fKdwYATC0s0Vfn/Ze+/g27NsvM+7LfDG0744s3dfTtPh8nAIBIwaQIkRYqUIZJiCRALThJdtF20XHKxXHLR9h+uYhVty7RcrrIsBolBJEVSTABIgoAIBojAIAyGxMSezuHm+6WT3rD3Xv5j7fc953YP6QbIkgYDvTPdfe/3ne9857xn773WetazngeHap9YVHo3hoT3uSVkIEqiT4nZZEIjkWUIrB+c8DNf+gLf9fKLVM5xcXHB+eKM85M/x/n0nNg0PPmxFzmvDNPZnCa0VFmox3vdU76AFAPrxYLjy1c0CbeqnRG7jiiJ2dVLSqqTiEtCCj2rxTmurJnWh0QsqekxCW4//GfE+EUwkYuLc+UKWTcm7VmBhIPjq7iugdDTbDacnjzk6PhYicQpqV6GKB/MW5WFbvuBj5SJkxk96rssqqYkJTAqn1zXtaKsxvL4zZtcuXqFt998k4vz8/8++H/I61dApRTEKLlDsuNYHyIxqARp6b0GeO9JRsetElrk+LKgriuqolT436rSnc70a3IQQyTGnGQUKi18vrhg026rfOs8ZVGwN5/TdS0ppfe5Hv2r7wUd7u/zsU9+jF/45F/kn/z0H+AXf/oP0T/5gKqqqScqF+vLBuQ8w1VCCEIS7aE7E0l9y7PPPcELLz7Du+/d4XO/+CVu332ALyccHF1hfngF6yf0faRtW1bLC+7du8urr/09PvLiz3N8+YgHD+7zxmtvqMzoZg1JEQVnHG2G+pJoUD45PaHrOjZNw958j3oyYbVaarYNTOoJe8M4Te7p930gxpykOYuxoola4cAZml45A+rsCM2mYb1aU9c1B3t7zGczrDvEmB9QJjEmKxhqVScS2dJ/yLwQnSzxrgBxTKd7W7JRlg0VyWN+8n74/1GY/9G/P7oijCm0DdC3SIp47yiLQttRTs2OrDE6B51VDFN8nST/hLbVQ7ILgYvzCxBhbz7lWz/zaercCpkc1dy5uM/rb73GqllwcnHOrZO7LNfndERsValKXuXwpadZB2KwmGRJWd3QiN4jkxJ+OmWThLfeeY+zuw9Ynp5RFYa7D29ztlHlP58szhVcP7rM5ctXWF0sOdg/4HA+Z3P7AdPoqKYTVt0SmzzWFJhpidiIFR2pdLhR2lhJpZKtk4UkEVdaJpfnHFw65vjSDaJ4YoAURDUHJLJuPU0/0yDusviMtVRVydHhQR5/3F5K/nT47IE+TPVEiZrkWc0GnHUc7O/jnGUymVAWdzg+PMTn/f6N3vYNKfLG3XtEayjKgi7rlbR9VDtxYLVcEbqOtmlIMWIyVD63Bdcme4QY+eKbb/IPfu7zfO3VVzi7/4CL1QkSXlE1v2CYBMsnnn8ZFyQLbRlijGrRXBhcVaq3xd4+MRuLWbM14rF1pVa+XSA0SsQObcvF6RnBSNZniJBHcR8u7yDmFCGxPDul7bZnjxqBCQ7BlxMm8329FyFw+85tCu/x1qr6KToS6q3NiahqBTibCwVrshuqzQmiJqlFUWRumafwjqrUuBOComgfeellXnj5oxwcHVNkzsAgjjNceiZ8gy+g/5auD40AlIXXgyIlvHVUdZ3hmEF4VSh8QQg9fZfyvLCMwhEGbemGEFmvVjhvqUrtPzuvEFMUFX+5/+AB6/Wasii5dngwvoaubwmuGMfaXvroR9lsGh7ef8D5xRld+6827yuKgk9/6tO89p1/nQf/hz85xpcv/tk/xrP/6z9I8fOPsWkDbTcDc5myuCDEQBvUztag7PUXn3uGg8M5P/PZX8TYkqouAQFnsbag7QJXHrvOvVs9m+V5NouBV175Mp/+5Fcp3Kc4fWBZXFywWCyY7814/LEnVN523YAx9H3HpK44OT1Rtq2xOOuZ7k1p24aLxYJLGYL3mXBjjNFA3Hb0XU9ZFNRVpQI+zmVjoYwCGEPTNFxcLLDWUE9qyqKk61qcdXzPd30P0+mML3/lLn1/haosspgJmDxfPCQgAvRRD3xjYLXecHH3IQH1Nwgh5qo/R6aR3LG7aXcTgfdfu4kC48Fkc+JprSYn3lkQT2uC9qC9Uz/zzEaW+CZiPkHolb+wWW/YrBtmezOefuopbt68SrteM68m9C4Rup7ggtoMd3Bvfc5qecHLzz2OaXuYGIXQvaFrNvjCUhRW2yTDy03CcrNienzMc5/8NO+8/Sand+9yUM3YLBYEq22eSVnx0qUbpKTKEb6q2Lt8xPm9+7xx6y2efOJJivmU8M6S+fyYcjZBCoPNGhQGo4IzLt/e9wEozhjECNFFjIdr169zdPUS7735Og/v38Y7pxogkxqppsRQ0QWdCBKrz69SwY+2AIbq2EQlqmKU1GryZIAtNDGYVBOadYM3lrqqSalhf/+Uuq5JaZ11G75xLxE4XSy5d35OWC548vGb9E1LDJGmXTPdO8A7T9e2xBRU/TRE2k734uPXr/Lg3Q19H3nl5CGeyHFZc7r8LxGzJnQti4f3mdUV1eUDYtvSGY/re1IIum2yEJGvSsrZXM9hgVSqr8vJ3QXXL1+inE0V1Wlb1g8ecvbeLd69dZuXjo+0Gk9C6Hr6tuN8uRhr/XZ1wWa9zvwjRwiaeJDUZMhlISABHj58wNnFGQf7R/o6RkKnyUJwigY7p+6DKn4qOQHwowBcDJEYAt5rsmCKQtucKRJCYrNZU0+mPPnU08TQc3L6kMXFgtD3dJ3KVY+mU//99eETACX9JB37S8J6s9HAEwLeWQ0qWVUNQKIaPVhf0LYapFLb0PU9PmsJtH1HjEHZnSL0MeG852Bvn0tHx5SuYJp2SIAZqlSjGUdKMJ/vMZ1Oud5eZbk0nJ2tWS73t+5I/xJXXVccHOzzuT/wFx6JM90TDzn5Nz7Llc/+m7TrDV1/He9+Ly88v8/t+/85TdNSFnqqPX7zJpI6/puf+QWme5fY21MHpxQBkxXBRJhM9rhy/QnefWtNyvLGbdvz2Z/7u1y/MkfC/4DrN64x3dtjPp8hEdbrDV3T0odAURSs1iu91xiMdcwP97HWcXZ2Std12zWfD9sYEzH2dG2LNZ5JXeO9wzjyKFdO7qIwqWrdhGbb39w0DTH0OOtwzvPt3/IZjo4O+dznK2bzL+Hthk3bYtDZYrMzthNSwhujhCSrLYa9ScXVG9d4963bGOMRSZkQOJD7hgi1e/jLv+Dr428DIk3bIClhj7ZjrAYlQ5kkRAZVMrLL4pfA/AZCnGGTw+A5PbtgMp1SlwXf9q3fwtuvva6/OfQYk1QpsizxVcHUGybTmr3JXFtgYpGgbbS6qmhzQKgnBVi4OD/DYjm9f5+9gz2u3LzK5GDOnVu3ee/eXd5843Wev3qNdWh54cWXaO6e0RI5nE2YTSa886VXeP2tN3n6o89z89mnNQivL0imZHI4y+0ep7cpDgZNmQeA8MazP8Uvf+ovUDUH/Pa/+39XdjY62SLoOO8TTz3D+fkJy/NTjFFTrJg6hIA1Wt0la1hvGh6enNJ2jxJ1LTajPAkrbtu2syaTLtW+9mB/n4O9PU5OTyivlDjXcrj/KrPZIavVmm/E6/0k5L4PvHHnDvME+5c2HJUly/WCzWbN7fsPePLxJwldS0hBnU5FSF1gMpuxv7/P8WzO3dNzRCxLcUymt1h2b2jLIETEGYJELt57j4ZIaBIs16RLSfU3BJy3SFWRrMEmB6WCvt1yzUXT8viw2WOi3bS8c+8e52f3mV69Rp8En9S4S0QnesaxXCwpBpYX51hfM8njfSlqIZdCGKXetSUYeeedd7j8LVfp1wGTdFR8kDb3mRyeRCicJQQlkw6Xz6qyKUZcWQ69PR3hNQZvDdieaV3rFJRJGOe4cv0xjo57nLN0TcNysWCx0MQlZKGmX8/Xh08A8mx4kMS16zeIMRBjIvWBTbuhT8ooTsZQlVX2oFahEGc9ZeXpg1aZhmHczJKMoe17Cl9obyx7jFtraWPPYr3d7DYr3il7VKuUmNUIy8mEyxPD9RufJ6U/wOlpz/nZBcvFYhw9GQhPH7j+OYyR6XSm/cdTD1cYkwATLVPZYzKfcrZcE0U4Ovo0l44/wmKV2Jv+Hdr2lG6qo5Kf/bmfx02PmMwNsW8R5yHmgzUp+ct2gb35IYdH1zi59y7D2N7tO7d4+aU9nnj8U5S+pG9bzk7OOD8/p8iuiX3fU1clF8uFVlh9z/HxMUXhWK5XNE1LXdWjGQ9oTFU55qDTFZXHFFqNJcxOwqMbHTHs7+3RNo2OMyVtG3DUaMXXzQid4fmnn2Vv/kO8/db/mel+jy1cDvyPjnA553BWxz2dtUwnhr1pzaVL+7z71ntstUjfB/MPKorjCxwSg0ERENQfIBMl81cmVc10usfFxYJ121JNJ6oqadS0Zn9vX5GQpht7jwYQ2aiGQd9RVJZmvWFxsWD/cI+nHn+Cfr2i7VpiDPjCYXxBMo62XSnJrcscGBKekpR6baE5S1XrlEYK4EqLKycsH9ynlyWumhJCw/7BnP2jl/nIJz7Of/0TiVsPH/DS9Sd4+Prb3HjuKZ46vgxtInSRSOIz3/cbuXTjMtY6+s2C+2/e5/FPfIpyPgFjScFAH7BWMKXRFgRw+/HP8df+7d/PevYQkxyh3PDbf+z/gdkUCg8b8JVgS8PTL77EFz73eaxpiEnw5Rzv93FOvR3un55x594DutH+dnvNpzVTa2lij7QdgsEbTxKFgV2hI3SXLx3jS69FgiSsWAr/GvP5Je7erT+4Wb8BrvcfLSklvvLGmxTecy6R7375o6zPLjhZLFg1G+7ceo/rV68RYmA+m7FXz4ldz97ejLooubR/wN2TM0xKPFycsr/3Ds5dEKOwaDb80ldfYVpXlPszXLb89qEmdS12At2moZh4daAU9XKJYrBGWD4449nnn8N4S2hbTBI8kXdu3eLmU89iiIQ+ILEntK2S9oxQl+XY71fi8IZN05EqjzVqSz4E8kG3f2gv3bt7lz4ozyiFNLoVIso5STkpXa9XqvwoZH8AR9v2uIwaeO9VAj0/rzAojdqxFVEUmcckQipqokBVTzm8dBlEyctd29I0Dednp6guRU/ftUiSrLciIIzoxEBi/2a6PvwYIDqu5Z3j7r27WOfZn811Nr8okB6MUSQgpSyBmQbLXzX+kQzxDhK3NjsWDSMeOmamFZmI8gLsTukdYkCcz9Wrsk5dJpGkXn0KuvgQI69y6fK3c+PG42OgWq/XnJ+fsVhoUtB1/TZjl232Pvy2oih4/vnnee+999h85w/CT/xn8O23IBme/dHfxaf+/B/kbH7Gu+4e0VmefPomfeg5mv0Gyup/yL2Tv8Ae/4CzszMuLs4xiyVlUeDtZZK0WJOtk72FDEuHFJgd7HN2WpJ65ThsNhvuP/gRqupjrBdXWWdFPiXrRZzzdF3H+UUmU2E5OjxkNp3QtC0Xiwu8LyjKciR6DUl817XEGCkG6D9LDo+kLN25LFZLmqZjMpmwXq2o6gkmJtrPfJUf/d//ILFo+cyP/ft85m/9+5jOcPPxJ5jWf5Q33vq/IPZdbGVzG3+7ebz3GY7T1+Sy3v+smlIUbhRQAbQ3PNhPfoDo9/6qP7sHPvJ1Q1l4nDHMJhNWqxWHk5ouJMQpW/rByQlXL1+iWW80MUiCNY6U/gLwHxKkJHrlWJyfnTObT/C+5PLlq7z77jsIjtWqp+8DfVwyn02IoceYLGaVhausdXrQiSphlpmIGUKirCwXzYLJ/j77s32s0UpotVly+vCCj730ErcfPuT+nXvcvPoYYdGT9gJXbl7HTGZcx7O695CTN29zsTjn7quv8PRHP8ZjLz6PQZA2IJ0gDqT02oIRQWziz/7PfwtdpfrhYiOf+7Y/yfU7n+Rbf/p/ga/LPNuu+3H/4JDnX3yJ1175pxwcHZLi/4gg77A4fYXTizMenJ0hxuML/wE3wBB6ArrmRUQ9LYzFJdRLIKmj3vHhka6ZJKxWaypfgLV4d7jtBn2Dn8WC0EclcX75jTd5cHLC8WTC8uyCgHI/1u+9x6WjQ0y25K7mE0qgMJ6XnniSh2dn3LtY0sV7LFY/greWPhiuX7/B5SefZN1u2KRIt1riXcEk9YTQc+/WbfYuH9EvV8yriU5sVcrfCefndP2Gy9OZjgmHhMSADZGj/T02TQPSM69r+nWTzw1D3weu3rjB/fOKvu0wCH2zoWmWOLuHddCngBclcQ4JwJBOxxj52ldf4eOf+CTr9WoUJwLyuKKiUW3XarFnLdO61CTSoOiQqH5CSGqN7I3Ro0FSRhGMvicRQh+0VZU/DYxRrkDSRGO2t8/+4THXH3tMxY5AORAhIEZ1aE5PHvLaK1/JZ+Y3+IL7VVwfOgEwkGH3hM8KfjH0Iwyjuvj6uBQTfd9R+JJpXSpUPIj3oMGlrKox6AqAVXKKVl1bWHf3pscUaUObGaJaWSaUmDhA1Vpx/E2aJtG2n86kI5tZ71e5eu0qq+WShw8f8vDBA0UQeDT4A1RVxWw6459+/vOYtqL44X+H+ve/wrW9Qz75t35ItWpCRBAOD444mM259+A+pS/Y25vTdP8Ws/k+b7zx55BMhLs4ucd8fkDhLaXzSGb/kpQAhPT4sqSezlmfq4pWCD2vfO2rSHqH2Kk/unGW1KfMJFaBjOVqwcH+PhYoi1K5Fmu10Cy8SjYP1bT6eveEoH4GZVlmZGVngeePQIQ8blPgnVNEpet49xP/iJ/7X/4fiaW+zl/8nf8xkPi2v/qHaZuWST1nUv0wm/BXCf3rui52nj/kNofDI7k/H2LkcP+Q6WzKxUULqEOZ2OG0Tzv/7EL/u6v0UR2A4U9FqeOT3jk2IdL0PSGASRFjDJtNgytUEMkaR+x7AjHnEj8H5ntpux7nCrquZ7FYcXho2T88YHJywvnZCd5biqICU6HiaiH7N1hICTEq2eys7iMsJIlZh10rxtlkyt7BPsb7PCZpsFZwRWBvf48bN16me+Fl2tRT2wo5X7N5sKG6VmGnnvJgxvqdBWVZcf2pJzl64gkQ7fdLEJw3UFmCUX16k9EeeT+VwugiGIypvDFYMTo3LpHDwyNiNDSbDZv1hrPFO5yfvkufEs6XiLVYUR2I3asLAVPWSB9UF8T5sR3hsMSQKH3JtK7pu475fM7Dhw+4cvkKvu/Zm72Nc8+Q0uTR9foNewmGf4RwmfsXH+P+xSLHIqHAsI6RAJTmmK5sKcqS0hc47/GblpuHx5wsFyTzi7R52RsMISWWTYuIoW9D1rRISEhIiEyrinu3bnHz5uM6gpoEcYbCO27fuU09nRO7BhMEFxKx7zh7+ADvCiTbGnddr5LZ3mWdEC3KLh0dcffeXUzSNZ76DU1bYkuHxdBnMq3fafkNn9Xde3d5vtX2cUpawMlYgGlLaW9vL3NHEvWAkiW9ZzGjStY6tQYH3VuiY8YGlYwXFLXu+8F+nhGVtsYgVgWn+lbjSUwqZue9o+36bIvsGSYOvlmvDz0FUPoKjMWXFXVVgCQSSQk8Viss7x2F12pyNp1nZT+Hy0IwMQrOF9STCT6TN1yeMT06OOTo8FCJaUHHh5zzj9gBV2Wl42SZlBa6LBUJgDKHS+8pCyj838daVdRr24627XDO45xnNpvz+OOP85EXXmA2VzObUVNnuDFGzTE2mw1WDI9vPkr8o9/N3v/7++k3qnPftx11UfLUE09xtH/ItSvXuPbYY9neds6k/h3s7f02MD63Qxq6fsPEe0zhRylLmzXujRi8L6mmW61rEeHs7Iw+tFSTijIfEJN6QlH6cQRzMpno+y9KvPNsNg1t0+ILn/tnWzRdJBuUWK/CIE5H/ow1owqvMWp7G0LEukGpS++fAPc/+jnWl+9sF4iBr3zPX86Pc3hXcHBwE29+N5bHdeRzZyRMsqlTiKpS1nUqWSyp5+BgLx8YSnhTa0T5Ov+w8+fd4P/1lvXWF329XnO+WGj/OvcqrbWsVxsuXb3EZtNlc6Q8vSC/iIhC+W2WNz07vSAmwXrD0fERzheUpWc6LbOWOwrJuuF58irNnhnDAagkJ8GYRFlNqKoZha3AenpnwMF0MuP61etMJiXW9pRV5MrejIPL++w//xjl9UPiakM4X1HMp1x56Vluvvg8s9mcajLDtAkTDTibdQDUHW78PJLj3/jr/wkmDbAqPPvq9/PSF383dhAKQt+P9TpCVtYlVT1jvVqz2qxpu56iLKjqisoVGFEBqF31RwDnC+UAJJj4ciQQWwsxBSRFJmVFUVZZd8JnuVolus4m7+HdB2WYv1EvQ0L4GeCrWdgmp7ICrQgXMXF7teLW+YLVck272tCFnj60eITDyYTSG4z8MxpRwTFM/iwMlKXHVwVlXWGLgqqqqOqaJJGj6Txzb7TNJm1H3DS8fesu1f6MsFwQFxeE9ZpuvWa53hBF+/WIUR0Pssqjd4jVaaC9g30K7zUxl0Rs1nR9S9cqmuBwqg6bA7Qm+tq+7buWhw8eAGSJ8jTuhUHBNISou1UySprPtbTj4ikp4fL6Kkq1Gt/aDzPqpNnc9rQWtRq2djxbU0racjZCWRaUVaFui87iDDSbtfo2fBNfH74FkOfvnUSKZHWURTQbcxgVjsHQdA3OWkJQpMBkEZpBXa5PCekDZC14dYUzOruZP+CYIiRwRUHakRJdrTaEQVserZ6dWIhRBTWS0KZena/SCu/fQNJTJNHHN52SDmMIGGMpq5qPfuxj3L17h/fefS/zDxSWCCGyWCypJhNi27FeLtm0Ha+/d5vCWfamU6griumMonIUpSY+Tegwgs7Yt8JTT/57nJ19hq7706TwLl3XkOQASYmu7TP0Z4kxYsQgKVAU5Q75TXUWlqs/xVM3/zP6hhywBfJzSMZEo6g9cxd6Jd8ZQ+U1MfN+awccQ6DrFNL33qsC3mjAZxS6S4LN/X/lDJts0KR9tfL+MX4zJUy2HI3D288hSVERCxwfHrNcrmi7/xll8acx5sEja2ogFKXcTzQGVlkd0dq7pGhUKQ2244Bft+of/rvrFJh/B1qDWeOUs2AgJuHdO/d46vEnKKyjbRsMcPfeA7792z/N6YNzumaTg79g7QNEfpwQfidJEmVZ0Lc9F+cXHF8+5uj4kK997TW+9tpbPPvMU5CEru/oupbD+aGqDdqMRaQ0mmANya0RMF4tdNqzNVWoSUmYTErEWyT0WF9R+qzSlt9u7DaYqsTPSux0QnfWsHm4YHJQ0/ZBD0hvkT4gAWxptfcq5lEaBfCxX/63EBv5md/w/2S6ucQP/dm/iU8lYjJUbwdtBpCsFVDWUzb336M3gvEebwusStzje/WtiO/TAbDWamXprCYWBqLN9pYSMJLY25vTd606Ee7vc3R0yP17dzm6fIl6UmPdKfR7/3/PrG+ES8xfBdlgzC+T5AjDC7kFugfMFWET4Z3lBULiJp4Dgao0GOOYzmc4+5cwqQM/GRtbhfNUzufPU9uj1WRKaHqK6QTTVuO4tet6rFHiYNO1nPc99XSOrFZ0m4Z1LhRO1itiUdCs1lRFgXdbcajQqotrVVTM9o54eO8eJ2en+h77FgmtGhvqV3SMd7gHqP7DQCS8c+cO9WSiqqKo4+AwrqcWxUPC6VksV9khUFuVA4QvokTHsiqVYGstha81yeg71e8wDqxkaensrmqBrGKbQo/xVlvWuRmZRJUK1RXRcLK4+G9rqfx3cn3oBKAN2vOxOKJVFn6MgtBjXKE9/0zoiHHot4QMqWYr4Sz8k2LEGTDGq5d9FnoZFoE3BVESbddS7rCIi9LTO33JkpJ2e7MzYRphoETbd5nZ/I9yNfr9dH3ABD2MnFX96BAjqW157PHHiSFw//4DGA0s9LF1NYGioqhKjIWLpuGtB6c8e3PKO/dPODm74PGnn+f23TuaDFnHar2hLAusM+zPjzk6vMzde78HSX+cZrkgHFzBIITUUboKQTBWFygh4QqHLytCOwglCffv3Wb2mQnBOwrvspCNo+8iIUZOz88UNjM22y1HlbsFhnHE0Q7YKQvdeQ0QxtttMBAYSQCih8aQeVurutzL5YqP/tc/jK2Ez/3wH0N85Obnv4/f8v/5j8exT4lC2/WA4FyJsb8X6z4//o4Uk252n+1BxdB0HVihKkvKwtIbS4yAxJ2W7/vxuCGSfTD4j4/Is9h96LDWMd3bZ92esmk2+GIv950jm7ZlsVjy+M0bvPqV17ZTEGh1amLCGmibhqqqefjgjL29PXxZ8Oxzz/Dg4QM2bUPbtoSuJ6XI8d6x3seUKHKFkozoPUf3S+oiOEO76pn7mul8TuwifR9JNuGnNcaDk0CUMKIwiCBJ4V8slMclrvVsTi4IqxVulmeqM6KWkiJqqVe9B3GZQJVbDZ/4/A/xic//ECNiYZRcJYNKmxUI+lm9+pEf57UbP8H6C475P/5WTeSiEFKij3rQa9Lw6OcVuh5JUNQlURIhqj99CFGhamOYTKcU3tNZw2RSE5zloihYrlbUVY13fwP4Q//cz/sb6pItDwr+AcI/0K+bF0Ce0aBkHqMLT3J7taJP97ixmXF5b0pRFPSTE4QzQmxZnZ/lEV/oY8eiaRRBNbqWQtvhoqIENlft3hdIp86sbQycLs6Z7M0pbUEjBlfXlJJYNg2uKLNiK/Sxx+ARTPYUaIihAb5CisKlS5c5PTuDXLDJZgW1Y0MipoKZ9aoxMdwGdDTQW8vFxQVt2zKpJ7mYyX4oOfDHqAiAiMYGnUIQtU1GtrwtVMBsGBl01umZZizGQR+iOpEahwVCry61UdTQzqCeGCEEYoq5DTG0SfUM/GaG/+FXkABYAz5rtIeQ+5YZKo7Z/CNFta20RqFOYiKmYcokZZc/TR5sVdHHyLppCbFnUtXUVUWRYXEJkiH7bR9p6CkNY0vGKHN0vda5V+893lgmdT2yUKP8Y4xErPl+rHF53CaRUDnM2Pc0TcPTzzzDzZs3mf/yL8NyhfcFTz7zNLfv3+Xe7TvM96bMplOapmXddXzpjTe5WC75+Mc+zuH+IecnZxxdOqRrO1rb0vU9x5eOdcyryqxlC6FvlRFuS6aTKYUtALVPbfueTdsrgaqaIe06G/44zs7P6NqOw/kV/UrWTqgqx8VySbNRqeLVasWmabR9YkyuwrbQGajSWlF4bOHHzUNuq+gcb+ZY5YM8N9BIIVKWJfgNYg0f+ZEf4vjkKWwBV9/4FibdPhQGjJCMUBuVLm76DiP7pPitwKsKYRplClsRQvaUtwl6bfZRlSVdm/v9MuTnwz9jo4avGwTMjooggrVJhY1I9J1w7/59Oik4Xa6oqpoUE13fE6MmgR954Tlc5YktYAJ9jJT+NUTeRsxTxBizsxkszlccXd7nyrXLzPf3+OKXv8j1a9e5cvky69WGyXSGGRnRKTcr9B4JQBBMry2Qe2/f4rFLVzHOaEVtDN26oTvt8POKclphk7YQjDjESWZe5wmX6PBlSXVpn/XiHHve0vpT/NEMjyOtOux8gvHQdy2FKdTxdWyhaHDX/YVyMzA6XyGCEYtJkdc+8hP86A/8QRYH71HcP+TqH+mxP9bS9y1JYj4XHmWCD5ckofSF9m+Hcd6YiEGIGKzApFCy2nQ+06mTsmQ6nXJ2sWB/74DCl79GDuYvA7e+/rfkFeCVnMgdI1xlHWbcXv4ATbNhvVK31HP+Pk17G4mJbqMjvoMVs3GWLnTYvHed9WAT7brB0DI7PFAJ7oyeTeqar73xBkfzfZr1Bkk6jtunxMWmoYmB4NTMqYs9MvF4DE1MWJPo04Yu/Byb84grC1WFzRA+oSG1FsycYBKbvlFxJ9gmqyJEEm3bEEPIo7aiyZFTAqAzFnLLUZMGO/IPDNk0yfpRSM7mMy4hufK3OeFWbpe3BYjQBZ1kssZgrKKhUQIp6j4ufYkYQ7NpMkE5UtQl9WTC9szZPXu+Oa4PTwI0Js95avBXT3CnPc2k8pPWK/sz9D1iDaV1OO9VJjgLwRRO9Z9TiDoKZgxFVWPzSJ+1lq7v1QAOwey0YNq2harOJkSMs7MmqXObxERyev6TBG+tPqf7WQo/xxe/WeVyrdOAaKCczdT5TqAoa65dvQ7L1+j7ns1yxWNXrnL7vXd5971b40G+DCsMhmeffpoXnn2Ot954g+PjYxYr7eOVhSf2CRMS5w9PuLR/yC3n6KMQQkfsN1Teqs2lV1GaTdviY+TYl2BBJjWnK69z9kaduW7d+Y+4/un/F/1mgxFoQmCxOOXu3XsY59hsNmO/FlRmUzkTkhe6Ll5rLbZwmWmbJzKsMrORba+cpKQcl+G8weQn9ZHT1UMmkwnPfPF3qDWodfj8XGIMvlCy2fHhoY6JxhK4qUtOVIa28CUpKp/CSOYgJGE2m3F8fMzFxZ2cmGQ1svy+tptw1/xn53ofo807y7SqMUnXyuLkNptomFZPEPo5k2nN6uGaylhO7j5Enn2a4+NDbr93l6KweAySHoK5g8THAU8IPb4ouLi4YP9ghrHCR196iS996Qt85auvcHZ+xuPXb6qwDYI3VkeknBICUxOxRYGJQkgQNz2FGIrphBQTflKQJFLtz1T8pOtYPNgw2ZviSk0ZDVrhWxQRMwjJKDs6TArmh4cs3jslPXjI/OoR1fERNmlwd4XXwYSYsN5kFECfT0RRIkFUDVK2e+rh5df4qz/879DVKsPdXznj1n/0n/L4G5ewX/SkkNUEnRLHnHUMwDAovIqDYAwFug/77BpojKVwlsMDNc6RbNVssBwdHLFcKXmsrg3wV4Hf92GPr/+OrhNg+SEfd4IRQy9f4aSDdawQfgtN+tuaVKFk0NHSG0PfdPhCx2BjTKz6NYfVdZz9bZCMVv/WYUVTz/Pzc/oYuTSpMF3DZrWhj1pM9X3Ae09EKAp1Zd2fzYnWQekwqaTvlZTXrFeU3nP12lVu3boN5HG+0GKZYIwnpJwkP3Kl7IKcWCwWXLl8lWa9UYEwbxTNSJnflX0n1Acg4qwiA+pxYJB+aw+uxl0yCpuFLIRkrVWpadH+/6ZtEchS8j5PrOUEKdc5VVUqYd3rSKHK1e/yjb65rg+dAJD7MwNhrCiKLNZm8SSc0UPFiKHrLL0kKl+oV7hkeD8ptKNWswpP12U5ZoFDX8cAURRG3p1d1yCl/Wnn/DYZyRWq22HWO6vQpS88USK9vI2EU5A5rigpnBt7PTErFXaxpw3acogpcrE4R0Qoi5I+rLQKNboAZ9MpL33kBegDN65dY7VZ03c9oWmwUZ//4uQhabPhqRuPsTi5TohXqYqKK2XB4aSi9h5XqDa4t3PN5KMQmpa+qLiN5Z2LB9zvNixD4M23vsS3fEK12NfNhvOLBU3bqMgHKulblBUY0ZFDr4tcLYR3wFhrVI8dTbK0Na0BX2IiSqSwLtv16qSBzYSaiLBq12yalj70XDo60sTPKcSn6IH2jK3A/sEeTdfw3r07JD4BvAjmFVxRACYzlPXwTwiFcwoTE8FEoNip9t7PARiycnb+a9/3d31Y13Ykp+/REQnNhvu33+Xq4SGxdyCRYCxnqwUPT045PN7nrbffofATSIFoDM79CMa8iMiBtrecyjAvFmv2D2ccXzrgxRde5Od/8ed5771bNOuWl198KY8UkttkCrfHdYfxmsI46yhdxfGlKwrZp4gUVr9vAtYbnKlwxhOaiIjFlWqYYkw+u6z+nI0Qupa0Oqe8dEj54hPQ9izuPCTExOR4nzI6TFUSQ/ZoMIwmXykja2SagAyjvEl17PvUjsF/vL3zlsmsIE08IeiYY0iaeLn3uQFaq/u4T5AshNDpGhDBOWWOl6X2db1zxBBp2w4M+MLTtI1yi0zzDY4CrIHbv6Kf0B26ImBYphXIX2ZobxlrKMuSEHuMUUnd6WTCZr3Uc0+Uc9GHZ0i9JuMx6nifQ7X6N5s1m6hjgmGzZtM2GFQgp+97OrQlIzFhBeqyIrUN0q5ZXzQqBxV19Dj1HfO9Pay7p2OtABKxoR3XlXmf+dewb42Bk5OHPPvMswrZ43ISsh2PbdueuqjUedSrj4l3ikBLysmqURJtaQv6PihHJbcTQBFhRRoUMfFFQUyR0pUkk3LbKfsxuDTuzxgTxoIbPcI/+B6+Wa4PnQAUXseonHM4Z7Yz+gYK75mWBcRI2wesgSrr9kvuo/Sh1xFC5xG2pI+YZX1FZNT1H9ia4gRpdhaRUdngsizzHHscoVTlJASS0QVC1geQqBa4pX8Va/821v8QvtSZ95hE+0BFlpo0ZuwB9X3Pe7dv8fJLH+XG2Smvvvo1BKEqCz75iY9z/fgS9C2hbamqkrKuidbjJlOd/S5LUor4S8eUZcX3fvrTGPsdKv/rMnIxsLkESJEUVHO7KB1FVXA9eR7zU+45+GrbcnZ2zt0HDzEiLBZLJCXme3usNxsldBcmj/15qrLKuv2qlKXs/p3D2JqRLUsSJfp0qpewaRr29+YURbXTExNiHzDGsFytVQlyPuO9e3e4desWicThwSGLiwUJ2N+bUXtPOZlyfHTM/uaAcOs28BmQtwldGJEGnY3XNoVxmhzqaxdiGiLRbuDP9+0D1f/7k4Dhy9piAYh9IJ8zbDYblqs1xnpiDDij0ODtd2/xsU99VL0vUsTn+2YwpPSPEPu7cMOcsfecn10w35/hneUjL7zAV175KovFuRL2jBnbYRKH3n+Gvts+t2FUPreY1hgxyp5uOvxejTFamogVXKmSxMloP9MaVXrTyYI0onHWWcqJw0igazYEEQ6ffYx2uWF1coa9fIl+reSqIcHWYC8ZIlXu+iOh21liF5mdXuXjv/CDfOHb/tL4rYMf/07Ke9+JuB9D+h6bERv1j39UChjQMyB1Wg3GPL6V5YML59XGOvVIVGEZiYIvPPvzGU3fU5YFIveBrwEf+cDzf2Ncp8AXfhU/l+/6jjazQcd/H3/yGR6ePMT6knUXuFY4Zntz1usNIQbKqsbwm5EUiWJIQUmVCR0DPDk5YRMTp+fnHKZEyFvq5PSEvp7iylJHMfuATYnCGrouUdkpD1YnrE1O1AW6psFXFXU9YbNSh0aD4EKHpMG+e1eNdbt3RYTlYqHk2ywS1rSNenJYS+gCXd9hRairipRRqZAnnnQfSh4rV1TWWoPgCFkVdRDtKcoyOw1anAdCbh8nHaU0RpEHnUDRIsZaMyYC9pGz5Jsr+MOvJAFwToNBdgKz1malJQPOsWw7vHGIU4KdzSTAZNX5K0TNCmOMWpl6rzOaVoVryqJgtVyw2qzBWLzzWNiBtAFjKaoKMISuHf0GJCWiYsUgCoVaa9TZzA4+AxHDKxj7l0B+mBQV2uxSoA+B9XLD2ekJs/NzQEPI5u4Dzibv8v2f+U6e3j+iaTdcuXGd4yt7dBcXJG9x/i8iaaPM1dpgnVEypBmUqdTgyCh5HnW7iznoM8LyJvfxAlkgqbAwKWkWG2a+46NFyRf7e7z19p/hxpV/m0ldU08ndG2rjnsDklEWVGWJLzxlVSliY80johv6/vT9SxL6tiOEnnXTstxsSCFgrWVvruQi5wpltTetSrTO9ogp0seeX/7857h2eMReUfDWK1/hnZNThdCMIkXWOeqy5uOf+DiHvgCeBWaadBhDENHEBxm1ETCG6WSmm1oyxPeB4D8kALtfgw8Ef9D37zxJevCWaA3GqUnJ+eKM/f19DPpZlUXJg4cPSQKHR4fcfvc2Vy9fJvbaL3TuK0T513HJkIgkY1iv1jTrhunehOvXr3Lz5k2+9CX1TLBWk109jHWtijGYwpO6BmsgZRGoqijBWGxQxMLVAoVDrJIyjFidpikET0XqAu26xxceV3rEBKL0SAwU0wnWe6oqqQT3ZsHsYB9feZqTM4r5FFvYcRRwIGFlpXSGlGfIvwbnyWm4zG//K3+c4Bpe+cjfZfKzL3Dlj/wwLOcY9+NMp3NCTJiuZ9VuPqAv4H2dWdlKFgzZt8Lm/aJVnjrZSQhEk1XZrLYrurbRpMosELmNMS98Awq0ROCv/Yp/SkP+B9+LGCBG+iDs7R9zulgSsPR9Nxqi1W6Gdb8bGyuFuSXgul6r/LIkdD2n50sOHnuckzvvUp+fIcYR+sh5H6hu7tH3OrElMWHRfn1Z1sQuYeuaFA0eD4VQFXt0SZhMp6xXS7QNIPRdg+DU90Xe3wLYXiEELhYX7B8cqoWx6FntrMYTkvrKrPo+a59oy7koSpxx40hzTOoWaZwbi8oBcU5RW6cJ6EJD4QrlT1mnyJtDBccwGGPpYkfl1ZZ+cAEN8V9eUv4b+frQCcBms2I62cNkI5m+b2k2HZtNQ1lV1NNsDiSCL1zmB4jCMjKM98loDdx1ighUvmS5XBJjUGvWzEI146beniDrTUOTZ0SdM9RljXdWJYgzwdAbnY+1Ajazu61TCK3wUPEalfxtxPwufepOKHxFeW0fblzj8jtvwauvMZlM+G3f95sQEU7P7vLYzes46+j7hvXZH8E7nQFPKG9BjMkwe4ZPTc4wM2kHDNb4XPVqy8Eap1B3rhQlCbZwSK+jLtYXGFPhQ8KbwBNi6JfvcfVjl+n7QNf1XJwvOF8tKYqSiZFRJtMY8iJ2mdm7PVgMqJtgFwmhp21bmqZltd4QJakdsHd6OJMr36hWsF0I7O3tY5ylXS34lhde4OnrV5H1mhduXOGXX3udL9+6zVmT+75JHQ7fevstXjw6BkD4n9Cnn8jmPDn4lyWx14BWlkJVV9STisUisO31DzLA728BvL8N8GjUiSFyfn5OXZcYIpcuHXNyApt1x3J5QZSAMU4PPCOs25Zbt+7w3HPP8MUvfIErly5BPhBJHYW5T0pXcahmhS0MJw/PmO3PKb3hU5/8JK+99vr4OkxGe2ISvGiigbeQ+5oi2eHOOkIMFM5BANMHKFTAyQja9zQWaXpi36holKtoNx3rixXVrMJVE5p2jTVeUQcShfc03QXpvGWyd4zd3yNtOtqwpppPsM7qyOMA/w+GQWabmKaYRp/2arXHv/mf/xl+5O/9KOeLh0Tnae2Gcqo5uDeGerrHPM2YNC0020mePrS0CBJFiX8iulfR4mC+N2c+mSrUnw/0FHuWq5V6jhSeK1cu87VXXwVOEdkA32jSwAKc/Sp+Lq/tnf+AVsxJhNXyjMvXHuP+wxP63tN1gRhVJCfxvRTlS1S5VRtjoF0t6A3YLvHg7kP6wjKrPOerhjIFpO2JbUd1/Tqm8EjXqUlXr4jU/fNzLh8eEduWdeiQ9B6q4BhJmbC3N59x+tCQslaHrhzlG3393bi97t+7z3z/gC702s7C0MZEVSh5uE86vte1zYiIxJAyGj2M1ioqLKhomqLUlr7vVZPEFUQShdGJgcFPwBqj3Cab6PpO+SdFkQniSSeATG47vP/z+Sa6PnQCMN/bo64mTGezLIiygSisp81Ipuh6NZYJwZCKksJ7ndmEDN3IWNWHoDKNsQ8gCeudjkcJ6j6VlMjR7RhCWAwSFEGorKcSNauwuaftrCrsGSOqTTC0LLwGQRV3AWMfAucYcx27N8vQv6jDVH6MiLAJnR5oXklfPSGjC1arSQZ0Oo1IvmDGSsoMFX+emoDMAzfk7DSbUVjlUljjsSnoY5JgGl3EnYrGUTj1g48ilLVnsVxz//SUvu+4euky00lNVZWjoqIxClPbaNgdaVkt1zy4fQ9B5+9DiKQQiH1Q6+aqpioVaRnGuAchIFLEOUPlPEU9oU6BttfRrk3bcW3/kMWmYXn3PkHSIGXA+fk5ZxiezHdNjYiCBgCjG5Z8h0LX4X3B/t6cxcV9jCkQtE+/hU4eVfz7YCtgiwTEmOg61ep3FqbVHH+l5OTigs2m4ez0Qp0pUyL1kWk15e6tuzzz9E2uX7/Oa6+/xosvvKCVSloh5qfA/j5ErNoLi2O9WNKsGybTips3n+CZZ57l9OQBdamHisFoTzTo6xabp1DyHLfJUqQuGaJVw6DQtPjKkYzV6j/qBEncdPSLDU1K2Moxnc2xvmJzvlFmdt/jJzVtr/fWGWXWdzHQri5w0z18Pac9WdI8XODLEj/x2DKPZOXW1LBehgo7iWBDIlmhCT1d20HSkbSeQIgLnDdMqgqTEiaq3sfu1fUdyTti0sTSOodJ+n5T6kdOECKkpB4KIkkP5xhx1qlOPAC/BHwL5FX1jXP9Eo9KUX+4a6z+H/mPZmHOWI7LgknpdR6+DSwXC3xVQDokyU2sTUSbHfNiZNmvaSXikuH0/IRyPiX2XW5Bwt6kYhkusM7RpzBWy9ZaEvBwvebzr34NEyIbYxH3d3AZXe2bNs/fK9mw73tFOPMrH7qbw3sZ/pgZDRhjOD87I3Y97VrXtTVWpYq7Duccba/j4s5aJYRaJU53bUc9meAyVC9oyyqGgDcW6yzeqy5MjGo53oeergtY4+i6VjlKxtCFPDKeMjcl9FhvR5EhFf7656Uwv/avD50AXLt0lSSJKMK9+/eIKVIUpfqX+wLjlOFbTypSUKlZFfTRDN8Zq8HRK5xT5z54xsX1g8w2ulgyITDidyrX46LkaDKhLByVV5KbdUpesk7hIzF5gQ0a02TCyNAiAMTcwZj/CvhhRPYZ4KvQq1TreIWtrbEGeA0+Ahq0h8fm/uVgS6Nz2Tngi2QeiSFlTom60FlsbcE7TJdwfaBZDvP74KcT1iYQ+jXWCYihMkI8ecDbb77Bxz71cU7feIc7t2/zxM3HqaqCSfbKtpkY2feRrmsIsePs7IKXNxsK4K233+IXLi64dHTM3v6ByiQXBS7P+VdVic/2nkPPDcms2hTxpYXGst5sKAuH7KADQRK+zDagOWsvqpqqqkcVPTD4stRqIvYMlNA+RXxRUBc66TGfzTDmNljV0t+hLTLok28/rYSaAMGgYTB+J3ru3T1nvl9z9eolxEbAUlU1xji6vmdSTwkRekkUeE5Pz1ksFrz80sv8lS98gevXrlHXk1xdvU5hvkpKL2OyAY61cHZySlVdw3nLpz/5SX72Z38WZ7Sn2HcJl7TyNcPcs3V4Z3WCxprtgY/ulxAi0idsYREdtSB1PeXxHsWsJq1amuWS1cMz6r0Z83oCzrBcPqSNFoISDeujOdZ7irChCxviOiHljOnRXMm2baB5uIBesKXH1h6fRYhsxv+NNVCoKONiseL87Izl+RIKi8QeIZAkUBhH0wfWm57lasmmadm9AtB3napf5qkDkyODznJrcdD3PXEkhLUqZ13qRFE9mTKpJ2yaDYa/A/y7yK+Az/yv4hqrc+CDwf6X+DCV4qPP8fW/qGeKyjBfig5rC6bTPeJigUjAMgGOcPY5nDV065bGOlIX2GzWlN7RSaIXw9xV2Cg8/8QTdPfu056d46uao70DpNcWb9d3eGdwxqnsc9J9VXtL5acQW/AWU2or2HnPs3tXdGdGDeJktcunbr8Dd99jXk/4vqdfIKw2NBdnrJolrYX7oeH04X3KyZRkrI4sJt3hbYhIjCTr6AhKeo06FWYtnK0WeKOFRFlW6kAYIy5pMWndoD6rg9QhBvqU6JOOqxurZ07XqlZJXU3YNC1t6olNUi5KUdB0jX4CZuzWflNdH3rXrBoVONk062wtq3CMsdo/mc9nFGXB5uyMMFjxigZmMajgTwjQ99g+C8GIJg3OQOULXFVQOtWm99ZQFyXT1Qp4G4CbNy7TVmVWjotYPJZsKQljBiomz5bmK+aevJFtALHmLvCfQvrfaq8aq8S4EfYmtyu2cKiSHj3whxF+GcxnwTxgYNLLzu4dx3XyHKs1KsEqGLwYTEp03YZ0EWCdCH2LLSy19cRNJJkOZ1q6tCE5jxiLk8Bh+kW++Et/jhvP/J+4ceMSv/yFwPlqw3NPzymtZvYh9DR9x9nFkvv3H3Ln3i3W6w1PbdbsAweHB1y9eonQCcvVSu+VVOzv71FXitxYY2natXIZvBow9SHQpYD3JXnGi5g1/bu25eHJKfv7+zmJM5ASzhdURcmknmLjFs1R1S8VfEoxESRqOyAVnK9XEIXprOb48iGnD5eIGSR1U77DQ/W/iwDkf96/UU0k9D1nJ2uME+rCURcFtVcjpYvFOQf7+ySNsYhV/ft79x7ykRee58rVq9rCePFFJAW83ZDiORDwzmfmPCxXK/ablumk4LHHrnG4f0QKCbHaq4zJk9+FHk6ZSCsiuLIkiuCzOZTYLOLTRqTS9UflkE5bZcw8zsHce/rlmvZiie8rOKgwAS4/dp103hLOl6xvr3BHU8rpnG51wsG8xFceyQJLdlYymUxIXSB1ARsS7ekSQhqna/o+0m5abfntTbl24wbl3pRms8qwqiGmlhSE6AqitcRMQN29RKKOAiZRElqI+MKrKVP0TKoJm6YhJeiTVq+HB4c0G0UcU0YBRqMx7pJnhv+VXB8W5BXOgCG5+YcoIXG4Plzf+Ov+nvd9MaHJf28Tb53dg9TQWk/fLoippun3qdwPQtsTm5bpdA8326Npziinhzy4dxtrBX+x5nEzZ39SEYJgJ1fZpBlFWeFlik0F4i2+VI18CZHQNNoes45y8lMk2+PqI2Q8yzQRNzMzIpyjEyDC3tkJoITwJw+u0oUzVrLggU2sYmQtsGobfDVVbxUhTy3pOhkmScQoAVl5VY7QqZhPEiUQN5sNYgyucIi1tDFCUp3/QXo6xkAbVJwrxahKtklo24bNeoVEoagqBk8B5wqMEfpOP+NvxuAPv4IEoNlsWK1WpDxDachsfeeZT6ZUzmOicHm+hzWW9XKZ9ZqtVspB8KbAlcq69y6PqFntxYxsaVEfZzGiQaHYHiBtH+jNEFANSMyQemYtG20zGFW30R5m/t+gbDYQFFXsZI0xX8OYF0d0YBfuGXnn47cGeL/A8hkwnwF+Eh33eV0fbTQI2aESN4bByc4IEJX8lHpVVnRFgduDuEo0Fxcs798jLpYkDEU5pZheoe83JFqwhpl4iuWSL/2zL/I93/sdPPH4Y7zx1ru8fTDl8qUrhBg5OXnI/YcPOD+/IITE/mzGY9euU7/6CqxWTOsJN65eZ9N2tK06b9WTmvlsTlWWOGtpu471Zs1gvDFYcSojVycNDvb3MRendG3DxXLJcrPm8qXLeh9y73g6mXLz8SeUfLO8GO9s36u9ZwyRGCJVVeGtx2Apq5LQdqQkXL12FWMKTh+sSGIBHR9UlTI169DZQ6txQOSRz1Bfy2oMuquzE+bXbhBjYtNuiAhN07FaLqnqCSGqsqIznrt37/P000/x8Y99jJ/8iZ/kyukpl4+PlcgXX6dwH8PEGRghEXF9ZLNcM50cUBSOZ555Fuc81jidq1emnSaMkqF/UPvT2Yyq0ITI5jE8iyrnud5hKkd0gp84wnJD6WtS5UAs3k7BOfqmpbt9d9QFYOIpmLOP0F6sWN1vsDMPhx6bIjEGuiyx6pzDlAZXFRjrqGUCCKLS8xQCRR+Ys3Xy3JtOaFbnYJTLkZIeyjFCUZRMvSJSu1dVVKiocJ44cAZvUQOZTErt8rx2TJGQ/RJiTsy7ttXWX1HQdh0gGPkywic/7FH2L7z++ef828CO9wU/D9z/Vf2Or1v58+iqHb6vhYeQsJwbCKcnfOYzn+HWrfcoYuJo+vuZzi6xN51ycHBIiWV9/w5Xr12BaLCV4f5bb1FZT42ni1Hlmg1MZnMVaYuJIB04R7BhRDyVwK3JZ9c3QKc8L2tHLZYBPd3+L79oAykXgpKE0AWIgo3qgmiswURD10faGIhRRX+6TEAOsR/Hg5UnE7NfiN4oMYr3pRBVIMlZisIR+6gW0jHp1JnV0eVms9HEWdjKcksikfVSvBIDUx9U16aLOSRsC8LBBvmb6frQCUDlHX5aE7qOuiqpfEnhNJBbp4E7hUjpvc5a1vUYgH2We1Q2thkXiEZYxcSTgASdCqDPPXXR0anxSmkcG4uQiSYK8RvJFfpOoNfPTjCj0q1Boo5LqSVkwtgfzR/pizsAc+40Z/KijigxutMikLJCHea3YM05mPew9u+B5BlpkZGYqH83eSQnm6QUpc7cn11wducefdNST2smR1ew125QlRX9KtKdr3Dtmr45pZF/jbNgKd1THM1mLNctzz3/PG++9Ravvv4mX3nlVUJQ0YzSe/YmE2azPWbTKfO9Of4N/biLomA+32cyDWoH7D1lWVGXFc56RGC9WY+z7qHvcGWFNZY4juPpHS1RYlhoOq5duTpCZYIwzSJL9aTWls55BH4WeIcYnOqDxzQmaBIiSQyF8dl7okeMcP3GdTbNW6wWHcZWCG5sLxjnkORGLgUSIQ3EweEaOAEqLRwMFNblylZbEIv1inoyJcWIdwrbn5ycsVwseOKJJ5jvzXnjrbe4fPmyEv/MF0npN2LtTIOtLhgWiwUHR3v40nPp0tGWoS5sxZi8HVn1iEKnpdNZZ4w+j0jMI60R1wmhEPBgKotLBV3TU0wrKByJhEsl1lsu3nmP6fQQVg22LkkuEVcNhRXiJnD77gOq4znGRAosVamMas2ZTVZkzKN7RhN0RBAjVFNFO1YXWjGJ2Y6omSg4MvqWIhKDti3ed2AWAt6oNjwJTDb7MdbSSeDSpUu4gRSL0MaetmmYVHX+bDxFVeILtzUKk58B80nMIyFUhv9//etDneOfZUAfNfg//DA/9KGur/fr5evFFxk+GxXIKp1juVrwmceeoeIzHH/sN1NVh2DBRXjtb/4N7ndrnnnxYxjnqCYlj129wfToMtNiTuwbpA3ErldENkT93IZubBQiMY+watIsUZAuYCTouWcdwfSQfVyAzOXRz1GMKvs98l4lkWyW4RVNARMQY4+3wmpxQVHVdF1Pn3lfEgJLtEhIMZBykklS22GTX7QkHa9VPQlyfMkGUuRWmy8pZqUmlIUKVoU+UFiHGcqDoiQaHT8kBHVXlEBI3U5U+Oa6PnQCcGAcyZfYshqKXJCkpLWoxBDvDCIB7/WA1rlKCFmgwYrBRF3MdqCLCPSIuqZJUrTA6ok0GPMMlxkqcEnj+JLCj4OQjI4AanzK5g8oJA458xurRLIz1BL46xh+EGOefcS/XHv3MhKZFH1WtMECZGIj7CNpnxSfQuQtDH8biUuMlZHwIuTkpYu0i3PCyQVpsSKEyPzwgOKxY4Kz9J1Q+gldsBDBpn8PfMKZU6oIzgtlHzhKQNNz+fIV9vf3eXDyEIulrmpmkwmzSU1VVcznc2azGbPpRI180LaEKz221zZLUXis81leWPuAq+UK6wu1a5VBkNgSraWTRFF4zs42rO6f8PInPsrp2RJbqcFLYS3OOJ587CZY1YAwZok1Pw/8pPIt4otUVZ3HF4XQqfuZ8wUpJtq+xXlHu2lYhQZblEALtsS4SpdFGqDWnLpJAglAx676nKIFKNNfhNPTc44ODxWiTkJp1eZ4b29PLaVR0mPoA7fu3OXFF1/g6aef5XOf/yXeffcWH3nySdq2QexfBPMfgICzCUJHvxG6psMXJbN5TXfREkWn6gdN/SFJctZp0pO0+sXlyZAYcNYTXCT1QUcIN2DrPNI5KbUfGjQRtd6RaqB04B29MWxOLpheOoS6xDQdYbHCG0tYbkibRKwKMAknuo5VA2Ar3SrGauWZEQkJkZOT+7RdR1VV1JOa0lj2ixqsYV5bYrqEpAVBhJD3+KNBWceJyzwpFLseg/aaoxEK45m4Qom/Yqiw2GiJbYfzNc562vUKKTtmZU1bDGJA6lCIKfO+1QJD076hfae8G21ZaoAw+XsDsqdyutktzhqwHjjJj6nAPKZrQ9TdzuY9MTjdjcyU/Fw+ox+Del1ejGw2rZprJUMy2pfusnWyy9ZbzjqKuqKeTqjKUi2YxWEXa+4sNjz29A1s+d246RGpD7gm8PZP/RPapue5T3wLcRNob5+QHBTTmr0r14jOU7KPhIDERN/2mKRwv7QdZGKytC0uCalXczQvXyTKl7SgsZlyJ4J1ynHCujx1FLM/iyH2+jvYhgqcLfCuwFNiU09phIen57TrRn1DBq5Viqq6agxkMp76yQxTVpbJ/gFVWakeCcotC0F1ZmLUwsViFFHIfKbOWMSBWIeYkBELHduWvoc+MJ9MKApP365p1uucjAz1v2Ix30zXh5cCRucijTbHx5lym/WVAUYN8Rhz1idYGapzGYO3ijeQD06Lk6xpPmykHHgHSHC4Ygik7Aeg43UaWHWELo6vLf9qhvG6oZpImXE+MvJVlxIVW/kzwH+oVVp+gtiHIU0ZDwzrjB4woGOOsv1dIg7scxj+NyB/H2feI6bXMCKEKvHut90lXKw5+uuR4iwRiRADD++cY+57iuIFiunjNNPnMP43ql66aUh9RzIRaxeUIkRr+KV/+kt86viIQwxPXr3OxWrJ/nyPwhdMJxPqsmQ+nzOdTqjLijKLY+i7UUUvAO8LrXi9BaO+DheLJWIMZVFQFF6nJwRVD0yGyjrapsNPa/7JW69xsVhhK8fTz9wkrDYkDIfHR3QxcOnwmKb9Km37J2maW2h/1JEQmq5VfYkYM+xoafuOUjxFqTO76u3dM6kqlnKG9EtIDeAGmwC2vf/d4L/dqELSQJN0GqNZLFl7T7KCs2oO1YWeZtMwm06VD2JUee7tN9/mxRc/wuNPPMarr7/O3Xv3uH50zLQqsXFBad6icM/jMRRi6aOwOLmgmk4oK88qrhBbE0h5+qWnKGsGE540SKaKEgSTI4s0Cd56MKqU6LpIbANuUiLeZMVGg/O5z146+qbH1lPmx5eI987pThbYoz3MdELRwZt3Xmf/iWvMigldl7ClR0zCpJCDoYqyYOyo02ByVRr6lqrwlC6LZfXCwfSAsrfoUSCQ/hCGP6133nkcMG/fgU0zfhbPXr3JlXKi5MjC4HyBdZ5kEt5Yruwfq5+Is/QpQNZRGNppV4GiKrl06TIhhjHBNuwD/7qeSUYVBdXGehAds5llnoNxHjHUZCA/xppRN8M6C/bfxfDHwVw8krjlKuKDh6Q8+ucR/TEynhHDz4qgHIsYISY1uokRGRS7rSKmGKPaKqlHTi9o3nyHvfMFdu95ivkV+vQF4tk5d/7ZF6iuznns25/DTHtIibBydOcL/L4hTl/TBD5FklVytptmUTJyIA8R+p66DcimI3Yd0gdCfw9rSpwIBJV6d8ZCDHn0WcWqdDoqItFsizn0xlnnoZ5gqhJZKqRvjcq5Hx4f03QR69Wxc7NegSQm9RRfVSRUPttanTYLVqWLuyZRlXNC37NZNxR1RcQgxtN2+TV5T9d02JRG5VicoSo80QoxBnyh8vPExGqxVmTu61zfbMEffiUJgM1QVA7QknvBknu9Bv2ztUZHKobEf1BxijEH8ywT69y2Ny86wqEkjwEbQHv7ceemi5rHkDN3J0rwE6u2koOOuR5a29emnuvZkzofrpo0DCY5BkgYfhbVq8+/HhgU+8bEJGmGKqL9pwFcjkGzVpMJSsZ+F8Fs8PbLGPkcn/uDP81Xf997ADzz3Yd81//1JYyo17rY34arp1j3IiYeEvtIv2lJQVXdxGmFnjqtxMUYLlbnfOUrr/DkE09QYLl2fJm6qikLHeObTCom9YS6KrXiNzuQXFKDoNLm9s3YExM2mw1N01HVFWWhhMyyKlktV2CzfnvOmufTGRsMv3DrTV56/AnqsuTuwxPavudjL73Mm2+9ze07/4j5/j9UIlKRKzRj1PTJgHdGpUBFxysnfkIfg5I8k5CiHhJt02gSJgGJgx+8zdXIUGmmfBjtokYWW8xUYyL1kHUhJHQ48lqxCfrA+uKciS8w1o4TEOfn59y9dZfHrt9gbzalX6159+23+JYXXmRSFJT+sxj7ss6sJyito1lvCF1POSup5xVN11JaHfELbUsxUQdIPdcSlXWQcn8S7b0aUVOtwmhrwDiwMRJXG4xXaN5YhxgLRl0KTYzUh8eUdUk4nhEznG8qB5emXJ89h/WqxW+aHlwNOG1hZB6NQls5sxoIABa8q9S5MgmSZZs/813fqZWsy7W+uQXmrp4RBowR/I/9KGQyGMAnvvMzyPXrYyw0g3hL3nDDJzfsUXIbcRyRy38/EhkfYwDkGTBPM/Sk9fXIWJQ8GrMf6ViT3Yvyd9jKMYeAgsjd9vGScUX1yR1Pigw6MsZ5MxCKZXxvJk8EjU6L+T0M91gkw+dJgIiETFjuVRDJTCvipSOm0QHvENr/gs17JzTv3ebo5hXKgwMSv0TqLM4Z7MxQ1BErqpKXrIrfWC9ZHQ/EqR+DSwnxYIoEZSTVmpxYsplYOtbX0wXog/b3Y4Q+YnpVMZXciRsypV0vAGMsvYkYW+BF9fU0qXMYV5JMpMfQhZ6Iy4TtyHJxjpjBFM6Oo7PERFWWeudNVqO1WlgEYyjLihQCVSbXpj4QUgCTaNcbxGdTuJTomy1p+9fb9aETgEE2UZX2tOcz7AMV3ct93IguftGNGjrdPMaoba/NXgImk/mGzWKGwC+ylVCVYWvlK+UxvSS6/8jSMDZ3aHKgHqB+m+G+4bUlq0YlOqM/HDDa99QF8FOIPAtc0+dLaezvqje89pgGVTibx6SGdR5BtQfKQpnNUkP3nfzC/+oneeV3b13B3vh9Z9h5z3f/3/5wPkeukCjURKnfQBuQkHInRNsoiIr8FDHSOXAiFKWjCz0XTcPxwTFlpQIZZVUxqSqKwme5zNw2ybcyZrELW3pl8+bPo2s7FhcXKuVcFFS1ssPbrqPtO3XoS1HlZvPndXB4wOm9livzfWwU7l+cc+fBA95bLFitN8zmX2ZaPgAYxZ1EhNB2OpvrHTGErNfgWS9WGGcJoccZlwlKQt8scHbo7evasybiMXnM1CjEaGvqouQ4dNAsKZzno4/fJHY9pIBIwHqrXhC55VEWRYb3YG9vn7qsAZNHIQNm1bJ3+TLf+8lvIZyeY+6dcz14zH6trnwYojWqPWEsEhOriyXF5JDpdMLZ+oxZtDpBgNA/ONOkUsBjcNMKyYpjElOOvzlhc7rWIxGTjaPIehlKasoWqQYKaylmE4SE36/HfQlCsobaT3QvWENZTnS0MgJZi10Xx5B+j30+ZYzlO89QmAKTejKaOA1ugY8E0q/TAhieditLvQ3EMnB22PJ5tkOf8r7nkozw2fzaVLxF21Xkit1oMB0ygPGZ8t3NTydDYN557iEeJFRYzIiBZEhG8SRloO0UK+QESrbvMSXJAjcZukbPxWSMjoSiZ5lLAt6TnIoxSdKDVdHSoIgINaUriE0gvn6HzTu3KGd7+HXH0WOP6xrsEnbi9Twc2hjZUleSmkclb1Q1DzeiqyZ/tqnrISiRE29xVQnOUWaHQAQkJHyMxD6S+h7ZdEjTYdqgEsR90PHpzPka93totYgpC1appygtfewJYghdTwSccUzrmg5DExq6EIibjcaJvX0MCRsTfS5AUuworAcH02LC1JWQAt4aVqsFQQKb01MlBAqkwTVThBQGqOXX9/XhE4CgMKHdqYYFwSWjlUw+0AzKpOyzqY+aORRqWGHNaKMac9Ae2gd2gMWQDMsboijsP1wpqtf8bvthgCiFbCWJGRNzSUKQQMzb1BnlKOhfNfgra1nnV42PSPxx4DeAfILQKMvYDkePNSRrMaWnNHaE6qzLVUwfME1He75iuWqQ8yVGDKfTE8TvLDYLi2uBtLmCc1oF2RDJWmxYcRRZwSrYiJiIYInGEZwe4IUrmVYB073C3GwwqeR4cowg1PUEZ7WXaONwGILhLeAhhZTscYkJNVaUa5D6RL9aUaSO2cEes2mnQk5tIJxfMEvQnm2YVjUFnnazJvSBmwcrrtWOa4drTPc2V4ozuuOWaAPFXJhUX+PK/hWMCMddBO5SWMd3HD1GjAnv7agToW5e2mbBqABTQuVo5bk8/YFhmEFyVj3PtRq2OAveWLz1+Ne+DF/8BaZlyQ986jNgrI4vOZtDfVJnvgyzsjPalHVJc9Kkh2d86x5PzS5h/B59W9KfLin3pkT3Din9NJjfpHKlApUxdKuG9XrD3myOcZZm3VNXqo9gnAYDa3MCm0efDLvFqPJacE5HE3NwMTkCD6Onw9+HMnNAEEhJOZE5jxgT3l1Svhv28U6g3o3/5P3I0L4jV+kZScukxSQa3pIZwj6jpPP74//2ylDdGJzzRs7PP7zf8UzIiMLw6OHw1h1TgByNPzPub5EsajQknjndGF9W/vcO8jDcxy1K+D8lmT8+PofPjxnUvLd3SEeJR2IakrUOVOUi5sLJJCHZoGls1qNVbSiTb4nFRDBJz4EUDHHZEM4fcn5+zvrhGdWspFkvuXT9Ch5HjJKllA2kiHV+mzi5rMiakpLogroKMhRiZK8Qp22WkFrVg8jnWiq9rqe8cBRdKfBRp29SjHruRdGWQdsRLtbExWaMr0aEtG4x3ZIywZX5Jc7bJWXf402kjT1lUWtCEFVrhjwivH98ldX5OWnTYY2lzXP/KSW8GArbUXiH6Rva9TlNsybEQEga9IvMDRuSzO1nLWOyOnAPdmvNXy/Xh04AbBr2s26sYX+PdzQpjN+HqNC2s9jCYrzLoGyCXhnfGDPa/BqTYfyYGGqHISkYEIHhkpSQPrLFblEYR9IomEHG4HbRBJtHsEIeP7HWYZzNdrnaLxwgPP3dX0B4SaGuphsNTcz4GEtvhZSCWnR2gdT0xC5BEAqrs/SxrnFYbv7Yd3L/t3yBONOZUtM5bvz5T9Lcfpc+KmO6X64Qayn296DpsF2P8xNctUc1P6C6+TgHRyVXq1KhOBFs/RYm/JgmTNZkoyW0v5mz9aH6NyKY9JPACYV8hEn/Kehz7y+v/1kSDryDtUFWQidK7KmzsuAEIW0SzlgKEbDwG57WKjrEz5FS4iM3Hc85i8Fj5BOIfHT8DMvcC7bGcHl2vAPh5kM5i0NZY7efey7iXLY1xhk1xJGEtbnlkg96k0vTXgJG4jhz30rA4DT5w5CGRFayB+gANSdGONrkdRiNgLVq7oTQOwOzGtcHzLrHH0wJvENhFxg5QBB1Ugwdy9MFs+mM6cGM9fKEutJxVzH/DdbooWSGSknIxlnb/pkmbjZPraTRuhlUdlqNsOwIlWPUSMhhstvjbkWae/qYbTDXD4MBUQHy3P723ueomfeF2dl32cxqt4rmPAe8MVP4wCU7cPv2+0OWsn2cye9xQAkNQ3qTUcHhPQvAMZjv3q6ZIdDLkCSNb+b9v2Z87LAH2E2GEopqDH4Jg6nU8JryPd9tU2qlnPK6TYxmCAOaIaLjeQlt8QXtt/dNi910tE1LaFZ0zZLlxSl921Lg8OWU+splrjz+JP7qPpt3H+DwhNhiirxfQkDKauxUD3mQIjeKqNqo/B+JSV9aPg8xYLyjmE/yhIA+hqjqeeMeIbc2rCBWf8ZUiv37VGBkir+yT+oCVXMf7usZE41A7SnqfWgc7759TucLSAmfIIZEFIvDYQswsaXEkpqGvUlFWC+o64nuc+kIXUPs1Ya+E8meFdoSLET1A0LU1MyJgDXaCtj5nHfSyQ+sv0fW7r8oORh+5tdo8vChEwCJejCZnQMA0Vn9FCJIGnUBsIB3+UCTIT3WnzNm3AgWVQBMKW4PfQNB8rmUk4rhSl1ACnWLGsV/dtj+YPTAdGY8FK3z46iKc6Vm5llaMpEwSVEJayyh7wlNC7yBREfz4Pk8suf1YHQaCFNMuCA460miTmXGa8+pazfErqVpLqBviSTmf+5ZXjz73XzpL/2XAHzqf/eDHP2pj3JXbiOuoMIx3T/iyosvUV+/RpCoKm4RlbssPK5QUlzswSblQnTWY012vgr9KGIkWbwoyaCNkOf3830cWh8Jnb0d1P4sjNrsMaRsZuPVcMQYkMySBmIOXqoVExQFKh2daKVmpUf4HNGATQaThLizS8RluHjwnTcKjUrKIL8MkO9wkBs9yDJ/xFpPHAp4ch469BSNAe+HN4utSwbbWzF50Q/z6UNlvLt+hteDguPRWIIKVFIUlngwg16IbcIFsMXbYE4RDpBMOJsYaDYdoe0p64Jl7QjG4L3HcIDw17FOA/EYIwbUYTcQWksUMM6MTgj5pY7cFMmVIyipEVE2xIAOWJEss73TUBtRg/y7x3+bDx54w0PzuhFJj557Y5D9wJH6qJsq+dgd4/RYrj/yKsbPPcm4LhnOkkceORQJucDYfbad3Ges1PNfzO79GR67+5p23s9urjIUAJIywS2m7PKYxscMPCd9yYI1TluEMULbkZoWWfZ0ixX9asHm7ITYrAmhwRAIsWfVtQQRDi5d4/j4GvXhERzsU8xmxEI5GykJ1jp6ZygimmB4fSPa5swvPLdEzYgiZe4TYEQLKvXCyHcjJ7yuKvXY3klGJSdleoanrPyXuVfD/RQw3oO3SKV70M8nXPlNnyIsWr72+tf4yp3bxBipkpBMwpjEVIS5BWk2kAIVCY8hNhudWDCBbrPQ10CkSEo+T2JIxhCt4HKPKuSYMCpMWpU0Vw2BvAfGHNc8ghLtLNTth7779/c/5uv86K+l68O3ACQp3CSi8/gxju5myuTNpiU+Q8+ZeDeQQfTxIEnHPZSIppaOGF1YyQwxYZsMDJK+wKijLxaM8wwe5ioHrJnsQDzVUcS8EbN+QArau0/DYked6IyoGIakhIR8kJhbVHtT3QRiICZMH+mbRhOFlFRmNzT0KSqHwTr6rsFUlVrpFhVdjCTvOf7S7+U3/44fpPIlJ28v+FL7VabOMusjT1x9kmuf+ATuYI/eJHxVYHyNcQUGwadEt2lJ6w20SQU6AGdarFUxJJN0bMYMab9RRnPKErrG7fQrVShAP/zEmKSp97tC7qOqLmQCzqDvrZHRpGyf7JUVbsVkxy6jnAp0gxpJ4PWQsMNssDHIvNQ2TyZSjvvN7bz2XJ16p60db+3OnjSISYjd1oZjhWgNZhCQMgbqAh1xT48Qy8Zqf6ea3OlAa4y0JnfI80HnNaHgoEYa9Q4whQfzl7H8BxhKIGHFUiWh2zT4yjOZTejaiDcFYp7C2H0ii+3nBTlJtux8RT/nQY4aMpqlwkcwHMZmrEi3ZDkzHm5RJCtd8kiVPOwpO3BhxoRr+wCT2w3jGbfzeiUrbA7XNvQxrhdN0LeXDL9m+D0i42O3j9lJRMbnHIhaZufmbFeDon85pTDbpMrkmfMxHuYEQF9C2r7oHMRsEq2Os0Kl8oB+K46/livibe9Yd0sORFEZ9CYkUq9z9qw7wmpNWq8J7ZrV+oKmW9PHDX0+O002VLWi+iBYz9HVJzi8fhO7fwBVCbXHlQUyqKp25LM44uoSWfXbfZASkkzmDQz305DF7rfrfeRGGEUH+0gyynEySRDpdM+4IYFw2MKpnbXRhF5QhFD795YUIqEL9E2DC5GUEb/+Ysnrf+Pv8GB5znlsuGEcM2+JBp4QhywuMok70pIg6mh4P2RcXpBosCkRSBixDEwPb4x+LX8sAUh5isWgMWUoJpwxeS+YRzQXBpRpZ4HrNz+Ym24v874//xpNAn4FLQAh9p3OJGclKCX05Yqw67OMqc0CJ3qIj2xeTD6Ad7L3wU249LmK244XDoew2YHXXFUS8/SAy5mtqo/pM6q5jjJMQ4gYM1QRqjwW8vMpgSrD4yFozw1dKKnPs9Bpw+b+L9FurrGWxEW7oU2qdd9LYi2JPkNLzhU8efMml44v06yXPHh4ivMFZVlhAF8WpPkR3UUkCbSNUBm46h2Hbsbh7AgxDkPE4EibHgkNqU+krlMr0LbDi8E7S3J5rMnvEc0x2HukDOWNVU/uXWt1I+PhCPre6aLyvfL3GPgUuXSyO5oJI/vZbIOC2iwPwTSPVGXjJ5NbLslYjPEIetBRbrX6bV2CCLYq8ueeI4PZBmQ7VLeQSZ87dWbOL80OZD0kBppFbl28jM9T4DujXyO0PAQGfWMM08bja8nfNcYgTssIMykwoSJKm01vBLEJYz0OP5rpVCbRrzfE+YxyUrFqLrQiN5cQ8/3A32KwL5W8nvMntE1pxkpdxteIGWrk/OqGqKZp107gzJVQlqMeq/jx2kHPjIxRc2w1DD30HU9fYduig+1mfn9Fvj0gd3/f9vsiYOXRpMGMz6Xvxw6poRhE4uiXYKIuipQTIGMY++0qE64olsSkcuVJdeVVoEhjITFhUspEyKRM95jog5oYpT4Q+4iLIPE+Qd5WU69czIC21STEnT3aan4cemLsaWJgEzvW3Zpkogrs2IS3BsRRuGEE2lNOZsyObrB37Rq+rAneQ1VgS69+C4XPrb2kCZI3mD5BVRLO1/jSK9KTuVQD2VnMMC0lJIkjnymmpFr7Ien4X9dD12M6QbqO1Pbq3LjZ0HUtbd+y7tUVsI+RaBKNN5iiJFnH3bNTVeyLiT3p+Zg7ZBI2+oGGQLG+4IYXHitqUkpZG8MRSIQkBBN1QNhYgk0jiVwkK1KLoLycqEi0ANkGXoxW+GLzWhU9700mLw7PYzIaNixR1XPRnZTNDDFkXs5wPrDl2yAwjJQ/cv0aDf7wK0gA2uV6vClFVapcYsxWu8ONy8wWkw0hyN7e5IClR7Jse785gx+scIegpTCsqlGlXT/mPqlOs7X6gWVC+AAraizTvFxH9fJRnrQP5NBxPbI9rzLgLanI2XzhiE1+la7h7vwLvP6woo0q1LFblQFaySYwEnjvwQn7l6/h6xl+0nB2dsY+qLtZiJw/uM/pZsXlw2O65Ya5EY6NZ+InpMw291JpJS+STynlNljn8HW9U+Fk8yS5ijXPYOSeVi2xZ+xnjoEtE7Z2zmFjLRReK8sMLI9VMDwykraF4nNKthOI2cYMBmRggIkxCqc/Uo7tWmu6QZQFBlbWmKDkJMAYuyWv7bynoYIfYv32hY/d50fi3FAd5axirJqHJ/1gAj+G/XEiZfiqVkUOqQusCKbT8VUN4v8AzL+m70ugcAX9plP1zLoGq0qKzlowz2DsU4i8s+UBDL95t6oe8qL8O9JuhzdH05Eu8Uhw33n/j1TwO0lU3jvjn3NCOI6mwSNV9vj34f0OCFtGI4Zo/shn8v6zMsko0TDyT3Y+g5Q/GZNPZMnoRbIGoox7Gou2WzAQUEZ610PbI32WlgUluKaYxWXyLwn57xgNgl1Q8ZiolWQMal9sBWLbkdoHWHumxU9OJGIIuk9SIsYOk9TLoiWySR1t7OhSYkMAbymwFMlho8M4KCipbM20mlBM96iPLmH2poTC0RdWETgn4GSUqVX+gGSRHDB9R7E/oS8cUpakTKglRpIRbOEBbV/5otR21qajv1jDssN0PbJcqw3wpqFbr1h0a+43Sw30JFJtkNKzDAGKEluXOCkREtF7mqYDiZyvVgiBmTHccAV7pR+nR/QD1WCcMiUvoW3EgBDNcJQLUSDkr0vez4Pd+yD3nvL6HIimQyEw8GTscBblMz8NqysJfvh5UcbAgCkqt2M8FXLSIONeHt7HiByM2yK3V3b0ah5pk3+DXx/eQsvrDKbLojGjkUkUfFko29ToYWBydhn7kPvIeV4bsx25czbfTIXVDOiojTWjTa4xZuyvAWrgkGFj5Ry4kV27nf21RK+kFGXZWjonhEzgMaUHp4YRvdHeqEJ9yjrfd/nDM4a+qmmdoQ2ZLMZQnbB7giIirFYXnJ+dUM/3OD4+ZnlxwXJxQVtXGO/ou56qcCwWC+z6gqveU1R7mHKOlAWmj0hIGa7T/p44QyqEkJR/oXorSpJT4rC2HWw2yhlC8xj0c0WAHR6v91FyG0fy+zF5I43osDHjWKbNQTyNXdMtScqQRxTH35q3zkAsYggwOVD4R6vyAYIXQbP6/Lu38Twzj/Nft0EsP/OwM3d+RgOlTmsMjzXZa2Ik3Y0benjd26p/u9fz82eNgeFeYfP9kkKJiE6PEOst9K9i+D6M0Tl/k2BiPCd37nHpiRuUdcWmadjbm5M4xpgfAvOfICweZec/8jZzxWLe//5l/ILJB+wjicxwSG5/YPyujP/dfmsMwebRx74/6x0Oxp2/MSb1w0say3rZuaN6KdSbMoRrxseNyTp5CANBJHNzUtAgX7g8LaIja2mxIJ63uPb3ENItTIZ3jcvcoigQNYlOOVgTEzYJRrTqj32vI3kh6KSTCKnrciUoSAyYcI2YniWln9dkQtT9MkquQCURU6CLPWsCrQTlXFj17vCZe1G6gmldU9gprqwpygl+b4rMpqRioq2kwqg8cuGVXMcQXPRexMFeHYO0febnWNV1sNDXjqIscnKjxGsE+tMzlm+8S9yscTESNiuavuGiX3OWOpYpsfaGNCk4vHGNS1cvU0xqwGuS4nzWNzE4q59sSInTi1Pefe9dHj7QerowBo+hyy3eYZ1oIE8M+nsiGuz1/tmcEDAGZ4NC+zE/Lg1JRFYl3Vn+OsIoaeQDxeE3CqNQFEOCkFEiNy5TXfdD403RJF2LOoyUtgnvuCeHLTFw4raFxK+160MnAEVdqmGI0cBqrFUbVJ97y1Grcz3Q9aaNHgDkgx3ADwxvk8VZQD+pfAxHhpVAzBn5cMUU6UPQ6tFZWlBWsbUkC0ll3fLiURMISUIICil1KSKhp/a1WvMaS1FUeRQwjs59480pVC/emC4vuOE4HYLDNhcIMfLlr73C/t4BN28+AdZwcnEOC4OzHus9NgWCCFecpTQeV82o9o8Q6wihx3b92EeXAQGwJhOJBm5CRi8g8yOGlgAZXh6qZd0No2WHQgf656wylkZnPRlVu1TP2+UEYIC/DCqLmm+OHbLuvA6GKnUbRsbQQPZTkEEwQm8g4rcz4/q+zPjfcSvlinT79Xzl4D5WnobxtSoyYLJ0aH4Sq4jUmKDk72wDYLZy3sYjhtul72fMnDCSlLxa6uFgCnTdeEdql6TmJ0F+J8mAzWYmFWpoUk9qTtcbPVkSiJlgzLPAP92+33/ulV+J5oePHEbDOxoqI7Yf9RjqBxh0FPkZ7ufwRLLt/RtgebDkvSduY5Pl+a88twPPM6IA4+oXck9Yn2wI4DvdmfGSmLIngr6WAWAYYHuDZFlkg82COCaKcnPubejWHf1igQ8xfz7qKWFtO66VGKNyM4SstKjBXzUWUtbbCBhrtKjI4l0pBPXrSFlwK58XQSJBWoQNEEcCsULZkd4kQurp0QS+NBYnBm88hVVx36qcUhY13pXYosROJ8RJTawKbFFqcux04gOnxONk89/zTRwKaklJFexcRdsH/KQg9QFflkjhEOtUHjppqy/ceUD7zl36ZkOQnovQcNpuuCDQmYTUFdMrx3znSx+FwhLFkLqOddvSdi33Vyt9HQkmkwkRbUme3rvP/sEBTz3xDG++8RYmJpwRgrU440fxMUERiySZfC3aqx+4K9FoiyJq+59goBPFupIu8DxRkTLSth3NHtfYsGHzGrLj+hdFAsy24Ng5hhQxweTpgnGBb9FO0WkHMnIgZiDuDvyRYWN8oDHwa+L60AnAqFeeROFqpxAyGPq+1w1sDN47tUiVTJBKkSS9VqFWF+92FkN5AkEELNk62OgIU6nM+1W7lRFd79csnVMYJ0YkB8ZEnlsd0QZ1DhwOq8GeUkhZ717GbNFZQ2VLoo2EFLfZnQghvoO1l4BDHvl4Zcgbd/pBBowkFotTXn19TVkUOJttYGMkxU5H9WxBLCe8u7mg7jrKqiCmyCQz++kDKc/kwrY3bguf+96GITRrkCsQlCRmJd/fAabVF4vRPHZLdMk98nH8eDjHJcNuOSAYQyZz5sx4eM4x+m8DwuCbMIyajV4MQyX+vgrXZQnSvM+2lXnO1kdkImfZZvxl+WcGGH8H0h4TAnhfsrHlHmxV53LCKRl2Fk1mLNtAaobRwxx1TR4Lk6zipmznPL3gDL6cEtJtbLiDyNUtuStBt1wxuVwzmU1ou5aqqhRVkd+ZP8df+uCmG+6zDP3/4V/yyMH3CGlu5x7pTRmqpZxIDX1+Gdbu9r4agCS0dcuP/Z4f52svv4aNlt/6I9/Hd/z0t+SnewQ/yPt++Ou2HTE+5H0wqASdGx8PzPwYESHkz9X2idi1hNUGVg0sW0wXx8kTZ0EGnkIM2LRBUp+lXhXRSyFgEzoim0fzUtBEwEokRtFErO0UHdT+ISkFDUaIztSTaCXQpw1iVkSJ472MJIIoUdAagxWLNY6Zq6iNx5c1ZVFhfIEra3w9xZYloXRY78E6egcxF0rJCXgIqYM2EIMmsnvz2RgEjXH6PqqCh6sF3Nmwd7jP2bu3MKVXNJLERhKkyGaz5uL0lLBp6B0jByIaA9Mp80nN3nzG3t4eFxfnBITeaNHSbRoNhCHR9I36cxS6kaMk9o+OWK9XnF6cK79AEiWOwigfaOz1IMQUiLJFAFKu/mPOAKOofkWUPBaOJgVBJFf4upYF1Vsgn38pCTptsV3jduf804Av+bzU88LtTMOMAnZjNgHKMtcfNnZ7dm5jvSCyc/b/Gr5+BWOA6IEVVZPZGNWND33EigqwOGNITdC+Tsp609ZjnPY+BYMUFsljeilX712KY9BxXmV9rckGQ2whAEmGwpfKFHcej6Pte5JEtCmosrLOadUcQqBpG5x3OAFJ2n+11tH3fVbxUzJfispqTzuwlXVnOPcQHdvauRdf509j1SjQblq6pnvk8JNccXUpcrZakqxwuz3n5uUj5NIe/ck5XdviTZXPeUVKtDeWIA0tgEGsTaExJIDpFRrLwcJltq9C1jr3vjtNoe9OGAhlQybg8phnYqc63Na/2/4rYLJ3w/jNoYVgtyiAHRMFM96D4UpDz9uasQp8JAkYM4Pt38fAMz6nGV/DGO929vB4DZnOozkEoP07yxDoyXQEM77f4UaozoB+JhIFnGDxWY9AEalgDMXBmu7kAU6u6oEtOkrYNx0hBKq6YrNeU1Lme+sR8zKGf8pgujTeh/xWk2zfFwzaCWoks61+ZCRMAqMY1vaOZ+h+OMTy/3a/P5xyf+V//Dd484W39HNyiZ/6Hf8Q28O3/pNPDgthJEiR150Zxt8yhKvfyqz5nSv1QRXnhr2RlGBs+oA0DWnZqpDMusFZTUCjMVshHUlK+MvkX7WRbYmpyzwYhalNEmJU3o7EgDE62qoJck+KQuoFkZa//yf+v5w9c4/n/vx38cKf+E0QIYgQrWpNaEvue4DXsVZVLWPKxjcOGhK901actyUrZzgz0IrQhQ3LdkGzSopQZo2OlImHfeiJJOLO5xdE72UCnn/uOb7zM99BihHrdAorFAUPlx1vL+9TxgmXKsvDi4eEtuWgrPCzmvOLcy5WS6K19FVBtb/HwXTKcrNgbzpjsVoxPz5gVtQ4Y2maltXFknI6pzeJkwd3CH3LxfKCru+wxlFXNRfnFyxWSyQje+v1ipQtva21KmBEIpiwDf+igb0n5cCf/8mfa8gFWowZ/DX6eJX43u5D5RTpmlU9uV3FAxl2dN7qOUBbOxIiyQWSJbd30DMviSplZoqFJoQ7svJp3DN8IKH9tX59eA5AH8e+vXWO6LTfXk1qkjVskjJKQ9KbGzEY43DeqSa51WEq61S5LUWdn9enDhTOqzuayLgwXIp4v32JyuSPpCB477jo1kiMeOspSkUeQMfFktMkZDKdYo2j2aypCq+Wk02rS8ZoH0uDv1Z+u7B433fqFPVIafUvvoZHvZ8AUhYlKfaazaZI7Rw+bPjyZ/8+Vz7ySS4/dZOEOuPZPpIk5OCcD+ZcxQ9BYaigrP0+JC0w7g6Ye/q4HOwGEp7bDdRoIJPCbx8DmvwwxFYNsFsofneDDYF/2GgMkX68b2Q0BjHZillNWNyOh/jQOx+1JXISkL/IID42JjYZ2dnyFIaEQSuAAWLYJihDiB86BttplN3HadxLPEJkG37QbB801BNpUHdTuAUrWffCavUnNZjpingRwLj8mQaaVUs8Lzg+vqQe5TGq4iGCyLMYvgPMz24Jffk9DuTYQctseyAOSZoejuost12FmoBZcinFwNTdJrjbmzl+cjmwP7j2qO1tV/Us5hfKFhdNW4YJk6S5UP6cMsIgKcvPCjY9mgCYTY873xD6DtY9seno2k4TgC7oPTVQlp4eIYYeMyQJXp87SG4J9Fne1jbEZJAY8AMXwSjaYIzOug+EP3JFR4p0R0t++o/9WV79gZ8DCw8/8Rald1z7U99DHw3JFLShp5FIm2YsUmJpGw1YvTLZe0m0KdJKnoAZAs/OkfGhQ4bZrjTNe5V/dL5aUU1rjEBoO4z13HpwnzYJzWrNpeNLFFePuffW29w9W1Kfe+qqwhQ1GKEwDuqCdxfndG3PKgjtZsXpek3oI0ShTy1WhLquCCKcX5yrI2FRElOg7zqW1lIWBX3XZXE3oSyKMULqWZBIKVs8j2fgUMjojH6fEZY+5fl8FA1I+YxTboXei5SnkywwOMgNJnAumztJ/lmbIJmBKA1j359caOSiU0g4LGIMHUKbE0nZ0UoZN/8HPrxfpwnAg6SKUM47vNc5U2NUqCfkAzQl9W222UylKDLPUoSuD0QSpaB8gRHWtBRliTWofLBR1qX32mIwO4FUUE8CUMlYj6eNAynGUbiSVbOhE11uTdMgRg9bLFSuZL1ea0aP9nRSiKOkaUxqLDNeA2T8L5H1DT3bj7zwPBIDb739Dpv1hnVIlFVJQWT9xltcdJHZE9egKMZD2iA7ColDBZz/lgOwYDDu94I9B34eY7+EMWfAlsA3RupxTQ8M7q0YkD5+CPz5ADVZSGg3IRhzglyBZ/jZmoEsl0ZUYWhWgCZb8kjzbeBbbF+fcivStl1gDFs4TkZeAztPM/xrGFEcUpRHNqrZwnXG8Mi42U5o377WRz5AdA1u87CxbaKHz/b5RVQwqTj+HJv1y0g7IabIJnQ0oScuStLBIb4q6bqe2leIiSCOJM+A/DKYlcLqDAnJDhSwk5+4vEbCwEUZR/J1vdq8H8c3jWAHn6R8D8bkapiBwmAk8YmffZmf+W0/P97n/ZM5T7x6PVfYSYN60uTEWK1mU/YwMAOaFlF3u3Znigfo37tPur9CLKQUcFhMH0anOuO1Sg4xYKO+xxTUxS1tYhaiyT1ho7DusE6DQLSGLkVaCbToaFk0ijD2JDqJtCmwipEH3/ULvPN7fnZ8beKEf/xH/wJ7f+I52vOsYCq7yNfTWPNq5ufskMKGNbOTmP+q4oQwrlNB22TnF+fceXCHO/fOAEsIG8pywte++iUK73n88ce5c/8ui8U5753ep2kaXE7Io0QlbWNVDz9/zEuGXroWAHrGan9+1awZ9lCIYXRI1Dwy0bTtqNMiQNfvOjICYkhOxavep+4ACCEncBEyCVARDzFD6zHvQYE0uq0KDpN1YgwGl9FKwWZewCBz7RCMcTn/MkSjdvQptxMCKjQ3FHtbivL2aNl+At/814cXAipLvPfZ69tQ+QxhGug3a6Z1gXhLDNB1Pb70eK/ezF2nC8l7T9v1OdArHF4UBbVRjXRnzNh3DSHQxIjv2vE1xBARl1GDkHLCEVWpTRLr5TlVWWHFsWk3GUpKxBRxxtG23cim1/6rIZeoeadvFc4QrT6EnwWuAnv/Uje6aVo+/vJHmU1mnC0uWN5/SNes2TMwJdE/PGWTDP7yEdZ7FZzBjIS9AbK3dqhALaOOvTXAMdb9doSHGLMcg/ajR1ROHqwaM6UB4xoSg+H5h2rZ5BEXthX6kElvMWm9f1txpa3gkB029SMHgV7Jbr/86KYb+nk7JZQ+mR4SaTuWuAUAzfbw3G0NjM+4s6VzNT1WakMyILvkovz8SZSbkpPY4RmHVyZuyxFQKePt2KC/+lnufPXbSDGysAEbBHNyytGVS9SzyTZA5zl/w0cQfg/w58HozPb4dmRYntv7EXPCZMSMOhjbA9fkJDe/4jRoCOrXxWTxFNCJnSG7yVDp9/6d7wCT+Jnf+ou44PiBP/3beeJr1zUHSSAMaI7uQYtTD4coSNDgL1HUFOZ9LQDvHbaqSBKJPXQpYOYTYunBGkIMGR1xUHhWqxV7R5eIhSe4QQVSE85f/vKXeO3NZ0npNQ08MZGMTs30I0tfD3uxKOyfEzURQZrF7rYYFgKrvieNC3S3JvwukCt5nckjCdmwwP5VhQ1jeqz9WzTNhs9+9rNcuvYEMUTu3XmbqqrZbFb0vkAk0jYb5vM5fafI5ujCZ7JLqYOi9LRti7XDS89rYUQ808573cJQfd8TRjXWnBjsoDpiBOv0nB+KEqJqFQxtODEDU5/cAjAaiMlTAMP4SCY/awKra3to/WANBQrTG9HXYyRLhOd2ZLKWNkU6QVEG0VHwIOPE+E64H+/09r38Ogn6u9eHTgDq2WSESeuipiwLhnGyrm3oQ0/tC5wvx5O52WwwxrDqGsqqIvWRi+WSvfke9aRWkQaB0Ae896MIUBLJlsGGsthpAYiqCnYhEkLE5cCtHvYXOOuwAiEqAdAZy6Qs6L2aTHirTNsUIzH0OFfkjNLSx6BWqTt9au88VfWApVEN+V8NEjD8yL279+hefJmjw0OOji/RXrnB4tWvslmcUcUORyKkiGl7JS+hozEqujQE+8EpMWvT5U1jciY/Bj/LOMqnlbUdK1cAjPIwVOMbBsLMrkNbxs3GFoQxZiQBjnPbuhvHNoIMpY8dqvkxVObn263Kh//ICFJoHcL2NTDWpbnazCFup8TaHcPRSjjl97zzu2R8FsZTeng/ucLQ58oEwJ2T3ORKKmdCoxSswFhBqZa6Bk9rPRGhqu8xu36Zd/9/7P15tGZpdtYH/t7hnPNNd4wbc2ZGTqpSlWrQUAgkQNBtMJheTLbpNoIGjLGN3WYGg21Y7gV0A73sbi/c2LCMWUCDAQkDmhAFCIEkVEhVUs1ZVZmVU0TGeOf7DWd4p/5jv+e790bkFJFjpXKvlRlx4977fec7w/vu/exnP88LVwlaXkPXnu3tHR6ZXMnwvEJHSTmCimj1KDGtstTU72ffUcsqTeXPnGD5/BitCGQSqeoTIfK0TkHUKyIGk0WwaBNGIWYvIeTzkJfHBDokvueHfh3f9clfTYyB4bQSH/gYSBp2jg5pFofgPc558B8nhc/jw028c7i2pWtaXPR8+HCXSyeehx955rPciDHrtSgwil/1q38Na+vrOZdV+BAwRUm9qKnbEXE8Ymf/gNXBhPFwiDWGQODAKq5NV4hpTuJnUfzY8o4h93T7VFBluPkkaZcf9PAXgD8EVMA+qN8Gaf4/3H2b5uv9u0jpyZO356vHWgN/4x/CL7sKP/IB+L/9OphVr+MXIaUG13U0TUsKkbZZsLZ+lsnqBvu7O0KoDiIstn94yMbGWuaohOM2dT4VPoQsHawxRtDZuwWeXvYDnUiOl0ho33ZD7kNjDIOqwi88RNn0pfsXTq2XSeWNPyl8yrP+MZL65wC11EAxfRKuNDY/yyYlIgGjDT4aopIEotOaRfB0QUzffIzcJVT9Wmf6dV2P92q87gRAxYQxmTTnG2bzI+q2JTQNrXMMRiPicISPnhhlgXGdo2lqOtdRz6bE1mHLCru+gUqJpq5pnXi7V2Up1plaMRgMsNqyMlll/US2GVKidaJ5b7SWvg2KEAI295IXbUPPdYopMWuafMNmyU6kijUKQtcSnJMbuShwzuEy4hBjZH9/n7aVr09uEg/SEejahkW9oFIGn5GI6swZZtMDVlKicBGcB++JxhC7TKopclXZowG54tc9ApArY3UiEVA6q9H1CUBvtZw3+KQQFnXe/Pt+2XK5vEfAPVff/SKQWz79pt3XnqIgJ1WC8AlSFtq4N/te6nTnqnMpJnXX+/YMceED9DV4Ov48mpyg5NeT3X+5tgl9wC5/Tzb+/oDSsX9C7uum3J+UESUZU0s+4LyoW6Z8/7Vty9aFsxTWcu3qVRZ1TSRQFhXnz5yD0LC2dsjOaExdL3BZM2BvZ4/zFy9hC83scIp2X2FU6eW8s7HfS0w/QQyepmkoTEFZFpnwJhMeMUSKqkRrhW861KQS50TD8j6I9IjGOkr/W/l+iITOExYLWDhsUaKGBcFo6dcrvUwctYdBSKTgaZsG1zqSTtzav8NP/MxPEWKHNkqMWNIn8fFfi2AVwrI2pcWTeCi0pxKAvXbObkj01WRVVVzf3ub23gEPP/QwRweHrEwmXHv2GrosmAwGNPMDQkx8/c51dndvE2LgiSce5fDoUORriagUSAh8DWl5I+V0bnkzqLx5xQR0Cv4rZCd5GPhBSD8K0MCJ+5b+VdPTCBp4L6J1T5xZwF/6EfiNX5Ovf8fnwWn4Q78Wpq8nCXgGlGI0mZCUYf/wgPlcph201ayunGExq6mbBdPplJvXry/HsPvjlfu6Pw+yhpRlSYwN4WQV/zqOZrmZpxPnJT+/ZVHIBJhSIgmcSXQn3lyS5jwCKByxRMi9fHEv7rE3vRTyEXl4Q5ciTUrihhoiXZK13eVE/MRR5g9/QpPiFXMB9XL/+AsuXv8UAJrZbEazmONdh/N5zE4rtLHUdUPnhc4RvSM5R/RepDKVQhcl2BKf4M7tW6KolUS2U2nFPC/ctiwZjCdUg4rZbEbXueUx7B7scwSycIdwrPYF2NLQe4h7JxX/srpTksEWpiIohbUF3rVsrK+yPhkzHAxoW8edW7dZNDJ2mFKiabt7qv4H2fyVEtbwfDplsLaB1kLUq8YrHGmDU4EqeFEoywIl0nfWQow0WjwWjMk+CDpv9DkhyEmBkMBU/r5a3vynFsH+gE7I4vbnZ5nxn6jOlz/fj77l6jPq4/55DzDLU5uO1biQY9LLef3j+l5l06j+MLU+Hn3sj3lpObusaNIyIRDxpp6tmxMNLz1q7xzlrKYEfOd5/ivP0LqOzjsRFwGRie06fNMRvCc4gZ7b4GmDwy8VGYX1n5SmLCpGRUFhDLPZnF/2q385W2e3ePbai/z8z/08ITquPPI4/4df/Mtp5nsMzU/z0IXfxDMvPoeJorrYdh13bt/hoUcvojHMdr+fYsWji5IYAqb81Wj160Vjw7W4NmEZYKwW1TkfRBtVFcK76VqMWsVNW1JpKAelmLsYsxzySCmSQkIZjRkW2GKVOHSEeYf2MROsIinJs6qsjODqnEQqbcBEjpopP//U5zEVlGpEilCaW4T4EiEO8TFidBagiVBqfaIhJBFSzEiLPGNt2/JvfuZTKKV55OErrKysMB6P+cpXv8ba+jrEkCd5KnZ3dsVS2Ro++7nPQ3CZy9CnnP2d8wqR7maH5L/9qXt+8N5fBeAngF/K60oAHt+H/8uXT//bf/RZ+PO/7BUTgNNb0r8S4yhrmR/NqCpBTGezI4yWYiUlMUWz9lgn5O5PsGwF5udmMa/fvG0vJgKB3YN9UIo2RaK2DOwA7TluTamE0wkXhDTpOUb7tDagZd4/KGhhubE7H/AoXO7fL1E/XvUqn/7mK/7g+5s/3EcCsH3zJXzbEZAxs15eE20gOekrlh3GWmLdEH0n/XylKEYjge87hwqBmMKpfmaSuTaZFFCO+cE+85TZ+Sd23NneniQKpGUfS1uNthbvBbIvCvEnsEajTUFZVTRdi7aa8WQVqzU+ye9bpZl2kblrMUazdu4Cg71daBuU1ow2tnDtnMXs+yD93lMV7P2EdEoiTb2A9Y2lTaUzmtYaklcYAj46tAso5VFWzDVC3YgJR1YGNMbkMchcqRkr7odRTGOU/g0yg2tOz75LsvAocAetvxVjvjtXy7HvAqDUdZT6pCQZve66Ygn/91WzXtb1HL8Hiqiy+AoKHSIxeRm9jA7nHGEhEqspRvZeuoEPYTmFEWIkhkTnHM7LWGP0ga5zBO8ExvRBkrvOUbcNTdeyiO54cegTvpT4jq7jVwDzesEP/NMfzdLJ8XgD6H+WjDAkMoyvjtsf5AQJzcraOmOfsEkxmFQMVle4eWuPixcv8cijV/jiU0+hFoqqXGFlPCF2ARe/xmDwVR4+/yjPvvg8ykp7Zmd7l4sXzzNaGdIe/k4a/5eptMxeh/gTGFNgzC+lsJZF09J5x8AUMjqW/dJV0+GNRk8GuRKPlN4yv7lPTJ7BhXV0ZVEelI/oLhJsIpWKZDRxtcIUBl17QuskqcgLr1ctaIUphMfTuY7b+zt89qnP4kOHNQXOh6yKdx2lbufRO1HU9MGDClhll1M1fRglJjb5xiGRZIRMeZ5/8evojCyFCEeH+9iyYGvrDNt3buA7h1ZD2nlO6vF5k9lFNue3Ou5JpV85vIaFhdEJEuS8gKBf8VdOv7KoadbzBdYazpzZZDJZ4fbNAbdv3WB2JIXK0eGhaPDTJ/F3vYoStDCEcE8x86ZGJtjdDB0hGJ5cOYNeALMaowwjOyKlZjn82hJpYqSLkS4muiiETZl4eXUI/31g/82L1+8FUM8Fpi1slj2VXksKIcs0GpRzhK4jpSgVSEIqWOeJ0YtxCkpc7qxGa5tlYzXWysLgQ6SyJYWxKJWYRAd7+wBsXrhElV0A+3YUiqUN8bAaLMlcbecgyQJu21YWKx8y/CTogEfhkBFFWe57ow+5gVzMyuvqiBNDWA90omOM7O3tcenCJRIJ1zmUVsyM5atty3lTsBEdY99SGMmPjbWQtOiUa402itD7w2up6EBRFAXloMKrSKKUc2N0tj1O0u83hhTXAY93qzT1MOs0aFTuX4f4ON79J7SLRo5P9Qu0p8sEo2o4wLmALQtSjBRW4Mlnn3+OEOVzzZsFH3/8IS6sr/KPP/N3SPFHiHHOx4Pn1wB10/DXv//vLs8zS4JC/q//awJhnaWlTsTrvl/znwloXLfsYfaox0kt+1MLY58MINoSCYHGm6ZGjzSpg4mZsFKNqY8WLOYLLl+6yLnNs+x7h1b/PugVhsOv0fkaW1xlY+PDbBxusH2wjSoK2rZhe3uH8w9dQFVnCc1F2nCHgTaY5An+GsbMUXaAsZaubrHK0LdoYvSEUcFgdYKpDC4FilFFszvFKE21topRluQVbt7iZg3NYkFhLdVogBlXYC26KIhB4bqAKQ29oIpKwu5P1hCVYXt/jy9+7YvMFkcEI3wIYy06PE3n/5FQRbKOhvSFZVkJPtxTmfabv1i5Cp9l69wlYnTMpocMBhNIieFwhNaJojBs7+xw+eGHuX79OsooTEzZwnsLGKCUJ6XF6743HjwScB145LV/9LMX4Hf+ZmkDnFvA9RX43b8Rnl9/fW+lJGE92N/j3NYWvutAKYrBgHIwolnMgMTR0dHx0b3M8xGzE+rbEQnwUXFjUXOnuc4QuAjUMfJs2xESHAXHPPolObN/1JdHmJ/79wH6tydevw6AEg39pDVeGbAKijL3pqUK13mOP+WZWB0URVkQYsRajcqqTUL4s4IQADEEIlKBFClhUZSmxFQV5YkEQEYKBT4XAmLK8JEsWLPFghA8KWvkn7Sb1UpIdT4E2RwzkU2HbDikFSZvmgAkCCHhXTxxJ76xW7JtW1wQpTG0bK66GnJ9f5+bzYJyto/Z0VhjjoleqYfIjwlrJ2eNU4IPfvCbuXTxIgrw3hGcxwfRUJjPF6yvr1KUJZvBUyLkzN2dbYw2lLbAFBrnHJ0L1HXN4dFMRieBEDwhOHTuH468p3Oe0WiELQoUMs3x0vYdWufpmQSHwfORJx5h/JXv4M7ep0hxj76ZI22DEzCqUqAEudFKLQmNSilpJ0WPaHLTF/r5fnjlK3IKBXylzf7VfulExBQJbUsqh4Qki5cuC6JV3Nk/5NGHL/DoIw+zf/PzmOiI/j8gpn8N4SVS/DSRT/DQpUvMFnMWvhEUYG+PM+fPMJysc1R/DzF8Hzo5qsKC/hIhfStJfQBTWGIm29miAIWI01QW19VgRzKpUBmKtSEEUGVJqJ2058qCYqIphgMW+4fMD6fYtmVQDUhliaoM5XqFsSaP1+UEOESsKZnuTvn61ee4c3ib5B1OK7S2GB2o7D9G5WTbeKCElB0pU9547j7fSimZEsjI02C0wtbZ8yxmR5w7d562Fee+zY11dg8P8F3LxsYZ7ty+LegXiqqyVIMxzeIX0bbnSenmK1/TNzUC8E+B3/M6flbB3/8Wuaee3IcvnYN/+uTrfJ+nIM1xTiTIt+/cZjiaU45GTGczysGAZjHPP/tu2yZlU+9V/ABcirzQzjghKXaaXHn61+GBsdb3437j9ZMAyyH9TFJKQABtFLosZeFQwipPMZC0weREoGtaUAqrSkJmosakpa9KyBtaylaohqXtZwq4rqX1xxwA1zmBMBNoa0SWlbhkMac8n4zOSUVegGIMFMayHN8hYYyma7tspmFIIeGiw7tjyC70csIK+cBLZf0Hi7qumc3nopGdkoxKlrKJJgVt9DKT7T3p9BYJZJS6J/KltKzwg0pSzcVAUQzBiD1p5zqq8RA7HJx6oJQWnkBUSnTMnQi3tK6j7rrsbhWlN5611FdWVqiqiqIs8yxvZFgWjIYDXFyjrCo63+UCWnFnf8Z44zyXL17g9u6vQ+u/hkoL+lFLlSt6ed6zpkF2C9NWXMxS0sfVS7+RqP5/6YH4GPcXmd2ghGjquo6V9U3hBygYFpbp/hFcOsejTzzKT//MT9PEZ4gJmvqjGPMBfPFLKYstjDI89vAVnr32PG3wTA+PWMwWTEZjtH0MHT6Cd18UpUqlIfwTFJdRjFDGEpPc/8bIRMFzX/pZHjm/RfnQN0FpCSpihxXT7UPi3iHNbM54fZXJ2U1MaQmtpxoNcF1H17b41lNUFba1mMKANeIhX1jhG1QlISV+/os/x7Wbz+NCl0+7FQ18/2MEf4cs4E9Mka5xKGukuo8J3/nTwkP9KYXlhMj5ixfRWnPx3Dl0YZg3Dt9JojzZWMM3HRfObnHjpZfILjRcu/oczrX4rr4vVOgdif/9Wx7gl74O1CzvcyXjkZW1qBCoZ1Puosu+i+JYB+FeID/jfa+O8L8fb2PcBwIglXFv3iMjKJoYW3RVoI2hW9TiyDUoREQCsEasLFMez4oIuS6miNHiMFWVllIXKIT1mYzGA6SIJiwPweRRwZikbyhwaECRMBiRFdWiUhYz2Ug6uJkIlcljETHl6XygKEqpNKMwvY9nqqELnspqlGqB7wN+6xs62SEG6kWNVkbmoa1hZTii74UqddyXhruej776PbHg9f3V/phjSllQSWZ3fQisrIzoR976R9IYw6CsRH0rS6r64Gmalq5zdK6jbRqkD6kYT1YoBkMZ5ckvU9dz1tZWiEqxurLKpYsXeeaZmchuqsSd3R0ODo545Ikn+crzN2ia/4Tk/yL9wrWcquhNoJLPSYESO1MkMZSFJC3P0XJ1edsiTyioSNcusOU5uV+CpGg+eI5mcy5cvMS5SxcJQe7N0XBI02q6dp2ytBBbRsMDrlw6x1efeRFdWm7dvsMHPvAkhZ2geYQYv0rbOQpdUJhdiH8Vwn8BOYnVKVHXLeXQ0uzvcrOZET8EzZmOwWKD9b1HKQcVofGE8ZDx2oogYiGyOJxSDYeMVyaUTUHTCIeCrsMag9FWkuKuYzZfMG3mPH39Wa7efI6BNWgvkrxED2qOSi/Rdh1oLejeQGM6m73iM8svG4adOpux15jQ2NIyHowZD0d0oWGxd4S2lqIcYK2lqiw78wV7u/usb5zBFgVFYbl16yWODg6A7m28D/pwwBGw+pa/08nxO9e1bN++hVYJ79pT00hvVKzsrYqTSN1JvO5deKi/YOP1TwH4liXmnIk6WKnaowsycwyAIvqwdGMLMUr/GSEwyThIT06xGGOzbr/I8arI0mY1kmTOOIfPimoq9w8LpWWe3xpxGVRZKbBnsZ/E0PO+ehIKNtaijcZojY8BlwlW+QeI0aNNmVNWqQYf2AJCsdyUC2vkcymByqQJ9hqv/Crfmi/mggR4h3cdbSv9+qIoRMUrxlM6/IkkhhtJnNO8D3RdR+uEXBeCsIu1MVSDAcPReDnqo7SmUx3OOeq6IYTIoKo4f/4Czz73nAiCJEX0ka8883U+9pFvYW1csVhUwKPA11FKsbW5tVy4RCpauAw+eIwt6NqO2WyOczIZ8U5Gr5yYgiPqhC4LaXFlbYDdg0PW1lb54JMf4Ma1Q0K8zer4IcrK0jkvyaieE+OPcGbtES5eCLx0a4v9vX3qpsVUlhh+Mab4Im37IqHpSGVCxUM0T1Hqx1A6oDRUlQjwfPibP8rXz36a7/+9/1d2Lj/N+avfyr/zv/1PnO8+Shg7JoMVUEIkjSFS2JLoI12UTdMYiw8OdMIFx/7RAdev32B6NMUYy9qZDabTA6wuaFwHRAiJaFp0+hF8fJqglejyh0QwUBhDQoleQJ7wufu2zZxKCmN4+JFHOZjOmbcNiUDbOSprWFMaFyOaRDUcMTua0nQNXVMzGA5wreOdi9vAzwK/6i18jxPP6on16vbtW3kthaWDF7wrN/+TIUvvKQzyru++H+9UvDIl9a5QSaMxgBGCX1GgjCFZJYQV3yFwlUDxIQX5XhYb6WF3AGMVZVUIqKClzyy9Tb2EuBVKzDtOScMISS8FmR0NIWYHNy2woetyj10SAZ8JiiEl2rah61q5EY0hJZEpDjHivceoxHhosfb0RjmfLTgpD9xTAe87krQlFvMZxlqarsV5hynEu1s9yIOQk5qjoyMOZ1Nm8wUxJgbVgKIo2VhfpzAGrcwJe1yWFqkxiFlLCjJe1tQNvmshRKwVPfHRYMCwqijLEqUNMSbKsqAoChZzSTwWi5r1lTWqslpiFynC889fJaG5dPECmhG2EDMZpTSjlTXKyQoUFclalCmJEYwdkJLCBQ8qYkx/tvXS4O/137VvXvSS1vTa4v3kQ4wcHszoXMfly5fZ2mzouucJMVKUBVVV5gV6HXiS6P8Nl85+hvW1f0LXPcP1l24wGA9xwaHtr8JqSwhR7KHjPpbvx6qvYm0Si1yrwSTqR+/wqT/+59m5/DQAtx/5HD/6O/4zpqOXUNbg5zXtoibULRZFVZT4znGwf8jRdMrRbIoPjrbrWMwXTI+mRBKD0YgzZ84wHAywxqJUFHInoq4W05QufOEY1dMRR6BtHI330rLRusd5Xr6NZSTx39/ZpaoKxpMJXV2zvrrKaDAiRM9sfsTu/j4HB4eEEFnULWjLweGUSw8/cgJHjsA/f+tvgLctXkTx3PGXJzmxeZLl7Nmz2Yn1GzXSif/ej3cyXj8CkHuEyyc6P4C6y5t3YWX8C51nlpf4FMkIPG20obAFWudNKEmPMCUYmipr8itROIuRwmi0O75JurZDVwM0GmsLRNgTuhMwJhrKrH2dtEIVBRaFthU+errkibk9USoIMRG1bJAxRTF7AflMqjc9iSjlSKkBqge/bRXs7+9x/uJlTNtn8id62w/4wiGjHsaKyyEJNjfXl4RBlLqHU9Drm/sU6bxj3tT4KOz+mCSJGFQDBmWJ0QqNTB0ka9BRYVsrZknZ3nk0GnHu3Fmef2G+XPxv3rrJl778BT7+8Y/y1a99ndR9DHieGD/HS1fzItcDLqeOrhfu7KcCNEsERmVtANRyxv3tiH6CfH9/l0E1ZjAY0XQNZWE5ms45ODpi68wG+lFNO5tydHiEc5HhcERRGIbDITp9Fyk9j2+e4vFLlms7n+TmzR/mocv/I0F5gruCKTcYdbcoksPoCHGXZG8TzZOC5IQEKVKX++w9+typY7x96fN0gxpTt5Q+4vIImEqi9qescE+61mGLghClHde0DT4GSltgraYqCnyKuBBwKaCNXo5sqvTXl7ZLvX5ATJEYhOGveovWlP08FCJwdXwi0Vrm1vf2d1k/e4HFYsb62ipaZenunIC2IRCNomkdtqiYzWe4xYzRaADpUeCXZBTpxbflHjiOFmkFFG/qq8oScEhR1qysbrG7uwNJvA50Juy6rmM+m94zXvl+vB8PEq8fAQgBnIMui/zk/5QRpnFAi6xj7v/FGOmco6/8kwIXZMxPCEKOQmuGZcmgKEgh0rUtbVPjgyNlL+7CHh9iUVWEmGijZ9Ys6FpH13VLlnfnOultth1JZhPp6kYgxKYmtC3ucIafzghdKyp/IZCCl40+xMxxkIexMoaVyQplWUF6Afj0GzrZKSamsyOOjvYpygEpJIqyEpj+DSTDMgZpZIJBKapBhfdeZu+jeCGctvLVeUJCfqZuapq6zkTKhCks5aBiMKhEZCQEQhDUYDY7Ek1xYzC2WL7G9HDK5YuXTjj+CQT86U9/lhs3rvNt3/YtGGWA3whMcitIUrjYC7moPCViLOPBiDPra1w6f4bHLp/jySuXefjyeTbXVxiUMiL69rYGJEWZHh5wdHSY2yhBJk9i4Mb1O5SFZTIeUJU/htY1o9GI6dEhs9lc+uWphKip5zUDZXj8wha4O7z44h/A2h8ius9g1a+nGmmshWbvUEZsq39Figdo70iuQTmPPZyw9ewHTh3hpRc+QdGMGE5GzOuaiBhO9cZXRVUymkww1tI2HU0e92zblma+oNAGYwTliQmqgZBHQxC0yOirKF2TlBJ7by1mUsaKpHb/NQiCZ8riXrhMgTEa5zpsWbK6voY2hv3DQ/b2D7izs8O8aXA+4tqW6c427XSfxcEdXDtDaVjMp5LYUnLs0fB2xqeBtyrpyNNB2sho82iMVuLdsXbmDLoocM6/z5N/P96UuA874JZcB4rITGb/hhhRIaKMle/HSFC5RWVEurHAYLVd9tijixS2IPiAi9nYQZtlP1xaDRrX+VMcgOgD3sdl5edVElvZKDBljOJEFqS0x9UdwTmSSpiqEh1xO84VrcI5T5dCNpmB0Lb4VgQ2lNKMRhOqQQnB0dTzZZH+IMX6kviaYH9vjyuPbTJrW8qyxGYZ4tf9WicIQADT2RStFHXrlu/jvFs6KlprKcri1ARBCCKq4zpHWzfEIDasQhAcMCgKrBbXLWMNprCQep8F2QD6kxBioBqUPHbuMT77hc8zX8yXfIumdfzET32Kxx69woXhEKYz4JeT+MfL82KsYmU8YnN9jbPrq5y7eIHR6grj0ZCBUdA1zBYts7phb3+Xvf19btw5Ynv/6G1iFGWiIrI8T48OmE6nDAYVMQTKsuDqtZt87CPfjCkUtrA459jYXMEY8UxfLGY4bdDpY9TNpxh3jrIq+fYPfZTdo33a/Z9kY/AzWP1hUtuwe3uH0WSELiwpeLT7cWL7awhKPO+rq2f4Ff+vP8G/+GN/lv0nn2Pl89/Er/jrf4Hh/jqz5ojpwSHzG7cYrKwwXlthfX2VFCPDQYUicRSmIsjlA/W8pjDFclQ2poBWluBllLRrOkjPkOIPkbTDZBXKEAMq82dCj+FoQc4MohHSSy2fPJc+yymDYjoV7YLxeMx8vhClOxVxRx3z+RTftidkXTXFYMD29AC4snzF98pW2Lf1XdeyfecO1WDIxtYZdm97YlIUVUU1HKLzs/te+dzvxzsX9yEFnB2atSWh0EqjDUSl0UUpM/0hilaAkmrXWBEv8UkEZwplskNUJHZ9vzmhrEVpMQApqwGkiOscvu1o/Un4UCrXoiiIPhJcTdu4JSycctXp27lwA8qSta11UoKyzNKpwTMZjTm4vU07b+hUZOPcBcYra7h6jqplvjaEwPYtYd3GrslQYz6Ol1Hceu3zdxz7e7s8+sSTlIOKzjnUfTa1737ruq65cfMmZzfOMBhUpBhwQZwSNWqJDCxJQzHinKd1jkXT4HK/OsaINlYctkIk6QDaMr1wjX/z3/5hAL7tz/wZVl58EqM0TbPI6osFqyurOOc4d/Ysz784P3GskUXd8dWvPceYBDiM+hKPn11lPLJsrA7YXFtlMlllMJiQ7JB569jb3uOp7V12d/domgbnfO47B5RKRDEHuK/z9uBxks+caNsFR0cHVIMLKC1iQU3bcjhdMBmWpMMaCDjvGY9HlEXB4eEhC9cRw0U6Z5g1C4quo24bVlYnpDLxj37fT3D42D/hmz55gV/yyY9g0PjFAk3CpU+B9qTw79C4jq5pWX36Ct/zB/8k4ZKmCOfhpQ3iRsfAFoT1NeqmY7IyIcbAzp1dVlZXqGODVorxygQWM/YO9iW505qiKKiqAUeLmTxjIRCCiD113UuEeAgIOTT1fhAZXTBKZ46PiDYVxpCUJjV3J7aJ6D0ohW87Xnzma9nUUuHz2OlePc+GS3dL90JwjsnqGtMDI0mmZJpv/S1wT/wQ8J8jLkJvUqQp8E+XY7JKawbjFTbOK7qmZff2DmVVMJ9NH2gNej/ej7vj9SMAyzm0gFImj49pKIQJrWMg1AuIIbcBgLJiMJkILGgUruuIbYdOnuA7dIy5+u1kfD+KUntKMoPuuu4Ue72Z7eG1wTdi0ep9t5wTPzkKs+x4B89+W8somRLCllKKI2vwbbs02blz7VnRO0+RKz2snCJhcYA/Nc26BzSkNHjt0/UdN2C9gWur8PTW8vwpZBrg1o3rrG9uoVEMR0MWi9nrvxT3XBpF27aMRyMRS9GKoRaQ1GQHsJinLyAbaXhJADrvxaAjBIxWlEWBMkLC08Ywf/xZfurP/6e4FVEc+6n/4ffw3f/lX2btmW+mtobpbMrZM1tUgwHXn3+Oi+cv8NL166cRDSX94ZiOgH9ISFe5cVhiphp9e47We9Kbdg6XyCZP6QTjUkiS/RlMeRSwb3vcD3ryRiMlhfOO2WzKyuo6mxsbLGZTCmu4eeMmH/7gE5RVSdv8NdruT2EGlcz2o0Q5M4HrfhvR/S06nfAhcr15ic/8vs9y47u3QcHu7zrClJZv+v+dwx0uCMEx2Vhnsn6bEPcI3mLRhOhZ2TtPu93i18YcdDMuVBfp6oZbt2/TuA69v0/TNoxWJsRpQhkjG60PHO4fkKI4blprGRQV88UieyAIsuNci+YaZfUpOmfwyROJSx2HmCI+xDwgJIqeUYuUb/D+nj1KK0k0uq4F8kgbp1OskJ/Bk+TYhKiKrqxtsnbmDOfPfxM3rk2Yzw9PEYXfvjjkdGryJoSKkKYobSirAdVoQkJRlgNQmqQV073dZfvs/Xg/3mjcRwIA0Hs1I8QsBcm7pW68KS3RReEGqERwHe20hX6eNfis+gf96h688ARIEe883jWn3/HEfR68x+N5uThlPZn/H4PnhIwA/TBfd0JcqHeWS7kVcfKxSvf8y88D34ZYh71K/FvPwV/7AXjkEH7mskiAPnXu1LHu7e6wurZBUVZU1RusIlLizJlN8VBXUPbjf6RMbgzZ/Us+iw8ychWSOMzFIGN4g2ogM+sqEaL83FPf+z8vN3+AMKz50u/8i3znH/+LbK5vcuvWrexImHjx2os8fOUKZ86c4fbt28fXJAHMkarpeQC6zp0gQZ6evDiWBe6x3+Mr0fc+FWCNZlhZjkIvd/rWLIonsBP5OkFwwiEJXqrkwaDixo1bPP7Yw9iBhcOOtm2weVTOlgWuk3vd+5LF/GHGq9dZWZlw+6N3uPErt0+94b/+3qdY+V8ihzeOOLe1yZo24F6ka3+OFD6BSkaMekpLWXtefOoZ1GSNF51lejTl9s4dqtGQQf7v6GhKs7OLttLjTyGKNHAmeRbaiFmSAlOUNK7D+yC6/OGzKCPtNjy4IG6cKEWIaUn88yqigiQ53vnsDZXuOZne+1OX6viKnj7Pd1/NlCJFYVnMF6SywYdASp/j1EP+jRz5A6+urbGxdY7p4YIYNYdHM5SWZKl3qHw/3o83I+4jAeh7eYroO050tQlukQVdyAzt44XSd83dL7QM0Z9/+x7eV3pu3tTn6TtuwP+aN3+AX3wdvu/74Xv+Q9gbLd9rPl9weLDPww9decMPdD+S1rSNeH5bQy/+E1NcIh99xJDoQhAxpRiASDUYU5QlkN3hlBALiepEJQ4kiD6hjWI0HLCyMmFne1vaAGtrjMdjLl+6xM7uzglVxQD8PeDq8TGke5f9ZbzMCTn+l16LQXzjqyJ7Q3BaR+HNJIbdsxEBbbMgpcDhbEoIjmJQMlvU3N7e4cLWGaqqxqSfxJpfR4pQFCWh9CituX4djH6EonueorAsE52TBx3g689fZTOtkVzk4HDG0bSmXfwwK2uXMfqCEHAxTEMkaUOoHddfuo6LfumkeOOmJGhlUUoyGBNVWVJUg2wdrIgJOu/pvMPFhNGiNogCqxVN+ALEKLodymZ1SEnUUjhhqay0IAQpQZSC9uUert5v4+4rpZTCWiv8nigeECfvhRgDXVtjrebmzofy2vI53rkE4IeA3/Imvp5woOaLGn94RDNtBIHUClfPqY8OM/v/7ac9vh/vzbhPBACOn+jjBVxEUo5/IqODvzBjo4Yrh6f/7cPbUN6tiCaaADKl98YgzN66OPojsRqOkcFgQGFthnL9KTewkKII/ywalE5ZYz3zNkpLigLzNk3Nk3/yj7L4S7eZfuSrAKy/8EG++y/8f5isTEgEzp47w3gxoqkbnnj0MVCKMxsbjIYjjtwRUvl/Hyc3/3tRlvv6tMe/k6Asjs2MJN7APOV9RNd1LI4OAcW8Ffe8UTngxo07PHzxInZgUN0+TSOqba7uiCFQWcOZzQ2Gw/MoM6ZpO5586glm/3vLz/6GnyOUAbZh83dpLugzVKOKa7duc3D4DNoanrhyheHkr9K5P8igGhED7O4viOWAwg4xKWKTzNmTLZaPhjt0D9dUasjk2pWsQinfN9oQfcB1jqbr8ERKLSfXWENssnpm8hiTNRm1KCHCsceCApQXHRDZx3Lb5q7LcdIFtByNmUxWaBY1RVVA5veMhiOapmE6n1IfTaFvCSiYHh4yHhe4buNdsMi8mR4EucWYoKoqykGBb1qme7cpB6Wgp8upl3f6c78L4p7H/O157t9r8QAJwOk4fcqP2dLv3fgqcJlXnKC8tgafuQifOLE4fPJJmJf3/Khs0InV1RVu3nhwOU+VV9mNjXWApV2z91Kd+SxxvFQVyxWbOMAJa9tmRcRjy9mIMYahWuET/81/z/X//G/hfeCb/95/TOUnUKQlEWk8GtG2jq6TqYKV0YTHH32Uz33hX6PUD71Jc9r3PuApJVSKyz7x270I7O1tM1nfoK5rqqJEJ9g/hLpt0aWlmbYMS0VdL9BRrndVVZjCkvgwWj9L538etej48N/9AL7xLM4uWPyDI9SnZrSjhp0b+0zrlmQTk9Kwvb3LfD7l4UtfQunvZmdnD5csk9V1dBfFMTIEtDZYYwmbU178L/8yu7/2p7HTCd/+p/40Wz/7i7KmQADTO3QqxuMRyhpcJ1LaKJVhdtHqELQuT0P0egxJznsiW3RnJENpJX4dXsSmTl6zPgIJWw1ZqYZUg5KmbfFdx6LumC1qQpTpoBR6+WhIKTBfzDFGE/x7BPpfxt8HYHZ0QNk2BOeIKVDP+skksijZe3uFfbk4LXfcu06apWrp+/Fg8SazZ34hXIhP8aqQ49e24D/8TfCVLfn6R5+E//jXw/TePn+MIrJS1/WJ+fn7j5QS3stscFUUFFYIjW3XsljUdM7ReXfK56CHfkMQqWZrpUfdtWKk61wgRliZrLDJZb71r/xX/KL/9U+ib6/gncP7QIiJEBN1XeO8o2ka2rZDa8Pjjz1CWf4wKT39wJ/rVaOfRl1+orvOyVvzrsdvrxQ+OBbzKckLt8WHwHQ25+DgkKosQV2jLF9k/cwGa1ubDFYmUBhCiqxMJoyq72E83JINE8XH/9FH+a6/8p185/PfwXi4zkGd2Kk7DIahGdAuOqbNHBcCdw6/j88+8w/YXrSsnT2PMQW2qiiKAdaWVNWAwbjky3/mv2P31/40AH5lxhf+xJ9n5zs/A0oJc18bCmsoi4KyqiiKiuA9k8kE5x1KFyj1K/OGHyGJcmSMUQrzSN6Zkeo/n3xllUiFvwpabbRBaY0pLIu6wTtHVQ5ERbEqqaoSre96AQVFaZmsrLwLkPAZb1Qb5HSo5Z9dtjDv/1XlSYl7zscvkLh7k9doHnroMuvr69L2NOpUm/P1xP3+/Hsx3jACoF6GvPMGFPPfG/Gl89LzL4NU/oenpwb6+25/f5/CGBbzmRCj5Lvcz/alMkve5bngGEUuOSA2rcKb15zUzJExwN75MGF0IBQRtJDz3HzB6sYGq5MJZVGSVFq69ZVlwXg8JqTIoq6ZTWdMZzNGozF121KaQhQDQ8SHGw98Cu+Nu7EmRWk0g0GFKLPBSRmatzoFSHnT29u5g63G4qJoFeuTFW7evMO5s1uUwyMSO8T0ASBQWsPnv/AFLj90mdF4RFM/jEoFhTFCpjPSyhhPVvjWT/xiQhv4yrNfYX/3Dkf1nIG1aFty7XCGnz3PY5e/nYtbD2H0gOBCNuOCqipxoSPFxJ3vOL1BNRfuEB/bZfLUiMIWmSMSiJ2jsCVH8xnD0VCSm2w5q81jCEe2h/7JlKATG35foSm5J7VRxzf6K4SKkXoxRxUFxICKgTolOucZjoYkl1jcxREqqwExJBb1sV7HO1cAdsCbeY/DMglQ8uQuSbCp3wR/4a6rRVFw5swZUgjMFjP29/cYjUYkJoQQmU3nr/0iJ+J95OBNaQG8XPX1/ollZ/yK3+rvO+89Tz/9Nb7927+df/kvf1y8Cu5XXyAvEHVT07QtFAmVEgHxSYiZTOW8W/ZfU5KJCpW/0EqJHwOJycqElfMrsoD3Cn2o7LsgsO7u/h77+/u4LBQzGA7oFZRtWXA0nfGZn/80Mfx7KPU33qQH7e5NPbG2MmHzzCYv3JqBejvsge89pq5twBTYFDk8mrK5ssbVGzf5yMc+hCoN0d2kMoHWK5rpnJVqyNb5c3TzhSjlNY8T/adRhaVtG7xLWGNQ2ar5iUefxF15jN07t7h16zrJGC6sneFDT3wTo6qjrbfx/nF0WaFXJhAi08UMk8Qz4OxnP8Ht7/rp5REP75xn5eZDWG0AseH2XtT8QoxUZcVgWHJ0dCjOmr2BV0zH/fu7G/vkdoxSJA1FKdLaPvt2vFJ0iwUbG1sErfExojPxNJhIYQz7ezPxqzgR1lhWN38Rt291eV98r6w11xB54fQyHKrT51spsLYQ1O89uolppZbiZTFGEZxTMBoNWV2ZLAum23fuMJ/NxXnyDZ4LlaWzw10Olu9Wt8U3I95wAvALLxLwb4Bf/sZfKSVu3HiJJ554jIsXL/HCC88/oL5HwjnH4eEhR8CgqoRUlWScThz/jlsAzjvazmGMFqJWimysrjEaDzFaqo6YLZWttXRdoG5aFk3DdDalKAxlWTEZFxhr8cGjlWHWzYkDGQfcP9gHNknpCcTf/A2frfynQiSDNZcfucLlR7b43FeuQ3onrGHlGgbXUShF7SMH0ymd6zicThmUJV/98vdx+aGPsnX547Rt4My5syKEAxRlAeY3Mjv6DM1iAYhWRhMjRWmpygEuOaJrObN5nvFoDVsUlIMK5yMzP6cwP4e2V4gBfOtkvM9YkhLHwk/8uf+Wn//Df46bv/JfYqcTvuX/8ceYfOZjzMsanUmCSufNOsp4X/CBUTngzNom17avEbOVtkr5lloS/1RO/FLGqRPK9Hr+6Z7N++5QwOHBHmowYjAcUhYGUmKIxrfdsh11MurFHNd9LPNYvoZoc7yTcQ24jvCC3kj8NL298Ws3tO4f7n7T4y0G2WISJ1ilxCROa433geeee57haCwj3vkAnAvczyRIv8aeVEZVWomoVezHcY4/oKBMb92Y8TsZ7ycA9x0JeIo3IwEAuRE/85nPnPr6fqJfBwaDinPnzjMoK3a3vsbP/an/GoDH/+h/g33uLF1wy5HLSKL1HUNdcu7sWc5sbGKNJcVAcMIJKMoBUSV2d/e4dvUlYoqMJxM21taoqpLhQNoa3jvaVloHZzbWuXnnJi+8+DQxzYH/DZi+0VN08tPSN52KouDhK4/RhVqqg14YsIdM35bIUxWuw7sWbS117VAGrl69wXd8/CMcugWf/qc/xMc/WnPl3BW21taJMaGtFcW8YoXNjd+KCz/EYl5jjaVzjqPplP29PSaTsXxmpSmrAT4EurYjmgJrDCl9BWu+D21+OxqwZYnznoRMe1RHG3z8v/8TfPPf/D3gLKPnHsGnRFfX8pqFYVhVcm8kUCkSvUDOZ9Y3uXrnuhhk9RDPEgXoT0Fakv6UEXVQaSsZYlKk9PK6HSD3oTGG9Q3xAzjY26GrG0pbsljMSNm0a0k6zDD4sfDTS0gf/p2MHWCbN54AvHqc3KxSSnTd/SW8D1LFvurv3JOT5BvijVTKd1U/MmoqWiUKMZH6wAe/maIo+drXvpo1ZJa//DIH9QqH3t/LCaw1BEAXBShNCoHgJblQypBi4F49yvdOvJ8AvOORWCwWD/7b+b7s2o7Z4SE3HnmGr/5//wRhRfphT/2NP8CV3/6nKb9+LF5kjeHC1haXz10QVTbvWLQNTdPStDVHBwccHBwQgmcwHLG5ucnKZIXCGgprsYXFe09hS5SxDMcFqJobNz/Nl5/6QUL8F/BWPTQqApqyKji7uc5TT98RJbpTb/d2MqUVKUXaZkY5XqduPsTFC49z/frP8O0fi5y/dJ6vPXWNz3/mZ/ly8WW+6bEn+OCHvpm11TVI4FqH1eepFyWuPUSnxGhQMRgUNG0rBEvnadoWLbsrPiWCicTCErQhcQNrd9H6DG1W3+s3TKUNk/oswxe2RLxnlKvHJL4P1mqic0yGlcDw1tC2HQYYlANISkZ8Y18NqSX/YQm/K1BGZ10BJIE05ZJD8GpR13PCzZuQIk29gAQN/X5yDPC/VyHY1xcnVE4fcCPXWqG1XRKGX8/v3P0+r/TebxgiVwqjZJanV2ddCrQBaBFLr4YDVtbX+dLnP4/PRnMPFlnOWikoS3HM1KI/kfriP/+RgiQJsXP3tAbeC/F+AvAeic51DEZDvvJH/uZy8weIa3Pu/JG/zeO//08uPcQ31tbwFy8SYqTuGjrXcfPOHW7fukPXtVSlZW11lTObm6ytrZEiohaYEsHLaBLK0Pmao9lt9g/+Odvbd9je/gES9YlE/s3fiHsEejyqGBSJ3e07GaI++RNvZ8gbd3XN6voVDuZXqOvvQlefY39vn0sXL7E2+RFU/R0MxxOefe7rvHj9KhfPX+CRiw+zNl5hbf0RJuPvpLP/iq5tUcagjaYqB0uPhUXdyJ+Lmq7raFxN10nPvNE1ZfGjGPPvi59DZvdrYyiyU6RSeVFVMvlBEvRGnLsSRE/UinZR03jHeDhkUTcnRv1eAVzJC2ZM4qRZFAVaGxazOsOprx7BOeq+oj9x6e7ZT07dSu+2ZOBrwDcDr0Mi/GXjJQRJeKV48M2/h8+N0QyHQ1JKTKezJZJwd8vxJIFb5fbQcDQUknGMwjuIaYlIlGVJzKTiB4lqMKAaDuS+USLrXdc13ntsWTAcVHJ8MTKfL/jyF7+Ac22moZwkm7/yeTHGiBx66qdXEuPxCJ+gjRFrCkGXMsqvstlV7zSptUHZ9H4C8H70sQt8BvjEO30gy5jPFxxMp8cL9gnlvqqo2FzfWI4aNk3Di9euifXqYo5INWtWViZcunCO0WggD402Wc89EYIYOo1GI9CK7Z1dnn/xf2Fn53N07utCEju5Udz1tzcnjuuChy5fpHYdu7t7eQXTvGWow+uI4AOlGmH1B6nrmtHw13H1pZ/iYx/5EOtnNtl/wfPQxjloHN5HuumMp3a+QADWNzf50AcPOLc1YjQe0XYtznWQZLFSiFtiSomV8SiPzHlc8NRNS103zOefxdpHKcsPMxgMsUZG8LRWKCXXTmvxiUiFLP5jNQAU0Qca5ziYHopw0HzGcDjgYHYki2A8R+LjwOflw75MXqeA0lpKW9I0HdGFU/fg6wmVRMb6zNYWO7u7p2DuEx3Zd5j5/3LxFeDf5sETgGu8egIAGxsblKVld3f/tSv4E9enrCqstcxmM5z3lGVFNRhAJrwVRYHzXtQ0M89Da3GFTCThZuRNXiF22LPZDLRiMBiydeYM0+mUg/39+073lVKMJxOGKyu0XYcCxqMhw8lkaWVe2QKtFF3wnBlPhOCchNjsnWN6eIQPXj70K94UCWsNKCsOs9FRL2qUtQxzIhBD30bUMmoZk4xhJkXwEfMGxdrerfF+AvBA0fFaD+wbiQchAjrXsbu9y+U/+Eeo/+Yfp/uQKO+VT13h2/6ff5ZZ3SyFU27fucNXjo4YVBVra2usra4wWVnBGosBWtcQYsR1HjMyMgdgCxbNDjd3vsjVa3+a6XRG8DWok8Ykird8BDQptIJv+dCHOZy1TOf1CbTh5BL0drQBjt/DaM0v+tjH+NTnxszbjlmzxc3tho8beOTRy8xv/gOs/b9TmYLVwlIvWmxKpBCYHx3w4z+eGI9+lm/6plUuXbwksrvaovDo7DjoOyfmUWWJHggfYG1llbppWCxqQvoBXLtJ267nmf5S3CCNoioqCmvR1uSqUOFjIAGu7TiqFxhjcb6j1JrZYsat7duZxT8ANo/hlx75V2o5+2+0xhpN03a4xglTQyvUXXK+L3f6loW/1gxGI6rhYGny1f/gMQT+aymKc3Tda8PY75UwRhOCY3f3SPwZUGKDntKxGRrSelldW2M0HqOUEjSvqtDasnX2nCg4xiTjn4j3SoiREMIy4bfGEPPrJsT/IYRAWVbEFNjY3GI4GoMCY+2plPu1nra7EQzxldEYY7FG0IWuc7IOaZkkSSGB1Whr8EkmUozRomFRFIwmK7im5eBgf4lS3B0hRBaL5hT5L4QIviGFgB5WaG2ICjRK1sl8n2st+hfysu89Eab3E4B3YTxIdVMUBWfPbRJax5P/6X/Lzd//t0kJRn/2N/DVnedIJL4jyKK5ubHBBx95GINiUFZUZSGbTHZnjLHA+8BwPCapxN7+T3Lr1ktsb/9zuu6Zuw729Bdv5ePRa04MhwPW1ja49qUv0HVt1r0/ydI9dpF7a+O4T3l2bZ2z5bfz2KVLfOHZZwl+yP7+L+Zovs+lS4/wYnHAdH+PSw9dZuXMBi988SskoymGFSsb69RHa1j93bz0tX/EnavXmUXH5fMXWV1dJWrF6nhCWVZoFD46WCo7KlaHA1aGQzofMOaQ1n2A1stEgNZatP6Dl6oKqfIKa5deABZDVYk1tTEFpox86ekvCjp0+uMuQ2gECa011mqKUjaD0GWIWCOjma82CJB6DZG8uWezn9nR9K4q9/iNjal46KGHuHr1y3h/68Ev3ZseXwV+CW+kBTUcDkV8KSc8J5n+84WMCK+urjIcDLFFQd3WHO4fUFUDRsMBVVVhB0OMkV5/ORhgs9qowOVZFjxFjFay4Vt59qMPYuqVjqXB+ykOHwJWGbQSCelqraILnhhTHtF7veS745/rP5sCdE4jCmMkqUlR2nr5eJXz2MJg0UKeTeJFkRI4J66S49UVXNOxWMzv8pro3+X4/U/qKXjnIESK4ZAAJB/Q5JZWQuzrE+JN8R6M9xOA90hYa3no4iUMivl8i43/7o+JytqKxo8D2hgG0yPoOsqyZDwckZLojltrKctiCbsdzRdsb2/TuaeYzz/F0fRnifGdGbNbhgKVEkkpNlbHqFTz/ItXuXBune3tI1xMRI6TgLdj++8rGq01H378g1j1KxmFa6wUJbooWSw2ODwouLh1wOMf+RbanXXqesHixTmTjTVCiPjasbd3SFVaLj3yH2CKfwO+I0496XDO7t4RN6Z7pMKwtrnJ2njCuTNnmIzGDIdDVIIuehn/S4ku/nN0ckzK7xFiVa70DVrY/86JSZT3WGMpy1KqrSBjV7u37/DS9kvcnr08wtXLTvejaLYwFKWlMIpF3WWUSWSC1Wt2ZE5/MyU4ODg81hTIEwYnf8z7wK1bt4nxNvAM756q7CeQBOB+4wClvojWhksPXWaxqKmqCu8F+ZGQ89G0DaPhmOFoxKJpiVYzQTEaDlkdTwTGV4oQI13bYpSQdmN2bwwhgBZ/EK0UWktfO8UAWkSo5JIlkoJAxDWeoixEYisnnMHLPe+TW2qJ3G+cTAZkMz++p1Cynvngs5ZJIASVfU58HskToSmhB0hvXxcF1WBIWy8Ewr8bm3jFWyVJ0pDfO8aUE2zQmDx2/PaUFG93vJ8APHD4/N+75BTmjUhlKHxQDUnIuFCCbDgkmbCxhmpQgTJorTiazlgs5mzvbLOzs4v3PldgXyKln3rnZ44hs85BEZkMK7avfp39nV2qgSxs6UTlpR5MTOH+DykBSqOUoXDgU+Dg1i2qGNk7OIA04M72d/DIxU8xcz/Owf6P8OQHfge7O9sEH3j4yhX2nr/BzHcUVcXRomFl7T/Cmu8XU6bW0yzmnKkqvrJ3g9t3brJWDvmaVmyubWInI85M1lhbX2NjdU36mQqcu4l3iaYV58aEwiWPBoZVuYR5F3XD4uCQo/mUnb1dSoQwtjVa4bCeMg/1y5L+ejl6YxTaiD142wZc7e85P3ffOeJcebwB3A0Ly30nb3Dyeyf/Xi/qvFWpB9TNeCsiIYqU98sDWJDSDYbjFZTSDAYDbFlirF1ydqw1hBAYDAbEmJgvakxZoINlsrrKsKxIKExGdUyMVGVBf3589n0whaFA4X0UJ1ASKUUWizkrowm900PrOkpbUPQQfwKlNSFJday1WMGXVmzHZ8bcd9+yX1P6al9pjescKQmJVYGIkyktKAbH0ws98qSibNydlySFpLCALQraeiHoYO9Z0V+il710Ke/9UmCY0pKUIgZPDAmzXP/eFTfamxrvkt3rGzE+AzyJsH/f+Wjalms3rjOoBkyPjmQR0Spv5gFb2CWLdX//gC99+cscHU1p2/a0oljflz2x4L6REaQ3L9Ry0Q9ovvSV5ziaNzDvrYV75qPKFcLbcUgJYzQrozHDyTfz1Ge+yEOXHuGhyYAf/7lPE4uS7TszXHiUhy6fo70aMC6xurLGwfY2t65d4+KVh3AvvkTsPIc3d1FqwZmHwQxKgtaU5QbjlTHd9ktsmpKL61uUtmB3b5+t8+fY39vjzvXrxLbDTsacvXCByWgbzSYx/TJJ/HwgEFgED1OZpKwGFfVixosvPM8TH/0wyTt07Uijip3thrXRmEVXs6RX9BVXrtQwClVIj9R3nqb2S5Le8o9075IpVb0YucQY772fllXay/SLtRbPAzsgMSB4+7pG2t6eaIB/AHzv/f+qUmxubi4nKApboIoyf7aEDwmN9PyNhsIkDApdVLJh5pcJMWao/ngzTjGbg6WIjwLlW61xeVOMCcaTCUZZFAkfIqURi21jhDOyBGKyDHTM1bUkEEne5D7XhR7lKcoSVO67awVJCJ5aawxW+AhLQalESgoXI4USvpGxljKPtfog6IApLCZWDKzBNS2R8BpbdyJ5j8mEw5B1NHS+T73rUDG+9fymdyDeTwDeI9G2LZ/93OcE6otxOf4DieAFGv8O79kADo8OuXl0+PLPbJ8sn+iTLb/1DpdaPQftuavXIUWOl74e+k+nfvYtef/8t8FgSIyRYjTk0rmLTDZ/M7t3HEdHh7jdbSaFZaoS27u7tO2/zXDy4ww3dtjbeZqmGbF1/iyH+wfMk8MOK+qu5soTj3PrhmM0+Bix+TSL2YxFbLhx8yWC91x54km2ts6yvrEOX/gKg6qiWl/HbGyQUkSVJdVwiPcdV699EsKQQXWeqixEXGg2Z+3Ceer9KVsb64zKgoc2z2DrDhPArI65fusWrqlZnYy4dXBiwTt1QuVcy1y5ouvcCSJm/n46fcZOnccTojb3nGAtLH9rrLhlSjaaq87LDKqPoNQqKa0QwzreO6bTw1M2w+9cPNgxFEXBYCAjetoYEYrSQlRLKYr+g9ZLbQTp0SesVqAsWoEPiS4bgikkYYgpZllpnSWdE57QI/nH8H3SRBXlvGuD0pYQAyGGpfKj1oYYYo+74FOQyR8lMr0Sr78dI1C/tCt0iLLxRkECVBCEwlojs/lREhqtNFGBSVreGwgpLKt8lc9fSglbFRhVYYylbUU2Ogb3MvwAOeTg5HtKa2mRIElF6PUoUnzPbf7wfgLwnorg/VIQ89Wcw/sFwFpBBV55Y/8oSj1NSs+/6cd6/9E/fmnpRX8s/3lyAuA0sezNPYL+7RI+BqIP+NkcP1owvXqTSl/gwsVLTOsj9KDk8zevczhfcDRr2Vz7LRSrfw53dMTZC9/EcDikKEs0hplKnHvoMkf1nNHoPCY8SkyfpihLRqbg2p3bgGJr6zy+66hnNefPnmU0GHP9zh7FsGSwssLKxga7N29TasNjH1hjaL7AnVu/kXPnLvLiteuMi0AZA94maDtms5q1cxfQ1lDf2WacIsZ5zHCIsorCGFwIGWH5dkhfQ+kbJN1XggrXeegNgnro/sS5ujsR00bGEVMSxcmYIhqFKazIHFdl3vANpbWkbE2trcH7h0j6IkprktdgNAR91zu+k5GQJ+/1jowF4AeYTCaUZUHTdoDYcQ8Hg9wuUTKOttyQZQLAalm6e5nmkKTNF6KMx+ns8uhPPN/BRxG/6Z+kGLPAUyQoud4xRrxzGCtW4SFFYn5NpaRHn0KiKIQorPLEgIAOr/8a9E9qP/8fohcba2RTCjHSuUhMiaKQRDBl50kZyUtoa3A+4rzPqpiSBPQtLh8iwRgoLEZrjDd0bZNdLU8IDmVkS85HZg5oA0ovpwDeq/F+AvCGYhd5iB/cyvetilcp7oUDUFqslj6h8562qV/mNyakdLeN8XGV95Ytu1nk45VePXH3A5le4e8vH8bmauZBEY0EvusdCBXr6w+zsrbF9p1Dri7moBKT1TU+8S3fwk9+7nPs7O5z8exjjMaX2dmPrKyvc+v6dbSLTMohXV0zm07RQdEtarr5I6jqYTavHHH0wovMXEMbHc898zXOX7iITZr54REhJXRhaX1gdv02e7v7aJ1QK6scvnCN+vCrDDc+zrReRTcNA2PZu32HwfqErYvnufrSdXYO9lAxMa6Gwg4fVAQF4+FEKq6llO8EdEXSoIxo9vvGH0sDnzj3J4D7e66GHQwpjBG7ZC2VW1VYUkxSARoNUfq9dQhorShsAZmfkFLCB4dOd7BGs+gaeNcs0F8Hfgb4rtf9G0rtMR6dh1x5KqOYHk0xRlPZYjmX7vJGbrQmuIAz4bj1pcQe1yDJvdVGqnIlRDqrBfZXMRGyuY4ioW0h5DmTe+4xkFQUQmDKkHdCJMK9p25qhuMRoKgPZtgsCd7/3H1F7isURSHQu1aYINMLMQSsQVxFY8yGQIkUgwj6RCB/DmsKQQN8xGqLT+EUIhG8B5MTDmMoRxOSd6QgPCfF6Q1egCtNUhq0ECH1O9r6fGvjvalu8LbFP0N6f++OUAi0VpYFVVVSFPYV/MMTtiiwhaWsSqqyvA+i3zHU/pY9EuktfXWZbX+jD7QSYlShDatrv4JgH2Z1a4u1M2e4+MRjjM6sc3F1k43hiOn0COe3GKz9H1kZvMiNp58FJ33UWdfw8Acex03nzA6PGG6ts3npIjF+iOdfuM6dvV3mrsUAs6ahbhrsoOT8449QR894awNdFszmc2aHUw73pzSHM5RSDIoC638QlRKNa7j4+MOMBgMKFKtbG4QUiG2Hn85RMTFYGVNWMiHiQ8CY4tjRNyGbfZTzl3y8a/N/fTGoZFyth1rlnhXlQqM0Oil5WVugrQU0PpxUmlN07QwffxQfvAgmvavi/s5JURSMxiO6rqMoSlKCcRZ98ikSlRDTVK7QtVagoe5a5osa5+OSte6Dx3sh0pVFASFiM/nXp0gyCk0UG+2yYFBVS4Jh5xwxiuBXSpqYJOmQzVRRlSWTyQStTVaWzPP03ssUwX1Gv96EEOi8o3WdsPlJgvAAJHlGYkr4vj2QBb+WVXxKDKoBtiiWzyT5d3suUIpQlRUoSXJUYdFFQVGWgji9zDU0WskUS14revLqey3eRwDeQ2GMoRoMGAwHkKDrOurmhFhPT/zxka5xVGuDLLiRyS6vQag6De2q10gDTkLy6a5/e5VfQUOKxyDAEt7P8TLchJfVLX+VzxFf5hhO6wiqJaR592svP0GS4xtUA1bHY+b7h8xnc7TRbDx0gTu3btFO51xe22A6mzKra1T1MYrRT5LmjnKwhl0bcfv6DZpbt1nbWOPwaM5i54DaKhbuQ1y59Djzboa7E1ktSqLVoDXzoym7O7t476jKgqapuXj5AlsPXWbn5m3md3bZeOQyrHr2r99g/84n2Tz3y5lPp3Rtx8Zkwvb2DoOyYjioaNoW7zzNomZrc5OD2YKdvV1sYUnNCYK3Ussk4PXGy10Hm+fUI1LFtq5DKRn3wojqm1IG1c/Cp77l4NAESivGLa5rSP7dKM963Ip6rSgKkaH1nScp6TOXVgScUhD6WjCJ0Dm5R1VCGYtN+RlMAa3l55XW2fVTCHHWWiHqxSgogeo5HSnP+2tsKnAhoDL3IATZ8I01OecTwmEMUV43yfjoaDTCx/CG5XG1VihjaFth7IeYCM5jlKbtPJGYtfrzf4hrn7EK8j2TYh5n7J9eLehAL4Ht2napaGqtES8N5yEIomAKuxT/MUbaU7FvnaQoiBf3ddt/w8T7CcB7KGxRsLG5iQ+eru1wwYvsJ4mmbiBn6gnoupa6NgwHI4y1jMYjjg6PXvnFl5sxGb7TYNSy+ZEU4gHv/ZLg01d4sScdZLEeBaiiPJEepOXipLQhhXC8YOXfFXGZvBmklF83Lj8PuT/Yx8kGRT4Ulj98Kg9RoBK9C6hUGGrZZrgbKEj9ZyNhrMEWI4pindu7O3RNw6gY8fTPfZ7h2oRqssK3fuyjfObZpyltwd6hYtH+e2yeL1lMHeHOIYWHYlTQ1Q0f+sTH+dqnP8+KrWimC158docb27vAbyT5f41ygRQi0/mMYTng3EOXWTt/hvJ6QTevUa6jLDSDi2cxWhGLgvOPX6DQn2Nnf5PB+BNcfvgR0rym3Z8xXF+jbWtu7+9B7Ugh0K6tcmc2ZePsFgf19ATzv7/uJ8/da8fdPxqCp24C1ljpLyP9XkOibRq00cQkrG2tNb1HO4AmYE1Cp4ixkuDGuy/QOx4/DjwGXHwdP/t3mKwMxb0xQq2CJOOASuTNSPrvPR8ixYBSiUIbtIGkAlolYhQHRlVYsVEOAl+LnHcghpjH6UApsQCvj6YC/+dKG6XQRhCBpqnleRM6HJUtAAiuJcQoaESGz2Ww4wFg8oQgOzHhOkeMQSr5lG2lXcDnNWs0HEmSGMIyUUjJZ4vqIG29KHyA4ENuJUghYY3oIfReBn1NoYzGp0hpC2xR4jtHWVjqukbrnCjBe9IDoI/3E4A3HE8D3/ZOH4RsmH2P1Ht88IxGI8qyZDqb0RdwyxU5QT2rURhGI0tRVVlR75UeYo0yhlRolBeNbFUW8gASKWxB8I7CVku2bvSiONYv8soH8FJh6L46ARGsSYnSWtooi6BOoK3GhUBRFNIXzg+/ypWoznBhzHPO0TtUPk4VwomERwNRTkAP7ZGkB60VWsuolNGatpPP1m86KchmpLQWElKuWOQQAslf4mD7AzxyZcTWpXMc7U2ptw9Yfegsze4BuvaUbaSuG3QI+DZSnf0qpvhu4nRB6Aqq8YS9+R1uPv085y5dABLucI/5/LdQ+3PE9EF0ecDqyou0TcOZ8+cIdcfuS7cI84bt/V1sYbj2hS+yeWaLrYvn8bMaFwJnrjzMzRevEfzTPPzEb2Z2MKNebagP5xze2oFhwaMPPcLRbMpkdZUXXryKKQtmsxnDosQoTSDk68x9bf4vFz2SEKOI0yihXGOAojAoYwixV8ITcxhjgWSJXEYb0WjXMYqZ0bsuHK9VK/bYgLaewXiECxGVNMloWu/QSYSbQteircjhBhWW96jATxmQiQmfvCSuSuW2AZIwh4BrPTElQVWck/NcllhrsEVBCiItHEPAeUm8vQ9yblOSMT1tmXctnesyYS+QbIGLgWEpHKH73fxPKh2mFKUy9/IaZVUSXGBQVZnLIARnbYucGEoh0BcFWmnarsN5j3fSAinKbHEdIsYaFrM5RVY8dd6JRDUyMdF1HbYsUFnKWvU8CS14w4N8vm+UeD8BeMPxL3lXJABJ/ABSDAzKiknWA18sFmilGE/G6OkcTmSzKSW6tqMajijLisFgSL2ouXeVlxJaMn3NoMzysvkntRa/gD6JCFk6tA0d3jmiThTlUPqR3i17dcF7qaKtlQdca8rC4uuGlBKFrVBa1MCUkhUv9Q+nkZ6oQtoXbdPQxYjJvt5iYZgny4whJUmOVKa9JBVQ1qBVYmANo6pk1nXowmB1gVJCSvNRqiWiymdCi0a5nECGwyFoResju7sHECKT82dQ2sipbjwjCm7f2aFSghoc3PoWRusJvTnBBsf0xm2SVUy3d5m2DaurY1zbUrea2n8EcMzCd3Pgn2arqmjbDqUTR82cdttz5sJ5th69xJXDOaCYHh2x2J+iBxUHX3+JjWrASH+RgxtXmU8D649eYn8+A2soi5LxeMz1GzcxRYkxhm5RUw4qLp49x0EzZTqfvXm3aZKx1GQyySrPp0eE3d4thYDkP61FylYzorC/WNpCWuG6TmDcd8sAwH1Ef7ij8RhtOlLI/e2YUDnRTdl8pm1abJ7GUEoJQqDBliU6S/Z2XYcx0itHqyzytKCscjIefM5/I7osIEWa1lEVJYNBJdV3kI0/IMz7lJI8rylRtw3G6twjE6TBeU9Q0GShsQcPeZ5JUBQlkUTXycSBUaL+Z7TBJ9FECDHkBFyOLYZI61pi/l7IXgH90F5MMlKoErRtQ1q294TsqJWSNkNGnkxpSUsUgWPU6z0a7ycAOV5pHXlnxW/uL2JKhCQZ7aQs8c7RNC2DqiQqjZotgJ6AI1WEdx1ds0CbMUVZ5gTgZPyfgBeBRhaelBgNRgQSJgWUEj3xpXSp1oTUkWKiNIZojVR6KiMHIA9ujFS2JOSKn7JA6YJSJ8ygotSFqHJlJrO04+SYkxdmsLZSEfWzwNYIqScphfai1CjtgVwuGUCDVpYUxWAEFcFaFi4Sk8Xkiivm8TMZm+qJRcs1UNjSJEyqmO4fUp3bQmvNwfYufjRi46GLlOsrmMGAs26DGzduMC5LVooxhYembqjsGGVLVi6OGa1NCK1nf28fXRSkbobi79KFX4kxmkFZcX13n4PDA86M17BGM69rVlc3MYtDXvzZG1wYr9PuH7J+/iyh0CL2s3PA6sUtzHiAtt9PtfHbOTo4YjadcvHKZa4/9yLuaMbqeEyMkWaxoG1rUvQUGyvL/rE8Ar8Z+B+RKvfBwrUtXkHyMsqnMv+EvOmkEGTTNxplDUUy6BBJ0eYedUdlFU3TLK/Juy++H/h9vPp0kKMoC+p2lufzOwhyXzsfiCpRVRUFsuFrnXGFmHAu0HSeZESNT6FIeQSwbRpc9DJGicGgUaVdemUYYyiMpQ0BnyLzRY0GjNUUWIqMKrjUUii5DoNM/PPK03UtRDGYUj5hShnlfLB1UhJ6HyKd66iqSvxItMkdw2MvgpCFeDQiY926ThBLFF1eQ0xR5EIlEn3/7EPygWJQyUir97kVKeOnMZtZ+a4TG25lBPFSx3bA0vF8NXT0GzfeTwByvNKl/UbZ/IG8oXt8jGzvbKOUZjgYUFhNOAG52aLApERwAvPVdY0uZQbbZPev45jQp0chBKqqyCYxiqzPgVcttrDUbYcdlOJBYC3aZOhfITO5vXOZUqyMRgIFp4guDMGJVK1RArH3VY/PzfkYpI8poiQ69ykbdCGiJSnEnOW3y8qoj9hrh4cM52nhMMQE2lrqIItLaa18ntDKRlRI35XcWpFmQiIFl8ekCrT9rQKVu0T0geFoTGoct7/4DHEywJ/dpEueS9UK3iqGayuk1rFWDqjnC+ZtzeWLZ5nVC9pZjTucc/PgEIY/zCK9ROQ2xHPENGRQfi+PX/wpDm/voKuSJx6+wnRe8+LXv87GxhnSGDavXGRne4fLDz3McHWFxeqYamPCbP+Q2D2PGv8opftNpLpj9+otVnSFLxS1hf07dzAJBkZsUq/v3GHRNHfdC29scEhptTQoUogVcYiRpAXCTkYWZatAE7Fak5LC50rMKEPXPY1/17H/T8aU18pMrPnXVNUurQsURjbolGHnwki1naKnLKxwa0LAKmlZUZaZ+OaJnRMZ3Xg8JjgYTiiMhSjFQGlLUtIoo9AIymK0RiWdEQVN450oBnp5H1MI/J7yvLxU6AUx5YmMBGVZ0AuM3+86uYT/Q6RrGnzu25dFQVIpz/QLLyhkqeDWdTLvHwMKaJsOU9glmkhuCYQuLDUUUggkq2mXY7sC+6MU0cflZUpEFJGkA6gk5zmxRAFSvJcQ/F6I9xOAZTwoltgAXwI+8uYezgNETJH5fL4kR5XVgKKqwAvbddl7zD7gPTM2OE9bN5TjCcbaVya9pETXimKW9OaOGdptjfAQjKIcDImxJwbmResueV5lNHZYgYGYZ3J950lBEVTC4XEx4COolCiMzZMHEd87l5HwTqBBgkCgSiuI6gQ5TIEtsi1tHl3TisJI4qB8QufNxeGXSYbSWrQClEYlJYtDX5X0r61hf/6TdBv/Z3zTcGt6wGA84vLlh7jz0g20VlilYNbQWcNotEqcN+xv79JkI59hUXHt2RdoM5t5YktU83UW4RY3918Afhr4TSzqmoHuONw/YPXMJqtrq1x/6TqjwZhz586xMlnFlpY7O3ewyuCblq52DAZi9lMohfaJ/f2vMdv9NwyKhzhczDh/6SKhbZjv7+F0YuPcGdz2Di54DhZTIvGuJ+ONLYDeO7wSkRWiMLejS8cJgFIknYTAmgoCQcbRQgm0WKMJ3Q8eV2dv6GjeubCFRVtLmTSlLWiaRpKjPOrXk9cUck+bZDL6JOiZQVGWJclaQkyYfG+a3EpoXSOJsNF0XSc8C2VPcC/y2dMKHyQhiKFX3xMCog+CRPSUBp3HNTGSDHsnGgNi66vui5Ap0s4C8WulheehhHCXNIDYfocQadpG1gtrcc5RFAXWaDrlCN4LcdCYpVhRURQopfBtJ2JTISzHBlMms6jYCwFBfxdJ4hFOjLeqLKHu5Wy9jwC8l+NBL24LfI13QwKQYqJtW/kkSuGiZ75YMBmN5cHNWbfWmqosCIMBrhbIv60bCmWwhZUF42Ue5pQSzrnj6vquMTmVwHcOW1SElMCIAldEoHRtDbRypoMKKMTFaz6bERoPyaJjBCuv2YVASnl6wGTjkBiIwWOLMn/mSCKSory/SvJvqj9+Bcoa2WycsItREMmWoxGUNvTpvl7irbk6iIIWaKOXn7cXETLKMu3+MZ+/pjgz+OWcX1lna3MLrGb93Bl0EovTtc11kjG0iwW1axlvrKFiZP3MJqUtcM+8wOrmGVRRMBnPuPP5v8d08ZIsqOoZVHoapT5AWT2JD9c5mD3FxSsPs3o0ZX44ZWPjAnY84Nq165AC3byl7hxXzl7iYHqEZQtMwbyZooYHHE7/Bhtn/jAba+eZdQ2q6Rh46KJiOBigraUjoI3tLzPLk/kGo/dy916uG0qqweiyKIsCVVhZwL341UcAH1GIGl4PDb97l+MA/DDwm175R5TCWoNKQqqTf9O55WKwxmKtJQYvM/pIQqC1Qie1TFLpk0ygaVpCCBQ6uwAi51tT0HaNQOsIRK61RqOzUmCkNMK+d5mDEUk5WZZkIAQ5713bLiXGU4qkJM/qg0xjqMxzkHtAUD2l01LYKSC+HqaQ+9AaI8ZGub9flqWQILtOJIVR2Z66EA5RVYnyH4nCFrjgUFa+75oWvdQL0sIJMCo/55CUWZoxyZoiVknvtXg/AXi1UDpffRn9UlqqlqVc5LswqqoSdq+WTSuGSNu2AqHnDLYwllE1EF1toG1qUow0dU05Gsrvvdp89YnK/5RhC5C8Z3E0pRgM8J2XQi8/sqGXjE2J+d4RiQMRFNFG4PnoiYXOSmZCNoJsDGItLnhQCW2qPB8OppBFMCipoE1pCL4jdifVvRLKaJQu8hSBkSXEaGH6Kk0MTmA+jvvSQs7KZir9NY8py4RKoqFpWIQfYHpYs7/4Lq7t7LC+MsHNa86trnH2wnkeeuJRuhDoFjVjW6HLkmm94OrVa1ilKQeW/cNDZttfYTD420x3rnFUH8oIVJoDN4jpCXwcEMOYrfU1Dm5uMxmMZeGO8OxTX+Pyo49gypKdW3dY2drg9vyArm15+oc/SeM9jz16BZMGeHNAiDfYvtpy9tJF1h+6xPUXrmLryGz3gFIbQmVpj7aXleKbdb9HH4n6WMhFRi/VUtsdBSpKyyr0iaVSGPW78c5nW5dvhLGsO6/4nSWZzxiCcxhtUaUWaV2Ox/VCCIQgCdDSGjgKCz6CECa1yuY/0l4pbIFvHYUtpYL2AW0Ug2qAc1JdayVOd1ppdAJblMtWTFGWWVAoUBZSlofg8/H45TqotMoGOcetxfuNGCPOeUJOIGJMWA1kiF5GHpUYHBnhElXDkkU9B62XNsLOOcZjscc+ODoSxMjLOt0TIz3ZUyGIkiSpH/U9XsuQAaDl+uadh5SwWqONwXXtyxZG38jxCyAB6BnFkLeol/+ZbLahlhCvQme5UrlZFMYokneEtn3bjv5+oywLTFHSeYd3HdFHgvd4a0RoBXKv0DIZDElFyXSqWSwW+ODRzglz+NQie/oBf80+WIr4plnCuigw1i7H8gC0LSAFSQ6MkWkAZeiCF3lSFEoLVEqQMb9CKZK1+BiWz6GooEV6H/EYIyidJWXD8uhT7CuOTOZRxw89RHQkbzgiR+vzfDImV2hJzEiUFtKS0jI7bcnJg/3HHPrEzH+CPWpc59hxNauzQ27u7TEZj7l47hxdXdMdzenqBetnzzA7OIAYOX+xZX74/UzOjHHTgtaDUnLOUvxXoL+V2pV0xcN08SqmronGc+Gxh1lZXWVjssrWEw9x66vPszlaoaxKrt94CRMUK2urDGLk2o3rbG6doRhU3LzzP3N29S9QRcULT3+dJjqwhhBhZW2V5154hnm9uPvCvv4b8RUihkBI/USF6LvHfi4bpP8a/PFbJUkM/NLw6RslZgh59so930lpH2vv5Dn6JJC6kvHGlGRsTYG0nrTCaIXFYBFUzwW5F3UmDMqzJS0RHwKmsMvR2KSg6QTajykRvWx7Rhs61y2rd2t0bg0ASXxCOhfk2dJKiojMvxFxIeEgFDajZw8SShFiwDlHgmw/LKZAIUaic1grSEhKitZ5gpJx3BCC/H6IFNVQiJN1LR1+BcpIj19afzJ11CsqQkTrRAy5vssjp0pp4QVkNFAJNEGIXlDEb6j77/XFuzoBUP3M68v2Xu7uAKq7vpWO/82IitTS4S7fFGi5wGT2etKA89C5PD8mIyhKy9iJ1poAhLa7671fBJ4FnniTPvmDR9t2+KYRFmx/3vLGeMryV8HAFHhjWN1Yp3Me13a4rpXzfur0/mrgH93HUeReWogoZZYU5h66FVnTitY7Ih4VwadAVDGjCGIUE5wnBC9e5lqqgIRwFmJPyiH/bJAHVsVM9EmnDmc5iZBiImTCkyiJiZyorAZZKa2vSsmCQ1Gc0mICHYVEZI2MPvpOqgQVAT6JJ+Db70QZw2FoOZg13Jgesj4YMsPzwUcfp9k/QjU11hgm4xGHe08xe/7vA4d0bFIHR5dE55+gCEqhkvx9lh6n8j+PUlPOrq2xfesOs+mM+e1dVs9sUATFZDTkzo2bFFFhtcH4yOpkgmo6zk3WaNqGqVlw9rFnuX79A3gizbzGpchoOGRjsiX3ioLek+H4/78a+JFXvfonTVZetmKKJzuvHKNJ9I9ufJm19htt8T1CWoP3JgCoHVAvENI5gg9ZfU/Wu+g9TdOKip026JTQhSGphIvChyD3zvvpid5MSUVJVlVftPQ8HaIYaCmoikJafAgPJ6Uko39eNO+jFxXGLnhB4YjC0UgyJhiTR2klQk5ZNXC51N7nJVJZeMga4TeEGHEZiUtAWRSStCRJWESwSGGLgkIphqMh0+kU7z0RRZcFjwplMEVB65xM+2Qk1FgRn5JiwBzLfwYhoaZ4UoGUJcpHyujlK93P38DxLkoATlbpEoPhkJWigIMD+QmlESOo/ubW2T60vwmlz5xg2a9dijpkr2etkFEQcm+rvyEy2Kn6TTMlYtOhqgKbHzSjxSTi3vvgCDh4607NfYTWmtIURGOI2SlLqSSbcDhGAFARpY1YaybRyvadjO8ZKz0x8f9WiLLZg0RPqon9/rp8/7aZy7nsiTgJkskPYBQ9+OA7ucTa4L1D54WKEPIoHnnuWWVSIHIP5D+XoVRGCny+h1QmOma0QCuUyeTHXC2QEsoKkhC7ToAMJWNIVluRbY0RXVpiyFVrbIBPgvsxjP7dhHAOInQmsuPn7H3li7xw6zrf852/hBUVuXXtJVy4xdr6PyAs9nFEFsFx5BvRwXci8yrGx38H9H/BrGtZ7NYcDo5QgwHMGw739nnygx/g9s3bnL98kelsymA+Z21rjbMPX2K6u8eZzU0AuqZhf3+fc2e3eO6Zv8Ha1m9hY+NXcv3ZF1k/s4lLkaZzfPCD38xnv/TzS0JoBuOBR3m11V4pJXK1WmGUQblXl5e+5455zfW15hujBfAqkWTWfz5fLCvq5fqDwhhLQISxVEw457FlX4jIPaiNsN+ryi6vhs5aGTHm+zivW0Uhm7Uit9P6RDrbDQucn4gZ3ZJfE2QtZFMg6a1b6rpZCuTYosjJ2oNtisbkaSHnZBzQOXwQASDvgiT1ud3Rto0IgiFregieerEgxoAPHpEQFx0F17R0eTJIA0ErMCYTAKUI7CcmUpb8VbklmHxGn5IkPUql4xHk92C8ixKA/gSrZWXeNC3TE3PpyVjQ8pDolFDWLGEdGS2SXrNW0rPtNaRN3mhQOekzPXNVy1hXihTWSg+uz46VohxUlIMStFSjRitMWdDO3r03g7EFpixIqQAfMc7hQndqvXbO0bYC9RslUqDj8QilYDadZrlNTXpTFlpBW9LJDAAh9Kne8S8nbynD+Yns4hU9KSkCoCKE0GUlPr0k++VRA3SG9yU5S8t8UqGOdQqUFtjVaLQVj3PpsWZDFdX7zh8TG1NK0obIxCujxWfd9xyJDCIJQhBQqSWpDu/+KujvBfWIqLylRFKwvb/Lj/7Uj/PoxjnOjY84d/mfYZKhHG0ye/E6N168SudbDBqvDS75/B5T0C9AeJiU/l0Ou/83N/b3eGR1gyrfl0HDnd1ttNFUgwHjlTGVLZgXhjQsMWtj5geHpIGlVYG92R77B/+K9dVLrG2cp64XBKMZr51lbTLmzPUX2d7dfhnGvfxNG5M1l/KiqhS2EOY5KeFq9xbU7f+Md0uy/dqxgxQHq3f9u8JaIToaLT115xwaI1UvEPNYnDIK5cG3jrIqUGSoPHMDklJSoJBkPl9ld+aUiEn4BDGJAFWKgaZtKWxBjDI9I6NyGYFAU+T2gfAHRITLB09ZliitGA0HNF2HIon9eG7ZPciIXA+xJ+RZLjJML9M3iRATBDAxZFOzIifvUeTOO9H7t9bifZRzZjXKFiinUEosu5UxIjyVAC9tDawSISlAwIDcQulHhnO7WCmTyY4PznV4N8c7mgCoTLjoZy1BxD8w2Y4xRHTSkGRT1lYWF5M3rqiPF99M4hRoTIkphsqwr9Ea53yG8xX9LLPOF1pnediitJTOg5PfnaytorXCO08xGIiMZPdulB89jhQjZVngnaMaVayYFbq2kd7g4aGgAFrhSTRti07ZaawsGWkRWHFZFOPNC5WZ9sc2ncpkCVGESESSCihCbtFILz4h/yjZuMB0SedxviQgkMzk65wEhmN28fLtpZWTEGMPsU0V/QGfBFVQmQciHAExA1FGRqKsLWQkKvjcp0Xuz4wKavKoo5H3VRGUciTzQyj961E8Lp9LKUxZUnvP87u3qdNPsNBXofUQErd2b7MXW9r8oVM2RRLP9QbST6H1b4OYSN4zDx3XZodsVCOOVp7i6PfOMZ3h2/7ShzCDiqvPvsjNqzcIKdI1HbPZEbu7OxilOLqzw3BlhfOXDqgXewxGj3PYNpSTCXY0YlANuHTxMrv7u9lo5fhc9u0xDBRVKWRJIGWxFN8FYgiv0Lp7kPhFwMu5tr3b42ngFqcTAIfiZ9BKklXnPNpk0mkkq09Ky0sqeWlzuQg6yTPZO+eZ/HUIJhMGs4ANIvhTNzVVJesWSVj0oZ/k0TLXrxV0ncMaS1VVONcRVC83nI65NnkDNNYytEZEvZQCa2iVunvK93VFX6z1LSOjDVUhQl4qKQKS0KCi2EeDcHOQZxIjSYjRBl1anNZZCdCgTYHzHYWRAi+gsKXFDkqcc3R5min1haT3S5I0sESVT40TP0if410eb3ECoO7963HjL+ef+lhbRGlAk2KGdJUm2SSas4AxVqoOdcJgBslcZSNIouTUs1XVcUUoPtKiNGatprCliM34LveyhAF+6nIrTV3XjIYj6f+nRDUYMTdHLzMr/+PA48DGm3j+7j9iCIyHI2LpGJQVIL0y7z1mNoMQ8M4xPTiElBgMBpRlSWkl+7ZVies11t+U+/04yz95EyTlIck8eFJKbDd7Ambffk75mvQ94yj9UHEu83LfaEkutJYEQCst0CIRejSH3FdWZBJfkHHCLhGdp58SsMaQgjDURchI51E05H4yYjhirMn9bRlT0iCSwUb8DWwhnBFja1T8EZL6bTRuS9oXPlIMLG34HM9d/zGuXW9lkUbIRq2Rxa/QBhcdhbLiORANIVwDPkvU3wrxN+D9D9Hpku2LR0z/yh7deQcRwsTzoT91BWcUem2EcZFnnn0ONKytrjEaDNjb2RXpVa3R409y6J5gtLYGi0el2wAAK3VJREFURUHXtCwiPHTxYZ594Vmms6PjBr1RS5JZTIG2bpZJgQKii6ioj6/9m7JgPgEUb8LrvBvCg3oWbbbwQRJNjRQ7Rksbqh9P7cdNUZIYJAdFHoWTpDib1WQToRgDISaR1u7XgYzeaCvFS1SaquiXfclgjZHn0PtADEnm5rUCbbA5KQ5L5z9ps6oeKeuzYF5bKOfuu0H0SCxFWeRnPhEzkuv7zkKG3zvnMNmxMEXR5JDuQ8ytfBE0InN7Uoyi9ZECsfNoZSmshdSLk0lbQKeEUZqkLT765XEuI7FMqt68WZh3T7yFCcAJJoU0n/qSf1nh9cIf6JwIJMk6Vb/Y5PGWPkIUVmvUx4zVIhNInAu4GNAhK8pZkytEIaxoY6TKVCmLwETZUIxFlEe1QHAxwGKej1wzHk+E8OJ9Zp6SwaK7Y8Y73ps8Qfbrk5fWOVwQkwyXuQ/Ry5hMAhYxoqxF+4BWmuFwTFc3x7PJb/iY1HKBOG2r2xMpMpFIISTMlAk6yPdSQhT5JB0nKdlcU07uVHaMiyktx4ZEL6AvSRLEgNZIb1pF6espJQupEvQnhoAi5RaAzihCoiyLXPUndF5YNSqzhsF5EQvqCVnKWIiREAMmanyaoou/ztD8Lky4jHMdxn+eqP4h1Vhj0pBGg40i7FIphc5SqwMMSglq1TpP3bQkWrmPi02CisQicfDJbdJWr9YC13/DNrMvBzb/p7Ok2ZRKGczaKlZBKAp2mwY/GeFCYjGbsrlZsrryg5TlHxBVxi7inacaDdna3GI6PRKko7LSSw3SPrGmWPJpU4wi0Zwr1r6qez9uIQnMaUTNh4DyAner3gArq3B6L1a+spkb2dyyIZZzDlsWJJ29P8gCQHkjLwqByVMIuaWWaF2LtnK9fNtBkCRZZU8LkxPbLmvgx3gCFcvs9xSPr6nOI3sY2Zxlqua1++T3fDfJeQCOkdkoqKAtClDCYZJxbJkOIMnnDTk7MHkqCK3BZz0CBbEf2bMFFJbgPPP9I/ksIEizNSTn8S4sC4mUdQZOFaqZpNS3md9Lcd8JwOks7lV6Ilk8JfUVYO7dGiMXI/VwVb7uRVFSZIMHZQxRiRCIdseQe1UNCJm4BwhhL4qhSCJRFQUomXPXGY4U9riYQ/SGFiIm0o/S6LyAIfruJxjKiUSIkKJDMl8rqnTW0ob7Ize9LZFEaW1/d5eubeg6d2IjPAbgc2Kdv0hZLEO87QdliRuNmE6PjivxBz4ggdW1MtKfOXnOesg/SwX30xg63zcxpdyu0GhboJTGdy7DkgpTlKhe2yhKLzSlfC/087/Lj3hscBJ8S1Im0w4kIZQEIBGSB2tF+ycFmRtWiqosM2ogXBGlhDyE1phCREdSygthThYKW0hSA+BbrPn7mPI3U1RTnPvhjFTI+FZBAp2E/BiD9CuVFnFS1ydiglrFeJUYP4ayZ0jxm2ia5+/daBWUg4qVtTWM0RRaCFsmL2DzX7rN9JuPmNxe4ZEfu0JVDiiriDEvYO2HiSaSypKVlRWefPIDvHD1ebkRgkxQEAKoQOj1EqJUjScP4/3Nv49/AXwCGC3/xWSRnyZ4QXsQ0ZmkZL2SaZheFhex2NZahJl0wuWWjPOesizoIVQfIzoeM/6jd8uHXaVsGlQIfybEiM0tq54MeIyECYcqZIKczi00EPEhrTTaRpquFeEceC0DxHtCQd+/wzsnBVombHeuE62DhHCHkkzbyGaf+QbI5yKjKDF0UkxGkQ1ORhA60yv/qR5zzq0Nl9DJ5PVR5Zbiy32IjF32PLN4nx/0XR6vPwFQWUUKyGbVUj3lnqlEfzNEIWVak5ne8h2lM0HIQ8/Y10bj2xZtoRoOlqMfKVdXw66FPI9cFZY6gUFY3b13dPJxudHLsXKs7kb/AImeuNx4Ip6h9DH/IOeFpyvfZeZnli0HbTTGFtCe1Eg/+QvHrYl3IkIITI+OXpOQIxm3VCBd3cgssLVYpRmPV5jXC9nk4oNkASfQn5iIOqM6J8a9elRIReFgJJR0ALTM42dQkf7uSbnPbrKMrFIKpSNGIUz9lPAxw6kISbSPmLP4mOeqIb+OThlFkPvFhy4jKMfjScYYPCnzAUSyViuFxuKTLJB9O0hIXbJIGCNtgkLZfBoO0OoH8X4BBlRQMhrpQ55C0OKoaEv6zEsFmQ03xmYY15C4Sudm+HgZo76JEJ+l+P1D3N9aLMeazGcKyr83ZDIaidqcUqKzoDU7377Nc//11+m2OuzUMhgOePTHHqMwLab4J2i9QkqXGQ6HjAYDNjc2+pNITAHSGvBdkD4FpKVIzNsT74Wk4odPAaEC7weSR8aO6SH5E5tcgq7t0KWMxvUS3gRBarTq23eF1C8hMs9ugCvjMdEHdG81rDWE1HdbM9kwI2BKrIKjD5gkD8bxY5vXyCQyzVYbiqLEB3efyd5pom5/b6v8zEcNOsuWWxTaiL9JyP8lo7FFJcWdEc5AG8U9VOfnM2UmpI6JkD0AcsafhX5YmgQdo5BpeXzHcbKISC/T9v3Gj9edAKjBQPqofT9U576fEk9vW4hYg2878B6MQlmBMIlJYFEvmsxaKzAFtuhHqCK+dbgq955CRBkr7k0nEi7nPFHLmIyQvhUuBhmHKUq66FAhyghZ7PKNJZudwVAYIySbEORG8blSVJLEBBWWN7PwnDJ6kfoZetHQN8Urnba/DvzRB7oQb2a85uYPyx5YymxjXze02qAnmlE1ZGPzDEeHB4TOcYoY+/qOQH6hH8bPEJ06cVwpBLAsq32lwJb2eFSnZ98rRYidIEpKS6soAkpIgronCob+INU9YzvWFGBVHonM2nYxkq0B5XC1hiRTI/10giAGHq+OpU5NnghJKRGzSiB58RKiYcLYIlcLMmOtlfgOOC+M+hgjZVUuYUeIfZeEkEI2apEZflVJhV0YQ/AOHyLW/F1C+gPAQKY9fiCx8Z+dY/aHDlALzYXffoWqtcStRDkcYnJls9hc8Lk///PEUr72K54v/MHPsXprhXNfvYDSh2huoLgkglJGMZ/P803T93dL4MzrvRHe5Pgy8Ll36L3fSOxzjADclLVSQXFCnjslRIlOH1tWx36TqwZLA5yua+U+CyJO41OEFDFK0846VkZjtLGsjCeEFFk0DYW1x9B+TOhc7MacfMcsvKO0wjc1g2pAUsKPkedJ1uymEY+RqhpkMa54rOj4uuOYWJhSwPsuF5LQ1DXJWlH5CxFrjAhtxUi5skKMompqtKgEhhBZxEDyPit9StIegieoJOcyCclQ1BQNKXoZI1729O8+9pdLBPrjfS8koKfjdScAyfejYbLpL4k/RqOVwFEpgjKGoiqWGgsocfhCQ3Ry0xRFsdRcViqRhkNSFKEL34mwBaFb9vX7iEqg3rptRL4yCtvYWEvTNUsiWELmvHU+tp7R6XuyX3aTS0qdHhk0WmDfPnTWhs5iMTGIzeQrb4TvjraA0blNcs/4lkSiF+VJIpJENhBpGoqqwhcFg9GIlOBgd+f4l+4rEiLJJatNiscPkILcc+nbLwGMya59veKeEHmCzxWGiih6TXAh7KSY6IK0iHqXuZSv08mH1ee+PUlGm+jPTRJCqbiwZUvaDLfGBG22DlUIDyEk6ZP297/KOuhyGjUpRJx3Ykl8Am0AQQRi6AguEIl4r0haFm6dUYWUk1GlRfbUOycoiCaPbYkyYiSi1LOgPoHvvopRX2f9n59h9Z9sCoKhEm3oaJqazTMbaGSWHK2Wm//yKhWid5ASQoBUP0pVfFQU3rTi6rVr9MnQOxPngfX898h9Y83vivh+4A8uvyoGlcy0J4/3Dhcio2qI1tA50e1IqFxU5bUnRrzrJIGtjv0qykImVEiylrkYGZUVoXNZJU/aZ0qD1RbnPD4KwmrI1thJuDRaa5wLtG0naGBRLM22QowMhgNB4lI6LgTjA6CdeVlQKGm9Zc+BclDRdF7aSUrTBo/Ocr9dEF6EMTKW1yPFPobM+jdSPColRNQTRUCEvL6kE0qv+TiWCdgrJQLv7bgPDkA6Top07qeoY2MUYkChMYXGp0hpjPSz+gXZZ8arMSgUrulkg85jf2iFc9mSkZQrU/An7q+u6WgzyQskcyUmfJBqv7CWlElhy4XZ2iVbNsUkY2J5sylslV3yVIZslwl5bi8F4S1kAmPsOoi5Mn0Xh61KqbD7atpLj+/UPZ0EYk4pyLRFBFJgdnBIjJHheExZVSIJ+sDIV7b07MtbpZYPX2/VqXqXwpiWTH0fQxZfQjL43L9LKpK0wakgTPyUCEmuk465RxeC3DsnBGyiF2Z13u2FypQSKUDPYktIJXXM9g9LDmoySkhHIeV7PxJzAhCDnOM2w4lKydioTmIspPKCZExOTlOHVko04Ktjh7c+mZBWhLQFUj4AWcCzUVHuY2r1aSLfSiq+h1LfoixKVGlwTUPjHSkl6kUtwihRyF3VUcVj//Bxnv/Nzy2v0MM/9Qhbt7YYDKps1APj0U+j1b/LdDbPi6haTtm8/evi48CFt/tN37JQmevSdaJGaY1F+GiGyHFlmhIYXWTimyBAShXHhLvEksxsc+Xro5BfW+/wvqOoSozWBB8oCpsJdlkTAEmGdcrt0CwOlJKMBYYQMUbIriGkjNTldTdIu9AHsQa+79ww9eursPt9CLRe2neShIp1Mc4TYqCsKjrvZGwZMUNynSMGD1oTgsidJ4TQmHwg5Umm3t68p6Sm/hosYU1O/HniAO+6Zu/F6h/uJwEwWuBZpGonJXQSqCbmxVb3/UaOWdnBx3wB8kmOiaZp8viGjGH0PvF5lxZYKc95p/9/e+cXelmW3fXP2nufc+79/aq6qqtrupvOiM1M5g+SKDqZiBETTcCAiJCAmLyIT/ooqI8B34P45oMvhhBQGQRBiMJIGKMMJiqK6Jgxhokm3T09U13dXb8/995zzt57+bDWPvdWdVX1r7qrZ6a77oJq+vf73XPuuffss/faa33/yP7mlHmm+g59LhOI6zUnBxu6WlWj/bUyvoobWID1hkJg6AeGYbCHy+kh1VHk9hmFYbWyBzQEshrIMMj+dQ/PGrccAn6+H5G6RLdeGXe2VrY706aX8SEuf9X/E6yEhsA0jqRhQENcEA0fdPgr7BePg82bqu1aanPdC+oCUIDarqAh/W1n7IuvWjITYrKxVx3Ep4Wq854tcDCmlsXdgYfLmNK2uMnS58S50tS2G/fJ0W2E1Y+z5Kldo3hry8RWKjbp4CqGfUrM3usPMVGrMS5qzkbDk0ATptJi/gjjWEipM6e3gJWFbfYCLdQ6o2FE0quUEphzZjX0rNZrQu6oU2aqhV2eef7aNaJX3J7/p1+iWyVe+zOvcftbt/nzv/JTpKkjnpr+fEqRkL7BnBN37/5pPvuZz/HO2V1ef/N165YUvX8IHf5/a68s3+3TjIpZb38cQ7Fr/01iPEfDTRP+6XtSbwJkuRRKzY6zCGYQhFsCq9+XaoJBVghx1kBVM8qJzelPiSJIA30WK99P84RRajFTrYBVDObZ2FOxMwVQEdPGKEol0iXTZCk+HwcHJRor5kBe/Qrx4DxSq9mX52y9farRuFMS5t3Wx5Cyu7w0qWOXTS4u+iJa0dk3Fn5saVU7sbmiZJsHtOAMFbV1zMqQDwzTT+Yi/7i4egIwF8vKajXzjhApuIlCMK52rRWdZ+srFZ+IS4UYCMmMFjRno2UlgZzJdbb+LtbXaUCTxjM9BAEYpiAszlhdjIv0b993hNQ5u8AGKmp0lVLKUu7tukQIcfm7qjJPsyvJcZ94Sa2VuU5kgpfIbACVR05uE/AvgL/+BLfgaYewXq0Z1mv6EM31KwTyUAj5HVPCAlq5ozntRWdQrFYrRA1vodEWyA/3WHgr4MHvzPv8tgB77z0aMM8EUKov3pbYBe+1M2fvuxevPvl71OoJIVbau+/9/d+STNo5pbV3HDwaNFIx1LPRnhz1uzMlxTB0ZphTClKqfZcxLDNbUFwLYN8eK6Uwz9ZWUAQcZY0auFREKAriOugxRB+8soAJA3vL1UUyVr5L5d8BP0uIn0N5g9000kuw3vIqMc0T987OeOn2C1xbr43TrcrP/PO/AF/Z74TCiZe+XDAm18Kdt75BCF8gpZf4wue/yFtvv2UMhVAIYaDUbl890WpSs60tqA8rqX7YOAP+9VM+5/cq7gH/BnCGS7V7P82ZPO1MoAyf64KxQ4JbJiMG0FN6Glgtu4JdJRLFVDO1WFKZUiSXbLv1Yu0tK/Hv6bGtKht8NPX9QM2F2T1OejfgaZRExZIFo2HbfFBKYZyfzB3vwVd2/UAcBuZs2C1D4zozyF/djlGt1ob2tcEe5wShVfasfYeIJQc0ZVFf7HX/OvAx+sgre+C6P6G7f3iSBKBk14Y/ODJ1tkYvsNZWInJRFy/7iihlmoyy4VvKOhnAxLY3xTNf65eaOU/cT87eWh9WK+gHaskLsrlqtQUBmMct4g+ZIf0NZBbFBkPji5uZjNldFgc2NoGXZvRrNpOTg+Cb1Oz+vWxb+4OJCq1FGVYrgxJl6BGSg8vA2jChS2YU01gXqqz7gS4k269MG0d46+Fz84Hi4fxgdeU90GATVCvSabD2hD280al1EQlq7nwqNh61SZm69nlD+T5wsQGhet3UFA4FJaAqhGCWv7UY7c+SQyvja9N9EEPpI7JIiy6JRnRTkaqErkN9/EuxcSUSbWw7falL0RDXIaFqwisikSo21rMWghr3fyploUOJQuw6uyeO7aB+ixDugP4Mwq+5hno2TwevrL397ru8cecOL9+6xc3T694mMyfD4BNps0A1nrew21zSx3c4uXmXcfsKN6/f5GR9ytm773gVsFpiL7R0xO+oV3me+oSpwNef8jm/1/G6aTqkzkrUIZBrZZxnAqZdkrreW2GFKLbRUNf313kiubKpRDUtD6ppBYTg1SnnMpkHt0kBx8ScJ3K2zZBhRIQ+JHLJzDVbu00gxlb9VGNDBSCrL7osNLg5Z/outtIeTSb3yRZKu95pzoTULwu1OPYlZJ/7JaDYNce1of9Nh6It8mXBxyj4cxZsk9pcffy6Frn4+oNr5/69jquzAPDJNUWnAHrWihpq3xf9NmnueyxiO6VqKlK42AIIoesJIVnPMw5+jPE4U9fRpchJVRiNBth1PbNLZNaqlGl2vWubVKtCCEofjCZYGv8b572mRM2ZEAwVm0um1Ow8WFwW02UHFfJsfNvqKOqh7xae+SFW4AcrlGkaudxsrHzsMrmrfqCuK1OaiOmU07g2EE0p1kMP1k8U78OP4+hmHw+WyZ7aZS4LqBZzGTP53wptRySK5tmc9pJl/pWCqoDrSLRS3yIkVT35PGjrNXChSHCcQXMIM1CqUYDtfUMzCAFiH0HMCUwxjQHcO0FsxkS1EjHFsdI4xVopNZv+gYgj6qPTG11vUqBEA3Ilb73kamNTvWqFQJ6hoAxDj7r2usSmcvldSr2LyiuMc2Y1FAcJ2ncSHEVep4nXvv0G/69Ubt+6xa1bN1mvVu5s2FoQwjzPXFxest3tGIaBzear1Hob4Ta3b32Kd9++awl0/hxmEPV7D9zTpz1QBIOqFYwB8DEMT5JU34LYMRUz+MFNebrYg1ipf5qsWleKleuHoTdOvAiTq/81f5MQhZhNKIhS0blCsNepirviJaYyU0phmq0qIAFi6A3MGmxcT7kwlxmJMHQ9ZS4oleKJeAw2pg2OYJuhPOcFb7WYsb3/V3HfrjvFAH3POI5Lq9E8Oew5jwjr1YoxZ/I0EoaIpgBdMPvumq1tOWW06fqHvRkcS9ffopSrtyuelXgiDEDoItWlIR295DQQV5+jASYOAGdalx1GO0ZECH0HXWfGDdnKujEYar8uYCOvCHjUWtnOW6Ot5ELqfOL08qNgFfzRPe1VXXFLDGBmb1+pZWd0xVqcuWBgxLhamSAFtpspuL4AYUGaGtLWxYoeygvdYSW/G094K55e5JI5P7tHJ4GT9ZqYAvOnv8uv/Po/Zvrs66z+1Zc5/aW/Rr7bU3aZVd8To/WpyzQtk1Pf94zT9KEwAA+L+9tujsrV4smHTQUVQaJVH5h130XwidBgzJboGb7D8QQScLH/fbkvxIUZ0WTOhQJafEzofvz4PU19t+x2zQ1tLx6UYiIGYcwTkkEKxNh5i0zpVgNob+cWSCESqpmzTPPsctTJ7H6xa9Ug3vcVMhnBy6xViCmYAYsqSYJpHYiQS0bCHXL5LIS/gsjXHKjnrQIBLZmh71n3g6G4BbbbrVXpMLbMuNtxdnbBbtwZIyZaBSKFQEr/gNT/Erdv3eYPhhXjuEX1BFg9xRHxqHgZ+GngK8Dme/B+H0GozSUSAv1qxVwVnYyRYa1Ha79oby24iievUsllssXXKaV4+7ELJmyW50wuZuOLFgJxMalSp/aFYDvzxgrKuTDVTEodoeCmRC6HWypYPkGuxVqu2ByffF6UYNoc81ToUkdIjSEij/wKHvnVVDV9RMd8davB2A650J2soFR2F5fo2tQN6+XWdvYOBLd2maIlGx1YTVfANoiHbJF2bcfF/8G4Og0w+cI/+04eDFxVFHFpyqbahAOzCGLHOeBCmrhEcHGWaaLMMyklOqxUX+2Jsf5oDHvXNYwDm72/2HqiAXHEalwWAdWC+OKtvqjYwJpQzUAhdgHNwYUlTJMgxkQInkkKCzhLgusEAFXq+8jkvgH8d+Ann+Q+PNXI88zzz99giB1aK9tPv87ZL/8q8xdeA2D78/8JJXD6936RE12xGlZuA1rZjSO1FE7W62Vx/GgeG+/blT1YT0KAJBDTIlZi5T+X5g3mA2ACHs7/j3vf+ajJXk807vtotKkYk6vuGfak5Gw90BRdaMd68VaNqMuuxtjD0SRPdSlacKhElrrOWgxieUkbhzFGJ62ZfGsQE5nSIITU2/h1EZfZ22uie072MnEzUwvOvx7IWk1CNqh1HvJXifKnWA9rOq/iEEw2tdTCOE+8c3lB1/fsdiNVld00Mu5GZ2hATMLQDbxw65aBzgT6YUVMkVyU3fZ3uXH981w7vc44bh92M68cfxL44Sc64g3MVe/jHaLKSiu7eQbvq9sfHIRapgWT09T3Gj25aTCYDXCkbCeaNknTnqi1LKY/bX6MvkNH9hbB1c+t44iW6lRD874QhThPFFXzKdD2LBjDapoMC9O5T0bZXELJmNTOe2eJP/LAz+95hcJu3LliqwG4x83GEpVqmjESAzplpAswq1UDxcq1WoopZcaARstcSjFvD0sI6uJU+agE5ZOM8L9KXL0F4MYn4ou5zq6iFGyAWPnfd3MizhrABnixHqiK2ASvGKCjmnITycqWJc/G5axKLco8FtNr97DJ0W+oWjlKHDATYzInq+ylrqpoil5eM3/tWkwR0PjpkdBbT6iIUuaJQcLBIJVFVwC13ds8Tcw5M8+za8c/Lqe8yqB68qz5KhFD4Nbztzh79x7v3rvH/OVvkX/8/nLt7ud+ixt//xdYyw1CEMZxQnKlJEMkB1e+k4/gGgUMhCdeehdz5Apdj9L67kDj2S8GIPv2koTOZZx17+LVJKLFetlgk2Ep2RLKIC6+kwg1m0Ka98FTSpRs4KPGv1bvIUaVRXcAvEogttuwZ2BpdhkYNugi2iMIUgv90HFy/TneeuceOWfI1sIoIvYstFKuL+A12+duNFZqJU+zAR/7jiK6cMGfP/kNXnnx7/Duvd+n6v+2z1TN8yLFyHi55fXN66yvnXJ6smboEqfdNWLq6LpkFEUR61HHRke0VkeKsF7/DsPw47z40svcffsOzSP9g8SXn/iIP+CTkACgCk1U6b7f8/21EBkdYNVan1eJpq53hXjcMJFkSn50yjhP7DYbBzy6FohXRIK3e0PvhlDZWnpVFPqOmDq0qGEBfA5YGERwH/jvPdfXqn+eCDxrycDVKwBNg7mVbFtpvqqL46iV78EyVnzxKI2TrdjWvy5tAE2mb13xSbVUu6mYqUiInSOiLYpbUIYQiF1kWJlFpO3uR9/ZlYVGOM8T3WqwARUC/coU2spspf8YEpViqFgV5mJ8abCBYeJG1TjWXgZOyYyGpmYOBA82t4Df5PGgpQT8DR6fAJwC6yvdm8NoO9TLywvuvv02CqzleS7vnVJv7Ceg+ObznA7XGFhTsvX3kMxpSuzm0cE4+RFtjqcQ3rtH7HolBIooMvsYqJ4EVCVgoB6jaLaRFQyBXxRKJg69a/Eb7uNQtlO1UnH2SjVlPgkRydUqAWptg+YymGfjQaN7NciqNvbqXJDoe4po9sMUB16BJRGzWk9Swav1dB1sLs+9xRINrBQ7hEBxQ6R9FrEfTDVnb0UYY0FRZrVkWzSjU+b6C3e4ee0628sX2KkY3CZYPzWXynZ3wTRNDGcXvPip23zqhVs89/wtTk7WVuXw9lh1cFQzOwLDHwhvMU5f49atzyxAsKv2fVtMGI7/yeJN4M4TH/XxjUdtKcSfkwa6a4wLWRYwYFm8pK18bQH0PRl6UK4XO+9+bdSDn3W/8WE/pxxeZQMOy8H7Piwq8GsHx7V3AhhO1uxmWzvSMJAvdyCROJjgT5WKYKBbdhnTLPEkfsrWHhGoJVv1WeryDVqFxB+oh1xf0/Zvrc6Li4vH3ZhPbFwdA6C637E1KL/VbW23LUqN3iZYBicLjc9AhDbR4+VNcRvKOudl1yKOfBWpMNdlJwfwo2Vm9B5oyJW4KeRcUNlnbjY49zupWDJczub8F5zXjV2b5LyUaYNAzZWXnDfaq/InXO8/lEyo2AKCPXCTKp7WPOSZLTw+rR+Bf/Q+X/iPYECrR8WP8iiP9FQKq3fu8UNaWQ8Dw9c/x+Xf/qv811/+CpsXN6Tf+TS3/+7fIl2colKptZijlpgoSB8jk1rCZQC6b77PtT4uHj6pSa3L5KWtolRBaZWBvExG1veL+4QgJKsaNOEeL31KcOyAetWpvZePQ/XkrgZ1vIAuAkTTPPl5dOkfNvXH5TzONy7Fdt+qSuqMHlpK066orjXh7AXLIthc7ggSibGjiiP6myy2REM5x87ArLUuCTewTOZFTThJsb4nLcmQc1L4GtevfZGw+Qab8R2/FEVLYeg7hn7Fxfk5333rDirK+dk5N25c5/pzz3H99JSUAim0ErFpZhRVtMK03XF2+RusVzfpu4HtbvPQe/q4+H3gnz3REe2D/2Coaz7NsCdC9mO3laE9QUXbYuz3XwRSohtW1DlTpsnwIymZh4f3v8s0L7oVYGV1M/SBPkakVqY8UzDMClirtYLRCBexqsJqtQKaDa9VknLJpBAYUsfFdmutrVqYtlse9Zwfxn1/FdMTyNNE2YzI0KGyZ8r0MVGTMrlGR5KBuhvNiKpP5ntQKnXOe6t3nJGDt1DKXpzrwSRFD57z+cBw7lmLqycAycuqll55Vin7xdc3KQSzV1WX3W1oTPUea4hGtWuTaVUDPFW3qhVPOauvrDXuL/EvNp/6q96vAnzAm3uiys+en3+gY59O/E//96j4PzzSI30G3n0V+BJsHZT4q6/y6ps/z+aFDd9544d587XP+EJYFiplCIFpHJGGrZhnv3///kN8jkeX3pZSvxv8gNPtmjCRt5C06TB49m/iQUKpGDAzdNb2cfyJekVqea+c3foXb0UFK7mLLOJCVFkYLMH9xH1bbIIj+KRtABXjICuEaFTYiFDUbYezD+Jk1sAyV6QbHNzqrANxRDdeXYtWlTCKoO2+l/U/m067+o4nVHdq8yTh3ttvM00j6/Rl4vo/EGViO43UqMR1pO87zs/PSQFeun2bl19+iaCwG7e8+Z1vczcmTtanXLt+jWG9oouBWgub7cT5xQWbi3M0RLrVb3Fy+kc9ATjGBw31/5ol9rKzWP5qVsCFruus7+7JbGZctFLMLdPGdK6VgGGZ8jQiYhXSmq0UFGNg1sp6vaKfo2n6N9R8h1VZA4QgdH1Hkh4JdlzAGAdd1zPOYhoBBE5OTqgI08W5JwBP/iUMXce1a9fJJyNZK9M82qYtRm+zZuZc6LveHob1YFiacaLx/RvjocloN7p/wwUdlvfh/p5/M/j5JJr8XDWungBMBq7YT6wODqsHud9cjRpV1VfwB7MuUwY0pHZcyp2HZSld5E4NaHU3BL7a9/zZaTqYEe1mH5afDktUj477s1R9yF863Utu7NoEfbDN35c+v9/9osfvykV+F5FT4I8tv/uhf/t57g09/+uVVxDXmzHr2sQqRsuicyUMA3m3I5eM6o4n3e1dPQSj5Bm1tBb25e5go6Bx8A3MFy0pcRpf0YYJ2QNTTWo1WhLQ3iXFpX9vgjxt9OrBdXipsFZLUIP/fJ9iSDuHmHU0MO12C4NAm/CVWKk+qE0+IZneevM8iDGQS4ZqYCere0TmcbeUXQn2XUQx9s2yaFTzowi5ON1PObu84Dvvfp2b1z+DhBNOVmtWXUd2GeDNdkOumRdffJHU9QSJ3L75HKVcI9diFaCpcHbvHrs7dxiniRRs1zcMAzduPEdRpfKHnJ7+N+7e/SngLwH/F3g2S6dPI6yUH033IgakBihmsyypRzvDaDDNDnoupL6j73vmWulSx5Qz/dCTxOh5m43ShcR6vWaurb1ZWXUD0zRC19HPhvbvTwdjrLjmRlNLXUr+4tTvnNHdjl5AUEKfqNvMMAzMV5p37482147bHWfn50QCdJFSZupmJqwG6m5GtRA0MPcK42QS53NGVMnTtAh52XS8rwCrU8Dt1/fP0w/O2UcQ4BVDDrySD+fD+5ZUVXMCDOJgIt/itV5RaVsfJYju0ZvL6WxBiGp65xWliPAfY89v970LpIlRVDAd7TzP3mPt9gkC+x6P6QBYpp1SR87ZS6zOIggBkx+2EvhP5Jmf3u44j5F/8qlPAcHAYtjEn0QYqeTdju3Z2UP7S9/PEIAYuH7zGt1wi4hN+qgpf5mfPQSpi+a2omZ8gxC6jlwzUy4OoPmXoB/Bji94I53gnkFqvXUHvjVKPdjvJAYmpySKmHVwo2VqrQunX1wdkppdiEeQlDDgoI2ZkjNFbMdTs/f6g9uI1rB8kxptO6ELrdAxC812Wl1tLIov2mbSEmBRYNNaIHUoDZ1fkJIN7xJ8dxJAtOz7sn4jxaVcm0MmxXQ29vbbgAhFK2+fvUVK3ySGnyPWf4j6QrIZJ3KpdP3AVJVv/+FrvP7Gt/mJH/sS16+f0sfgoirGPpimmXGcHUyLyzMrOleKFob1FuvNvwzN/vsYHyx8egzueilRoBNTXRWjYyoV6TtO1mbONU07U2SslRB6Tk/XxtpwBcl+dRMRYdyNgNK514lIpRt6UuwYbty0qTLYNkunQhw6Vyeclp3zxWbDZrMhVOutr9YrKJm8KQz9imk3WgLMkhpzsBo89PNa+9VeM/Qd4zzTrXrGcWfW8KWQLy5Mgn29NvOscSRopU6jt+h8I1pqYw3bHMLBAn/FaflZXvzhSUCAj7i3tki892+qSojJevyLDONyhIs06cE5WslGzXRHK9oejLAHJUkMxCrmD11NFKUt9DEGivdPk/fH1GAKBohy9L5WaE5/KUaCJPI0QxKqtwxWtfLnzs5sV+w0lUbNUtQnyPspcu/fBftoo72/lEq6OEc27xLkHsHlk2Mrq/uLbX53B719YccUwnKhuBLi4bk/zHXdZ+nSTlZNxERd5Qy/Z3hyVrE+dq116eu1ipFmrxJ4m8k0+bMZ9Xj2oDjGI1hfdJ7nhdq0X2zFx0Oy93GMSAjJu1jV/R8Se4yAjdVWarTPYpUIdZ+A2CdUotsL22da5EmbtnoyMxMcqRXEXeBqdcXL4JbVnlT44r9/7ow+2KcE8p8Zxx9B6+j4DWWaJuZpZDfNXG53XO52PH/zGmfbS84uLzg9OeV0tSIE6PqO9WrNarWizJndlJnnTKEQsDJzDBvgvwB/+QOOhmO0CDGhQYxKGhNVszk9ZmOt5Gk00LKaN8bQ9/R9x3YaCbWy6gd3P63mLVGsKhTEFCtP1iduptMz5Znt5Yaba5OIFmFx1LvYbulrJZfMar1iu9kSQuDmjRsgwul6jQSTrI4xMG62KMr6ZM142TONzaPhfWYIXerGAAs9VrWQdyPr6yeUGGFWuvWK2U2tZJxN1M0Lfa31tYBRa1tHnu3F/IPEE4AAH/+3wwUiKF5GbaJB4nKsukhdNt3rplQXRMwLKwCuX2+0QoGibuBiZanq2tfFF+WiLL1TcaWXKrP1dqPJ3aqqldem2SbavrfJtZr7lJVp6wJaHFT5sc3HuNc5Tph4yofhbf8PrMz7dMPywAODKN3LcwrVy+tCwaWIaQs5+wde5OBcLAkNYFK9/sN1Vf7mbrRE8jBha4mQspzrQbaQHLym9Rh9Ftu/6X0f7IHy4th6i7KfnA7LZwoyG0jrPtDiJCytsYOLes9upSVnJbP61jfdKvV1av3joL8OWOKrrojZUP7prXuc/vY75loYAskpkmGxL7aTNxVIxRIXVeXVaeQnEeCLXCVuXulVz2bUajbUWpWcR4imURFSh5yekHYTGoXURaSLTNXc8SRF+rRGUm/DSCqbeWK83DJ0HXHokGFg46C9zWaLSGDVrxlpi6dZr5dSSOsVu1ypwHixYT30dCmxm2ak69h5QrIHykItM7PqglexeIJtgsDFxQUlV6YtoMrm/ALU3ExzzuTLjbeCdbGXP6T3tvL9voz//d6CffxC9FmvgRzjGMc4xjGO8QzGsYl3jGMc4xjHOMYzGMcE4BjHOMYxjnGMZzCOCcAxjnGMYxzjGM9gHBOAYxzjGMc4xjGewTgmAMc4xjGOcYxjPINxTACOcYxjHOMYx3gG45gAHOMYxzjGMY7xDMYxATjGMY5xjGMc4xmMYwJwjGMc4xjHOMYzGP8fwhMyZo4rsawAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "loader = LuxonisLoader(dataset, view=\"train\")\n", + "for image, ann in loader:\n", + " cls = ann[LabelType.CLASSIFICATION]\n", + " box = ann[LabelType.BOUNDINGBOX]\n", + " seg = ann[LabelType.SEGMENTATION]\n", + " kps = ann[LabelType.KEYPOINT]\n", + "\n", + " print(\"Sample classification tensor\")\n", + " print(cls)\n", + " print()\n", + "\n", + " print(\"Sample boxes tensor\")\n", + " print(box)\n", + " print()\n", + "\n", + " print(\"Sample segmentation tensor\")\n", + " print(seg)\n", + " print()\n", + "\n", + " print(\"Sample keypoints tensor\")\n", + " print(kps)\n", + " print()\n", + "\n", + " h, w, _ = image.shape\n", + " for b in box:\n", + " cv2.rectangle(\n", + " image,\n", + " (int(b[1] * w), int(b[2] * h)),\n", + " (int(b[1] * w + b[3] * w), int(b[2] * h + b[4] * h)),\n", + " (255, 0, 0),\n", + " 2,\n", + " )\n", + " mask_viz = np.zeros((h, w, 3)).astype(np.uint8)\n", + " for mask in seg:\n", + " mask_viz[mask == 1, 2] = 255\n", + " image = cv2.addWeighted(image, 0.5, mask_viz, 0.5, 0)\n", + "\n", + " for kp in kps:\n", + " kp = kp[1:].reshape(-1, 3)\n", + " for k in kp:\n", + " cv2.circle(image, (int(k[0] * w), int(k[1] * h)), 2, (0, 255, 0), 2)\n", + "\n", + " plt.imshow(image)\n", + " plt.axis(\"off\") # Optional: Hide axis\n", + " plt.show()\n", + " break" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.13" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/luxonis_train/__init__.py b/luxonis_train/__init__.py new file mode 100644 index 00000000..c89890e4 --- /dev/null +++ b/luxonis_train/__init__.py @@ -0,0 +1,6 @@ +from .attached_modules import * +from .models import * +from .tools import * +from .utils import * + +__version__ = "0.1.0" diff --git a/luxonis_train/__main__.py b/luxonis_train/__main__.py new file mode 100644 index 00000000..f7b27a3d --- /dev/null +++ b/luxonis_train/__main__.py @@ -0,0 +1,108 @@ +from enum import Enum +from importlib.metadata import version +from pathlib import Path +from typing import Annotated, Optional + +import typer + +app = typer.Typer(help="Luxonis Train CLI", add_completion=False) + + +class View(str, Enum): + train = "train" + val = "val" + test = "test" + + def __str__(self): + return self.value + + +ConfigType = Annotated[ + Optional[Path], + typer.Option( + help="Path to the configuration file.", + show_default=False, + ), +] + +OptsType = Annotated[ + Optional[list[str]], + typer.Argument( + help="A list of optional CLI overrides of the config file.", + show_default=False, + ), +] + +ViewType = Annotated[View, typer.Option(help="Which dataset view to use.")] + +SaveDirType = Annotated[ + Optional[Path], + typer.Option(help="Where to save the inference results."), +] + + +@app.command() +def train(config: ConfigType = None, opts: OptsType = None): + """Start training.""" + from luxonis_train.core import Trainer + + Trainer(str(config), opts).train() + + +@app.command() +def eval(config: ConfigType = None, view: ViewType = View.val, opts: OptsType = None): + """Evaluate model.""" + from luxonis_train.core import Trainer + + Trainer(str(config), opts).test(view=view.name) + + +@app.command() +def tune(config: ConfigType = None, opts: OptsType = None): + """Start hyperparameter tuning.""" + from luxonis_train.core import Tuner + + Tuner(str(config), opts).tune() + + +@app.command() +def export(config: ConfigType = None, opts: OptsType = None): + """Export model.""" + from luxonis_train.core import Exporter + + Exporter(str(config), opts).export() + + +@app.command() +def infer( + config: ConfigType = None, + view: ViewType = View.val, + save_dir: SaveDirType = None, + opts: OptsType = None, +): + """Run inference.""" + from luxonis_train.core import Inferer + + Inferer(str(config), opts, view=view.name, save_dir=save_dir).infer() + + +def version_callback(value: bool): + if value: + typer.echo(f"LuxonisTrain Version: {version(__package__)}") + raise typer.Exit() + + +@app.callback() +def common( + _: Annotated[ + bool, + typer.Option( + "--version", callback=version_callback, help="Show version and exit." + ), + ] = False, +): + ... + + +if __name__ == "__main__": + app() diff --git a/luxonis_train/attached_modules/__init__.py b/luxonis_train/attached_modules/__init__.py new file mode 100644 index 00000000..c5116aeb --- /dev/null +++ b/luxonis_train/attached_modules/__init__.py @@ -0,0 +1,5 @@ +from .base_attached_module import BaseAttachedModule # noqa + +from .losses import * +from .metrics import * +from .visualizers import * diff --git a/luxonis_train/attached_modules/base_attached_module.py b/luxonis_train/attached_modules/base_attached_module.py new file mode 100644 index 00000000..a015e09f --- /dev/null +++ b/luxonis_train/attached_modules/base_attached_module.py @@ -0,0 +1,141 @@ +from abc import ABC +from typing import Generic + +from luxonis_ml.utils.registry import AutoRegisterMeta +from pydantic import ValidationError +from torch import Tensor, nn +from typing_extensions import TypeVarTuple, Unpack + +from luxonis_train.nodes import BaseNode +from luxonis_train.utils.general import validate_packet +from luxonis_train.utils.types import ( + BaseProtocol, + IncompatibleException, + Labels, + LabelType, + Packet, +) + +Ts = TypeVarTuple("Ts") + + +class BaseAttachedModule( + nn.Module, Generic[Unpack[Ts]], ABC, metaclass=AutoRegisterMeta, register=False +): + """Base class for all modules that are attached to a L{LuxonisNode}. + + Attached modules include losses, metrics and visualizers. + + This class contains a default implementation of `prepare` method, which + should be sufficient for most simple cases. More complex modules should + override the `prepare` method. + + @type node: BaseNode + @ivar node: Reference to the node that this module is attached to. + @type protocol: type[BaseProtocol] + @ivar protocol: Schema for validating inputs to the module. + @type required_labels: list[LabelType] + @ivar required_labels: List of labels required by this model. + """ + + def __init__( + self, + *, + node: BaseNode | None = None, + protocol: type[BaseProtocol] | None = None, + required_labels: list[LabelType] | None = None, + ): + """Base class for all modules that are attached to a L{LuxonisNode}. + + @type node: L{BaseNode} + @param node: Reference to the node that this module is attached to. + @type protocol: type[BaseProtocol] + @param protocol: Schema for validating inputs to the module. + @type required_labels: list[LabelType] + @param required_labels: List of labels required by this model. + """ + super().__init__() + self.required_labels = required_labels or [] + self.protocol = protocol + self._node = node + self._epoch = 0 + + @property + def node(self) -> BaseNode: + """Reference to the node that this module is attached to. + + @type: L{BaseNode} + @raises RuntimeError: If the node was not provided during initialization. + """ + if self._node is None: + raise RuntimeError( + "Attempt to access `node` reference, but it was not " + "provided during initialization." + ) + return self._node + + def prepare(self, inputs: Packet[Tensor], labels: Labels) -> tuple[Unpack[Ts]]: + """Prepares node outputs for the forward pass of the module. + + This default implementation selects the output and label based on + C{required_labels} attribute. If not set, then it returns the first + matching output and label. + That is the first pair of outputs and labels that have the same type. + For more complex modules this method should be overridden. + + @type inputs: L{Packet}[Tensor] + @param inputs: Output from the node, inputs to the attached module. + @type labels: L{Labels} + @param labels: Labels from the dataset. + + @rtype: tuple[Unpack[Ts]] + @return: Prepared inputs. Should allow the following usage with the + L{forward} method: + + >>> loss.forward(*loss.prepare(outputs, labels)) + + @raises NotImplementedError: If the module requires multiple labels. + @raises IncompatibleException: If the inputs are not compatible with the module. + """ + if len(self.required_labels) > 1: + raise NotImplementedError( + "This module requires multiple labels, the default `prepare` " + "implementation does not support this." + ) + if not self.required_labels: + if "boxes" in inputs and LabelType.BOUNDINGBOX in labels: + return inputs["boxes"], labels[LabelType.BOUNDINGBOX] # type: ignore + if "classes" in inputs and LabelType.CLASSIFICATION in labels: + return inputs["classes"][0], labels[LabelType.CLASSIFICATION] # type: ignore + if "keypoints" in inputs and LabelType.KEYPOINT in labels: + return inputs["keypoints"], labels[LabelType.KEYPOINT] # type: ignore + if "segmentation" in inputs and LabelType.SEGMENTATION in labels: + return inputs["segmentation"][0], labels[LabelType.SEGMENTATION] # type: ignore + raise IncompatibleException( + f"No matching labels and outputs found for {self.__class__.__name__}" + ) + label_type = self.required_labels[0] + return inputs[label_type.value], labels[label_type] # type: ignore + + def validate(self, inputs: Packet[Tensor], labels: Labels) -> None: + """Validates that the inputs and labels are compatible with the module. + + @type inputs: L{Packet}[Tensor] + @param inputs: Output from the node, inputs to the attached module. + @type labels: L{Labels} + @param labels: Labels from the dataset. @raises L{IncompatibleException}: If the + inputs are not compatible with the module. + """ + for label in self.required_labels: + if label not in labels: + raise IncompatibleException.from_missing_label( + label, list(labels.keys()), self.__class__.__name__ + ) + + if self.protocol is not None: + try: + validate_packet(inputs, self.protocol) + except ValidationError as e: + raise IncompatibleException.from_validation_error( + e, self.__class__.__name__ + ) from e diff --git a/luxonis_train/attached_modules/losses/README.md b/luxonis_train/attached_modules/losses/README.md new file mode 100644 index 00000000..aafbc440 --- /dev/null +++ b/luxonis_train/attached_modules/losses/README.md @@ -0,0 +1,106 @@ +# Losses + +List of all the available loss functions. + +## Table Of Contents + +- [CrossEntropyLoss](#crossentropyloss) +- [BCEWithLogitsLoss](#bcewithlogitsloss) +- [SmoothBCEWithLogitsLoss](#smoothbcewithlogitsloss) +- [SigmoidFocalLoss](#sigmoidfocalloss) +- [SoftmaxFocalLoss](#softmaxfocalloss) +- [AdaptiveDetectionLoss](#adaptivedetectionloss) +- [ImplicitKeypointBBoxLoss](#implicitkeypointbboxloss) + +## CrossEntropyLoss + +Adapted from [here](https://pytorch.org/docs/stable/generated/torch.nn.CrossEntropyLoss.html). + +**Params** + +| Key | Type | Default value | Description | +| --------------- | -------------------------------- | ------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| weight | list\[float\] \| None | None | A manual rescaling weight given to each class. If given, it has to be a list of the same length as there are classes. | +| reduction | Literal\["none", "mean", "sum"\] | "mean" | Specifies the reduction to apply to the output. | +| label_smoothing | float\[0.0, 1.0\] | 0.0 | Specifies the amount of smoothing when computing the loss, where 0.0 means no smoothing. The targets become a mixture of the original ground truth and a uniform distribution as described in [Rethinking the Inception Architecture for Computer Vision](https://arxiv.org/abs/1512.00567). | + +## BCEWithLogitsLoss + +Adapted from [here](https://pytorch.org/docs/stable/generated/torch.nn.BCEWithLogitsLoss.html). + +**Params** + +| Key | Type | Default value | Description | +| ------------ | -------------------------------- | ------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| weight | list\[float\] \| None | None | A manual rescaling weight given to each class. If given, has to be a list of the same length as there are classes. | +| ignore_index | int | -100 | Specifies a target value that is ignored and does not contribute to the input gradient. When `size_average` is `True`, the loss is averaged over non-ignored targets. Note that `ignore_index` is only applicable when the target contains class indices. | +| reduction | Literal\["none", "mean", "sum"\] | "mean" | Specifies the reduction to apply to the output. | + +## SmoothBCEWithLogitsLoss + +**Params** + +| Key | Type | Default value | Description | +| --------------- | -------------------------------- | ------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| weight | list\[float\] \| None | None | A manual rescaling weight given to each class. If given, has to be a list of the same length as there are classes. | +| reduction | Literal\["none", "mean", "sum"\] | "mean" | Specifies the reduction to apply to the output. | +| label_smoothing | float\[0.0, 1.0\] | 0.0 | Specifies the amount of smoothing when computing the loss, where 0.0 means no smoothing. The targets become a mixture of the original ground truth and a uniform distribution as described in [Rethinking the Inception Architecture for Computer Vision](https://arxiv.org/abs/1512.00567). | +| bce_pow | float | 1.0 | Weight for the positive samples. | + +## SigmoidFocalLoss + +Adapted from [here](https://pytorch.org/vision/stable/generated/torchvision.ops.sigmoid_focal_loss.html#torchvision.ops.sigmoid_focal_loss). + +**Params** + +| Key | Type | Default value | Description | +| --------- | -------------------------------- | ------------- | ------------------------------------------------------------------------------------------ | +| alpha | float | 0.25 | Weighting factor in range (0,1) to balance positive vs negative examples or -1 for ignore. | +| gamma | float | 2.0 | Exponent of the modulating factor $(1 - p_t)$ to balance easy vs hard examples | +| reduction | Literal\["none", "mean", "sum"\] | "mean" | Specifies the reduction to apply to the output. | + +## SoftmaxFocalLoss + +**Params** + +| Key | Type | Default value | Description | +| --------- | -------------------------------- | ------------- | ----------------------------------------------------------------------------- | +| alpha | float \| list | 0.25 | Either a float for all channels or list of alphas for each channel. | +| gamma | float | 2.0 | Exponent of the modulating factor (1 - p_t) to balance easy vs hard examples. | +| reduction | Literal\["none", "mean", "sum"\] | "mean" | Specifies the reduction to apply to the output. | + +## AdaptiveDetectionLoss + +Adapted from [here](https://arxiv.org/pdf/2209.02976.pdf). + +**Params** + +| Key | Type | Default value | Description | +| ----------------- | ------------------------------------------------- | ------------- | ----------------------------------------------------------------------------------- | +| n_warmup_epochs | int | 4 | Number of epochs where ATSS assigner is used, after that we switch to TAL assigner. | +| iou_type | Literal\["none", "giou", "diou", "ciou", "siou"\] | "giou" | IoU type used for bbox regression loss. | +| class_loss_weight | float | 1.0 | Weight used for the classification part of the loss. | +| iou_loss_weight | float | 2.5 | Weight used for the IoU part of the loss. | + +## ImplicitKeypointBBoxLoss + +Adapted from [YOLO-Pose: Enhancing YOLO for Multi Person Pose Estimation Using Object +Keypoint Similarity Loss](https://arxiv.org/ftp/arxiv/papers/2204/2204.06806.pdf). + +**Params** + +| Key | Type | Default value | Description | +| ------------------------------- | ------------- | ----------------- | ------------------------------------------------------------------------------------------ | +| cls_pw | float | 1.0 | Power for the [SmoothBCEWithLogitsLoss](#smoothbcewithlogitsloss) for classification loss. | +| obj_pw | float | 1.0 | Power for [BCEWithLogitsLoss](#bcewithlogitsloss) for objectness loss. | +| viz_pw | float | 1.0 | Power for [BCEWithLogitsLoss](#bcewithlogitsloss) for keypoint visibility. | +| label_smoothing | float | 0.0 | Smoothing for [SmothBCEWithLogitsLoss](#smoothbcewithlogitsloss) for classification loss. | +| min_objectness_iou | float | 0.0 | Minimum objectness IoU. | +| bbox_loss_weight | float | 0.05 | Weight for bbox detection sub-loss. | +| keypoint_distance_loss_weight | float | 0.10 | Weight for keypoint distance sub-loss. | +| keypoint_visibility_loss_weight | float | 0.6 | Weight for keypoint visibility sub-loss. | +| class_loss_weight | float | 0.6 | Weight for classification sub-loss. | +| objectness_loss_weight | float | 0.7 | Weight for objectness sub-loss. | +| anchor_threshold | float | 4.0 | Threshold for matching anchors to targets. | +| bias | float | 0.5 | Bias for matchinf anchors to targets. | +| balance | list\[float\] | \[4.0, 1.0, 0.4\] | Balance for objectness loss. | diff --git a/luxonis_train/attached_modules/losses/__init__.py b/luxonis_train/attached_modules/losses/__init__.py new file mode 100644 index 00000000..737373d2 --- /dev/null +++ b/luxonis_train/attached_modules/losses/__init__.py @@ -0,0 +1,21 @@ +from .adaptive_detection_loss import AdaptiveDetectionLoss +from .base_loss import BaseLoss +from .bce_with_logits import BCEWithLogitsLoss +from .cross_entropy import CrossEntropyLoss +from .implicit_keypoint_bbox_loss import ImplicitKeypointBBoxLoss +from .keypoint_loss import KeypointLoss +from .sigmoid_focal_loss import SigmoidFocalLoss +from .smooth_bce_with_logits import SmoothBCEWithLogitsLoss +from .softmax_focal_loss import SoftmaxFocalLoss + +__all__ = [ + "AdaptiveDetectionLoss", + "BCEWithLogitsLoss", + "CrossEntropyLoss", + "ImplicitKeypointBBoxLoss", + "KeypointLoss", + "BaseLoss", + "SigmoidFocalLoss", + "SmoothBCEWithLogitsLoss", + "SoftmaxFocalLoss", +] diff --git a/luxonis_train/attached_modules/losses/adaptive_detection_loss.py b/luxonis_train/attached_modules/losses/adaptive_detection_loss.py new file mode 100644 index 00000000..89c18f67 --- /dev/null +++ b/luxonis_train/attached_modules/losses/adaptive_detection_loss.py @@ -0,0 +1,250 @@ +from typing import Literal + +import torch +import torch.nn.functional as F +from pydantic import Field +from torch import Tensor, nn +from torchvision.ops import box_convert +from typing_extensions import Annotated + +from luxonis_train.nodes import EfficientBBoxHead +from luxonis_train.utils.assigners import ATSSAssigner, TaskAlignedAssigner +from luxonis_train.utils.boxutils import ( + IoUType, + anchors_for_fpn_features, + compute_iou_loss, + dist2bbox, +) +from luxonis_train.utils.types import ( + BaseProtocol, + IncompatibleException, + Labels, + LabelType, + Packet, +) + +from .base_loss import BaseLoss + + +class Protocol(BaseProtocol): + features: list[Tensor] + class_scores: Annotated[list[Tensor], Field(min_length=1, max_length=1)] + distributions: Annotated[list[Tensor], Field(min_length=1, max_length=1)] + + +class AdaptiveDetectionLoss(BaseLoss[Tensor, Tensor, Tensor, Tensor, Tensor, Tensor]): + node: EfficientBBoxHead + + class NodePacket(Packet[Tensor]): + features: list[Tensor] + class_scores: Tensor + distributions: Tensor + + def __init__( + self, + n_warmup_epochs: int = 4, + iou_type: IoUType = "giou", + reduction: Literal["sum", "mean"] = "mean", + class_loss_weight: float = 1.0, + iou_loss_weight: float = 2.5, + **kwargs, + ): + """BBox loss adapted from U{YOLOv6: A Single-Stage Object Detection Framework for Industrial Applications + }. It combines IoU based bbox regression loss and varifocal loss + for classification. + Code is adapted from U{https://github.com/Nioolek/PPYOLOE_pytorch/blob/master/ppyoloe/models}. + + @type n_warmup_epochs: int + @param n_warmup_epochs: Number of epochs where ATSS assigner is used, after that we switch to TAL assigner. + @type iou_type: L{IoUType} + @param iou_type: IoU type used for bbox regression loss. + @type reduction: Literal["sum", "mean"] + @param reduction: Reduction type for loss. + @type class_loss_weight: float + @param class_loss_weight: Weight of classification loss. + @type iou_loss_weight: float + @param iou_loss_weight: Weight of IoU loss. + @type kwargs: dict + @param kwargs: Additional arguments to pass to L{BaseLoss}. + """ + super().__init__( + required_labels=[LabelType.BOUNDINGBOX], protocol=Protocol, **kwargs + ) + + if not isinstance(self.node, EfficientBBoxHead): + raise IncompatibleException( + f"Loss `{self.__class__.__name__}` is only " + "compatible with nodes of type `EfficientBBoxHead`." + ) + self.iou_type: IoUType = iou_type + self.reduction = reduction + self.n_classes = self.node.n_classes + self.stride = self.node.stride + self.grid_cell_size = self.node.grid_cell_size + self.grid_cell_offset = self.node.grid_cell_offset + self.original_img_size = self.node.original_in_shape[2:] + + self.n_warmup_epochs = n_warmup_epochs + self.atts_assigner = ATSSAssigner(topk=9, n_classes=self.n_classes) + self.tal_assigner = TaskAlignedAssigner( + topk=13, n_classes=self.n_classes, alpha=1.0, beta=6.0 + ) + + self.varifocal_loss = VarifocalLoss() + self.class_loss_weight = class_loss_weight + self.iou_loss_weight = iou_loss_weight + + def prepare( + self, outputs: Packet[Tensor], labels: Labels + ) -> tuple[Tensor, Tensor, Tensor, Tensor, Tensor, Tensor]: + feats = outputs["features"] + pred_scores = outputs["class_scores"][0] + pred_distri = outputs["distributions"][0] + + batch_size = pred_scores.shape[0] + device = pred_scores.device + + target = labels[LabelType.BOUNDINGBOX].to(device) + gt_bboxes_scale = torch.tensor( + [ + self.original_img_size[1], + self.original_img_size[0], + self.original_img_size[1], + self.original_img_size[0], + ], + device=device, + ) + ( + anchors, + anchor_points, + n_anchors_list, + stride_tensor, + ) = anchors_for_fpn_features( + feats, + self.stride, + self.grid_cell_size, + self.grid_cell_offset, + multiply_with_stride=True, + ) + + anchor_points_strided = anchor_points / stride_tensor + pred_bboxes = dist2bbox(pred_distri, anchor_points_strided) + + target = self._preprocess_target(target, batch_size, gt_bboxes_scale) + + gt_labels = target[:, :, :1] + gt_xyxy = target[:, :, 1:] + mask_gt = (gt_xyxy.sum(-1, keepdim=True) > 0).float() + + if self._epoch < self.n_warmup_epochs: + ( + assigned_labels, + assigned_bboxes, + assigned_scores, + mask_positive, + ) = self.atts_assigner( + anchors, + n_anchors_list, + gt_labels, + gt_xyxy, + mask_gt, + pred_bboxes.detach() * stride_tensor, + ) + else: + # TODO: log change of assigner (once common Logger) + ( + assigned_labels, + assigned_bboxes, + assigned_scores, + mask_positive, + ) = self.tal_assigner.forward( + pred_scores.detach(), + pred_bboxes.detach() * stride_tensor, + anchor_points, + gt_labels, + gt_xyxy, + mask_gt, + ) + + return ( + pred_bboxes, + pred_scores, + assigned_bboxes / stride_tensor, + assigned_labels, + assigned_scores, + mask_positive, + ) + + def forward( + self, + pred_bboxes: Tensor, + pred_scores: Tensor, + assigned_bboxes: Tensor, + assigned_labels: Tensor, + assigned_scores: Tensor, + mask_positive: Tensor, + ): + one_hot_label = F.one_hot(assigned_labels.long(), self.n_classes + 1)[..., :-1] + loss_cls = self.varifocal_loss(pred_scores, assigned_scores, one_hot_label) + + if assigned_scores.sum() > 1: + loss_cls /= assigned_scores.sum() + + loss_iou = compute_iou_loss( + pred_bboxes, + assigned_bboxes, + assigned_scores, + mask_positive, + reduction="sum", + iou_type=self.iou_type, + bbox_format="xyxy", + )[0] + + loss = self.class_loss_weight * loss_cls + self.iou_loss_weight * loss_iou + + sub_losses = {"class": loss_cls.detach(), "iou": loss_iou.detach()} + + return loss, sub_losses + + def _preprocess_target(self, target: Tensor, batch_size: int, scale_tensor: Tensor): + """Preprocess target in shape [batch_size, N, 5] where N is maximum number of + instances in one image.""" + sample_ids, counts = torch.unique(target[:, 0].int(), return_counts=True) + out_target = torch.zeros(batch_size, counts.max(), 5, device=target.device) + out_target[:, :, 0] = -1 + for id, count in zip(sample_ids, counts): + out_target[id, :count] = target[target[:, 0] == id][:, 1:] + + scaled_target = out_target[:, :, 1:5] * scale_tensor + out_target[..., 1:] = box_convert(scaled_target, "xywh", "xyxy") + return out_target + + +class VarifocalLoss(nn.Module): + def __init__(self, alpha: float = 0.75, gamma: float = 2.0): + """Varifocal Loss is a loss function for training a dense object detector to predict + the IoU-aware classification score, inspired by focal loss. + Code is adapted from: U{https://github.com/Nioolek/PPYOLOE_pytorch/blob/master/ppyoloe/models/losses.py} + + @type alpha: float + @param alpha: alpha parameter in focal loss, default is 0.75. + @type gamma: float + @param gamma: gamma parameter in focal loss, default is 2.0. + """ + + super().__init__() + + self.alpha = alpha + self.gamma = gamma + + def forward( + self, pred_score: Tensor, target_score: Tensor, label: Tensor + ) -> Tensor: + weight = ( + self.alpha * pred_score.pow(self.gamma) * (1 - label) + target_score * label + ) + ce_loss = F.binary_cross_entropy( + pred_score.float(), target_score.float(), reduction="none" + ) + loss = (ce_loss * weight).sum() + return loss diff --git a/luxonis_train/attached_modules/losses/base_loss.py b/luxonis_train/attached_modules/losses/base_loss.py new file mode 100644 index 00000000..61297f10 --- /dev/null +++ b/luxonis_train/attached_modules/losses/base_loss.py @@ -0,0 +1,53 @@ +from abc import abstractmethod + +from torch import Tensor +from typing_extensions import TypeVarTuple, Unpack + +from luxonis_train.attached_modules import BaseAttachedModule +from luxonis_train.utils.registry import LOSSES +from luxonis_train.utils.types import Labels, Packet + +Ts = TypeVarTuple("Ts") + + +class BaseLoss( + BaseAttachedModule[Unpack[Ts]], + register=False, + registry=LOSSES, +): + """A base class for all loss functions. + + This class defines the basic interface for all loss functions. It utilizes automatic + registration of defined subclasses to a L{LOSSES} registry. + """ + + @abstractmethod + def forward(self, *args: Unpack[Ts]) -> Tensor | tuple[Tensor, dict[str, Tensor]]: + """Forward pass of the loss function. + + @type args: Unpack[Ts] + @param args: Prepared inputs from the L{prepare} method. + @rtype: Tensor | tuple[Tensor, dict[str, Tensor]] + @return: The main loss and optional a dictionary of sublosses (for logging). + Only the main loss is used for backpropagation. + """ + ... + + def run( + self, inputs: Packet[Tensor], labels: Labels + ) -> Tensor | tuple[Tensor, dict[str, Tensor]]: + """Calls the loss function. + + Validates and prepares the inputs, then calls the loss function. + + @type inputs: Packet[Tensor] + @param inputs: Outputs from the node. + @type labels: L{Labels} + @param labels: Labels from the dataset. + @rtype: Tensor | tuple[Tensor, dict[str, Tensor]] + @return: The main loss and optional a dictionary of sublosses (for logging). + Only the main loss is used for backpropagation. + @raises IncompatibleException: If the inputs are not compatible with the module. + """ + self.validate(inputs, labels) + return self(*self.prepare(inputs, labels)) diff --git a/luxonis_train/attached_modules/losses/bce_with_logits.py b/luxonis_train/attached_modules/losses/bce_with_logits.py new file mode 100644 index 00000000..5800cbdb --- /dev/null +++ b/luxonis_train/attached_modules/losses/bce_with_logits.py @@ -0,0 +1,58 @@ +from typing import Literal + +import torch +from torch import Tensor, nn + +from .base_loss import BaseLoss + + +class BCEWithLogitsLoss(BaseLoss[Tensor, Tensor]): + def __init__( + self, + weight: list[float] | None = None, + reduction: Literal["none", "mean", "sum"] = "mean", + pos_weight: Tensor | None = None, + **kwargs, + ): + """This loss combines a L{nn.Sigmoid} layer and the L{nn.BCELoss} in one single + class. This version is more numerically stable than using a plain C{Sigmoid} + followed by a {BCELoss} as, by combining the operations into one layer, we take + advantage of the log-sum-exp trick for numerical stability. + + @type weight: list[float] | None + @param weight: a manual rescaling weight given to the loss of each batch + element. If given, has to be a list of length C{nbatch}. Defaults to + C{None}. + @type reduction: Literal["none", "mean", "sum"] + @param reduction: Specifies the reduction to apply to the output: C{"none"} | + C{"mean"} | C{"sum"}. C{"none"}: no reduction will be applied, C{"mean"}: + the sum of the output will be divided by the number of elements in the + output, C{"sum"}: the output will be summed. Note: C{size_average} and + C{reduce} are in the process of being deprecated, and in the meantime, + specifying either of those two args will override C{reduction}. Defaults to + C{"mean"}. + @type pos_weight: Tensor | None + @param pos_weight: a weight of positive examples to be broadcasted with target. + Must be a tensor with equal size along the class dimension to the number of + classes. Pay close attention to PyTorch's broadcasting semantics in order to + achieve the desired operations. For a target of size [B, C, H, W] (where B + is batch size) pos_weight of size [B, C, H, W] will apply different + pos_weights to each element of the batch or [C, H, W] the same pos_weights + across the batch. To apply the same positive weight along all spacial + dimensions for a 2D multi-class target [C, H, W] use: [C, 1, 1]. Defaults to + C{None}. + """ + super().__init__(**kwargs) + self.criterion = nn.BCEWithLogitsLoss( + weight=(torch.tensor(weight) if weight is not None else None), + reduction=reduction, + pos_weight=pos_weight if pos_weight is not None else None, + ) + + def forward(self, predictions: Tensor, target: Tensor) -> Tensor: + if predictions.shape != target.shape: + raise RuntimeError( + f"Target tensor dimension ({target.shape}) and preds tensor " + f"dimension ({predictions.shape}) should be the same." + ) + return self.criterion(predictions, target) diff --git a/luxonis_train/attached_modules/losses/cross_entropy.py b/luxonis_train/attached_modules/losses/cross_entropy.py new file mode 100644 index 00000000..f073401e --- /dev/null +++ b/luxonis_train/attached_modules/losses/cross_entropy.py @@ -0,0 +1,57 @@ +from logging import getLogger +from typing import Literal + +import torch +import torch.nn as nn +from torch import Tensor + +from .base_loss import BaseLoss + +logger = getLogger(__name__) +was_logged = False + + +class CrossEntropyLoss(BaseLoss[Tensor, Tensor]): + """This criterion computes the cross entropy loss between input logits and + target.""" + + def __init__( + self, + weight: list[float] | None = None, + ignore_index: int = -100, + reduction: Literal["none", "mean", "sum"] = "mean", + label_smoothing: float = 0.0, + **kwargs, + ): + super().__init__(**kwargs) + + self.criterion = nn.CrossEntropyLoss( + weight=(torch.tensor(weight) if weight is not None else None), + ignore_index=ignore_index, + reduction=reduction, + label_smoothing=label_smoothing, + ) + + def forward(self, preds: Tensor, target: Tensor) -> Tensor: + global was_logged + if preds.ndim == target.ndim: + ch_dim = 1 if preds.ndim > 1 else 0 + if preds.shape[ch_dim] == 1: + if not was_logged: + logger.warning( + "`CrossEntropyLoss` expects at least 2 classes. " + "Attempting to fix by adding a dummy channel. " + "If you want to be sure, use `BCEWithLogitsLoss` instead." + ) + was_logged = True + preds = torch.cat([torch.zeros_like(preds), preds], dim=ch_dim) + if target.shape[ch_dim] == 1: + target = torch.cat([1 - target, target], dim=ch_dim) + target = target.argmax(dim=ch_dim) + + if target.ndim != preds.ndim - 1: + raise RuntimeError( + f"Target tensor dimension should equeal to preds dimension - 1 ({preds.ndim-1}) " + f"but is ({target.ndim})." + ) + return self.criterion(preds, target) diff --git a/luxonis_train/attached_modules/losses/implicit_keypoint_bbox_loss.py b/luxonis_train/attached_modules/losses/implicit_keypoint_bbox_loss.py new file mode 100644 index 00000000..7169d2a4 --- /dev/null +++ b/luxonis_train/attached_modules/losses/implicit_keypoint_bbox_loss.py @@ -0,0 +1,333 @@ +from typing import cast + +import torch +from pydantic import Field +from torch import Tensor +from torchvision.ops import box_convert +from typing_extensions import Annotated + +from luxonis_train.attached_modules.losses.keypoint_loss import KeypointLoss +from luxonis_train.nodes import ImplicitKeypointBBoxHead +from luxonis_train.utils.boxutils import ( + compute_iou_loss, + match_to_anchor, + process_bbox_predictions, +) +from luxonis_train.utils.types import ( + BaseProtocol, + IncompatibleException, + Labels, + LabelType, + Packet, +) + +from .base_loss import BaseLoss +from .bce_with_logits import BCEWithLogitsLoss +from .smooth_bce_with_logits import SmoothBCEWithLogitsLoss + +KeypointTargetType = tuple[ + list[Tensor], + list[Tensor], + list[Tensor], + list[tuple[Tensor, Tensor, Tensor, Tensor]], + list[Tensor], +] + + +class ImplicitKeypointBBoxLoss(BaseLoss[list[Tensor], KeypointTargetType]): + node: ImplicitKeypointBBoxHead + + def __init__( + self, + cls_pw: float = 1.0, + viz_pw: float = 1.0, + obj_pw: float = 1.0, + label_smoothing: float = 0.0, + min_objectness_iou: float = 0.0, + bbox_loss_weight: float = 0.05, + keypoint_distance_loss_weight: float = 0.10, + keypoint_visibility_loss_weight: float = 0.6, + class_loss_weight: float = 0.6, + objectness_loss_weight: float = 0.7, + anchor_threshold: float = 4.0, + bias: float = 0.5, + balance: list[float] | None = None, + **kwargs, + ): + """Joint loss for keypoint and box predictions for cases where the keypoints and + boxes are inherently linked. + + Based on U{YOLO-Pose: Enhancing YOLO for Multi Person Pose Estimation Using Object + Keypoint Similarity Loss}. + + @type cls_pw: float + @param cls_pw: Power for the BCE loss for classes. Defaults to C{1.0}. + @type viz_pw: float + @param viz_pw: Power for the BCE loss for keypoints. + @type obj_pw: float + @param obj_pw: Power for the BCE loss for objectness. Defaults to C{1.0}. + @type label_smoothing: float + @param label_smoothing: Label smoothing factor. Defaults to C{0.0}. + @type min_objectness_iou: float + @param min_objectness_iou: Minimum objectness iou. Defaults to C{0.0}. + @type bbox_loss_weight: float + @param bbox_loss_weight: Weight for the bounding box loss. + @type keypoint_distance_loss_weight: float + @param keypoint_distance_loss_weight: Weight for the keypoint distance loss. Defaults to C{0.10}. + @type keypoint_visibility_loss_weight: float + @param keypoint_visibility_loss_weight: Weight for the keypoint visibility loss. Defaults to C{0.6}. + @type class_loss_weight: float + @param class_loss_weight: Weight for the class loss. Defaults to C{0.6}. + @type objectness_loss_weight: float + @param objectness_loss_weight: Weight for the objectness loss. Defaults to C{0.7}. + @type anchor_threshold: float + @param anchor_threshold: Threshold for matching anchors to targets. Defaults to C{4.0}. + @type bias: float + @param bias: Bias for matching anchors to targets. Defaults to C{0.5}. + @type balance: list[float] | None + @param balance: Balance for the different heads. Defaults to C{None}. + """ + + super().__init__( + required_labels=[LabelType.BOUNDINGBOX, LabelType.KEYPOINT], + **kwargs, + ) + + if not isinstance(self.node, ImplicitKeypointBBoxHead): + raise IncompatibleException( + f"Loss `{self.__class__.__name__}` is only " + "compatible with nodes of type `ImplicitKeypointBBoxHead`." + ) + self.n_classes = self.node.n_classes + self.n_keypoints = self.node.n_keypoints + self.n_anchors = self.node.n_anchors + self.num_heads = self.node.num_heads + self.box_offset = self.node.box_offset + self.anchors = self.node.anchors + self.balance = balance or [4.0, 1.0, 0.4] + if len(self.balance) < self.num_heads: + raise ValueError( + f"Balance list must have at least {self.num_heads} elements." + ) + + class Protocol(BaseProtocol): + features: Annotated[list[Tensor], Field(min_length=self.num_heads)] + + self.protocol = Protocol # type: ignore + + self.min_objectness_iou = min_objectness_iou + self.bbox_weight = bbox_loss_weight + self.kpt_distance_weight = keypoint_distance_loss_weight + self.class_weight = class_loss_weight + self.objectness_weight = objectness_loss_weight + self.kpt_visibility_weight = keypoint_visibility_loss_weight + self.anchor_threshold = anchor_threshold + + self.bias = bias + + self.b_cross_entropy = BCEWithLogitsLoss( + pos_weight=torch.tensor([obj_pw]), **kwargs + ) + self.class_loss = SmoothBCEWithLogitsLoss( + label_smoothing=label_smoothing, + bce_pow=cls_pw, + **kwargs, + ) + self.keypoint_loss = KeypointLoss( + bce_power=viz_pw, + distance_weight=keypoint_distance_loss_weight, + visibility_weight=keypoint_visibility_loss_weight, + **kwargs, + ) + + self.positive_smooth_const = 1 - 0.5 * label_smoothing + self.negative_smooth_const = 0.5 * label_smoothing + + def prepare( + self, outputs: Packet[Tensor], labels: Labels + ) -> tuple[list[Tensor], KeypointTargetType]: + """Prepares the labels to be in the correct format for loss calculation. + + @type outputs: Packet[Tensor] + @param outputs: Output from the forward pass. + @type labels: L{Labels} + @param labels: Dictionary containing the labels. + @rtype: tuple[list[Tensor], tuple[list[Tensor], list[Tensor], list[Tensor], + list[tuple[Tensor, Tensor, Tensor, Tensor]], list[Tensor]]] + @return: Tuple containing the original output and the postprocessed labels. The + processed labels are a tuple containing the class targets, box targets, + keypoint targets, indices and anchors. Indicies are a tuple containing + vectors of indices for batch, anchor, feature y and feature x dimensions, + respectively. They are all of shape (n_targets,). The indices are used to + index the output tensors of shape (batch_size, n_anchors, feature_height, + feature_width, n_classes + box_offset + n_keypoints * 3) to get a tensor of + shape (n_targets, n_classes + box_offset + n_keypoints * 3). + """ + predictions = outputs["features"] + + kpts = labels[LabelType.KEYPOINT] + boxes = labels[LabelType.BOUNDINGBOX] + + nkpts = (kpts.shape[1] - 2) // 3 + targets = torch.zeros((len(boxes), nkpts * 2 + self.box_offset + 1)) + targets[:, :2] = boxes[:, :2] + targets[:, 2 : self.box_offset + 1] = box_convert( + boxes[:, 2:], "xywh", "cxcywh" + ) + targets[:, self.box_offset + 1 :: 2] = kpts[:, 2::3] # insert kp x coordinates + targets[:, self.box_offset + 2 :: 2] = kpts[:, 3::3] # insert kp y coordinates + + n_targets = len(targets) + + class_targets: list[Tensor] = [] + box_targets: list[Tensor] = [] + keypoint_targets: list[Tensor] = [] + indices: list[tuple[Tensor, Tensor, Tensor, Tensor]] = [] + anchors: list[Tensor] = [] + + anchor_indices = ( + torch.arange(self.n_anchors, device=targets.device, dtype=torch.float32) + .reshape(self.n_anchors, 1) + .repeat(1, n_targets) + .unsqueeze(-1) + ) + targets = torch.cat((targets.repeat(self.n_anchors, 1, 1), anchor_indices), 2) + + xy_deltas = ( + torch.tensor( + [[0, 0], [1, 0], [0, 1], [-1, 0], [0, -1]], device=targets.device + ).float() + * self.bias + ) + + for i in range(self.num_heads): + anchor = self.anchors[i] + feature_height, feature_width = predictions[i].shape[2:4] + + scaled_targets, xy_shifts = match_to_anchor( + targets, + anchor, + xy_deltas, + feature_width, + feature_height, + self.n_keypoints, + self.anchor_threshold, + self.bias, + self.box_offset, + ) + + batch_index, cls = scaled_targets[:, :2].long().T + box_xy = scaled_targets[:, 2:4] + box_wh = scaled_targets[:, 4:6] + box_xy_deltas = (box_xy - xy_shifts).long() + feature_x_index = box_xy_deltas[:, 0].clamp_(0, feature_width - 1) + feature_y_index = box_xy_deltas[:, 1].clamp_(0, feature_height - 1) + + anchor_indices = scaled_targets[:, -1].long() + indices.append( + ( + batch_index, + anchor_indices, + feature_y_index, + feature_x_index, + ) + ) + class_targets.append(cls) + box_targets.append(torch.cat((box_xy - box_xy_deltas, box_wh), 1)) + anchors.append(anchor[anchor_indices]) + + keypoint_targets.append( + self._create_keypoint_target(scaled_targets, box_xy_deltas) + ) + + return predictions, ( + class_targets, + box_targets, + keypoint_targets, + indices, + anchors, + ) + + def forward( + self, + predictions: list[Tensor], + targets: KeypointTargetType, + ) -> tuple[Tensor, dict[str, Tensor]]: + device = predictions[0].device + sub_losses = { + "bboxes": torch.tensor(0.0, device=device), + "objectness": torch.tensor(0.0, device=device), + "class": torch.tensor(0.0, device=device), + "kpt_visibility": torch.tensor(0.0, device=device), + "kpt_distance": torch.tensor(0.0, device=device), + } + + for pred, class_target, box_target, kpt_target, index, anchor, balance in zip( + predictions, *targets, self.balance + ): + obj_targets = torch.zeros_like(pred[..., 0], device=device) + n_targets = len(class_target) + + if n_targets > 0: + pred_subset = pred[index] + + bbox_cx_cy, bbox_w_h, _ = process_bbox_predictions( + pred_subset, anchor.to(device) + ) + bbox_loss, bbox_iou = compute_iou_loss( + torch.cat((bbox_cx_cy, bbox_w_h), dim=1), + box_target, + iou_type="ciou", + bbox_format="cxcywh", + reduction="mean", + ) + + sub_losses["bboxes"] += bbox_loss * self.bbox_weight + + _, kpt_sublosses = self.keypoint_loss.forward( + pred_subset[:, self.box_offset + self.n_classes :], + kpt_target.to(device), + ) + + sub_losses["kpt_distance"] += ( + kpt_sublosses["distance"] * self.kpt_distance_weight + ) + sub_losses["kpt_visibility"] += ( + kpt_sublosses["visibility"] * self.kpt_visibility_weight + ) + + obj_targets[index] = (self.min_objectness_iou) + ( + 1 - self.min_objectness_iou + ) * bbox_iou.squeeze(-1).to(obj_targets.dtype) + + if self.n_classes > 1: + sub_losses["class"] += ( + self.class_loss.forward( + [ + pred_subset[ + :, + self.box_offset : self.box_offset + self.n_classes, + ] + ], + class_target, + ) + * self.class_weight + ) + + sub_losses["objectness"] += ( + self.b_cross_entropy.forward(pred[..., 4], obj_targets) + * balance + * self.objectness_weight + ) + + loss = cast(Tensor, sum(sub_losses.values())).reshape([]) + return loss, {name: loss.detach() for name, loss in sub_losses.items()} + + def _create_keypoint_target(self, scaled_targets: Tensor, box_xy_deltas: Tensor): + keypoint_target = scaled_targets[:, self.box_offset + 1 : -1] + for j in range(self.n_keypoints): + low = 2 * j + high = 2 * (j + 1) + keypoint_mask = keypoint_target[:, low:high] != 0 + keypoint_target[:, low:high][keypoint_mask] -= box_xy_deltas[keypoint_mask] + return keypoint_target diff --git a/luxonis_train/attached_modules/losses/keypoint_loss.py b/luxonis_train/attached_modules/losses/keypoint_loss.py new file mode 100644 index 00000000..4728b045 --- /dev/null +++ b/luxonis_train/attached_modules/losses/keypoint_loss.py @@ -0,0 +1,77 @@ +from typing import Annotated + +import torch +from pydantic import Field +from torch import Tensor + +from luxonis_train.utils.boxutils import process_keypoints_predictions +from luxonis_train.utils.types import ( + BaseProtocol, + Labels, + LabelType, + Packet, +) + +from .base_loss import BaseLoss +from .bce_with_logits import BCEWithLogitsLoss + + +class Protocol(BaseProtocol): + keypoints: Annotated[list[Tensor], Field(min_length=1, max_length=1)] + + +class KeypointLoss(BaseLoss[Tensor, Tensor]): + def __init__( + self, + bce_power: float = 1.0, + distance_weight: float = 0.1, + visibility_weight: float = 0.6, + **kwargs, + ): + super().__init__( + protocol=Protocol, required_labels=[LabelType.KEYPOINT], **kwargs + ) + self.b_cross_entropy = BCEWithLogitsLoss( + pos_weight=torch.tensor([bce_power]), **kwargs + ) + self.distance_weight = distance_weight + self.visibility_weight = visibility_weight + + def prepare(self, inputs: Packet[Tensor], labels: Labels) -> tuple[Tensor, Tensor]: + return torch.cat(inputs["keypoints"], dim=0), labels[LabelType.KEYPOINT] + + def forward( + self, prediction: Tensor, target: Tensor + ) -> tuple[Tensor, dict[str, Tensor]]: + """Computes the keypoint loss and visibility loss for a given prediction and + target. + + @type prediction: Tensor + @param prediction: Predicted tensor of shape C{[n_detections, n_keypoints * 3]}. + @type target: Tensor + @param target: Target tensor of shape C{[n_detections, n_keypoints * 2]}. + @rtype: tuple[Tensor, Tensor] + @return: A tuple containing the keypoint loss tensor of shape C{[1,]} and the + visibility loss tensor of shape C{[1,]}. + """ + x, y, visibility_score = process_keypoints_predictions(prediction) + gt_x = target[:, 0::2] + gt_y = target[:, 1::2] + + mask = target[:, 0::2] != 0 + visibility_loss = ( + self.b_cross_entropy.forward(visibility_score, mask.float()) + * self.visibility_weight + ) + distance = (x - gt_x) ** 2 + (y - gt_y) ** 2 + + loss_factor = (torch.sum(mask != 0) + torch.sum(mask == 0)) / ( + torch.sum(mask != 0) + 1e-9 + ) + distance_loss = ( + loss_factor + * (torch.log(distance + 1 + 1e-9) * mask).mean() + * self.distance_weight + ) + loss = distance_loss + visibility_loss + return loss, {"distance": distance_loss, "visibility": visibility_loss} diff --git a/luxonis_train/attached_modules/losses/sigmoid_focal_loss.py b/luxonis_train/attached_modules/losses/sigmoid_focal_loss.py new file mode 100644 index 00000000..31e16051 --- /dev/null +++ b/luxonis_train/attached_modules/losses/sigmoid_focal_loss.py @@ -0,0 +1,40 @@ +from typing import Literal + +from torch import Tensor +from torchvision.ops import sigmoid_focal_loss + +from luxonis_train.attached_modules.losses import BaseLoss + + +class SigmoidFocalLoss(BaseLoss[Tensor, Tensor]): + def __init__( + self, + alpha: float = 0.25, + gamma: float = 2.0, + reduction: Literal["none", "mean", "sum"] = "mean", + **kwargs, + ): + """Focal loss from U{Focal Loss for Dense Object Detection + }. + + @type alpha: float + @param alpha: Weighting factor in range (0,1) to balance positive vs negative examples or -1 for ignore. + Defaults to C{0.25}. + @type gamma: float + @param gamma: Exponent of the modulating factor (1 - p_t) to balance easy vs hard examples. + Defaults to C{2.0}. + @type reduction: Literal["none", "mean", "sum"] + @param reduction: Reduction type for loss. Defaults to C{"mean"}. + """ + super().__init__(**kwargs) + + self.alpha = alpha + self.gamma = gamma + self.reduction = reduction + + def forward(self, preds: Tensor, target: Tensor) -> Tensor: + loss = sigmoid_focal_loss( + preds, target, alpha=self.alpha, gamma=self.gamma, reduction=self.reduction + ) + + return loss diff --git a/luxonis_train/attached_modules/losses/smooth_bce_with_logits.py b/luxonis_train/attached_modules/losses/smooth_bce_with_logits.py new file mode 100644 index 00000000..48f827d6 --- /dev/null +++ b/luxonis_train/attached_modules/losses/smooth_bce_with_logits.py @@ -0,0 +1,69 @@ +from typing import Literal + +import torch +from torch import Tensor + +from .base_loss import BaseLoss +from .bce_with_logits import BCEWithLogitsLoss + + +class SmoothBCEWithLogitsLoss(BaseLoss[list[Tensor], Tensor]): + def __init__( + self, + label_smoothing: float = 0.0, + bce_pow: float = 1.0, + weight: list[float] | None = None, + reduction: Literal["mean", "sum", "none"] = "mean", + **kwargs, + ): + """BCE with logits loss and label smoothing. + + @type label_smoothing: float + @param label_smoothing: Label smoothing factor. Defaults to C{0.0}. + @type bce_pow: float + @param bce_pow: Weight for positive samples. Defaults to C{1.0}. + @type weight: list[float] | None + @param weight: a manual rescaling weight given to the loss of each batch + element. If given, it has to be a list of length C{nbatch}. + @type reduction: Literal["mean", "sum", "none"] + @param reduction: Specifies the reduction to apply to the output: C{'none'} | + C{'mean'} | C{'sum'}. C{'none'}: no reduction will be applied, C{'mean'}: + the sum of the output will be divided by the number of elements in the + output, C{'sum'}: the output will be summed. Note: C{size_average} and + C{reduce} are in the process of being deprecated, and in the meantime, + specifying either of those two args will override C{reduction}. Defaults to + C{'mean'}. + @type kwargs: dict + @param kwargs: Additional arguments to pass to L{BaseLoss}. + """ + super().__init__(**kwargs) + self.negative_smooth_const = 1.0 - 0.5 * label_smoothing + self.positive_smooth_const = 0.5 * label_smoothing + self.criterion = BCEWithLogitsLoss( + node=self.node, + pos_weight=torch.tensor( + [bce_pow], + ), + weight=weight, + reduction=reduction, + ) + + def forward(self, predictions: list[Tensor], target: Tensor) -> Tensor: + """Computes the BCE loss with label smoothing. + + @type predictions: list[Tensor] + @param predictions: List of tensors of shape (N, n_classes), containing the + predicted class scores. + @type target: Tensor + @param target: A tensor of shape (N,), containing the ground-truth class labels + @rtype: Tensor + @return: A scalar tensor. + """ + prediction = predictions[0] + smoothed_target = torch.full_like( + prediction, + self.negative_smooth_const, + device=prediction.device, + ) + smoothed_target[torch.arange(len(target)), target] = self.positive_smooth_const + return self.criterion.forward(prediction, smoothed_target) diff --git a/luxonis_train/attached_modules/losses/softmax_focal_loss.py b/luxonis_train/attached_modules/losses/softmax_focal_loss.py new file mode 100644 index 00000000..57b288f3 --- /dev/null +++ b/luxonis_train/attached_modules/losses/softmax_focal_loss.py @@ -0,0 +1,53 @@ +# TODO: document + +from typing import Literal + +import torch +from torch import Tensor + +from luxonis_train.attached_modules.losses import BaseLoss + +from .cross_entropy import CrossEntropyLoss + + +class SoftmaxFocalLoss(BaseLoss[Tensor, Tensor]): + def __init__( + self, + alpha: float | list[float] = 0.25, + gamma: float = 2.0, + reduction: Literal["none", "mean", "sum"] = "mean", + **kwargs, + ): + """Focal loss implementation for multi-class/multi-label tasks using Softmax. + + @type alpha: float | list[float] + @param alpha: Weighting factor for the rare class. Defaults to C{0.25}. + @type gamma: float + @param gamma: Focusing parameter. Defaults to C{2.0}. + @type reduction: Literal["none", "mean", "sum"] + @param reduction: Reduction type. Defaults to C{"mean"}. + """ + super().__init__(**kwargs) + + self.alpha = alpha + self.gamma = gamma + self.reduction = reduction + self.ce_criterion = CrossEntropyLoss(reduction="none", **kwargs) + + def forward(self, predictions: Tensor, target: Tensor) -> Tensor: + ce_loss = self.ce_criterion.forward(predictions, target) + pt = torch.exp(-ce_loss) + loss = ce_loss * ((1 - pt) ** self.gamma) + + if isinstance(self.alpha, float) and self.alpha >= 0: + loss = self.alpha * loss + elif isinstance(self.alpha, list): + alpha_t = torch.tensor(self.alpha)[target] + loss = alpha_t * loss + + if self.reduction == "mean": + loss = loss.mean() + elif self.reduction == "sum": + loss = loss.sum() + + return loss diff --git a/luxonis_train/attached_modules/metrics/README.md b/luxonis_train/attached_modules/metrics/README.md new file mode 100644 index 00000000..4e452158 --- /dev/null +++ b/luxonis_train/attached_modules/metrics/README.md @@ -0,0 +1,44 @@ +# Metrics + +List of all the available metrics. + +## Table Of Contents + +- [Torchmetrics](#torchmetrics) +- [ObjectKeypointSimilarity](#objectkeypointsimilarity) +- [MeanAveragePrecision](#meanaverageprecision) +- [MeanAveragePrecisionKeypoints](#meanaverageprecisionkeypoints) + +## Torchmetrics + +Metrics from the [`torchmetrics`](https://lightning.ai/docs/torchmetrics/stable/) module. + +- [Accuracy](https://lightning.ai/docs/torchmetrics/stable/classification/accuracy.html) +- [JaccardIndex](https://lightning.ai/docs/torchmetrics/stable/classification/jaccard_index.html) -- Intersection over Union. +- [F1Score](https://lightning.ai/docs/torchmetrics/stable/classification/f1_score.html) +- [Precision](https://lightning.ai/docs/torchmetrics/stable/classification/precision.html) +- [Recall](https://lightning.ai/docs/torchmetrics/stable/classification/recall.html) + +## ObjectKeypointSimilarity + +For more information, see [object-keypoint-similarity](https://learnopencv.com/object-keypoint-similarity/). + +## MeanAveragePrecision + +Compute the `Mean-Average-Precision (mAP) and Mean-Average-Recall (mAR)` for object detection predictions. + +```math +\text{mAP} = \frac{1}{n} \sum_{i=1}^{n} AP_i +``` + +where $AP_i$ is the average precision for class $i$ and $n$ is the number of classes. The average +precision is defined as the area under the precision-recall curve. For object detection the recall and precision are +defined based on the intersection of union (IoU) between the predicted bounding boxes and the ground truth bounding +boxes e.g. if two boxes have an IoU > t (with t being some threshold) they are considered a match and therefore +considered a true positive. The precision is then defined as the number of true positives divided by the number of +all detected boxes and the recall is defined as the number of true positives divided by the number of all ground +boxes. + +## MeanAveragePrecisionKeypoints + +Similar to [MeanAveragePrecision](#meanaverageprecision), but uses [OKS](#objectkeypointsimilarity) as `IoU` measure. diff --git a/luxonis_train/attached_modules/metrics/__init__.py b/luxonis_train/attached_modules/metrics/__init__.py new file mode 100644 index 00000000..9e73e4ac --- /dev/null +++ b/luxonis_train/attached_modules/metrics/__init__.py @@ -0,0 +1,17 @@ +from .base_metric import BaseMetric +from .common import Accuracy, F1Score, JaccardIndex, Precision, Recall +from .mean_average_precision import MeanAveragePrecision +from .mean_average_precision_keypoints import MeanAveragePrecisionKeypoints +from .object_keypoint_similarity import ObjectKeypointSimilarity + +__all__ = [ + "Accuracy", + "F1Score", + "JaccardIndex", + "BaseMetric", + "MeanAveragePrecision", + "MeanAveragePrecisionKeypoints", + "ObjectKeypointSimilarity", + "Precision", + "Recall", +] diff --git a/luxonis_train/attached_modules/metrics/base_metric.py b/luxonis_train/attached_modules/metrics/base_metric.py new file mode 100644 index 00000000..f2334163 --- /dev/null +++ b/luxonis_train/attached_modules/metrics/base_metric.py @@ -0,0 +1,60 @@ +from abc import abstractmethod + +from torch import Tensor +from torchmetrics import Metric +from typing_extensions import TypeVarTuple, Unpack + +from luxonis_train.attached_modules import BaseAttachedModule +from luxonis_train.utils.registry import METRICS +from luxonis_train.utils.types import Labels, Packet + +Ts = TypeVarTuple("Ts") + + +class BaseMetric( + BaseAttachedModule[Unpack[Ts]], + Metric, + register=False, + registry=METRICS, +): + """A base class for all metrics. + + This class defines the basic interface for all metrics. It utilizes automatic + registration of defined subclasses to a L{METRICS} registry. + """ + + @abstractmethod + def update(self, *args: Unpack[Ts]) -> None: + """Updates the inner state of the metric. + + @type args: Unpack[Ts] + @param args: Prepared inputs from the L{prepare} method. + """ + ... + + @abstractmethod + def compute(self) -> Tensor | tuple[Tensor, dict[str, Tensor]] | dict[str, Tensor]: + """Computes the metric. + + @rtype: Tensor | tuple[Tensor, dict[str, Tensor]] | dict[str, Tensor] + @return: The computed metric. Can be one of: + - A single Tensor. + - A tuple of a Tensor and a dictionary of submetrics. + - A dictionary of submetrics. If this is the case, then the metric + cannot be used as the main metric of the model. + """ + ... + + def run_update(self, outputs: Packet[Tensor], labels: Labels) -> None: + """Calls the metric's update method. + + Validates and prepares the inputs, then calls the metric's update method. + + @type outputs: Packet[Tensor] + @param outputs: The outputs of the model. + @type labels: Labels + @param labels: The labels of the model. @raises L{IncompatibleException}: If the + inputs are not compatible with the module. + """ + self.validate(outputs, labels) + self.update(*self.prepare(outputs, labels)) diff --git a/luxonis_train/attached_modules/metrics/common.py b/luxonis_train/attached_modules/metrics/common.py new file mode 100644 index 00000000..27d1069a --- /dev/null +++ b/luxonis_train/attached_modules/metrics/common.py @@ -0,0 +1,76 @@ +import logging + +import torchmetrics + +from .base_metric import BaseMetric + +logger = logging.getLogger(__name__) + + +class TorchMetricWrapper(BaseMetric): + def __init__(self, **kwargs): + super().__init__( + node=kwargs.pop("node", None), + protocol=kwargs.pop("protocol", None), + required_labels=kwargs.pop("required_labels", None), + ) + task = kwargs.get("task") + + if task is None: + if self.node.n_classes > 1: + task = "multiclass" + else: + task = "binary" + logger.warning( + f"Task type not specified for {self.__class__.__name__}, " + f"assuming {task}." + ) + kwargs["task"] = task + self.task = task + + if self.task == "multiclass": + if "num_classes" not in kwargs: + if self.node is None: + raise ValueError( + "Either `node` or `num_classes` must be provided to " + "multiclass torchmetrics." + ) + kwargs["num_classes"] = self.node.n_classes + elif self.task == "multilabel": + if "num_labels" not in kwargs: + if self.node is None: + raise ValueError( + "Either `node` or `num_labels` must be provided to " + "multilabel torchmetrics." + ) + kwargs["num_labels"] = self.node.n_classes + + self.metric = self.Metric(**kwargs) + + def update(self, preds, target, *args, **kwargs): + if self.task in ["multiclass"]: + target = target.argmax(dim=1) + self.metric.update(preds, target, *args, **kwargs) + + def compute(self): + return self.metric.compute() + + +class Accuracy(TorchMetricWrapper): + Metric = torchmetrics.Accuracy + + +class F1Score(TorchMetricWrapper): + Metric = torchmetrics.F1Score + + +class JaccardIndex(TorchMetricWrapper): + Metric = torchmetrics.JaccardIndex + + +class Precision(TorchMetricWrapper): + Metric = torchmetrics.Precision + + +class Recall(TorchMetricWrapper): + Metric = torchmetrics.Recall diff --git a/luxonis_train/attached_modules/metrics/mean_average_precision.py b/luxonis_train/attached_modules/metrics/mean_average_precision.py new file mode 100644 index 00000000..34adbcd9 --- /dev/null +++ b/luxonis_train/attached_modules/metrics/mean_average_precision.py @@ -0,0 +1,73 @@ +import torchmetrics.detection as detection +from torch import Tensor +from torchvision.ops import box_convert + +from luxonis_train.utils.types import ( + BBoxProtocol, + Labels, + LabelType, + Packet, +) + +from .base_metric import BaseMetric + + +class MeanAveragePrecision(BaseMetric, detection.MeanAveragePrecision): + """Compute the Mean-Average-Precision (mAP) and Mean-Average-Recall (mAR) for object + detection predictions. + + Adapted from U{Mean-Average-Precision (mAP) and Mean-Average-Recall (mAR) + }. + """ + + def __init__(self, **kwargs): + super().__init__( + protocol=BBoxProtocol, + required_labels=[LabelType.BOUNDINGBOX], + **kwargs, + ) + self.metric = detection.MeanAveragePrecision() + + def update( + self, + outputs: list[dict[str, Tensor]], + labels: list[dict[str, Tensor]], + ): + self.metric.update(outputs, labels) + + def prepare( + self, outputs: Packet[Tensor], labels: Labels + ) -> tuple[list[dict[str, Tensor]], list[dict[str, Tensor]]]: + label = labels[LabelType.BOUNDINGBOX] + output_nms = outputs["boxes"] + + image_size = self.node.original_in_shape[2:] + + output_list: list[dict[str, Tensor]] = [] + label_list: list[dict[str, Tensor]] = [] + for i in range(len(output_nms)): + output_list.append( + { + "boxes": output_nms[i][:, :4], + "scores": output_nms[i][:, 4], + "labels": output_nms[i][:, 5].int(), + } + ) + + curr_label = label[label[:, 0] == i] + curr_bboxs = box_convert(curr_label[:, 2:], "xywh", "xyxy") + curr_bboxs[:, 0::2] *= image_size[1] + curr_bboxs[:, 1::2] *= image_size[0] + label_list.append({"boxes": curr_bboxs, "labels": curr_label[:, 1].int()}) + + return output_list, label_list + + def compute(self) -> tuple[Tensor, dict[str, Tensor]]: + metric_dict = self.metric.compute() + + del metric_dict["classes"] + del metric_dict["map_per_class"] + del metric_dict["mar_100_per_class"] + map = metric_dict.pop("map") + + return map, metric_dict diff --git a/luxonis_train/attached_modules/metrics/mean_average_precision_keypoints.py b/luxonis_train/attached_modules/metrics/mean_average_precision_keypoints.py new file mode 100644 index 00000000..3740f58e --- /dev/null +++ b/luxonis_train/attached_modules/metrics/mean_average_precision_keypoints.py @@ -0,0 +1,349 @@ +import contextlib +import io +from typing import Any, Literal + +import torch +from pycocotools.coco import COCO +from pycocotools.cocoeval import COCOeval +from torch import Tensor +from torchvision.ops import box_convert + +from luxonis_train.utils.types import ( + BBoxProtocol, + KeypointProtocol, + Labels, + LabelType, + Packet, +) + +from .base_metric import BaseMetric + + +class Protocol(KeypointProtocol, BBoxProtocol): + ... + + +class MeanAveragePrecisionKeypoints(BaseMetric): + """Mean Average Precision metric for keypoints. + + Uses C{OKS} as IoU measure. + """ + + is_differentiable: bool = False + higher_is_better: bool = True + full_state_update: bool = True + + pred_boxes: list[Tensor] + pred_scores: list[Tensor] + pred_labels: list[Tensor] + pred_keypoints: list[Tensor] + + groundtruth_boxes: list[Tensor] + groundtruth_labels: list[Tensor] + groundtruth_area: list[Tensor] + groundtruth_crowds: list[Tensor] + groundtruth_keypoints: list[Tensor] + + def __init__( + self, + kpt_sigmas: Tensor | None = None, + box_format: Literal["xyxy", "xywh", "cxcywh"] = "xyxy", + **kwargs, + ): + """Implementation of the mean average precision metric for keypoint detections. + + Adapted from: U{https://github.com/Lightning-AI/torchmetrics/blob/v1.0.1/src/ + torchmetrics/detection/mean_ap.py}. + + @license: Apache-2.0 License + + @type num_keypoints: int + @param num_keypoints: Number of keypoints. + @type kpt_sigmas: Tensor or None + @param kpt_sigmas: Sigma for each keypoint to weigh its importance, if None use same weights for all. + @type box_format: Literal["xyxy", "xywh", "cxcywh"] + @param box_format: Input bbox format. + @type kwargs: Any + @param kwargs: Additional arguments to pass to L{BaseMetric}. + """ + super().__init__( + protocol=Protocol, + required_labels=[LabelType.BOUNDINGBOX, LabelType.KEYPOINT], + **kwargs, + ) + + self.n_keypoints = self.node.n_keypoints + + if kpt_sigmas is not None and len(kpt_sigmas) != self.n_keypoints: + raise ValueError("Expected kpt_sigmas to be of shape (num_keypoints).") + self.kpt_sigmas = kpt_sigmas or torch.ones(self.n_keypoints) + + allowed_box_formats = ("xyxy", "xywh", "cxcywh") + if box_format not in allowed_box_formats: + raise ValueError( + f"Expected argument `box_format` to be one of {allowed_box_formats} but got {box_format}" + ) + self.box_format = box_format + + self.add_state("pred_boxes", default=[], dist_reduce_fx=None) + self.add_state("pred_scores", default=[], dist_reduce_fx=None) + self.add_state("pred_labels", default=[], dist_reduce_fx=None) + self.add_state("pred_keypoints", default=[], dist_reduce_fx=None) + + self.add_state("groundtruth_boxes", default=[], dist_reduce_fx=None) + self.add_state("groundtruth_labels", default=[], dist_reduce_fx=None) + self.add_state("groundtruth_area", default=[], dist_reduce_fx=None) + self.add_state("groundtruth_crowds", default=[], dist_reduce_fx=None) + self.add_state("groundtruth_keypoints", default=[], dist_reduce_fx=None) + + def prepare(self, outputs: Packet[Tensor], labels: Labels): + kpts = labels[LabelType.KEYPOINT] + boxes = labels[LabelType.BOUNDINGBOX] + nkpts = (kpts.shape[1] - 2) // 3 + label = torch.zeros((len(boxes), nkpts * 3 + 6)) + label[:, :2] = boxes[:, :2] + label[:, 2:6] = box_convert(boxes[:, 2:], "xywh", "xyxy") + label[:, 6::3] = kpts[:, 2::3] # x + label[:, 7::3] = kpts[:, 3::3] # y + label[:, 8::3] = kpts[:, 4::3] # visiblity + + output_list_kpt_map = [] + label_list_kpt_map = [] + image_size = self.node.original_in_shape[2:] + + output_kpts: list[Tensor] = outputs["keypoints"] + output_bboxes: list[Tensor] = outputs["boxes"] + for i in range(len(output_kpts)): + output_list_kpt_map.append( + { + "boxes": output_bboxes[i][:, :4], + "scores": output_bboxes[i][:, 4], + "labels": output_bboxes[i][:, 5].int(), + "keypoints": output_kpts[i].reshape(-1, self.n_keypoints * 3), + } + ) + + curr_label = label[label[:, 0] == i].to(output_kpts[i].device) + curr_bboxs = curr_label[:, 2:6] + curr_bboxs[:, 0::2] *= image_size[1] + curr_bboxs[:, 1::2] *= image_size[0] + curr_kpts = curr_label[:, 6:] + curr_kpts[:, 0::3] *= image_size[1] + curr_kpts[:, 1::3] *= image_size[0] + label_list_kpt_map.append( + { + "boxes": curr_bboxs, + "labels": curr_label[:, 1].int(), + "keypoints": curr_kpts, + } + ) + + return output_list_kpt_map, label_list_kpt_map + + def update( + self, preds: list[dict[str, Tensor]], target: list[dict[str, Tensor]] + ) -> None: + """Updates the metric state. + + @type preds: list[dict[str, Tensor]] + @param preds: A list consisting of dictionaries each containing key-values for a single image. + Parameters that should be provided per dict: + + - boxes (FloatTensor): Tensor of shape C{(N, 4)} + containing `N` detection boxes of the format specified in + the constructor. By default, this method expects `(xmin, ymin, + xmax, ymax)` in absolute image coordinates. + - scores (FloatTensor): Tensor of shape C{(N)} + containing detection scores for the boxes. + - labels (tIntTensor): Tensor of shape C{(N)} containing + 0-indexed detection classes for the boxes. + - keypoints (FloatTensor): Tensor of shape C{(N, 3*K)} and in + format C{[x, y, vis, x, y, vis, ...]} where C{x} an C{y} are unnormalized + keypoint coordinates and C{vis} is keypoint visibility. + + @type target: list[dict[str, Tensor]] + @param target: A list consisting of dictionaries each containing key-values for a single image. + Parameters that should be provided per dict: + + - boxes (FloatTensor): Tensor of shape C{(N, 4)} containing + `N` ground truth boxes of the format specified in the + constructor. By default, this method expects `(xmin, ymin, xmax, ymax)` + in absolute image coordinates. + - labels: :class:`~torch.IntTensor` of shape C{(N)} containing + 0-indexed ground truth classes for the boxes. + - iscrow (IntTensor): Tensor of shape C{(N)} containing 0/1 + values indicating whether the bounding box/masks indicate a crowd of + objects. If not provided it will automatically be set to 0. + - area (FloatTensor): Tensor of shape C{(N)} containing the + area of the object. If not provided will be automatically calculated + based on the bounding box/masks provided. Only affects which samples + contribute to the C{map_small}, C{map_medium}, C{map_large} values. + - keypoints (FloatTensor): Tensor of shape C{(N, 3*K)} in format + C{[x, y, vis, x, y, vis, ...]} where C{x} an C{y} are unnormalized keypoint + coordinates and `vis` is keypoint visibility. + """ + for item in preds: + boxes, keypoints = self._get_safe_item_values(item) + self.pred_boxes.append(boxes) + self.pred_keypoints.append(keypoints) + self.pred_scores.append(item["scores"]) + self.pred_labels.append(item["labels"]) + + for item in target: + boxes, keypoints = self._get_safe_item_values(item) + self.groundtruth_boxes.append(boxes) + self.groundtruth_keypoints.append(keypoints) + self.groundtruth_labels.append(item["labels"]) + self.groundtruth_area.append( + item.get("area", torch.zeros_like(item["labels"])) + ) + self.groundtruth_crowds.append( + item.get("iscrowd", torch.zeros_like(item["labels"])) + ) + + def compute(self) -> tuple[Tensor, dict[str, Tensor]]: + """Torchmetric compute function.""" + coco_target, coco_preds = COCO(), COCO() + coco_target.dataset = self._get_coco_format( + self.groundtruth_boxes, + self.groundtruth_keypoints, + self.groundtruth_labels, + crowds=self.groundtruth_crowds, + area=self.groundtruth_area, + ) # type: ignore + coco_preds.dataset = self._get_coco_format( + self.pred_boxes, + self.pred_keypoints, + self.groundtruth_labels, + scores=self.pred_scores, + ) # type: ignore + + with contextlib.redirect_stdout(io.StringIO()): + coco_target.createIndex() + coco_preds.createIndex() + + self.coco_eval = COCOeval(coco_target, coco_preds, iouType="keypoints") + self.coco_eval.params.kpt_oks_sigmas = self.kpt_sigmas.cpu().numpy() + + self.coco_eval.evaluate() + self.coco_eval.accumulate() + self.coco_eval.summarize() + stats = self.coco_eval.stats + + kpt_map = torch.tensor([stats[0]], dtype=torch.float32) + return kpt_map, { + "kpt_map_50": torch.tensor([stats[1]], dtype=torch.float32), + "kpt_map_75": torch.tensor([stats[2]], dtype=torch.float32), + "kpt_map_medium": torch.tensor([stats[3]], dtype=torch.float32), + "kpt_map_large": torch.tensor([stats[4]], dtype=torch.float32), + "kpt_mar": torch.tensor([stats[5]], dtype=torch.float32), + "kpt_mar_50": torch.tensor([stats[6]], dtype=torch.float32), + "kpt_mar_75": torch.tensor([stats[7]], dtype=torch.float32), + "kpt_mar_medium": torch.tensor([stats[8]], dtype=torch.float32), + "kpt_mar_large": torch.tensor([stats[9]], dtype=torch.float32), + } + + def _get_coco_format( + self, + boxes: list[Tensor], + keypoints: list[Tensor], + labels: list[Tensor], + scores: list[Tensor] | None = None, + crowds: list[Tensor] | None = None, + area: list[Tensor] | None = None, + ) -> dict[str, list[dict[str, Any]]]: + """Transforms and returns all cached targets or predictions in COCO format. + + Format is defined at U{https://cocodataset.org/#format-data}. + """ + images = [] + annotations = [] + annotation_id = 1 # has to start with 1, otherwise COCOEval results are wrong + + for image_id, (image_boxes, image_kpts, image_labels) in enumerate( + zip(boxes, keypoints, labels) + ): + image_boxes_list = image_boxes.cpu().tolist() + image_kpts_list = image_kpts.cpu().tolist() + image_labels_list = image_labels.cpu().tolist() + + images.append({"id": image_id}) + + for k, (image_box, image_kpt, image_label) in enumerate( + zip(image_boxes_list, image_kpts_list, image_labels_list) + ): + if len(image_box) != 4: + raise ValueError( + f"Invalid input box of sample {image_id}, element {k} " + f"(expected 4 values, got {len(image_box)})" + ) + + if len(image_kpt) != 3 * self.n_keypoints: + raise ValueError( + f"Invalid input keypoints of sample {image_id}, element {k} " + f"(expected {3 * self.n_keypoints} values, got {len(image_kpt)})" + ) + + if not isinstance(image_label, int): + raise ValueError( + f"Invalid input class of sample {image_id}, element {k} " + f"(expected value of type integer, got type {type(image_label)})" + ) + + if area is not None and area[image_id][k].cpu().item() > 0: + area_stat = area[image_id][k].cpu().tolist() + else: + area_stat = image_box[2] * image_box[3] + + annotation = { + "id": annotation_id, + "image_id": image_id, + "bbox": image_box, + "area": area_stat, + "category_id": image_label, + "iscrowd": crowds[image_id][k].cpu().tolist() + if crowds is not None + else 0, + "keypoints": image_kpt, + "num_keypoints": self.n_keypoints, + } + + if scores is not None: + score = scores[image_id][k].cpu().tolist() + if not isinstance(score, float): + raise ValueError( + f"Invalid input score of sample {image_id}, element {k}" + f" (expected value of type float, got type {type(score)})" + ) + annotation["score"] = score + annotations.append(annotation) + annotation_id += 1 + + classes = [{"id": i, "name": str(i)} for i in self._get_classes()] + return {"images": images, "annotations": annotations, "categories": classes} + + def _get_safe_item_values(self, item: dict[str, Tensor]) -> tuple[Tensor, Tensor]: + """Convert and return the boxes.""" + boxes = self._fix_empty_tensors(item["boxes"]) + if boxes.numel() > 0: + boxes = box_convert(boxes, in_fmt=self.box_format, out_fmt="xywh") + keypoints = self._fix_empty_tensors(item["keypoints"]) + return boxes, keypoints + + def _get_classes(self) -> list[int]: + """Return a list of unique classes found in ground truth and detection data.""" + if len(self.pred_labels) > 0 or len(self.groundtruth_labels) > 0: + return ( + torch.cat(self.pred_labels + self.groundtruth_labels) + .unique() + .cpu() + .tolist() + ) + return [] + + @staticmethod + def _fix_empty_tensors(input_tensor: Tensor) -> Tensor: + """Empty tensors can cause problems in DDP mode, this methods corrects them.""" + if input_tensor.numel() == 0 and input_tensor.ndim == 1: + return input_tensor.unsqueeze(0) + return input_tensor diff --git a/luxonis_train/attached_modules/metrics/object_keypoint_similarity.py b/luxonis_train/attached_modules/metrics/object_keypoint_similarity.py new file mode 100644 index 00000000..c5e4a19b --- /dev/null +++ b/luxonis_train/attached_modules/metrics/object_keypoint_similarity.py @@ -0,0 +1,203 @@ +import torch +from scipy.optimize import linear_sum_assignment +from torch import Tensor +from torchvision.ops import box_convert + +from luxonis_train.utils.types import ( + KeypointProtocol, + Labels, + LabelType, + Packet, +) + +from .base_metric import BaseMetric + + +class ObjectKeypointSimilarity( + BaseMetric[list[dict[str, Tensor]], list[dict[str, Tensor]]] +): + """Object Keypoint Similarity metric for evaluating keypoint predictions. + + @type n_keypoints: int + @param n_keypoints: Number of keypoints. + @type kpt_sigmas: Tensor + @param kpt_sigmas: Sigma for each keypoint to weigh its importance, if C{None}, then + use same weights for all. + @type use_cocoeval_oks: bool + @param use_cocoeval_oks: Whether to use same OKS formula as in COCOeval or use the + one from definition. + """ + + is_differentiable: bool = False + higher_is_better: bool = True + full_state_update: bool = True + plot_lower_bound: float = 0.0 + plot_upper_bound: float = 1.0 + + pred_keypoints: list[Tensor] + groundtruth_keypoints: list[Tensor] + groundtruth_scales: list[Tensor] + + def __init__( + self, + n_keypoints: int | None = None, + kpt_sigmas: Tensor | None = None, + use_cocoeval_oks: bool = False, + **kwargs, + ) -> None: + super().__init__( + required_labels=[LabelType.KEYPOINT], protocol=KeypointProtocol, **kwargs + ) + + if n_keypoints is None and self.node is None: + raise ValueError( + f"Either `n_keypoints` or `node` must be provided " + f"to {self.__class__.__name__}." + ) + self.n_keypoints = n_keypoints or self.node.n_keypoints + if kpt_sigmas is not None and len(kpt_sigmas) != self.n_keypoints: + raise ValueError("Expected kpt_sigmas to be of shape (num_keypoints).") + self.kpt_sigmas = kpt_sigmas or torch.ones(self.n_keypoints) / self.n_keypoints + self.use_cocoeval_oks = use_cocoeval_oks + + self.add_state("pred_keypoints", default=[], dist_reduce_fx=None) + self.add_state("groundtruth_keypoints", default=[], dist_reduce_fx=None) + self.add_state("groundtruth_scales", default=[], dist_reduce_fx=None) + + def prepare( + self, outputs: Packet[Tensor], labels: Labels + ) -> tuple[list[dict[str, Tensor]], list[dict[str, Tensor]]]: + kpts_labels = labels[LabelType.KEYPOINT] + bbox_labels = labels[LabelType.BOUNDINGBOX] + num_keypoints = (kpts_labels.shape[1] - 2) // 3 + label = torch.zeros((len(bbox_labels), num_keypoints * 3 + 6)) + label[:, :2] = bbox_labels[:, :2] + label[:, 2:6] = box_convert(bbox_labels[:, 2:], "xywh", "xyxy") + label[:, 6::3] = kpts_labels[:, 2::3] # insert kp x coordinates + label[:, 7::3] = kpts_labels[:, 3::3] # insert kp y coordinates + label[:, 8::3] = kpts_labels[:, 4::3] # insert kp visibility + + output_list_oks = [] + label_list_oks = [] + image_size = self.node.original_in_shape[2:] + + for i, pred_kpt in enumerate(outputs["keypoints"]): + output_list_oks.append({"keypoints": pred_kpt}) + + curr_label = label[label[:, 0] == i].to(pred_kpt.device) + curr_bboxs = curr_label[:, 2:6] + curr_bboxs[:, 0::2] *= image_size[1] + curr_bboxs[:, 1::2] *= image_size[0] + curr_kpts = curr_label[:, 6:] + curr_kpts[:, 0::3] *= image_size[1] + curr_kpts[:, 1::3] *= image_size[0] + curr_bboxs_widths = curr_bboxs[:, 2] - curr_bboxs[:, 0] + curr_bboxs_heights = curr_bboxs[:, 3] - curr_bboxs[:, 1] + curr_scales = torch.sqrt(curr_bboxs_widths * curr_bboxs_heights) + label_list_oks.append({"keypoints": curr_kpts, "scales": curr_scales}) + + return output_list_oks, label_list_oks + + def update( + self, preds: list[dict[str, Tensor]], target: list[dict[str, Tensor]] + ) -> None: + """Updates the inner state of the metric. + + @type preds: list[dict[str, Tensor]] + @param preds: A list consisting of dictionaries each containing key-values for + a single image. + Parameters that should be provided per dict: + + - keypoints (FloatTensor): Tensor of shape (N, 3*K) and in format + [x, y, vis, x, y, vis, ...] where `x` an `y` + are unnormalized keypoint coordinates and `vis` is keypoint visibility. + @type target: list[dict[str, Tensor]] + @param target: A list consisting of dictionaries each containing key-values for + a single image. + Parameters that should be provided per dict: + + - keypoints (FloatTensor): Tensor of shape (N, 3*K) and in format + [x, y, vis, x, y, vis, ...] where `x` an `y` + are unnormalized keypoint coordinates and `vis` is keypoint visibility. + - scales (FloatTensor): Tensor of shape (N) where each value + corresponds to scale of the bounding box. + Scale of one bounding box is defined as sqrt(width*height) where + width and height are unnormalized. + """ + for item in preds: + keypoints = fix_empty_tensors(item["keypoints"]) + self.pred_keypoints.append(keypoints) + + for item in target: + keypoints = fix_empty_tensors(item["keypoints"]) + self.groundtruth_keypoints.append(keypoints) + self.groundtruth_scales.append(item["scales"]) + + def compute(self) -> Tensor: + """Computes the OKS metric based on the inner state.""" + + self.kpt_sigmas = self.kpt_sigmas.to(self.device) + image_mean_oks = torch.zeros(len(self.groundtruth_keypoints)) + for i, (pred_kpts, gt_kpts, gt_scales) in enumerate( + zip( + self.pred_keypoints, self.groundtruth_keypoints, self.groundtruth_scales + ) + ): + gt_kpts = torch.reshape(gt_kpts, (-1, self.n_keypoints, 3)) # [N, K, 3] + + image_ious = self._compute_oks(pred_kpts, gt_kpts, gt_scales) # [M, N] + gt_indices, pred_indices = linear_sum_assignment( + image_ious.cpu().numpy(), maximize=True + ) + matched_ious = [image_ious[n, m] for n, m in zip(gt_indices, pred_indices)] + image_mean_oks[i] = torch.tensor(matched_ious).mean() + + final_oks = image_mean_oks.nanmean() + + return final_oks + + def _compute_oks(self, pred: Tensor, gt: Tensor, scales: Tensor) -> Tensor: + """Compute Object Keypoint Similarity between every GT and prediction. + + @type pred: Tensor[N, K, 3] + @param pred: Predicted keypoints. + @type gt: Tensor[M, K, 3] + @param gt: Groundtruth keypoints. + @type scales: Tensor[M] + @param scales: Scales of the bounding boxes. + @rtype: Tensor + @return: Object Keypoint Similarity every pred and gt [M, N] + """ + eps = 1e-7 + distances = (gt[:, None, :, 0] - pred[..., 0]) ** 2 + ( + gt[:, None, :, 1] - pred[..., 1] + ) ** 2 + kpt_mask = gt[..., 2] != 0 # only compute on visible keypoints + if self.use_cocoeval_oks: + # use same formula as in COCOEval script here: + # https://github.com/cocodataset/cocoapi/blob/8c9bcc3cf640524c4c20a9c40e89cb6a2f2fa0e9/PythonAPI/pycocotools/cocoeval.py#L229 + oks = ( + distances + / (2 * self.kpt_sigmas) ** 2 + / (scales[:, None, None] + eps) + / 2 + ) + else: + # use same formula as defined here: https://cocodataset.org/#keypoints-eval + oks = ( + distances + / ((scales[:, None, None] + eps) * self.kpt_sigmas.to(scales.device)) + ** 2 + / 2 + ) + + return (torch.exp(-oks) * kpt_mask[:, None]).sum(-1) / ( + kpt_mask.sum(-1)[:, None] + eps + ) + + +def fix_empty_tensors(input_tensor: Tensor) -> Tensor: + """Empty tensors can cause problems in DDP mode, this methods corrects them.""" + if input_tensor.numel() == 0 and input_tensor.ndim == 1: + return input_tensor.unsqueeze(0) + return input_tensor diff --git a/luxonis_train/attached_modules/visualizers/README.md b/luxonis_train/attached_modules/visualizers/README.md new file mode 100644 index 00000000..bb3c1a89 --- /dev/null +++ b/luxonis_train/attached_modules/visualizers/README.md @@ -0,0 +1,87 @@ +# Visualizers + +## Table Of Contents + +- [BBoxVisualizer](#bboxvisualizer) +- [ClassificationVisualizer](#classificationvisualizer) +- [KeypointVisualizer](#keypointvisualizer) +- [SegmentationVisualizer](#segmentationvisualizer) +- [MultiVisualizer](#multivisualizer) + +## BBoxVisualizer + +Visualizer for bounding boxes. + +**Params** + +| Key | Type | Default value | Description | +| --------- | ------------------------------------------------------------------------------------------- | ------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| labels | dict\[int, str\] \| list\[str\] \| None | None | Either a dictionary mapping class indices to names, or a list of names. If list is provided, the label mapping is done by index. By default, no labels are drawn. | +| colors | dict\[int, tuple\[int, int, int\] \| str\] \| list\[tuple\[int, int, int\] \| str\] \| None | None | Colors to use for the boundig boxes. Either a dictionary mapping class names to colors, or a list of colors. | +| fill | bool | False | Whether or not to fill the bounding boxes. | +| width | int | 1 | The width of the bounding box lines. | +| font | str \| None | None | A filename containing a TrueType font. | +| font_size | int \| None | None | Font size used for the labels. | + +**Example** + +![bbox_viz_example](../../../../media/example_viz/bbox.png) + +## KeypointVisualizer + +**Params** + +| Key | Type | Default value | Description | +| -------------------- | -------------------------------------- | ------------- | -------------------------------------------------------------------------------------------------------------------------------- | +| visibility_threshold | float | 0.5 | Threshold for visibility of keypoints. If the visibility of a keypoint is below this threshold, it is considered as not visible. | +| connectivity | list\[tuple\[int, int\]\] \| None | None | List of tuples of keypoint indices that define the connections in the skeleton. | +| visible_color | str \| tuple\[int, int, int\] | "red" | Color of visible keypoints. | +| nonvisible_color | str \| tuple\[int, int, int \] \| None | None | Color of nonvisible keypoints. If None, nonvisible keypoints are not drawn. | + +**Example** + +![kpt_viz_example](../../../../media/example_viz/kpts.png) + +## SegmentationVisualizer + +**Params** + +| Key | Type | Default value | Description | +| ----- | ----------------------------- | ------------- | -------------------------------------- | +| color | str \| tuple\[int, int, int\] | #5050FF | Color of the segmentation masks. | +| alpha | float | 0.6 | Alpha value of the segmentation masks. | + +**Example** + +![seg_viz_example](../../../../media/example_viz/segmentation.png) + +## ClassificationVisualizer + +**Params** + +| Key | Type | Default value | Description | +| ------------ | ---------------------- | ------------- | -------------------------------------------------------------------------- | +| include_plot | bool | True | Whether to include a plot of the class probabilities in the visualization. | +| color | tuple\[int, int, int\] | (255, 0, 0) | Color of the text. | +| font_scale | float | 1.0 | Scale of the font. | +| thickness | int | 1 | Line thickness of the font. | + +**Example** + +![class_viz_example](../../../../media/example_viz/class.png) + +## MultiVisualizer + +Special type of meta-visualizer that combines several visualizers into one. The combined visualizers share canvas. + +**Params** + +| Key | Type | Default value | Description | +| ----------- | ------------ | ------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| visualizers | list\[dict\] | \[ \] | List of visualizers to combine. Each item in the list is a dictionary with the following keys:
- name (str): Name of the visualizer. Must be a key in the VISUALIZERS registry.
- params (dict): Parameters to pass to the visualizer. | + +**Example** + +Example of combining [KeypointVisualizer](#keypointvisualizer) and [BBoxVisualizer](#bboxvisualizer). + +![multi_viz_example](../../../../media/example_viz/multi.png) diff --git a/luxonis_train/attached_modules/visualizers/__init__.py b/luxonis_train/attached_modules/visualizers/__init__.py new file mode 100644 index 00000000..a5652cb4 --- /dev/null +++ b/luxonis_train/attached_modules/visualizers/__init__.py @@ -0,0 +1,35 @@ +from .base_visualizer import BaseVisualizer +from .bbox_visualizer import BBoxVisualizer +from .classification_visualizer import ClassificationVisualizer +from .keypoint_visualizer import KeypointVisualizer +from .multi_visualizer import MultiVisualizer +from .segmentation_visualizer import SegmentationVisualizer +from .utils import ( + combine_visualizations, + draw_bounding_box_labels, + draw_keypoint_labels, + draw_segmentation_labels, + get_color, + get_unnormalized_images, + preprocess_images, + seg_output_to_bool, + unnormalize, +) + +__all__ = [ + "BBoxVisualizer", + "BaseVisualizer", + "ClassificationVisualizer", + "KeypointVisualizer", + "MultiVisualizer", + "SegmentationVisualizer", + "combine_visualizations", + "draw_bounding_box_labels", + "draw_keypoint_labels", + "draw_segmentation_labels", + "get_color", + "get_unnormalized_images", + "preprocess_images", + "seg_output_to_bool", + "unnormalize", +] diff --git a/luxonis_train/attached_modules/visualizers/base_visualizer.py b/luxonis_train/attached_modules/visualizers/base_visualizer.py new file mode 100644 index 00000000..050c9f4a --- /dev/null +++ b/luxonis_train/attached_modules/visualizers/base_visualizer.py @@ -0,0 +1,66 @@ +from abc import abstractmethod + +from torch import Tensor +from typing_extensions import TypeVarTuple, Unpack + +from luxonis_train.attached_modules import BaseAttachedModule +from luxonis_train.utils.registry import VISUALIZERS +from luxonis_train.utils.types import Labels, Packet + +Ts = TypeVarTuple("Ts") + + +class BaseVisualizer( + BaseAttachedModule[Unpack[Ts]], + register=False, + registry=VISUALIZERS, +): + """A base class for all visualizers. + + This class defines the basic interface for all visualizers. It utilizes automatic + registration of defined subclasses to the L{VISUALIZERS} registry. + """ + + @abstractmethod + def forward( + self, + label_canvas: Tensor, + prediction_canvas: Tensor, + *args: Unpack[Ts], + ) -> Tensor | tuple[Tensor, Tensor] | tuple[Tensor, list[Tensor]] | list[Tensor]: + """Forward pass of the visualizer. + + Takes an image and the prepared inputs from the `prepare` method and + produces visualizations. Visualizations can be either: + + - A single image (I{e.g.} for classification, weight visualization). + - A tuple of two images, representing (labels, predictions) (I{e.g.} for + bounding boxes, keypoints). + - A tuple of an image and a list of images, + representing (labels, multiple visualizations) (I{e.g.} for segmentation, + depth estimation). + - A list of images, representing unrelated visualizations. + + @type label_canvas: Tensor + @param label_canvas: An image to draw the labels on. + @type prediction_canvas: Tensor + @param prediction_canvas: An image to draw the predictions on. + @type args: Unpack[Ts] + @param args: Prepared inputs from the `prepare` method. + + @rtype: Tensor | tuple[Tensor, Tensor] | tuple[Tensor, list[Tensor]] | list[Tensor] + @return: Visualizations. + + @raise IncompatibleException: If the inputs are not compatible with the module. + """ + ... + + def run( + self, + label_canvas: Tensor, + prediction_canvas: Tensor, + inputs: Packet[Tensor], + labels: Labels, + ) -> Tensor | tuple[Tensor, Tensor] | tuple[Tensor, list[Tensor]]: + self.validate(inputs, labels) + return self(label_canvas, prediction_canvas, *self.prepare(inputs, labels)) diff --git a/luxonis_train/attached_modules/visualizers/bbox_visualizer.py b/luxonis_train/attached_modules/visualizers/bbox_visualizer.py new file mode 100644 index 00000000..14dd1ab9 --- /dev/null +++ b/luxonis_train/attached_modules/visualizers/bbox_visualizer.py @@ -0,0 +1,201 @@ +import logging + +import torch +from torch import Tensor + +from luxonis_train.utils.types import BBoxProtocol, LabelType + +from .base_visualizer import BaseVisualizer +from .utils import ( + Color, + draw_bounding_box_labels, + draw_bounding_boxes, + get_color, +) + + +class BBoxVisualizer(BaseVisualizer[list[Tensor], Tensor]): + def __init__( + self, + labels: dict[int, str] | list[str] | None = None, + draw_labels: bool = True, + colors: dict[str, Color] | list[Color] | None = None, + fill: bool = False, + width: int | None = None, + font: str | None = None, + font_size: int | None = None, + **kwargs, + ): + """Visualizer for bounding box predictions. + + Creates a visualization of the bounding box predictions and labels. + + @type labels: dict[int, str] | list[str] | None + @param labels: Either a dictionary mapping class indices to names, or a list of + names. If list is provided, the label mapping is done by index. By default, + no labels are drawn. + @type draw_labels: bool + @param draw_labels: Whether or not to draw labels. Defaults to C{True}. + @type colors: dict[int, Color] | list[Color] | None + @param colors: Either a dictionary mapping class indices to colors, or a list of + colors. If list is provided, the color mapping is done by index. By default, + random colors are used. + @type fill: bool + @param fill: Whether or not to fill the bounding boxes. Defaults to C{False}. + @type width: int | None + @param width: The width of the bounding box lines. Defaults to C{1}. + @type font: str | None + @param font: A filename containing a TrueType font. Defaults to C{None}. + @type font_size: int | None + @param font_size: The font size to use for the labels. Defaults to C{None}. + """ + super().__init__( + required_labels=[LabelType.BOUNDINGBOX], protocol=BBoxProtocol, **kwargs + ) + if isinstance(labels, list): + labels = {i: label for i, label in enumerate(labels)} + + self.labels = labels or { + i: label for i, label in enumerate(self.node.class_names) + } + if colors is None: + colors = {label: get_color(i) for i, label in self.labels.items()} + if isinstance(colors, list): + colors = {self.labels[i]: color for i, color in enumerate(colors)} + self.colors = colors + self.fill = fill + self.width = width + self.font = font + self.font_size = font_size + self.draw_labels = draw_labels + + @staticmethod + def draw_targets( + canvas: Tensor, + targets: Tensor, + width: int | None = None, + colors: list[Color] | None = None, + labels: list[str] | None = None, + label_dict: dict[int, str] | None = None, + color_dict: dict[str, Color] | None = None, + draw_labels: bool = True, + **kwargs, + ) -> Tensor: + viz = torch.zeros_like(canvas) + + for i in range(len(canvas)): + target = targets[targets[:, 0] == i] + target_classes = target[:, 1].int() + cls_labels = labels or ( + [label_dict[int(c)] for c in target_classes] + if draw_labels and label_dict is not None + else None + ) + cls_colors = colors or ( + [color_dict[label_dict[int(c)]] for c in target_classes] + if color_dict is not None and label_dict is not None + else None + ) + + *_, H, W = canvas.shape + width = width or max(1, int(min(H, W) / 100)) + viz[i] = draw_bounding_box_labels( + canvas[i].clone(), + target[:, 2:], + width=width, + labels=cls_labels, + colors=cls_colors, + **kwargs, + ).to(canvas.device) + + return viz + + @staticmethod + def draw_predictions( + canvas: Tensor, + predictions: list[Tensor], + width: int | None = None, + colors: list[Color] | None = None, + labels: list[str] | None = None, + label_dict: dict[int, str] | None = None, + color_dict: dict[str, Color] | None = None, + draw_labels: bool = True, + **kwargs, + ) -> Tensor: + viz = torch.zeros_like(canvas) + + for i in range(len(canvas)): + prediction = predictions[i] + prediction_classes = prediction[..., 5].int() + cls_labels = labels or ( + [label_dict[int(c)] for c in prediction_classes] + if draw_labels and label_dict is not None + else None + ) + cls_colors = colors or ( + [color_dict[label_dict[int(c)]] for c in prediction_classes] + if color_dict is not None and label_dict is not None + else None + ) + + *_, H, W = canvas.shape + width = width or max(1, int(min(H, W) / 100)) + try: + viz[i] = draw_bounding_boxes( + canvas[i].clone(), + prediction[:, :4], + width=width, + labels=cls_labels, + colors=cls_colors, + **kwargs, + ) + except ValueError as e: + logging.getLogger(__name__).warning( + f"Failed to draw bounding boxes: {e}. Skipping visualization." + ) + viz = canvas + return viz + + def forward( + self, + label_canvas: Tensor, + prediction_canvas: Tensor, + predictions: list[Tensor], + targets: Tensor, + ) -> tuple[Tensor, Tensor]: + """Creates a visualization of the bounding box predictions and labels. + + @type label_canvas: Tensor + @param label_canvas: The canvas containing the labels. + @type prediction_canvas: Tensor + @param prediction_canvas: The canvas containing the predictions. + @type prediction: Tensor + @param prediction: The predicted bounding boxes. The shape should be [N, 6], + where N is the number of bounding boxes and the last dimension is [x1, y1, + x2, y2, class, conf]. + @type targets: Tensor + @param targets: The target bounding boxes. + """ + targets_viz = self.draw_targets( + label_canvas, + targets, + color_dict=self.colors, + label_dict=self.labels, + draw_labels=self.draw_labels, + fill=self.fill, + font=self.font, + font_size=self.font_size, + width=self.width, + ) + predictions_viz = self.draw_predictions( + prediction_canvas, + predictions, + label_dict=self.labels, + color_dict=self.colors, + draw_labels=self.draw_labels, + fill=self.fill, + font=self.font, + font_size=self.font_size, + width=self.width, + ) + return targets_viz, predictions_viz.to(targets_viz.device) diff --git a/luxonis_train/attached_modules/visualizers/classification_visualizer.py b/luxonis_train/attached_modules/visualizers/classification_visualizer.py new file mode 100644 index 00000000..e5920d21 --- /dev/null +++ b/luxonis_train/attached_modules/visualizers/classification_visualizer.py @@ -0,0 +1,97 @@ +import cv2 +import matplotlib.pyplot as plt +import numpy as np +import torch +from torch import Tensor + +from .base_visualizer import BaseVisualizer +from .utils import ( + figure_to_torch, + numpy_to_torch_img, + torch_img_to_numpy, +) + + +class ClassificationVisualizer(BaseVisualizer[Tensor, Tensor]): + def __init__( + self, + include_plot: bool = True, + font_scale: float = 1.0, + color: tuple[int, int, int] = (255, 0, 0), + thickness: int = 1, + **kwargs, + ): + """Visualizer for classification tasks. + + @type include_plot: bool + @param include_plot: Whether to include a plot of the class probabilities in the + visualization. Defaults to C{True}. + """ + super().__init__(**kwargs) + self.include_plot = include_plot + self.font_scale = font_scale + self.color = color + self.thickness = thickness + + def _get_class_name(self, pred: Tensor) -> str: + idx = int((pred.argmax()).item()) + if self.node.class_names is None: + return str(idx) + return self.node.class_names[idx] + + def _generate_plot(self, prediction: Tensor, width: int, height: int) -> Tensor: + prediction = prediction.softmax(-1).detach().cpu().numpy() + fig, ax = plt.subplots(figsize=(width / 100, height / 100)) + ax.bar(np.arange(len(prediction)), prediction) + ax.set_xticks(np.arange(len(prediction))) + if self.node.class_names is not None: + ax.set_xticklabels(self.node.class_names, rotation=90) + else: + ax.set_xticklabels(np.arange(1, len(prediction) + 1)) + ax.set_ylim(0, 1) + ax.set_xlabel("Class") + ax.set_ylabel("Probability") + ax.grid(True) + return figure_to_torch(fig, width, height) + + def forward( + self, + label_canvas: Tensor, + prediction_canvas: Tensor, + predictions: Tensor, + labels: Tensor, + ) -> Tensor | tuple[Tensor, Tensor]: + overlay = torch.zeros_like(label_canvas) + plots = torch.zeros_like(prediction_canvas) + for i in range(len(overlay)): + prediction = predictions[i] + gt = self._get_class_name(labels[i]) + arr = torch_img_to_numpy(label_canvas[i].clone()) + curr_class = self._get_class_name(prediction) + arr = cv2.putText( + arr, + f"GT: {gt}", + (5, 10), + cv2.FONT_HERSHEY_SIMPLEX, + self.font_scale, + self.color, + self.thickness, + ) + arr = cv2.putText( + arr, + f"Pred: {curr_class}", + (5, 30), + cv2.FONT_HERSHEY_SIMPLEX, + self.font_scale, + self.color, + self.thickness, + ) + overlay[i] = numpy_to_torch_img(arr) + if self.include_plot: + plots[i] = self._generate_plot( + prediction, prediction_canvas.shape[3], prediction_canvas.shape[2] + ) + + if self.include_plot: + return overlay, plots + return overlay diff --git a/luxonis_train/attached_modules/visualizers/keypoint_visualizer.py b/luxonis_train/attached_modules/visualizers/keypoint_visualizer.py new file mode 100644 index 00000000..beebaf3f --- /dev/null +++ b/luxonis_train/attached_modules/visualizers/keypoint_visualizer.py @@ -0,0 +1,123 @@ +from copy import deepcopy + +import torch +from torch import Tensor + +from luxonis_train.utils.types import ( + Labels, + LabelType, + Packet, +) + +from .base_visualizer import BaseVisualizer +from .utils import ( + Color, + draw_keypoint_labels, + draw_keypoints, +) + + +class KeypointVisualizer(BaseVisualizer[list[Tensor], Tensor]): + def __init__( + self, + visibility_threshold: float = 0.5, + connectivity: list[tuple[int, int]] | None = None, + visible_color: Color = "red", + nonvisible_color: Color | None = None, + **kwargs, + ): + """Visualizer for keypoints. + + @type visibility_threshold: float + @param visibility_threshold: Threshold for visibility of keypoints. If the + visibility of a keypoint is below this threshold, it is considered as not + visible. Defaults to C{0.5}. + @type connectivity: list[tuple[int, int]] | None + @param connectivity: List of tuples of keypoint indices that define the + connections in the skeleton. Defaults to C{None}. + @type visible_color: L{Color} + @param visible_color: Color of visible keypoints. Either a string or a tuple of + RGB values. Defaults to C{"red"}. + @type nonvisible_color: L{Color} | None + @param nonvisible_color: Color of nonvisible keypoints. If C{None}, nonvisible + keypoints are not drawn. Defaults to C{None}. + """ + super().__init__(required_labels=[LabelType.KEYPOINT], **kwargs) + self.visibility_threshold = visibility_threshold + self.connectivity = connectivity + self.visible_color = visible_color + self.nonvisible_color = nonvisible_color + + def prepare( + self, output: Packet[Tensor], label: Labels + ) -> tuple[list[Tensor], Tensor]: + return output["keypoints"], label[LabelType.KEYPOINT] + + @staticmethod + def draw_predictions( + canvas: Tensor, + predictions: list[Tensor], + nonvisible_color: Color | None = None, + visibility_threshold: float = 0.5, + **kwargs, + ) -> Tensor: + viz = torch.zeros_like(canvas) + for i in range(len(canvas)): + prediction = predictions[i][:, 1:] + mask = prediction[..., 2] < visibility_threshold + visible_kpts = prediction[..., :2] * (~mask).unsqueeze(-1).float() + viz[i] = draw_keypoints( + canvas[i].clone(), + visible_kpts[..., :2], + **kwargs, + ) + if nonvisible_color is not None: + _kwargs = deepcopy(kwargs) + _kwargs["colors"] = nonvisible_color + nonvisible_kpts = prediction[..., :2] * mask.unsqueeze(-1).float() + viz[i] = draw_keypoints( + viz[i].clone(), + nonvisible_kpts[..., :2], + **_kwargs, + ) + + return viz + + @staticmethod + def draw_targets(canvas: Tensor, targets: Tensor, **kwargs) -> Tensor: + viz = torch.zeros_like(canvas) + for i in range(len(canvas)): + target = targets[targets[:, 0] == i][:, 1:] + viz[i] = draw_keypoint_labels( + canvas[i].clone(), + target, + **kwargs, + ) + + return viz + + def forward( + self, + label_canvas: Tensor, + prediction_canvas: Tensor, + predictions: list[Tensor], + targets: Tensor, + **kwargs, + ) -> tuple[Tensor, Tensor]: + target_viz = self.draw_targets( + label_canvas, + targets, + colors=self.visible_color, + connectivity=self.connectivity, + **kwargs, + ) + pred_viz = self.draw_predictions( + prediction_canvas, + predictions, + connectivity=self.connectivity, + colors=self.visible_color, + nonvisible_color=self.nonvisible_color, + visibility_threshold=self.visibility_threshold, + **kwargs, + ) + return target_viz, pred_viz diff --git a/luxonis_train/attached_modules/visualizers/multi_visualizer.py b/luxonis_train/attached_modules/visualizers/multi_visualizer.py new file mode 100644 index 00000000..2fee8e1f --- /dev/null +++ b/luxonis_train/attached_modules/visualizers/multi_visualizer.py @@ -0,0 +1,57 @@ +from torch import Tensor + +from luxonis_train.utils.registry import VISUALIZERS +from luxonis_train.utils.types import ( + Kwargs, + Labels, + Packet, +) + +from .base_visualizer import BaseVisualizer + + +class MultiVisualizer(BaseVisualizer[Packet[Tensor], Labels]): + """Special type of visualizer that combines multiple visualizers together. + + All the visualizers are applied in the order they are provided and they all draw on + the same canvas. + + @type visualizers: list[Kwargs] + @param visualizers: List of visualizers to combine. + Each item in the list is a dictionary with the following keys:: + + >>> {"name": "name_of_the_visualizer", + "params": {"param1": value1, "param2": value2, ...}} + """ + + def __init__(self, visualizers: list[Kwargs], **kwargs): + super().__init__(**kwargs) + self.visualizers = [] + for item in visualizers: + visualizer_params = item.get("params", {}) + visualizer = VISUALIZERS.get(item["name"])(**visualizer_params, **kwargs) + self.visualizers.append(visualizer) + + def prepare( + self, output: Packet[Tensor], label: Labels, idx: int = 0 + ) -> tuple[Packet[Tensor], Labels]: + self._idx = idx + return output, label + + def forward( + self, + label_canvas: Tensor, + prediction_canvas: Tensor, + outputs: Packet[Tensor], + labels: Labels, + ) -> tuple[Tensor, Tensor]: + for visualizer in self.visualizers: + match visualizer.run(label_canvas, prediction_canvas, outputs, labels): + case Tensor(data=prediction_viz): + prediction_canvas = prediction_viz + case (Tensor(data=label_viz), Tensor(data=prediction_viz)): + label_canvas = label_viz + prediction_canvas = prediction_viz + case _: + raise NotImplementedError + return label_canvas, prediction_canvas diff --git a/luxonis_train/attached_modules/visualizers/segmentation_visualizer.py b/luxonis_train/attached_modules/visualizers/segmentation_visualizer.py new file mode 100644 index 00000000..6d8f3c79 --- /dev/null +++ b/luxonis_train/attached_modules/visualizers/segmentation_visualizer.py @@ -0,0 +1,158 @@ +import logging + +import torch +from torch import Tensor + +from luxonis_train.utils.types import Labels, LabelType, Packet, SegmentationProtocol + +from .base_visualizer import BaseVisualizer +from .utils import ( + Color, + draw_segmentation_labels, + draw_segmentation_masks, + get_color, + seg_output_to_bool, +) + +logger = logging.getLogger(__name__) +log_disable = False + + +class SegmentationVisualizer(BaseVisualizer[Tensor, Tensor]): + def __init__( + self, + colors: Color | list[Color] = "#5050FF", + background_class: int | None = None, + alpha: float = 0.6, + **kwargs, + ): + """Visualizer for segmentation tasks. + + @type colors: L{Color} | list[L{Color}] + @param colors: Color of the segmentation masks. Defaults to C{"#5050FF"}. + @type alpha: float + @param alpha: Alpha value of the segmentation masks. Defaults to C{0.6}. + """ + super().__init__( + protocol=SegmentationProtocol, + required_labels=[LabelType.SEGMENTATION], + **kwargs, + ) + if not isinstance(colors, list): + colors = [colors] + + self.colors = colors + self.background_class = background_class + self.alpha = alpha + + def prepare(self, output: Packet[Tensor], label: Labels) -> tuple[Tensor, Tensor]: + return output["segmentation"][0], label[LabelType.SEGMENTATION] + + @staticmethod + def draw_predictions( + canvas: Tensor, + predictions: Tensor, + colors: list[Color] | None = None, + background_class: int | None = None, + **kwargs, + ) -> Tensor: + colors = SegmentationVisualizer._adjust_colors( + predictions, colors, background_class + ) + viz = torch.zeros_like(canvas) + for i in range(len(canvas)): + prediction = predictions[i] + mask = seg_output_to_bool(prediction) + mask = mask.to(canvas.device) + viz[i] = draw_segmentation_masks( + canvas[i].clone(), mask, colors=colors, **kwargs + ) + return viz + + @staticmethod + def draw_targets( + canvas: Tensor, + targets: Tensor, + colors: list[Color] | None = None, + background_class: int | None = None, + **kwargs, + ) -> Tensor: + colors = SegmentationVisualizer._adjust_colors( + targets, colors, background_class + ) + viz = torch.zeros_like(canvas) + for i in range(len(viz)): + target = targets[i] + viz[i] = draw_segmentation_labels( + canvas[i].clone(), + target, + colors=colors, + **kwargs, + ).to(canvas.device) + + return viz + + def forward( + self, + label_canvas: Tensor, + prediction_canvas: Tensor, + predictions: Tensor, + targets: Tensor, + **kwargs, + ) -> tuple[Tensor, Tensor]: + """Creates a visualization of the segmentation predictions and labels. + + @type label_canvas: Tensor + @param label_canvas: The canvas to draw the labels on. + @type prediction_canvas: Tensor + @param prediction_canvas: The canvas to draw the predictions on. + @type predictions: Tensor + @param predictions: The predictions to visualize. + @type targets: Tensor + @param targets: The targets to visualize. + @rtype: tuple[Tensor, Tensor] + @return: A tuple of the label and prediction visualizations. + """ + + targets_vis = self.draw_targets( + label_canvas, + targets, + colors=self.colors, + alpha=self.alpha, + background_class=self.background_class, + **kwargs, + ) + predictions_vis = self.draw_predictions( + prediction_canvas, + predictions, + colors=self.colors, + alpha=self.alpha, + background_class=self.background_class, + **kwargs, + ) + return targets_vis, predictions_vis + + @staticmethod + def _adjust_colors( + data: Tensor, + colors: list[Color] | None = None, + background_class: int | None = None, + ) -> list[Color]: + global log_disable + n_classes = data.size(1) + if colors is not None and len(colors) == n_classes: + return colors + + if not log_disable: + if colors is None: + logger.warning("No colors provided. Using random colors instead.") + elif data.size(1) != len(colors): + logger.warning( + f"Number of colors ({len(colors)}) does not match number of " + f"classes ({data.size(1)}). Using random colors instead." + ) + log_disable = True + colors = [get_color(i) for i in range(data.size(1))] + if background_class is not None: + colors[background_class] = "#000000" + return colors diff --git a/luxonis_train/attached_modules/visualizers/utils.py b/luxonis_train/attached_modules/visualizers/utils.py new file mode 100644 index 00000000..52431204 --- /dev/null +++ b/luxonis_train/attached_modules/visualizers/utils.py @@ -0,0 +1,425 @@ +import colorsys +import io +from typing import Literal + +import cv2 +import matplotlib.pyplot as plt +import numpy as np +import numpy.typing as npt +import torch +import torchvision.transforms.functional as F +import torchvision.transforms.functional as TF +from matplotlib.figure import Figure +from PIL import Image +from torch import Tensor +from torchvision.ops import box_convert +from torchvision.utils import ( + draw_bounding_boxes, + draw_keypoints, + draw_segmentation_masks, +) + +from luxonis_train.utils.config import Config + +Color = str | tuple[int, int, int] +"""Color type alias. + +Can be either a string (e.g. "red", "#FF5512") or a tuple of RGB values. +""" + + +def figure_to_torch(fig: Figure, width: int, height: int) -> Tensor: + """Converts a matplotlib `Figure` to a `Tensor`.""" + buf = io.BytesIO() + fig.savefig(buf, format="png", bbox_inches="tight", pad_inches=0) + buf.seek(0) + img_arr = Image.open(buf).convert("RGB") + img_arr = img_arr.resize((width, height)) + img_tensor = torch.tensor(np.array(img_arr)).permute(2, 0, 1) + buf.close() + plt.close(fig) + return img_tensor + + +def torch_img_to_numpy( + img: Tensor, reverse_colors: bool = False +) -> npt.NDArray[np.uint8]: + """Converts a torch image (CHW) to a numpy array (HWC). Optionally also converts + colors. + + @type img: Tensor + @param img: Torch image (CHW) + @type reverse_colors: bool + @param reverse_colors: Whether to reverse colors (RGB to BGR). Defaults to False. + @rtype: npt.NDArray[np.uint8] + @return: Numpy image (HWC) + """ + if img.is_floating_point(): + img = img.mul(255).int() + img = torch.clamp(img, 0, 255) + arr = img.detach().cpu().numpy().astype(np.uint8).transpose(1, 2, 0) + arr = np.ascontiguousarray(arr) + if reverse_colors: + arr = cv2.cvtColor(arr, cv2.COLOR_BGR2RGB) + return arr + + +def numpy_to_torch_img(img: np.ndarray) -> Tensor: + """Converts numpy image (HWC) to torch image (CHW).""" + return torch.from_numpy(img).permute(2, 0, 1) + + +def preprocess_images( + imgs: Tensor, + mean: list[float] | float | None = None, + std: list[float] | float | None = None, +) -> Tensor: + """Performs preprocessing on a batch of images. + + Preprocessing includes unnormalizing and converting to uint8. + + @type imgs: Tensor + @param imgs: Batch of images. + @type mean: list[float] | float | None + @param mean: Mean used for unnormalization. Defaults to C{None}. + @type std: list[float] | float | None + @param std: Std used for unnormalization. Defaults to C{None}. + @rtype: Tensor + @return: Batch of preprocessed images. + """ + out_imgs = [] + for i in range(imgs.shape[0]): + curr_img = imgs[i] + if mean is not None or std is not None: + curr_img = unnormalize(curr_img, to_uint8=True, mean=mean, std=std) + else: + curr_img = curr_img.to(torch.uint8) + + out_imgs.append(curr_img) + + return torch.stack(out_imgs) + + +def draw_segmentation_labels( + img: Tensor, + label: Tensor, + alpha: float = 0.4, + colors: Color | list[Color] | None = None, +) -> Tensor: + """Draws segmentation labels on an image. + + @type img: Tensor + @param img: Image to draw on. + @type label: Tensor + @param label: Segmentation label. + @type alpha: float + @param alpha: Alpha value for blending. Defaults to C{0.4}. + @rtype: Tensor + @return: Image with segmentation labels drawn on. + """ + masks = label.bool() + masks = masks.cpu() + img = img.cpu() + return draw_segmentation_masks(img, masks, alpha=alpha, colors=colors) + + +def draw_bounding_box_labels(img: Tensor, label: Tensor, **kwargs) -> Tensor: + """Draws bounding box labels on an image. + + @type img: Tensor + @param img: Image to draw on. + @type label: Tensor + @param label: Bounding box label. The shape should be (n_instances, 4), where the + last dimension is (x, y, w, h). + @type kwargs: dict + @param kwargs: Additional arguments to pass to + L{torchvision.utils.draw_bounding_boxes}. + @rtype: Tensor + @return: Image with bounding box labels drawn on. + """ + _, H, W = img.shape + bboxs = box_convert(label, "xywh", "xyxy") + bboxs[:, 0::2] *= W + bboxs[:, 1::2] *= H + return draw_bounding_boxes(img, bboxs, **kwargs) + + +def draw_keypoint_labels(img: Tensor, label: Tensor, **kwargs) -> Tensor: + """Draws keypoint labels on an image. + + @type img: Tensor + @param img: Image to draw on. + @type label: Tensor + @param label: Keypoint label. The shape should be (n_instances, 3), where the last + dimension is (x, y, visibility). + @type kwargs: dict + @param kwargs: Additional arguments to pass to L{torchvision.utils.draw_keypoints}. + @rtype: Tensor + @return: Image with keypoint labels drawn on. + """ + _, H, W = img.shape + keypoints_unflat = label[:, 1:].reshape(-1, 3) + keypoints_points = keypoints_unflat[:, :2] + keypoints_points[:, 0] *= W + keypoints_points[:, 1] *= H + + n_instances = label.shape[0] + if n_instances == 0: + out_keypoints = keypoints_points.reshape((-1, 2)).unsqueeze(0).int() + else: + out_keypoints = keypoints_points.reshape((n_instances, -1, 2)).int() + + return draw_keypoints(img, out_keypoints, **kwargs) + + +def seg_output_to_bool(data: Tensor, binary_threshold: float = 0.5) -> Tensor: + """Converts seg head output to 2D boolean mask for visualization.""" + masks = torch.empty_like(data, dtype=torch.bool, device=data.device) + if data.shape[0] == 1: + classes = torch.sigmoid(data) + masks[0] = classes >= binary_threshold + else: + classes = torch.argmax(data, dim=0) + for i in range(masks.shape[0]): + masks[i] = classes == i + return masks + + +def unnormalize( + img: Tensor, + mean: list[float] | float | None = None, + std: list[float] | float | None = None, + to_uint8: bool = False, +) -> Tensor: + """Unnormalizes an image back to original values, optionally converts it to uint8. + + @type img: Tensor + @param img: Image to unnormalize. + @type mean: list[float] | float | None + @param mean: Mean used for unnormalization. Defaults to C{None}. + @type std: list[float] | float | None + @param std: Std used for unnormalization. Defaults to C{None}. + @type to_uint8: bool + @param to_uint8: Whether to convert to uint8. Defaults to C{False}. + @rtype: Tensor + @return: Unnormalized image. + """ + mean = mean or 0 + std = std or 1 + if isinstance(mean, float): + mean = [mean] * img.shape[0] + if isinstance(std, float): + std = [std] * img.shape[0] + mean_tensor = torch.tensor(mean, device=img.device) + std_tensor = torch.tensor(std, device=img.device) + new_mean = -mean_tensor / std_tensor + new_std = 1 / std_tensor + out_img = F.normalize(img, mean=new_mean.tolist(), std=new_std.tolist()) + if to_uint8: + out_img = torch.clamp(out_img.mul(255), 0, 255).to(torch.uint8) + return out_img + + +def get_unnormalized_images(cfg: Config, images: Tensor) -> Tensor: + normalize_params = cfg.trainer.preprocessing.normalize.params + mean = std = None + if cfg.trainer.preprocessing.normalize.active: + mean = normalize_params.get("mean", [0.485, 0.456, 0.406]) + std = normalize_params.get("std", [0.229, 0.224, 0.225]) + return preprocess_images( + images, + mean=mean, + std=std, + ) + + +def number_to_hsl(seed: int) -> tuple[float, float, float]: + """Map a number to a distinct HSL color.""" + # Use a prime number to spread the hues more evenly + # and ensure they are visually distinguishable + hue = (seed * 157) % 360 + saturation = 0.8 # Fixed saturation + lightness = 0.5 # Fixed lightness + return (hue, saturation, lightness) + + +def hsl_to_rgb(hsl: tuple[float, float, float]) -> Color: + """Convert HSL color to RGB.""" + r, g, b = colorsys.hls_to_rgb(hsl[0] / 360, hsl[2], hsl[1]) + return int(r * 255), int(g * 255), int(b * 255) + + +def get_color(seed: int) -> Color: + """Generates a random color from a seed. + + @type seed: int + @param seed: Seed to use for the generator. + @rtype: L{Color} + @return: Generated color. + """ + return hsl_to_rgb(number_to_hsl(seed + 45)) + + +# TODO: Support native visualizations +# NOTE: Ignore for now, native visualizations not a priority. +# +# It could be beneficial in the long term to make the visualization more abstract. +# Reason for that is that certain services, e.g. WandB, have their native way +# of visualizing things. So by restricting ourselves to only produce bitmap images +# for logging, we are limiting ourselves in how we can utilize those services. +# (I know we want to leave WandB and I don't know whether mlcloud offers anything +# similar, but it might save us some time in the future).') +# +# The idea would be that every visualizer would not only produce the bitmap +# images, but also some standardized representation of the visualizations. +# This would be sent to the logger, which would then decide how to log it. +# By default, it would log it as a bitmap image, but if we know we are logging +# to (e.g.) WandB, we could use the native WandB visualizations. +# Since we already have to check what logging is being used (to call the correct +# service), it should be somehow easy to implement. +# +# The more specific implementation/protocol could be, that every instance +# of `LuxonisVisualizer` would produce a tuple of +# (bitmap_visualizations, structured_visualizations). +# +# The `bitmap_visualizations` would be one of the following: +# - a single tensor (e.g. image) +# - in this case, the tensor would be logged as a bitmap image +# - a tuple of two tensors +# - in this case, the first tensor is considered labels and the second predictions +# - e.g. GT and predicted segmentation mask +# - a tuple of a tensor and a list of tensors +# - in this case, the first is considered labels +# and the second unrelated predictions +# - an iterable of tensors +# - in this case, the tensors are considered unrelated predictions +# +# The `structured_visualizations` would be have similar format, but instead of +# tensors, it would consist of some structured data (e.g. dict of lists or something). +# We could even create a validation schema for this to enforce the structure. +# We would then just have to support this new structure in the logger (`LuxonisTracker`). +# +# TEST: +def combine_visualizations( + visualization: Tensor | tuple[Tensor, Tensor] | tuple[Tensor, list[Tensor]], +) -> Tensor: + """Default way of combining multiple visualizations into one final image.""" + + def resize_to_match( + fst: Tensor, + snd: Tensor, + *, + keep_size: Literal["larger", "smaller", "first", "second"] = "larger", + resize_along: Literal["width", "height", "exact"] = "height", + keep_aspect_ratio: bool = True, + ): + """Resizes two images so they have the same size. + + Resizes two images so they can be concateneted together. It's possible to + configure how the images are resized. + + @type fst: Tensor[C, H, W] + @param fst: First image. + @type snd: Tensor[C, H, W] + @param snd: Second image. + @type keep_size: Literal["larger", "smaller", "first", "second"] + @param keep_size: Which size to keep. Options are: + - "larger": Resize the smaller image to match the size of the larger image. + - "smaller": Resize the larger image to match the size of the smaller image. + - "first": Resize the second image to match the size of the first image. + - "second": Resize the first image to match the size of the second image. + + @type resize_along: Literal["width", "height", "exact"] + @param resize_along: Which dimensions to match. Options are: + - "width": Resize images along the width dimension. + - "height": Resize images along the height dimension. + - "exact": Resize images to match both width and height dimensions. + + @type keep_aspect_ratio: bool + @param keep_aspect_ratio: Whether to keep the aspect ratio of the images. + Only takes effect when the "exact" option is selected for the + C{resize_along} argument. Defaults to C{True}. + + @rtype: tuple[Tensor[C, H, W], Tensor[C, H, W]] + @return: Resized images. + """ + if resize_along not in ["width", "height", "exact"]: + raise ValueError( + "Invalid value for resize_along: {resize_along}. " + "Valid options are: 'width', 'height', 'exact'." + ) + + *_, h1, w1 = fst.shape + + *_, h2, w2 = snd.shape + + if keep_size == "larger": + target_width = max(w1, w2) + target_height = max(h1, h2) + elif keep_size == "smaller": + target_width = min(w1, w2) + target_height = min(h1, h2) + elif keep_size == "first": + target_width = w1 + target_height = h1 + elif keep_size == "second": + target_width = w2 + target_height = h2 + else: + raise ValueError( + f"Invalid value for keep_size: {keep_size}. " + "Valid options are: 'larger', 'smaller', 'first', 'second'." + ) + + if resize_along == "width": + target_height = h1 if keep_size in ["first", "larger"] else h2 + elif resize_along == "height": + target_width = w1 if keep_size in ["first", "larger"] else w2 + + if keep_aspect_ratio: + ar1 = w1 / h1 + ar2 = w2 / h2 + if resize_along == "width" or ( + resize_along == "exact" and target_width / target_height > ar1 + ): + target_height_fst = int(target_width / ar1) + target_width_fst = target_width + else: + target_width_fst = int(target_height * ar1) + target_height_fst = target_height + if resize_along == "width" or ( + resize_along == "exact" and target_width / target_height > ar2 + ): + target_height_snd = int(target_width / ar2) + target_width_snd = target_width + else: + target_width_snd = int(target_height * ar2) + target_height_snd = target_height + else: + target_width_fst, target_height_fst = target_width, target_height + target_width_snd, target_height_snd = target_width, target_height + + fst_resized = TF.resize(fst, [target_height_fst, target_width_fst]) + snd_resized = TF.resize(snd, [target_height_snd, target_width_snd]) + + return fst_resized, snd_resized + + match visualization: + case Tensor(data=viz): + return viz + case (Tensor(data=viz_labels), Tensor(data=viz_predictions)): + viz_labels, viz_predictions = resize_to_match(viz_labels, viz_predictions) + return torch.cat([viz_labels, viz_predictions], dim=-1) + + case (Tensor(data=_), [*viz]) if isinstance(viz, list) and all( + isinstance(v, Tensor) for v in viz + ): + raise NotImplementedError( + "Composition of multiple visualizations not yet supported." + ) + case _: + raise ValueError( + "Visualization should be either a single tensor or a tuple of " + "two tensors or a tuple of a tensor and a list of tensors." + f"Got: `{type(visualization)}`." + ) diff --git a/luxonis_train/callbacks/README.md b/luxonis_train/callbacks/README.md new file mode 100644 index 00000000..0eae7a5d --- /dev/null +++ b/luxonis_train/callbacks/README.md @@ -0,0 +1,53 @@ +# Callbacks + +List of all supported callbacks. + +## Table Of Contents + +- [PytorchLightning Callbacks](#pytorchlightning-callbacks) +- [ExportOnTrainEnd](#exportontrainend) +- [LuxonisProgressBar](#luxonisprogressbar) +- [MetadataLogger](#metadatalogger) +- [TestOnTrainEnd](#testontrainend) + +## PytorchLightning Callbacks + +List of supported callbacks from `lightning.pytorch`. + +- [DeviceStatsMonitor](https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.callbacks.DeviceStatsMonitor.html#lightning.pytorch.callbacks.DeviceStatsMonitor) +- [ EarlyStopping ](https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.callbacks.EarlyStopping.html#lightning.pytorch.callbacks.EarlyStopping) +- [ LearningRateMonitor ](https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.callbacks.LearningRateMonitor.html#lightning.pytorch.callbacks.LearningRateMonitor) +- [ ModelCheckpoint ](https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.callbacks.ModelCheckpoint.html#lightning.pytorch.callbacks.ModelCheckpoint) +- [ RichModelSummary ](https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.callbacks.RichModelSummary.html#lightning.pytorch.callbacks.RichModelSummary) + - Added automatically if `use_rich_text` is set to `True` in [config](../../../configs/README.md#topleveloptions). + +## ExportOnTrainEnd + +Performs export on train end with best weights according to the validation loss. + +**Params** + +| Key | Type | Default value | Description | +| ---------------- | ---- | ------------- | -------------------------------------------------------------------------------------- | +| upload_to_mlflow | bool | False | If set to True, overrides the upload url in exporter with currently active MLFlow run. | + +## LuxonisProgressBar + +Custom rich text progress bar based on RichProgressBar from Pytorch Lightning. +Added automatically if `use_rich_text` is set to `True` in [config](../../../configs/README.md#topleveloptions). + +## MetadataLogger + +Callback that logs training metadata. + +Metadata include all defined hyperparameters together with git hashes of `luxonis-ml` and `luxonis-train` packages. Also stores this information locally. + +**Params** + +| Key | Type | Default value | Description | +| ----------- | ----------- | ------------- | ----------------------------------------------------------------------------------------------------------------------- | +| hyperparams | list\[str\] | \[\] | List of hyperparameters to log. The hyperparameters are provided as config keys in dot notation. E.g. "trainer.epochs". | + +## TestOnTrainEnd + +Callback to perform a test run at the end of the training. diff --git a/luxonis_train/callbacks/__init__.py b/luxonis_train/callbacks/__init__.py new file mode 100644 index 00000000..4be94600 --- /dev/null +++ b/luxonis_train/callbacks/__init__.py @@ -0,0 +1,32 @@ +from lightning.pytorch.callbacks import ( + DeviceStatsMonitor, + EarlyStopping, + LearningRateMonitor, + ModelCheckpoint, + RichModelSummary, +) + +from luxonis_train.utils.registry import CALLBACKS + +from .export_on_train_end import ExportOnTrainEnd +from .luxonis_progress_bar import LuxonisProgressBar +from .metadata_logger import MetadataLogger +from .module_freezer import ModuleFreezer +from .test_on_train_end import TestOnTrainEnd +from .upload_checkpoint_on_train_end import UploadCheckpointOnTrainEnd + +CALLBACKS.register_module(module=EarlyStopping) +CALLBACKS.register_module(module=LearningRateMonitor) +CALLBACKS.register_module(module=ModelCheckpoint) +CALLBACKS.register_module(module=RichModelSummary) +CALLBACKS.register_module(module=DeviceStatsMonitor) + + +__all__ = [ + "ExportOnTrainEnd", + "LuxonisProgressBar", + "MetadataLogger", + "ModuleFreezer", + "TestOnTrainEnd", + "UploadCheckpointOnTrainEnd", +] diff --git a/luxonis_train/callbacks/export_on_train_end.py b/luxonis_train/callbacks/export_on_train_end.py new file mode 100644 index 00000000..3aa55309 --- /dev/null +++ b/luxonis_train/callbacks/export_on_train_end.py @@ -0,0 +1,63 @@ +import logging +from pathlib import Path +from typing import cast + +import lightning.pytorch as pl + +from luxonis_train.utils.config import Config +from luxonis_train.utils.registry import CALLBACKS +from luxonis_train.utils.tracker import LuxonisTrackerPL + + +@CALLBACKS.register_module() +class ExportOnTrainEnd(pl.Callback): + def __init__(self, upload_to_mlflow: bool = False): + """Callback that performs export on train end with best weights according to the + validation loss. + + @type upload_to_mlflow: bool + @param upload_to_mlflow: If set to True, overrides the upload url in Exporter + with currently active MLFlow run (if present). + """ + super().__init__() + self.upload_to_mlflow = upload_to_mlflow + + def on_train_end(self, trainer: pl.Trainer, pl_module: pl.LightningModule) -> None: + """Exports the model on train end. + + @type trainer: L{pl.Trainer} + @param trainer: Pytorch Lightning trainer. + @type pl_module: L{pl.LightningModule} + @param pl_module: Pytorch Lightning module. + @raises RuntimeError: If no best model path is found. + """ + from luxonis_train.core.exporter import Exporter + + model_checkpoint_callbacks = [ + c + for c in trainer.callbacks # type: ignore + if isinstance(c, pl.callbacks.ModelCheckpoint) # type: ignore + ] + # NOTE: assume that first checkpoint callback is based on val loss + best_model_path = model_checkpoint_callbacks[0].best_model_path + if not best_model_path: + raise RuntimeError( + "No best model path found. " + "Please make sure that ModelCheckpoint callback is present " + "and at least one validation epoch has been performed." + ) + cfg: Config = pl_module.cfg + cfg.model.weights = best_model_path + if self.upload_to_mlflow: + if pl_module.cfg.tracker.is_mlflow: + tracker = cast(LuxonisTrackerPL, trainer.logger) + new_upload_directory = f"mlflow://{tracker.project_id}/{tracker.run_id}" + cfg.exporter.upload_directory = new_upload_directory + else: + logging.getLogger(__name__).warning( + "`upload_to_mlflow` is set to True, " + "but there is no MLFlow active run, skipping." + ) + exporter = Exporter(cfg=cfg) + onnx_path = str(Path(best_model_path).parent.with_suffix(".onnx")) + exporter.export(onnx_path=onnx_path) diff --git a/luxonis_train/callbacks/luxonis_progress_bar.py b/luxonis_train/callbacks/luxonis_progress_bar.py new file mode 100644 index 00000000..fcc130cd --- /dev/null +++ b/luxonis_train/callbacks/luxonis_progress_bar.py @@ -0,0 +1,111 @@ +from collections.abc import Mapping + +import lightning.pytorch as pl +import rich +from lightning.pytorch.callbacks import RichProgressBar +from rich.table import Table + +from luxonis_train.utils.registry import CALLBACKS + + +@CALLBACKS.register_module() +class LuxonisProgressBar(RichProgressBar): + """Custom rich text progress bar based on RichProgressBar from Pytorch Lightning.""" + + _console: rich.console.Console + + def __init__(self): + super().__init__(leave=True) + + def print_single_line(self, text: str, style: str = "magenta") -> None: + """Prints single line of text to the console.""" + self._check_console() + text = f"[{style}]{text}[/{style}]" + self._console.print(text) + + def get_metrics( + self, trainer: pl.Trainer, pl_module: pl.LightningModule + ) -> dict[str, int | str | float | dict[str, float]]: + # NOTE: there might be a cleaner way of doing this + items = super().get_metrics(trainer, pl_module) + if trainer.training: + items["Loss"] = pl_module.training_step_outputs[-1]["loss"].item() + return items + + def _check_console(self) -> None: + """Checks if console is set. + + @raises RuntimeError: If console is not set. + """ + if self._console is None: + raise RuntimeError( + "Console not set. Set `use_rich_text` to `False` " + "in your configuration file." + ) + + def print_table( + self, + title: str, + table: Mapping[str, int | str | float], + key_name: str = "Name", + value_name: str = "Value", + ) -> None: + """Prints table to the console using rich text. + + @type title: str + @param title: Title of the table + @type table: Mapping[str, int | str | float] + @param table: Table to print + @type key_name: str + @param key_name: Name of the key column. Defaults to C{"Name"}. + @type value_name: str + @param value_name: Name of the value column. Defaults to C{"Value"}. + """ + rich_table = Table( + title=title, + show_header=True, + header_style="bold magenta", + ) + rich_table.add_column(key_name, style="magenta") + rich_table.add_column(value_name, style="white") + for name, value in table.items(): + if isinstance(value, float): + rich_table.add_row(name, f"{value:.5f}") + else: + rich_table.add_row(name, str(value)) + self._check_console() + self._console.print(rich_table) + + def print_tables( + self, tables: Mapping[str, Mapping[str, int | str | float]] + ) -> None: + """Prints multiple tables to the console using rich text. + + @type tables: Mapping[str, Mapping[str, int | str | float]] + @param tables: Tables to print in format {table_name: table}. + """ + for table_name, table in tables.items(): + self.print_table(table_name, table) + + def print_results( + self, + stage: str, + loss: float, + metrics: Mapping[str, Mapping[str, int | str | float]], + ) -> None: + """Prints results to the console using rich text. + + @type stage: str + @param stage: Stage name. + @type loss: float + @param loss: Loss value. + @type metrics: Mapping[str, Mapping[str, int | str | float]] + @param metrics: Metrics in format {table_name: table}. + """ + assert self._console is not None + + self._console.print(f"------{stage}-----", style="bold magenta") + self._console.print(f"[bold magenta]Loss:[/bold magenta] [white]{loss}[/white]") + self._console.print("[bold magenta]Metrics:[/bold magenta]") + self.print_tables(metrics) + self._console.print("---------------", style="bold magenta") diff --git a/luxonis_train/callbacks/metadata_logger.py b/luxonis_train/callbacks/metadata_logger.py new file mode 100644 index 00000000..e36c0c30 --- /dev/null +++ b/luxonis_train/callbacks/metadata_logger.py @@ -0,0 +1,70 @@ +import os.path as osp +import subprocess + +import lightning.pytorch as pl +import pkg_resources +import yaml + +from luxonis_train.utils.registry import CALLBACKS + + +@CALLBACKS.register_module() +class MetadataLogger(pl.Callback): + def __init__(self, hyperparams: list[str]): + """Callback that logs training metadata. + + Metadata include all defined hyperparameters together with git hashes of + luxonis-ml and luxonis-train packages. Also stores this information locally. + + @type hyperparams: list[str] + @param hyperparams: List of hyperparameters to log. + """ + super().__init__() + self.hyperparams = hyperparams + + def on_fit_start(self, trainer: pl.Trainer, pl_module: pl.LightningModule) -> None: + cfg = pl_module.cfg + + hparams = {key: cfg.get(key) for key in self.hyperparams} + + # try to get luxonis-ml and luxonis-train git commit hashes (if installed as editable) + luxonis_ml_hash = self._get_editable_package_git_hash("luxonis_ml") + if luxonis_ml_hash: + hparams["luxonis_ml"] = luxonis_ml_hash + + luxonis_train_hash = self._get_editable_package_git_hash("luxonis_train") + if luxonis_train_hash: + hparams["luxonis_train"] = luxonis_train_hash + + trainer.logger.log_hyperparams(hparams) # type: ignore + # also save metadata locally + with open(osp.join(pl_module.save_dir, "metadata.yaml"), "w+") as f: + yaml.dump(hparams, f, default_flow_style=False) + + def _get_editable_package_git_hash(self, package_name: str) -> str | None: + try: + distribution = pkg_resources.get_distribution(package_name) + package_location = osp.join(distribution.location, package_name) + + # remove any additional folders in path (e.g. "/src") + if "src" in package_location: + package_location = package_location.replace("src", "") + + # Check if the package location is a Git repository + git_dir = osp.join(package_location, ".git") + if osp.exists(git_dir): + git_command = ["git", "rev-parse", "HEAD"] + try: + git_hash = subprocess.check_output( + git_command, + cwd=package_location, + stderr=subprocess.DEVNULL, + universal_newlines=True, + ).strip() + return git_hash + except subprocess.CalledProcessError: + return None + else: + return None + except pkg_resources.DistributionNotFound: + return None diff --git a/luxonis_train/callbacks/module_freezer.py b/luxonis_train/callbacks/module_freezer.py new file mode 100644 index 00000000..6a80f1ae --- /dev/null +++ b/luxonis_train/callbacks/module_freezer.py @@ -0,0 +1,26 @@ +import lightning.pytorch as pl +from lightning.pytorch.callbacks import BaseFinetuning +from torch import nn +from torch.optim.optimizer import Optimizer + + +class ModuleFreezer(BaseFinetuning): + def __init__(self, frozen_modules: list[nn.Module]): + """Callback that freezes parts of the model. + + @type frozen_modules: list[nn.Module] + @param frozen_modules: List of modules to freeze. + """ + super().__init__() + self.frozen_modules = frozen_modules + + def freeze_before_training(self, _: pl.LightningModule) -> None: + for module in self.frozen_modules: + self.freeze(module, train_bn=False) + + def finetune_function( + self, pl_module: pl.LightningModule, epoch: int, optimizer: Optimizer + ) -> None: + # Called on every train epoch start. Used to unfreeze frozen modules. + # TODO: Implement unfreezing and support in config. + ... diff --git a/luxonis_train/callbacks/test_on_train_end.py b/luxonis_train/callbacks/test_on_train_end.py new file mode 100644 index 00000000..6bd3c324 --- /dev/null +++ b/luxonis_train/callbacks/test_on_train_end.py @@ -0,0 +1,41 @@ +import lightning.pytorch as pl +from luxonis_ml.data import LuxonisDataset, ValAugmentations +from torch.utils.data import DataLoader + +from luxonis_train.utils.loaders import LuxonisLoaderTorch, collate_fn +from luxonis_train.utils.registry import CALLBACKS + + +@CALLBACKS.register_module() +class TestOnTrainEnd(pl.Callback): + """Callback to perform a test run at the end of the training.""" + + def on_train_end(self, trainer: pl.Trainer, pl_module: pl.LightningModule) -> None: + dataset = LuxonisDataset( + dataset_name=pl_module.cfg.dataset.dataset_name, + team_id=pl_module.cfg.dataset.team_id, + dataset_id=pl_module.cfg.dataset.dataset_id, + bucket_type=pl_module.cfg.dataset.bucket_type, + bucket_storage=pl_module.cfg.dataset.bucket_storage, + ) + + loader_test = LuxonisLoaderTorch( + dataset, + view=pl_module.cfg.dataset.test_view, + augmentations=ValAugmentations( + image_size=pl_module.cfg.trainer.preprocessing.train_image_size, + augmentations=[ + i.model_dump() + for i in pl_module.cfg.trainer.preprocessing.augmentations + ], + train_rgb=pl_module.cfg.trainer.preprocessing.train_rgb, + keep_aspect_ratio=pl_module.cfg.trainer.preprocessing.keep_aspect_ratio, + ), + ) + pytorch_loader_test = DataLoader( + loader_test, + batch_size=pl_module.cfg.trainer.batch_size, + num_workers=pl_module.cfg.trainer.num_workers, + collate_fn=collate_fn, + ) + trainer.test(pl_module, pytorch_loader_test) diff --git a/luxonis_train/callbacks/upload_checkpoint_on_train_end.py b/luxonis_train/callbacks/upload_checkpoint_on_train_end.py new file mode 100644 index 00000000..86879ec9 --- /dev/null +++ b/luxonis_train/callbacks/upload_checkpoint_on_train_end.py @@ -0,0 +1,41 @@ +import logging + +import lightning.pytorch as pl +from luxonis_ml.utils.filesystem import LuxonisFileSystem + +from luxonis_train.utils.registry import CALLBACKS + + +@CALLBACKS.register_module() +class UploadCheckpointOnTrainEnd(pl.Callback): + """Callback that uploads best checkpoint based on the validation loss.""" + + def __init__(self, upload_directory: str): + """Constructs `UploadCheckpointOnTrainEnd`. + + @type upload_directory: str + @param upload_directory: Path used as upload directory + """ + super().__init__() + self.fs = LuxonisFileSystem( + upload_directory, allow_active_mlflow_run=True, allow_local=False + ) + + def on_train_end(self, trainer: pl.Trainer, _: pl.LightningModule) -> None: + logger = logging.getLogger(__name__) + logger.info(f"Started checkpoint upload to {self.fs.full_path()}...") + model_checkpoint_callbacks = [ + c + for c in trainer.callbacks # type: ignore + if isinstance(c, pl.callbacks.ModelCheckpoint) # type: ignore + ] + # NOTE: assume that first checkpoint callback is based on val loss + local_path = model_checkpoint_callbacks[0].best_model_path + self.fs.put_file( + local_path=local_path, + remote_path=local_path.split("/")[-1], + mlflow_instance=trainer.logger.experiment.get( # type: ignore + "mlflow", None + ), + ) + logger.info("Checkpoint upload finished") diff --git a/luxonis_train/core/__init__.py b/luxonis_train/core/__init__.py new file mode 100644 index 00000000..6264473b --- /dev/null +++ b/luxonis_train/core/__init__.py @@ -0,0 +1,6 @@ +from .exporter import Exporter +from .inferer import Inferer +from .trainer import Trainer +from .tuner import Tuner + +__all__ = ["Exporter", "Trainer", "Tuner", "Inferer"] diff --git a/luxonis_train/core/core.py b/luxonis_train/core/core.py new file mode 100644 index 00000000..de17be0d --- /dev/null +++ b/luxonis_train/core/core.py @@ -0,0 +1,234 @@ +import os +import os.path as osp +from logging import getLogger +from typing import Any + +import lightning.pytorch as pl +import lightning_utilities.core.rank_zero as rank_zero_module +import rich.traceback +import torch +from lightning.pytorch.utilities import rank_zero_only # type: ignore +from luxonis_ml.data import LuxonisDataset, TrainAugmentations, ValAugmentations +from luxonis_ml.utils import reset_logging, setup_logging + +from luxonis_train.callbacks import LuxonisProgressBar +from luxonis_train.utils.config import Config +from luxonis_train.utils.general import DatasetMetadata +from luxonis_train.utils.loaders import LuxonisLoaderTorch, collate_fn +from luxonis_train.utils.tracker import LuxonisTrackerPL + +logger = getLogger(__name__) + + +class Core: + """Common logic of the core components. + + This class contains common logic of the core components (trainer, evaluator, + exporter, etc.). + """ + + def __init__( + self, + cfg: str | dict[str, Any] | Config, + opts: list[str] | tuple[str, ...] | dict[str, Any] | None = None, + ): + """Constructs a new Core instance. + + Loads the config and initializes datasets, dataloaders, augmentations, + lightning components, etc. + + @type cfg: str | dict[str, Any] | Config + @param cfg: Path to config file or config dict used to setup training + + @type opts: list[str] | tuple[str, ...] | dict[str, Any] | None + @param opts: Argument dict provided through command line, used for config overriding + """ + + overrides = {} + if opts: + if isinstance(opts, dict): + overrides = opts + else: + if len(opts) % 2 != 0: + raise ValueError( + "Override options should be a list of key-value pairs" + ) + + # NOTE: has to be done like this for torchx to work + for i in range(0, len(opts), 2): + overrides[opts[i]] = opts[i + 1] + + if isinstance(cfg, Config): + self.cfg = cfg + else: + self.cfg = Config.get_config(cfg, overrides) + + opts = opts or [] + + if self.cfg.use_rich_text: + rich.traceback.install(suppress=[pl, torch]) + + self.rank = rank_zero_only.rank + + self.tracker = LuxonisTrackerPL( + rank=self.rank, + mlflow_tracking_uri=self.cfg.ENVIRON.MLFLOW_TRACKING_URI, + **self.cfg.tracker.model_dump(), + ) + + self.run_save_dir = os.path.join( + self.cfg.tracker.save_directory, self.tracker.run_name + ) + # NOTE: to add the file handler (we only get the save dir now, + # but we want to use the logger before) + reset_logging() + setup_logging( + use_rich=self.cfg.use_rich_text, + file=osp.join(self.run_save_dir, "luxonis_train.log"), + ) + + # NOTE: overriding logger in pl so it uses our logger to log device info + rank_zero_module.log = logger + + self.train_augmentations = TrainAugmentations( + image_size=self.cfg.trainer.preprocessing.train_image_size, + augmentations=[ + i.model_dump() for i in self.cfg.trainer.preprocessing.augmentations + ], + train_rgb=self.cfg.trainer.preprocessing.train_rgb, + keep_aspect_ratio=self.cfg.trainer.preprocessing.keep_aspect_ratio, + ) + self.val_augmentations = ValAugmentations( + image_size=self.cfg.trainer.preprocessing.train_image_size, + augmentations=[ + i.model_dump() for i in self.cfg.trainer.preprocessing.augmentations + ], + train_rgb=self.cfg.trainer.preprocessing.train_rgb, + keep_aspect_ratio=self.cfg.trainer.preprocessing.keep_aspect_ratio, + ) + + self.pl_trainer = pl.Trainer( + accelerator=self.cfg.trainer.accelerator, + devices=self.cfg.trainer.devices, + strategy=self.cfg.trainer.strategy, + logger=self.tracker, # type: ignore + max_epochs=self.cfg.trainer.epochs, + accumulate_grad_batches=self.cfg.trainer.accumulate_grad_batches, + check_val_every_n_epoch=self.cfg.trainer.validation_interval, + num_sanity_val_steps=self.cfg.trainer.num_sanity_val_steps, + profiler=self.cfg.trainer.profiler, # for debugging purposes, + # NOTE: this is likely PL bug, + # should be configurable inside configure_callbacks(), + callbacks=LuxonisProgressBar() if self.cfg.use_rich_text else None, + ) + self.dataset = LuxonisDataset( + dataset_name=self.cfg.dataset.dataset_name, + team_id=self.cfg.dataset.team_id, + dataset_id=self.cfg.dataset.dataset_id, + bucket_type=self.cfg.dataset.bucket_type, + bucket_storage=self.cfg.dataset.bucket_storage, + ) + + self.loader_train = LuxonisLoaderTorch( + self.dataset, + view=self.cfg.dataset.train_view, + augmentations=self.train_augmentations, + ) + self.loader_val = LuxonisLoaderTorch( + self.dataset, + view=self.cfg.dataset.val_view, + augmentations=self.val_augmentations, + ) + self.loader_test = LuxonisLoaderTorch( + self.dataset, + view=self.cfg.dataset.test_view, + augmentations=self.val_augmentations, + ) + + self.pytorch_loader_val = torch.utils.data.DataLoader( + self.loader_val, + batch_size=self.cfg.trainer.batch_size, + num_workers=self.cfg.trainer.num_workers, + collate_fn=collate_fn, + ) + self.pytorch_loader_test = torch.utils.data.DataLoader( + self.loader_test, + batch_size=self.cfg.trainer.batch_size, + num_workers=self.cfg.trainer.num_workers, + collate_fn=collate_fn, + ) + sampler = None + if self.cfg.trainer.use_weighted_sampler: + classes_count = self.dataset.get_classes()[1] + if len(classes_count) == 0: + logger.warning( + "WeightedRandomSampler only available for classification tasks. Using default sampler instead." + ) + else: + weights = [1 / i for i in classes_count.values()] + num_samples = sum(classes_count.values()) + sampler = torch.utils.data.WeightedRandomSampler(weights, num_samples) + + self.pytorch_loader_train = torch.utils.data.DataLoader( + self.loader_train, + shuffle=True, + batch_size=self.cfg.trainer.batch_size, + num_workers=self.cfg.trainer.num_workers, + collate_fn=collate_fn, + drop_last=self.cfg.trainer.skip_last_batch, + sampler=sampler, + ) + self.error_message = None + + self.dataset_metadata = DatasetMetadata.from_dataset(self.dataset) + self.dataset_metadata.set_loader(self.pytorch_loader_train) + + self.cfg.save_data(os.path.join(self.run_save_dir, "config.yaml")) + + def set_train_augmentations(self, aug: TrainAugmentations) -> None: + """Sets augmentations used for training dataset.""" + self.train_augmentations = aug + + def set_val_augmentations(self, aug: ValAugmentations) -> None: + """Sets augmentations used for validation dataset.""" + self.val_augmentations = aug + + def set_test_augmentations(self, aug: ValAugmentations) -> None: + """Sets augmentations used for test dataset.""" + self.test_augmentations = aug + + @rank_zero_only + def get_save_dir(self) -> str: + """Return path to directory where checkpoints are saved. + + @rtype: str + @return: Save directory path + """ + return self.run_save_dir + + @rank_zero_only + def get_error_message(self) -> str | None: + """Return error message if one occurs while running in thread, otherwise None. + + @rtype: str | None + @return: Error message + """ + return self.error_message + + @rank_zero_only + def get_min_loss_checkpoint_path(self) -> str: + """Return best checkpoint path with respect to minimal validation loss. + + @rtype: str + @return: Path to best checkpoint with respect to minimal validation loss + """ + return self.pl_trainer.checkpoint_callbacks[0].best_model_path # type: ignore + + @rank_zero_only + def get_best_metric_checkpoint_path(self) -> str: + """Return best checkpoint path with respect to best validation metric. + + @rtype: str + @return: Path to best checkpoint with respect to best validation metric + """ + return self.pl_trainer.checkpoint_callbacks[1].best_model_path # type: ignore diff --git a/luxonis_train/core/exporter.py b/luxonis_train/core/exporter.py new file mode 100644 index 00000000..ab73ce72 --- /dev/null +++ b/luxonis_train/core/exporter.py @@ -0,0 +1,216 @@ +import os +import tempfile +from logging import getLogger +from pathlib import Path +from typing import Any + +import onnx +import yaml +from luxonis_ml.utils import LuxonisFileSystem +from torch import Size + +from luxonis_train.models import LuxonisModel +from luxonis_train.utils.config import Config + +from .core import Core + +logger = getLogger(__name__) + + +class Exporter(Core): + """Main API which is used to create the model, setup pytorch lightning environment + and perform training based on provided arguments and config.""" + + def __init__( + self, + cfg: str | dict[str, Any] | Config, + opts: list[str] | tuple[str, ...] | dict[str, Any] | None = None, + ): + """Constructs a new Exporter instance. + + @type cfg: str | dict[str, Any] | Config + @param cfg: Path to config file or config dict used to setup training. + + @type opts: list[str] | tuple[str, ...] | dict[str, Any] | None + @param opts: Argument dict provided through command line, + used for config overriding. + """ + + super().__init__(cfg, opts) + + input_shape = self.cfg.exporter.input_shape + if self.cfg.model.weights is None: + raise ValueError( + "Model weights must be specified in config file for export." + ) + self.local_path = self.cfg.model.weights + if input_shape is None: + self.input_shape = self.loader_val.input_shape + else: + self.input_shape = Size(input_shape) + + export_path = ( + Path(self.cfg.exporter.export_save_directory) + / self.cfg.exporter.export_model_name + ) + + if not export_path.parent.exists(): + logger.info(f"Creating export directory {export_path.parent}") + export_path.parent.mkdir(parents=True, exist_ok=True) + self.export_path = str(export_path) + + normalize_params = self.cfg.trainer.preprocessing.normalize.params + if self.cfg.exporter.scale_values is not None: + self.scale_values = self.cfg.exporter.scale_values + else: + self.scale_values = normalize_params.get("std", None) + if self.scale_values: + self.scale_values = ( + [i * 255 for i in self.scale_values] + if isinstance(self.scale_values, list) + else self.scale_values * 255 + ) + + if self.cfg.exporter.mean_values is not None: + self.mean_values = self.cfg.exporter.mean_values + else: + self.mean_values = normalize_params.get("mean", None) + if self.mean_values: + self.mean_values = ( + [i * 255 for i in self.mean_values] + if isinstance(self.mean_values, list) + else self.mean_values * 255 + ) + + self.lightning_module = LuxonisModel( + cfg=self.cfg, + save_dir=self.run_save_dir, + input_shape=self.input_shape, + dataset_metadata=self.dataset_metadata, + ) + + def _get_modelconverter_config(self, onnx_path: str) -> dict[str, Any]: + """Generates export config from input config that is compatible with Luxonis + modelconverter tool. + + @type onnx_path: str + @param onnx_path: Path to .onnx model + @rtype: dict[str, Any] + @return: Export config. + """ + return { + "input_model": onnx_path, + "scale_values": self.scale_values, + "mean_values": self.mean_values, + "reverse_input_channels": self.cfg.exporter.reverse_input_channels, + "use_bgr": not self.cfg.trainer.preprocessing.train_rgb, + "input_shape": list(self.input_shape), + "data_type": self.cfg.exporter.data_type, + "output": [{"name": name} for name in self.output_names], + "meta": {"description": self.cfg.model.name}, + } + + def export(self, onnx_path: str | None = None): + """Runs export. + + @type onnx_path: str | None + @param onnx_path: Path to .onnx model. If not specified, model will be saved + to export directory with name specified in config file. + + @raises RuntimeError: If `onnxsim` fails to simplify the model. + """ + onnx_path = onnx_path or self.export_path + ".onnx" + self.output_names = self.lightning_module.export_onnx( + onnx_path, **self.cfg.exporter.onnx.model_dump() + ) + + try: + import onnxsim + + logger.info("Simplifying ONNX model...") + model_onnx = onnx.load(onnx_path) + onnx_model, check = onnxsim.simplify(model_onnx) + if not check: + raise RuntimeError("Onnx simplify failed.") + onnx.save(onnx_model, onnx_path) + logger.info(f"ONNX model saved to {onnx_path}") + + except ImportError: + logger.error("Failed to import `onnxsim`") + logger.warning( + "`onnxsim` not installed. Skipping ONNX model simplification. " + "Ensure `onnxsim` is installed in your environment." + ) + + files_to_upload = [self.local_path, onnx_path] + + if self.cfg.exporter.blobconverter.active: + try: + import blobconverter + + logger.info("Converting ONNX to .blob") + + optimizer_params = [] + if self.scale_values: + optimizer_params.append(f"--scale_values={self.scale_values}") + if self.mean_values: + optimizer_params.append(f"--mean_values={self.mean_values}") + if self.cfg.exporter.reverse_input_channels: + optimizer_params.append("--reverse_input_channels") + + blob_path = blobconverter.from_onnx( + model=onnx_path, + optimizer_params=optimizer_params, + data_type=self.cfg.exporter.data_type, + shaves=self.cfg.exporter.blobconverter.shaves, + use_cache=False, + output_dir=self.export_path, + ) + files_to_upload.append(blob_path) + logger.info(f".blob model saved to {blob_path}") + + except ImportError: + logger.error("Failed to import `blobconverter`") + logger.warning( + "`blobconverter` not installed. Skipping .blob model conversion. " + "Ensure `blobconverter` is installed in your environment." + ) + + if self.cfg.exporter.upload_url is not None: + self._upload(files_to_upload) + + def _upload(self, files_to_upload: list[str]): + """Uploads .pt, .onnx and current config.yaml to specified s3 bucket. + + @type files_to_upload: list[str] + @param files_to_upload: List of files to upload. + @raises ValueError: If upload url was not specified in config file. + """ + + if self.cfg.exporter.upload_url is None: + raise ValueError("Upload url must be specified in config file.") + + fs = LuxonisFileSystem(self.cfg.exporter.upload_url, allow_local=False) + logger.info(f"Started upload to {fs.full_path}...") + + for file in files_to_upload: + suffix = Path(file).suffix + fs.put_file( + local_path=file, + remote_path=self.cfg.exporter.export_model_name + suffix, + ) + + with tempfile.TemporaryFile() as f: + self.cfg.save_data(f.name) + fs.put_file(local_path=f.name, remote_path="config.yaml") + + onnx_path = os.path.join( + fs.full_path, f"{self.cfg.exporter.export_model_name}.onnx" + ) + modelconverter_config = self._get_modelconverter_config(onnx_path) + + with tempfile.TemporaryFile() as f: + yaml.dump(modelconverter_config, f, default_flow_style=False) + fs.put_file(local_path=f.name, remote_path="config_export.yaml") + + logger.info("Files upload finished") diff --git a/luxonis_train/core/inferer.py b/luxonis_train/core/inferer.py new file mode 100644 index 00000000..b4d13b77 --- /dev/null +++ b/luxonis_train/core/inferer.py @@ -0,0 +1,57 @@ +from pathlib import Path +from typing import Literal + +import cv2 + +from luxonis_train.attached_modules.visualizers import ( + get_unnormalized_images, +) + +from .trainer import Trainer + + +class Inferer(Trainer): + def __init__( + self, + cfg: str | dict, + opts: list[str] | tuple[str, ...] | None, + view: Literal["train", "test", "val"], + save_dir: Path | None = None, + ): + opts = list(opts or []) + opts += ["trainer.batch_size", "1"] + super().__init__(cfg, opts) + if view == "train": + self.loader = self.pytorch_loader_train + elif view == "test": + self.loader = self.pytorch_loader_test + else: + self.loader = self.pytorch_loader_val + self.save_dir = save_dir + if self.save_dir is not None: + self.save_dir.mkdir(exist_ok=True, parents=True) + + def infer(self) -> None: + self.lightning_module.eval() + k = 0 + for inputs, labels in self.loader: + images = get_unnormalized_images(self.cfg, inputs) + outputs = self.lightning_module.forward( + inputs, labels, images=images, compute_visualizations=True + ) + + for node_name, visualizations in outputs.visualizations.items(): + for viz_name, viz_batch in visualizations.items(): + for i, viz in enumerate(viz_batch): + viz_arr = viz.detach().cpu().numpy().transpose(1, 2, 0) + viz_arr = cv2.cvtColor(viz_arr, cv2.COLOR_RGB2BGR) + name = f"{node_name}/{viz_name}/{i}" + if self.save_dir is not None: + name = name.replace("/", "_") + cv2.imwrite(str(self.save_dir / f"{name}_{k}.png"), viz_arr) + k += 1 + else: + cv2.imshow(name, viz_arr) + if self.save_dir is None: + if cv2.waitKey(0) == ord("q"): + exit() diff --git a/luxonis_train/core/trainer.py b/luxonis_train/core/trainer.py new file mode 100644 index 00000000..cb2c5a2c --- /dev/null +++ b/luxonis_train/core/trainer.py @@ -0,0 +1,119 @@ +import threading +from logging import getLogger +from typing import Any, Literal + +from lightning.pytorch.utilities import rank_zero_only # type: ignore + +from luxonis_train.models import LuxonisModel +from luxonis_train.utils.config import Config + +from .core import Core + +logger = getLogger(__name__) + + +class Trainer(Core): + """Main API which is used to create the model, setup pytorch lightning environment + and perform training based on provided arguments and config.""" + + def __init__( + self, + cfg: str | dict[str, Any] | Config, + opts: list[str] | tuple[str, ...] | dict[str, Any] | None = None, + ): + """Constructs a new Trainer instance. + + @type cfg: str | dict[str, Any] | Config + @param cfg: Path to config file or config dict used to setup training. + + @type opts: list[str] | tuple[str, ...] | dict[str, Any] | None + @param opts: Argument dict provided through command line, + used for config overriding. + """ + super().__init__(cfg, opts) + + self.lightning_module = LuxonisModel( + cfg=self.cfg, + dataset_metadata=self.dataset_metadata, + save_dir=self.run_save_dir, + input_shape=self.loader_train.input_shape, + ) + + def train(self, new_thread: bool = False) -> None: + """Runs training. + + @type new_thread: bool + @param new_thread: Runs training in new thread if set to True. + """ + if not new_thread: + logger.info(f"Checkpoints will be saved in: {self.get_save_dir()}") + logger.info("Starting training...") + self.pl_trainer.fit( + self.lightning_module, + self.pytorch_loader_train, + self.pytorch_loader_val, + ) + logger.info("Training finished") + logger.info(f"Checkpoints saved in: {self.get_save_dir()}") + else: + # Every time exception happens in the Thread, this hook will activate + def thread_exception_hook(args): + self.error_message = str(args.exc_value) + + threading.excepthook = thread_exception_hook + + self.thread = threading.Thread( + target=self.pl_trainer.fit, + args=( + self.lightning_module, + self.pytorch_loader_train, + self.pytorch_loader_val, + ), + daemon=True, + ) + self.thread.start() + + def test( + self, new_thread: bool = False, view: Literal["train", "val", "test"] = "test" + ) -> None: + """Runs testing. + + @type new_thread: bool + @param new_thread: Runs testing in new thread if set to True. + """ + + if view == "test": + loader = self.pytorch_loader_test + elif view == "val": + loader = self.pytorch_loader_val + elif view == "train": + loader = self.pytorch_loader_train + + if not new_thread: + self.pl_trainer.test(self.lightning_module, loader) + else: + self.thread = threading.Thread( + target=self.pl_trainer.test, + args=(self.lightning_module, loader), + daemon=True, + ) + self.thread.start() + + @rank_zero_only + def get_status(self) -> tuple[int, int]: + """Get current status of training. + + @rtype: tuple[int, int] + @return: First element is current epoch, second element is total number of + epochs. + """ + return self.lightning_module.get_status() + + @rank_zero_only + def get_status_percentage(self) -> float: + """Return percentage of current training, takes into account early stopping. + + @rtype: float + @return: Percentage of current training in range 0-100. + """ + return self.lightning_module.get_status_percentage() diff --git a/luxonis_train/core/tuner.py b/luxonis_train/core/tuner.py new file mode 100644 index 00000000..d86efac4 --- /dev/null +++ b/luxonis_train/core/tuner.py @@ -0,0 +1,169 @@ +import os.path as osp +from typing import Any + +import lightning.pytorch as pl +import optuna +from lightning.pytorch.utilities import rank_zero_only # type: ignore +from optuna.integration import PyTorchLightningPruningCallback + +from luxonis_train.callbacks import LuxonisProgressBar +from luxonis_train.models import LuxonisModel +from luxonis_train.utils import Config +from luxonis_train.utils.tracker import LuxonisTrackerPL + +from .core import Core + + +class Tuner(Core): + def __init__(self, cfg: str | dict, args: list[str] | tuple[str, ...] | None): + """Main API which is used to perform hyperparameter tunning. + + @type cfg: str | dict[str, Any] | Config + @param cfg: Path to config file or config dict used to setup training. + + @type args: list[str] | tuple[str, ...] | None + @param args: Argument dict provided through command line, + used for config overriding. + """ + super().__init__(cfg, args) + + def tune(self) -> None: + """Runs Optuna tunning of hyperparameters.""" + + pruner = ( + optuna.pruners.MedianPruner() + if self.cfg.tuner.use_pruner + else optuna.pruners.NopPruner() + ) + + storage = None + if self.cfg.tuner.storage.active: + if self.cfg.tuner.storage.storage_type == "local": + storage = "sqlite:///study_local.db" + else: + storage = "postgresql://{}:{}@{}:{}/{}".format( + self.cfg.ENVIRON.POSTGRES_USER, + self.cfg.ENVIRON.POSTGRES_PASSWORD, + self.cfg.ENVIRON.POSTGRES_HOST, + self.cfg.ENVIRON.POSTGRES_PORT, + self.cfg.ENVIRON.POSTGRES_DB, + ) + + study = optuna.create_study( + study_name=self.cfg.tuner.study_name, + storage=storage, + direction="minimize", + pruner=pruner, + load_if_exists=True, + ) + + study.optimize( + self._objective, + n_trials=self.cfg.tuner.n_trials, + timeout=self.cfg.tuner.timeout, + ) + + def _objective(self, trial: optuna.trial.Trial) -> float: + """Objective function used to optimize Optuna study.""" + rank = rank_zero_only.rank + cfg_tracker = self.cfg.tracker + tracker_params = cfg_tracker.model_dump() + tracker = LuxonisTrackerPL( + rank=rank, + mlflow_tracking_uri=self.cfg.ENVIRON.MLFLOW_TRACKING_URI, + is_sweep=True, + **tracker_params, + ) + run_save_dir = osp.join(cfg_tracker.save_directory, tracker.run_name) + + curr_params = self._get_trial_params(trial) + curr_params["model.predefined_model"] = None + Config.clear_instance() + cfg = Config.get_config(self.cfg.model_dump(), curr_params) + + tracker.log_hyperparams(curr_params) + + cfg.save_data(osp.join(run_save_dir, "config.yaml")) + + lightning_module = LuxonisModel( + cfg=cfg, + dataset_metadata=self.dataset_metadata, + save_dir=run_save_dir, + input_shape=self.loader_train.input_shape, + ) + pruner_callback = PyTorchLightningPruningCallback( + trial, monitor="val_loss/loss" + ) + callbacks: list[pl.Callback] = ( + [LuxonisProgressBar()] if self.cfg.use_rich_text else [] + ) + callbacks.append(pruner_callback) + pl_trainer = pl.Trainer( + accelerator=cfg.trainer.accelerator, + devices=cfg.trainer.devices, + strategy=cfg.trainer.strategy, + logger=tracker, # type: ignore + max_epochs=cfg.trainer.epochs, + accumulate_grad_batches=cfg.trainer.accumulate_grad_batches, + check_val_every_n_epoch=cfg.trainer.validation_interval, + num_sanity_val_steps=cfg.trainer.num_sanity_val_steps, + profiler=cfg.trainer.profiler, + callbacks=callbacks, + ) + + pl_trainer.fit( + lightning_module, # type: ignore + self.pytorch_loader_train, + self.pytorch_loader_val, + ) + pruner_callback.check_pruned() + + if "val/loss" not in pl_trainer.callback_metrics: + raise ValueError( + "No validation loss found. " + "This can happen if `TestOnTrainEnd` callback is used." + ) + + return pl_trainer.callback_metrics["val/loss"].item() + + def _get_trial_params(self, trial: optuna.trial.Trial) -> dict[str, Any]: + """Get trial params based on specified config.""" + cfg_tuner = self.cfg.tuner.params + new_params = {} + for key, value in cfg_tuner.items(): + key_info = key.split("_") + key_name = "_".join(key_info[:-1]) + key_type = key_info[-1] + match key_type, value: + case "categorical", list(lst): + new_value = trial.suggest_categorical(key_name, lst) + case "float", [float(low), float(high), *tail]: + step = tail[0] if tail else None + if step is not None and not isinstance(step, float): + raise ValueError( + f"Step for float type must be float, but got {step}" + ) + new_value = trial.suggest_float(key_name, low, high, step=step) + case "int", [int(low), int(high), *tail]: + step = tail[0] if tail else 1 + if not isinstance(step, int): + raise ValueError( + f"Step for int type must be int, but got {step}" + ) + new_value = trial.suggest_int(key_name, low, high, step=step) + case "loguniform", [float(low), float(high)]: + new_value = trial.suggest_loguniform(key_name, low, high) + case "uniform", [float(low), float(high)]: + new_value = trial.suggest_uniform(key_name, low, high) + case _, _: + raise KeyError( + f"Combination of {key_type} and {value} not supported" + ) + + new_params[key_name] = new_value + + if len(new_params) == 0: + raise ValueError( + "No paramteres to tune. Specify them under `tuner.params`." + ) + return new_params diff --git a/luxonis_train/models/__init__.py b/luxonis_train/models/__init__.py new file mode 100644 index 00000000..1e2f0d91 --- /dev/null +++ b/luxonis_train/models/__init__.py @@ -0,0 +1,5 @@ +from .luxonis_model import LuxonisModel +from .luxonis_output import LuxonisOutput +from .predefined_models import * + +__all__ = ["LuxonisModel", "LuxonisOutput"] diff --git a/luxonis_train/models/luxonis_model.py b/luxonis_train/models/luxonis_model.py new file mode 100644 index 00000000..7cd5e02d --- /dev/null +++ b/luxonis_train/models/luxonis_model.py @@ -0,0 +1,762 @@ +from collections import defaultdict +from collections.abc import Mapping +from logging import getLogger +from typing import Literal, cast + +import lightning.pytorch as pl +import torch +from lightning.pytorch.callbacks import ( + ModelCheckpoint, + RichModelSummary, +) +from lightning.pytorch.utilities import rank_zero_only # type: ignore +from torch import Size, Tensor, nn + +from luxonis_train.attached_modules import ( + BaseAttachedModule, + BaseLoss, + BaseMetric, + BaseVisualizer, +) +from luxonis_train.attached_modules.visualizers import ( + combine_visualizations, + get_unnormalized_images, +) +from luxonis_train.callbacks import ( + LuxonisProgressBar, + ModuleFreezer, +) +from luxonis_train.nodes import BaseNode +from luxonis_train.utils.config import AttachedModuleConfig, Config +from luxonis_train.utils.general import ( + DatasetMetadata, + get_shape_packet, + traverse_graph, +) +from luxonis_train.utils.registry import CALLBACKS, OPTIMIZERS, SCHEDULERS, Registry +from luxonis_train.utils.tracker import LuxonisTrackerPL +from luxonis_train.utils.types import Kwargs, Labels, Packet + +from .luxonis_output import LuxonisOutput + +logger = getLogger(__name__) + + +class LuxonisModel(pl.LightningModule): + """Class representing the entire model. + + This class keeps track of the model graph, nodes, and attached modules. + The model topology is defined as an acyclic graph of nodes. + The graph is saved as a dictionary of predecessors. + + @type save_dir: str + @ivar save_dir: Directory to save checkpoints and logs. + + @type nodes: L{nn.ModuleDict}[str, L{LuxonisModule}] + @ivar nodes: Nodes of the model. Keys are node names, unique for each node. + + @type graph: dict[str, list[str]] + @ivar graph: Graph of the model in a format of a dictionary of predecessors. + Keys are node names, values are inputs to the node (list of node names). + Nodes with no inputs are considered inputs of the whole model. + + @type loss_weights: dict[str, float] + @ivar loss_weights: Dictionary of loss weights. Keys are loss names, values are weights. + + @type input_shapes: dict[str, list[L{Size}]] + @ivar input_shapes: Dictionary of input shapes. Keys are node names, values are lists of shapes + (understood as shapes of the "feature" field in L{Packet}[L{Tensor}]). + + @type outputs: list[str] + @ivar outputs: List of output node names. + + @type losses: L{nn.ModuleDict}[str, L{nn.ModuleDict}[str, L{LuxonisLoss}]] + @ivar losses: Nested dictionary of losses used in the model. Each node can have multiple + losses attached. The first key identifies the node, the second key identifies the + specific loss. + + @type visualizers: dict[str, dict[str, L{LuxonisVisualizer}]] + @ivar visualizers: Dictionary of visualizers to be used with the model. + + @type metrics: dict[str, dict[str, L{LuxonisMetric}]] + @ivar metrics: Dictionary of metrics to be used with the model. + + @type dataset_metadata: L{DatasetMetadata} + @ivar dataset_metadata: Metadata of the dataset. + + @type main_metric: str | None + @ivar main_metric: Name of the main metric to be used for model checkpointing. + If not set, the model with the best metric score won't be saved. + """ + + _trainer: pl.Trainer + logger: LuxonisTrackerPL + + def __init__( + self, + cfg: Config, + save_dir: str, + input_shape: list[int] | Size, + dataset_metadata: DatasetMetadata | None = None, + **kwargs, + ): + """Constructs an instance of `LuxonisModel` from `Config`. + + @type cfg: L{Config} + @param cfg: Config object. + @type save_dir: str + @param save_dir: Directory to save checkpoints. + @type input_shape: list[int] | L{Size} + @param input_shape: Shape of the input tensor. + @type dataset_metadata: L{DatasetMetadata} | None + @param dataset_metadata: Dataset metadata. + @type kwargs: Any + @param kwargs: Additional arguments to pass to the L{LightningModule} + constructor. + """ + super().__init__(**kwargs) + + self._export: bool = False + + self.cfg = cfg + self.original_in_shape = Size(input_shape) + self.dataset_metadata = dataset_metadata or DatasetMetadata() + self.frozen_nodes: list[nn.Module] = [] + self.graph: dict[str, list[str]] = {} + self.input_shapes: dict[str, list[Size]] = {} + self.loss_weights: dict[str, float] = {} + self.main_metric: str | None = None + self.save_dir = save_dir + self.test_step_outputs: list[Mapping[str, Tensor | float | int]] = [] + self.training_step_outputs: list[Mapping[str, Tensor | float | int]] = [] + self.validation_step_outputs: list[Mapping[str, Tensor | float | int]] = [] + self.losses: dict[str, dict[str, BaseLoss]] = defaultdict(dict) + self.metrics: dict[str, dict[str, BaseMetric]] = defaultdict(dict) + self.visualizers: dict[str, dict[str, BaseVisualizer]] = defaultdict(dict) + + self._logged_images = 0 + + frozen_nodes: list[str] = [] + nodes: dict[str, tuple[type[BaseNode], Kwargs]] = {} + + for node_cfg in self.cfg.model.nodes: + node_name = node_cfg.name + Node = BaseNode.REGISTRY.get(node_name) + node_name = node_cfg.override_name or node_name + if node_cfg.frozen: + frozen_nodes.append(node_name) + nodes[node_name] = (Node, node_cfg.params) + if not node_cfg.inputs: + self.input_shapes[node_name] = [Size(input_shape)] + self.graph[node_name] = node_cfg.inputs + + self.nodes = self._initiate_nodes(nodes) + + for loss_cfg in self.cfg.model.losses: + loss_name, _ = self._init_attached_module( + loss_cfg, BaseLoss.REGISTRY, self.losses + ) + self.loss_weights[loss_name] = loss_cfg.weight + + for metric_cfg in self.cfg.model.metrics: + metric_name, node_name = self._init_attached_module( + metric_cfg, BaseMetric.REGISTRY, self.metrics + ) + if metric_cfg.is_main_metric: + if self.main_metric is not None: + raise ValueError( + "Multiple main metrics defined. Only one is allowed." + ) + self.main_metric = f"{node_name}/{metric_name}" + + for visualizer_cfg in self.cfg.model.visualizers: + self._init_attached_module( + visualizer_cfg, BaseVisualizer.REGISTRY, self.visualizers + ) + + self.outputs = self.cfg.model.outputs + self.frozen_nodes = [self.nodes[name] for name in frozen_nodes] + self.losses = self._to_module_dict(self.losses) # type: ignore + self.metrics = self._to_module_dict(self.metrics) # type: ignore + self.visualizers = self._to_module_dict(self.visualizers) # type: ignore + + self.load_checkpoint(self.cfg.model.weights) + + def _initiate_nodes( + self, + nodes: dict[str, tuple[type[BaseNode], Kwargs]], + ) -> nn.ModuleDict: + """Initializes all the nodes in the model. + + Traverses the graph and initiates each node using outputs of the preceding + nodes. + + @type nodes: dict[str, tuple[type[LuxonisNode], Kwargs]] + @param nodes: Dictionary of nodes to be initiated. Keys are node names, values + are tuples of node class and node kwargs. + @rtype: L{nn.ModuleDict}[str, L{LuxonisNode}] + @return: Dictionary of initiated nodes. + """ + initiated_nodes: dict[str, BaseNode] = {} + + dummy_outputs: dict[str, Packet[Tensor]] = { + f"__{node_name}_input__": { + "features": [torch.zeros(2, *shape[1:]) for shape in shapes] + } + for node_name, shapes in self.input_shapes.items() + } + + for node_name, (Node, node_kwargs), node_input_names, _ in traverse_graph( + self.graph, nodes + ): + node_input_shapes: list[Packet[Size]] = [] + node_dummy_inputs: list[Packet[Tensor]] = [] + + if not node_input_names: + node_input_names = [f"__{node_name}_input__"] + + for node_input_name in node_input_names: + dummy_output = dummy_outputs[node_input_name] + shape_packet = get_shape_packet(dummy_output) + node_input_shapes.append(shape_packet) + node_dummy_inputs.append(dummy_output) + + node = Node( + input_shapes=node_input_shapes, + original_in_shape=self.original_in_shape, + dataset_metadata=self.dataset_metadata, + **node_kwargs, + ) + node_outputs = node.run(node_dummy_inputs) + + dummy_outputs[node_name] = node_outputs + initiated_nodes[node_name] = node + + return nn.ModuleDict(initiated_nodes) + + def forward( + self, + inputs: Tensor, + labels: Labels | None = None, + images: Tensor | None = None, + *, + compute_loss: bool = True, + compute_metrics: bool = False, + compute_visualizations: bool = False, + ) -> LuxonisOutput: + """Forward pass of the model. + + Traverses the graph and step-by-step computes the outputs of each node. Each + next node is computed only when all of its predecessors are computed. Once the + outputs are not needed anymore, they are removed from the memory. + + @type inputs: L{Tensor} + @param inputs: Input tensor. + @type labels: L{Labels} | None + @param labels: Labels dictionary. Defaults to C{None}. + @type images: L{Tensor} | None + @param images: Canvas tensor for visualizers. Defaults to C{None}. + @type compute_loss: bool + @param compute_loss: Whether to compute losses. Defaults to C{True}. + @type compute_metrics: bool + @param compute_metrics: Whether to update metrics. Defaults to C{True}. + @type compute_visualizations: bool + @param compute_visualizations: Whether to compute visualizations. Defaults to + C{False}. + @rtype: L{LuxonisOutput} + @return: Output of the model. + """ + input_node_name = list(self.input_shapes.keys())[0] + input_dict = {input_node_name: [inputs]} + + losses: dict[ + str, dict[str, Tensor | tuple[Tensor, dict[str, Tensor]]] + ] = defaultdict(dict) + visualizations: dict[str, dict[str, Tensor]] = defaultdict(dict) + + computed: dict[str, Packet[Tensor]] = { + f"__{node_name}_input__": {"features": input_tensors} + for node_name, input_tensors in input_dict.items() + } + for node_name, node, input_names, unprocessed in traverse_graph( + self.graph, cast(dict[str, BaseNode], self.nodes) + ): + # Special input for the first node. Will be changed when + # multiple inputs will be supported in `luxonis-ml.data`. + if not input_names: + input_names = [f"__{node_name}_input__"] + + node_inputs = [computed[pred] for pred in input_names] + outputs = node.run(node_inputs) + computed[node_name] = outputs + + if compute_loss and node_name in self.losses and labels is not None: + for loss_name, loss in self.losses[node_name].items(): + losses[node_name][loss_name] = loss.run(outputs, labels) + + if compute_metrics and node_name in self.metrics and labels is not None: + for metric in self.metrics[node_name].values(): + metric.run_update(outputs, labels) + + if ( + compute_visualizations + and node_name in self.visualizers + and images is not None + and labels is not None + ): + for viz_name, visualizer in self.visualizers[node_name].items(): + viz = combine_visualizations( + visualizer.run( + images, + images, + outputs, + labels, + ), + ) + visualizations[node_name][viz_name] = viz + + for computed_name in list(computed.keys()): + if computed_name in self.outputs: + continue + for node_name in unprocessed: + if computed_name in self.graph[node_name]: + break + else: + del computed[computed_name] + + outputs_dict = { + node_name: outputs + for node_name, outputs in computed.items() + if node_name in self.outputs + } + + return LuxonisOutput( + outputs=outputs_dict, losses=losses, visualizations=visualizations + ) + + def compute_metrics(self) -> dict[str, dict[str, Tensor]]: + """Computes metrics and returns their values. + + Goes through all metrics in the `metrics` attribute and computes their values. + After the computation, the metrics are reset. + + @rtype: dict[str, dict[str, L{Tensor}]] + @return: Dictionary of computed metrics. Each node can have multiple metrics + attached. The first key identifies the node, the second key identifies + the specific metric. + """ + metric_results: dict[str, dict[str, Tensor]] = defaultdict(dict) + for node_name, metrics in self.metrics.items(): + for metric_name, metric in metrics.items(): + match metric.compute(): + case (Tensor(data=metric_value), dict(submetrics)): + computed_submetrics = { + metric_name: metric_value, + } | submetrics + case Tensor(data=metric_value): + computed_submetrics = {metric_name: metric_value} + case dict(submetrics): + computed_submetrics = submetrics + case unknown: + raise ValueError( + f"Metric {metric_name} returned unexpected value of " + f"type {type(unknown)}." + ) + metric.reset() + metric_results[node_name] |= computed_submetrics + return metric_results + + def export_onnx(self, save_path: str, **kwargs) -> list[str]: + """Exports the model to ONNX format. + + @type save_path: str + @param save_path: Path where the exported model will be saved. + @type kwargs: Any + @param kwargs: Additional arguments for the L{torch.onnx.export} method. + @rtype: list[str] + @return: List of output names. + """ + + inputs = { + name: [torch.zeros(shape).to(self.device) for shape in shapes] + for name, shapes in self.input_shapes.items() + } + + # TODO: multiple inputs + inp = list(inputs.values())[0][0] + + for module in self.modules(): + if isinstance(module, BaseNode): + module.set_export_mode() + + outputs = self.forward(inp.clone()).outputs + output_order = sorted( + [ + (node_name, output_name, i) + for node_name, outs in outputs.items() + for output_name, out in outs.items() + for i in range(len(out)) + ] + ) + output_names = [ + f"{node_name}/{output_name}/{i}" + for node_name, output_name, i in output_order + ] + + old_forward = self.forward + + def export_forward(inputs) -> tuple[Tensor, ...]: + outputs = old_forward( + inputs, + None, + compute_loss=False, + compute_metrics=False, + compute_visualizations=False, + ).outputs + return tuple( + outputs[node_name][output_name][i] + for node_name, output_name, i in output_order + ) + + self.forward = export_forward # type: ignore + if "output_names" not in kwargs: + kwargs["output_names"] = output_names + + self.to_onnx(save_path, inp, **kwargs) + + self.forward = old_forward # type: ignore + + for module in self.modules(): + if isinstance(module, BaseNode): + module.set_export_mode(False) + + logger.info(f"Model exported to {save_path}") + return output_names + + def process_losses( + self, + losses_dict: dict[str, dict[str, Tensor | tuple[Tensor, dict[str, Tensor]]]], + ) -> tuple[Tensor, dict[str, Tensor]]: + """Processes individual losses from the model run. + + Goes over the computed losses and computes the final loss as a weighted sum of + all the losses. + + @type losses_dict: dict[str, dict[str, Tensor | tuple[Tensor, dict[str, + Tensor]]]] + @param losses_dict: Dictionary of computed losses. Each node can have multiple + losses attached. The first key identifies the node, the second key + identifies the specific loss. Values are either single tensors or tuples of + tensors and sublosses. + @rtype: tuple[Tensor, dict[str, Tensor]] + @return: Tuple of final loss and dictionary of processed sublosses. The + dictionary is in a format of {loss_name: loss_value}. + """ + final_loss = torch.zeros(1, device=self.device) + training_step_output: dict[str, Tensor] = {} + for node_name, losses in losses_dict.items(): + for loss_name, loss_values in losses.items(): + if isinstance(loss_values, tuple): + loss, sublosses = loss_values + else: + loss = loss_values + sublosses = {} + + loss *= self.loss_weights[loss_name] + final_loss += loss + training_step_output[ + f"loss/{node_name}/{loss_name}" + ] = loss.detach().cpu() + if self.cfg.trainer.log_sub_losses and sublosses: + for subloss_name, subloss_value in sublosses.items(): + training_step_output[ + f"loss/{node_name}/{loss_name}/{subloss_name}" + ] = subloss_value.detach().cpu() + training_step_output["loss"] = final_loss.detach().cpu() + return final_loss, training_step_output + + def training_step(self, train_batch: tuple[Tensor, Labels]) -> Tensor: + """Performs one step of training with provided batch.""" + outputs = self.forward(*train_batch) + assert outputs.losses, "Losses are empty, check if you have defined any loss" + + loss, training_step_output = self.process_losses(outputs.losses) + self.training_step_outputs.append(training_step_output) + return loss + + def validation_step(self, val_batch: tuple[Tensor, Labels]) -> dict[str, Tensor]: + """Performs one step of validation with provided batch.""" + return self._evaluation_step("val", val_batch) + + def test_step(self, test_batch: tuple[Tensor, Labels]) -> dict[str, Tensor]: + """Performs one step of testing with provided batch.""" + return self._evaluation_step("test", test_batch) + + def on_train_epoch_end(self) -> None: + """Performs train epoch end operations.""" + epoch_train_losses = self._average_losses(self.training_step_outputs) + for module in self.modules(): + if isinstance(module, (BaseNode, BaseLoss)): + module._epoch = self.current_epoch + + for key, value in epoch_train_losses.items(): + self.log(f"train/{key}", value, sync_dist=True) + + self.training_step_outputs.clear() + + def on_validation_epoch_end(self) -> None: + """Performs validation epoch end operations.""" + return self._evaluation_epoch_end("val") + + def on_test_epoch_end(self) -> None: + """Performs test epoch end operations.""" + return self._evaluation_epoch_end("test") + + def get_status(self) -> tuple[int, int]: + """Returns current epoch and number of all epochs.""" + return self.current_epoch, self.cfg.trainer.epochs + + def get_status_percentage(self) -> float: + """Returns percentage of current training, takes into account early stopping.""" + if self._trainer.early_stopping_callback: + # model haven't yet stop from early stopping callback + if self._trainer.early_stopping_callback.stopped_epoch == 0: + return (self.current_epoch / self.cfg.trainer.epochs) * 100 + else: + return 100.0 + else: + return (self.current_epoch / self.cfg.trainer.epochs) * 100 + + def _evaluation_step( + self, mode: Literal["test", "val"], batch: tuple[Tensor, Labels] + ) -> dict[str, Tensor]: + inputs, labels = batch + images = None + if self._logged_images < self.cfg.trainer.num_log_images: + images = get_unnormalized_images(self.cfg, inputs) + outputs = self.forward( + inputs, + labels, + images=images, + compute_metrics=True, + compute_visualizations=True, + ) + + _, step_output = self.process_losses(outputs.losses) + self.validation_step_outputs.append(step_output) + + logged_images = self._logged_images + for node_name, visualizations in outputs.visualizations.items(): + for viz_name, viz_batch in visualizations.items(): + logged_images = self._logged_images + for viz in viz_batch: + if logged_images >= self.cfg.trainer.num_log_images: + break + self.logger.log_image( + f"{mode}/visualizations/{node_name}/{viz_name}/{logged_images}", + viz.detach().cpu().numpy().transpose(1, 2, 0), + step=self.current_epoch, + ) + logged_images += 1 + self._logged_images = logged_images + + return step_output + + def _evaluation_epoch_end(self, mode: Literal["test", "val"]) -> None: + epoch_val_losses = self._average_losses(self.validation_step_outputs) + + for key, value in epoch_val_losses.items(): + self.log(f"{mode}/{key}", value, sync_dist=True) + + metric_results: dict[str, dict[str, float]] = defaultdict(dict) + logger.info(f"Computing metrics on {mode} subset ...") + computed_metrics = self.compute_metrics() + logger.info("Metrics computed.") + for node_name, metrics in computed_metrics.items(): + for metric_name, metric_value in metrics.items(): + metric_results[node_name][metric_name] = metric_value.cpu().item() + self.log( + f"{mode}/metric/{node_name}/{metric_name}", + metric_value, + sync_dist=True, + ) + + if self.cfg.trainer.verbose: + self._print_results( + stage="Validation" if mode == "val" else "Test", + loss=epoch_val_losses["loss"], + metrics=metric_results, + ) + + self.validation_step_outputs.clear() + self._logged_images = 0 + + def configure_callbacks(self) -> list[pl.Callback]: + """Configures Pytorch Lightning callbacks.""" + self.min_val_loss_checkpoints_path = f"{self.save_dir}/min_val_loss" + self.best_val_metric_checkpoints_path = f"{self.save_dir}/best_val_metric" + model_name = self.cfg.model.name + + callbacks: list[pl.Callback] = [] + + callbacks.append( + ModelCheckpoint( + monitor="val/loss", + dirpath=self.min_val_loss_checkpoints_path, + filename=f"{model_name}_loss={{val/loss:.4f}}_{{epoch:02d}}", + auto_insert_metric_name=False, + save_top_k=self.cfg.trainer.save_top_k, + mode="min", + ) + ) + + if self.main_metric is not None: + main_metric = self.main_metric.replace("/", "_") + callbacks.append( + ModelCheckpoint( + monitor=f"val/metric/{self.main_metric}", + dirpath=self.best_val_metric_checkpoints_path, + filename=f"{model_name}_{main_metric}={{val/metric/{self.main_metric}:.4f}}" + f"_loss={{val/loss:.4f}}_{{epoch:02d}}", + auto_insert_metric_name=False, + save_top_k=self.cfg.trainer.save_top_k, + mode="max", + ) + ) + + if self.frozen_nodes: + callbacks.append(ModuleFreezer(self.frozen_nodes)) + + if self.cfg.use_rich_text: + callbacks.append(RichModelSummary(max_depth=2)) + + for callback in self.cfg.trainer.callbacks: + if callback.active: + callbacks.append(CALLBACKS.get(callback.name)(**callback.params)) + + return callbacks + + def configure_optimizers( + self, + ) -> tuple[list[torch.optim.Optimizer], list[nn.Module]]: + """Configures model optimizers and schedulers.""" + cfg_optimizer = self.cfg.trainer.optimizer + cfg_scheduler = self.cfg.trainer.scheduler + + optim_params = cfg_optimizer.params | { + "params": filter(lambda p: p.requires_grad, self.parameters()), + } + optimizer = OPTIMIZERS.get(cfg_optimizer.name)(**optim_params) + + scheduler_params = cfg_scheduler.params | {"optimizer": optimizer} + scheduler = SCHEDULERS.get(cfg_scheduler.name)(**scheduler_params) + + return [optimizer], [scheduler] + + def load_checkpoint(self, path: str | None) -> None: + """Loads checkpoint weights from provided path. + + Loads the checkpoints gracefully, ignoring keys that are not found in the model + state dict or in the checkpoint. + + @type path: str | None + @param path: Path to the checkpoint. If C{None}, no checkpoint will be loaded. + """ + if path is None: + return + checkpoint = torch.load(path, map_location=self.device) + if "state_dict" not in checkpoint: + raise ValueError("Checkpoint does not contain state_dict.") + state_dict = {} + self_state_dict = self.state_dict() + for key, value in checkpoint["state_dict"].items(): + if key not in self_state_dict.keys(): + logger.warning( + f"Key `{key}` from checkpoint not found in model state dict." + ) + else: + state_dict[key] = value + + for key in self_state_dict: + if key not in state_dict.keys(): + logger.warning(f"Key `{key}` was not found in checkpoint.") + else: + try: + self_state_dict[key].copy_(state_dict[key]) + except Exception: + logger.warning( + f"Key `{key}` from checkpoint could not be loaded into model." + ) + + logger.info(f"Loaded checkpoint from {path}.") + + def _init_attached_module( + self, + cfg: AttachedModuleConfig, + registry: Registry, + storage: Mapping[str, Mapping[str, BaseAttachedModule]], + ) -> tuple[str, str]: + Module = registry.get(cfg.name) + module_name = cfg.override_name or cfg.name + node_name = cfg.attached_to + module = Module(**cfg.params, node=self.nodes[node_name]) + storage[node_name][module_name] = module # type: ignore + return module_name, node_name + + @staticmethod + def _to_module_dict(modules: dict[str, dict[str, nn.Module]]) -> nn.ModuleDict: + return nn.ModuleDict( + { + node_name: nn.ModuleDict(node_modules) + for node_name, node_modules in modules.items() + } + ) + + @property + def _progress_bar(self) -> LuxonisProgressBar: + return cast(LuxonisProgressBar, self._trainer.progress_bar_callback) + + @rank_zero_only + def _print_results( + self, stage: str, loss: float, metrics: dict[str, dict[str, float]] + ) -> None: + """Prints validation metrics in the console.""" + + logger.info(f"{stage} loss: {loss:.4f}") + + if self.cfg.use_rich_text: + self._progress_bar.print_results(stage=stage, loss=loss, metrics=metrics) + else: + for node_name, node_metrics in metrics.items(): + for metric_name, metric_value in node_metrics.items(): + logger.info( + f"{stage} metric: {node_name}/{metric_name}: {metric_value:.4f}" + ) + + if self.main_metric is not None: + main_metric_node, main_metric_name = self.main_metric.split("/") + main_metric = metrics[main_metric_node][main_metric_name] + logger.info(f"{stage} main metric ({self.main_metric}): {main_metric:.4f}") + + def _is_train_eval_epoch(self) -> bool: + """Checks if train eval should be performed on current epoch based on configured + train_metrics_interval.""" + train_metrics_interval = self.cfg.trainer.train_metrics_interval + # add +1 to current_epoch because starting epoch is at 0 + return ( + train_metrics_interval != -1 + and (self.current_epoch + 1) % train_metrics_interval == 0 + ) + + def _average_losses( + self, step_outputs: list[Mapping[str, Tensor | float | int]] + ) -> dict[str, float]: + avg_losses: dict[str, float] = defaultdict(float) + + for step_output in step_outputs: + for key, value in step_output.items(): + avg_losses[key] += float(value) + + for key in avg_losses: + avg_losses[key] /= len(step_outputs) + return avg_losses diff --git a/luxonis_train/models/luxonis_output.py b/luxonis_train/models/luxonis_output.py new file mode 100644 index 00000000..e6b8e16c --- /dev/null +++ b/luxonis_train/models/luxonis_output.py @@ -0,0 +1,33 @@ +from dataclasses import dataclass, field +from pprint import pformat + +from torch import Tensor + +from luxonis_train.utils.general import get_shape_packet +from luxonis_train.utils.types import Packet + + +@dataclass +class LuxonisOutput: + outputs: dict[str, Packet[Tensor]] + losses: dict[str, dict[str, Tensor | tuple[Tensor, dict[str, Tensor]]]] + visualizations: dict[str, dict[str, Tensor]] = field(default_factory=dict) + metrics: dict[str, dict[str, Tensor]] = field(default_factory=dict) + + def __str__(self) -> str: + outputs = { + node_name: get_shape_packet(packet) + for node_name, packet in self.outputs.items() + } + viz = { + f"{node_name}.{viz_name}": viz_value.shape + for node_name, viz in self.visualizations.items() + for viz_name, viz_value in viz.items() + } + string = pformat( + {"outputs": outputs, "visualizations": viz, "losses": self.losses} + ) + return f"{self.__class__.__name__}(\n{string}\n)" + + def __repr__(self) -> str: + return str(self) diff --git a/luxonis_train/models/predefined_models/README.md b/luxonis_train/models/predefined_models/README.md new file mode 100644 index 00000000..ddf0b46d --- /dev/null +++ b/luxonis_train/models/predefined_models/README.md @@ -0,0 +1,132 @@ +# Predefined models + +In addition to definig the model by hand, we offer a list of simple predefined +models which can be used instead. + +## Table Of Contents + +- [SegmentationModel](#segmentationmodel) +- [DetectionModel](#detectionmodel) +- [KeypointDetectionModel](#keypointdetectionmodel) +- [ClassificationModel](#classificationmodel) + +**Params** + +| Key | Type | Default value | Description | +| ------------------- | ---------------- | ------------- | --------------------------------------------------------------------- | +| name | str | | Name of the predefined architecture. See below the available options. | +| params | dict\[str, Any\] | {} | Additional parameters of the predefined model. | +| include_nodes | bool | True | Whether to include nodes of the model. | +| include_losses | bool | True | Whether to include loss functions. | +| include_metrics | bool | True | Whether to include metrics. | +| include_visualizers | bool | True | Whether to include visualizers. | + +## SegmentationModel + +See an example configuration file using this predefined model [here](../../../configs/segmentation_model.yaml) + +**Components** + +| Name | Alias | Function | +| --------------------------------------------------------------------------------------------- | -------------------------- | ----------------------------------------------------------------------- | +| [MicroNet](../../nodes/README.md#micronet) | segmentation_backbone | Backbone of the model. Can be changed | +| [SegmentationHead](../../nodes/README.md#segmentationhead) | segmentation_head | Head of the model. | +| [BCEWithLogitsLoss](../../attached_modules/losses/README.md#bcewithlogitsloss) | segmentation_loss | Loss of the model when the task is set to "binary". | +| [CrossEntropyLoss](../../attached_modules/losses/README.md#crossentropyloss) | segmentation_loss | Loss of the model when the task is set to "multiclass" or "multilabel". | +| [JaccardIndex](../../attached_modules/metrics/README.md#torchmetrics) | segmentation_jaccard_index | Main metric of the model. | +| [F1Score](../../attached_modules/metrics/README.md#torchmetrics) | segmentation_f1_score | Secondary metric of the model. | +| [SegmentationVisualizer](../../attached_modules/visualizers/README.md#segmentationvisualizer) | segmentation_visualizer | Visualizer of the `SegmentationHead`. | + +**Params** + +| Key | Type | Default value | Description | +| ----------------- | --------------------------------- | ------------- | ------------------------------------------ | +| task | Literal\["binary", "multiclass"\] | "binary" | Type of the task of the model. | +| backbone | str | "MicroNet" | Name of the node to be used as a backbone. | +| backbone_params | dict | {} | Additional parameters to the backbone. | +| head_params | dict | {} | Additional parameters to the head. | +| loss_params | dict | {} | Additional parameters to the loss. | +| visualizer_params | dict | {} | Additional parameters to the visualizer. | + +## DetectionModel + +See an example configuration file using this predefined model [here](../../../configs/detection_model.yaml) + +**Components** + +| Name | Alias | Function | +| -------------------------------------------------------------------------------------- | -------------------- | ----------------------------------- | +| [EfficientRep](../../nodes/README.md#efficientrep) | detection_backbone | Backbone of the model. | +| [RepPANNeck](../../nodes/README.md#reppanneck) | detection_neck | Neck of the model. | +| [EfficientBBoxHead](../../nodes/README.md#efficientbboxhead) | detection_head | Head of the model. | +| [AdaptiveDetectionLoss](../../attached_modules/losses/README.md#adaptivedetectionloss) | detection_loss | Loss of the model. | +| [MeanAveragePrecision](../../attached_modules/metrics/README.md#meanaverageprecision) | detection_map | Main metric of the model. | +| [BBoxVisualizer](../../attached_modules/visualizers/README.md#bboxvisualizer) | detection_visualizer | Visualizer of the `detection_head`. | + +**Params** + +| Key | Type | Default value | Description | +| ----------------- | ---- | ------------- | ----------------------------------------- | +| use_neck | bool | True | Whether to include the neck in the model. | +| backbone_params | dict | {} | Additional parameters to the backbone. | +| neck_params | dict | {} | Additional parameters to the neck. | +| head_params | dict | {} | Additional parameters to the head. | +| loss_params | dict | {} | Additional parameters to the loss. | +| visualizer_params | dict | {} | Additional parameters to the visualizer. | + +## KeypointDetectionModel + +See an example configuration file using this predefined model [here](../../../configs/keypoint_bbox_model.yaml) + +**Components** + +| Name | Alias | Function | +| ------------------------------------------------------------------------------------------------------- | ---------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------- | +| [EfficientRep](../../nodes/README.md#efficientrep) | kpt_detection_backbone | Backbone of the model. | +| [RepPANNeck](../../nodes/README.md#reppanneck) | kpt_detection_neck | Neck of the model. | +| [ImplicitKeypointBBoxHead](../../nodes/README.md#implicitkeypointbboxhead) | kpt_detection_head | Head of the model. | +| [ImplicitKeypointBBoxLoss](../../attached_modules/losses/README.md#implicitkeypointbboxloss) | kpt_detection_loss | Loss of the model. | +| [ObjectKeypointSimilarity](../../attached_modules/metrics/README.md#objectkeypointsimilarity) | kpt_detection_oks | Main metric of the model. | +| [MeanAveragePrecisionKeypoints](../../attached_modules/metrics/README.md#meanaverageprecisionkeypoints) | kpt_detection_map | Secondary metric of the model. | +| [BBoxVisualizer](../../attached_modules/visualizers/README.md#bboxvisualizer) | | Visualizer for bounding boxes. Combined with keypoint visualizer in [MultiVisualizer](../../attached_modules/visualizers/README.md#multivisualizer). | +| [KeypointVisualizer](../../attached_modules/visualizers/README.md#keypointvisualizer) | | Visualizer for keypoints. Combined with keypoint visualizer in [MultiVisualizer](../../attached_modules/visualizers/README.md#multivisualizer) | + +**Params** + +| Key | Type | Default value | Description | +| ---------------------- | ---- | ------------- | ------------------------------------------------- | +| use_neck | bool | True | Whether to include the neck in the model. | +| backbone_params | dict | {} | Additional parameters to the backbone. | +| neck_params | dict | {} | Additional parameters to the neck. | +| head_params | dict | {} | Additional parameters to the head. | +| loss_params | dict | {} | Additional parameters to the loss. | +| kpt_visualizer_params | dict | {} | Additional parameters to the keypoint visualizer. | +| bbox_visualizer_params | dict | {} | Additional parameters to the bbox visualizer. | + +## ClassificationModel + +Basic model for classification. Can be used for multiclass and multilabel tasks. + +See an example configuration file using this predefined model [here](../../../configs/classification_model.yaml) + +**Components** + +| Name | Alias | Function | +| ---------------------------------------------------------------------------- | ----------------------- | ------------------------------------- | +| [MicroNet](../../nodes/README.md#micronet) | classification_backbone | Backbone of the model. Can be changed | +| [ClassificationHead](../../nodes/README.md#classificationhead) | classification_head | Head of the model. | +| [CrossEntropyLoss](../../attached_modules/losses/README.md#crossentropyloss) | classification_loss | Loss of the model. | +| [F1Score](../../attached_modules/metrics/README.md#torchmetrics) | classification_f1_score | Main metric of the model. | +| [Accuracy](../../attached_modules/metrics/README.md#torchmetrics) | classification_accuracy | Secondary metric of the model. | +| [Recall](../../attached_modules/metrics/README.md#torchmetrics) | classification_recall | Secondary metric of the model. | + +**Params** + +| Key | Type | Default value | Description | +| ----------------- | ------------------------------------- | ------------- | ------------------------------------------ | +| task | Literal\["multiclass", "multilabel"\] | "multiclass" | Type of the task of the model. | +| backbone | str | "MicroNet" | Name of the node to be used as a backbone. | +| backbone_params | dict | {} | Additional parameters to the backbone. | +| head_params | dict | {} | Additional parameters to the head. | +| loss_params | dict | {} | Additional parameters to the loss. | +| visualizer_params | dict | {} | Additional parameters to the visualizer. | diff --git a/luxonis_train/models/predefined_models/__init__.py b/luxonis_train/models/predefined_models/__init__.py new file mode 100644 index 00000000..0e8fe8c0 --- /dev/null +++ b/luxonis_train/models/predefined_models/__init__.py @@ -0,0 +1,13 @@ +from .base_predefined_model import BasePredefinedModel +from .classification_model import ClassificationModel +from .detection_model import DetectionModel +from .keypoint_detection_model import KeypointDetectionModel +from .segmentation_model import SegmentationModel + +__all__ = [ + "BasePredefinedModel", + "SegmentationModel", + "DetectionModel", + "KeypointDetectionModel", + "ClassificationModel", +] diff --git a/luxonis_train/models/predefined_models/base_predefined_model.py b/luxonis_train/models/predefined_models/base_predefined_model.py new file mode 100644 index 00000000..33ababdc --- /dev/null +++ b/luxonis_train/models/predefined_models/base_predefined_model.py @@ -0,0 +1,53 @@ +from abc import ABC, abstractproperty + +from luxonis_ml.utils.registry import AutoRegisterMeta + +from luxonis_train.utils.config import ( + AttachedModuleConfig, + LossModuleConfig, + MetricModuleConfig, + ModelNodeConfig, +) +from luxonis_train.utils.registry import MODELS + + +class BasePredefinedModel( + ABC, + metaclass=AutoRegisterMeta, + registry=MODELS, + register=False, +): + @abstractproperty + def nodes(self) -> list[ModelNodeConfig]: + ... + + @abstractproperty + def losses(self) -> list[LossModuleConfig]: + ... + + @abstractproperty + def metrics(self) -> list[MetricModuleConfig]: + ... + + @abstractproperty + def visualizers(self) -> list[AttachedModuleConfig]: + ... + + def generate_model( + self, + include_nodes: bool = True, + include_losses: bool = True, + include_metrics: bool = True, + include_visualizers: bool = True, + ) -> tuple[ + list[ModelNodeConfig], + list[LossModuleConfig], + list[MetricModuleConfig], + list[AttachedModuleConfig], + ]: + nodes = self.nodes if include_nodes else [] + losses = self.losses if include_losses else [] + metrics = self.metrics if include_metrics else [] + visualizers = self.visualizers if include_visualizers else [] + + return nodes, losses, metrics, visualizers diff --git a/luxonis_train/models/predefined_models/classification_model.py b/luxonis_train/models/predefined_models/classification_model.py new file mode 100644 index 00000000..72a22186 --- /dev/null +++ b/luxonis_train/models/predefined_models/classification_model.py @@ -0,0 +1,86 @@ +from dataclasses import dataclass, field +from typing import Literal + +from luxonis_train.utils.config import ( + AttachedModuleConfig, + LossModuleConfig, + MetricModuleConfig, + ModelNodeConfig, +) +from luxonis_train.utils.types import Kwargs + +from .base_predefined_model import BasePredefinedModel + + +@dataclass +class ClassificationModel(BasePredefinedModel): + backbone: str = "MicroNet" + task: Literal["multiclass", "multilabel"] = "multilabel" + backbone_params: Kwargs = field(default_factory=dict) + head_params: Kwargs = field(default_factory=dict) + loss_params: Kwargs = field(default_factory=dict) + visualizer_params: Kwargs = field(default_factory=dict) + + @property + def nodes(self) -> list[ModelNodeConfig]: + return [ + ModelNodeConfig( + name=self.backbone, + override_name="classification_backbone", + frozen=self.backbone_params.pop("frozen", False), + params=self.backbone_params, + ), + ModelNodeConfig( + name="ClassificationHead", + override_name="classification_head", + inputs=["classification_backbone"], + frozen=self.head_params.pop("frozen", False), + params=self.head_params, + ), + ] + + @property + def losses(self) -> list[LossModuleConfig]: + return [ + LossModuleConfig( + name="CrossEntropyLoss", + override_name="classification_loss", + attached_to="classification_head", + params=self.loss_params, + weight=1.0, + ) + ] + + @property + def metrics(self) -> list[MetricModuleConfig]: + return [ + MetricModuleConfig( + name="F1Score", + override_name="classification_f1_score", + is_main_metric=True, + attached_to="classification_head", + params={"task": self.task}, + ), + MetricModuleConfig( + name="Accuracy", + override_name="classification_accuracy", + attached_to="classification_head", + params={"task": self.task}, + ), + MetricModuleConfig( + name="Recall", + override_name="classification_recall", + attached_to="classification_head", + params={"task": self.task}, + ), + ] + + @property + def visualizers(self) -> list[AttachedModuleConfig]: + return [ + AttachedModuleConfig( + name="ClassificationVisualizer", + attached_to="classification_head", + params=self.visualizer_params, + ) + ] diff --git a/luxonis_train/models/predefined_models/detection_model.py b/luxonis_train/models/predefined_models/detection_model.py new file mode 100644 index 00000000..8b248fc4 --- /dev/null +++ b/luxonis_train/models/predefined_models/detection_model.py @@ -0,0 +1,87 @@ +from dataclasses import dataclass, field + +from luxonis_train.utils.config import ( + AttachedModuleConfig, + LossModuleConfig, + MetricModuleConfig, + ModelNodeConfig, +) +from luxonis_train.utils.types import Kwargs + +from .base_predefined_model import BasePredefinedModel + + +@dataclass +class DetectionModel(BasePredefinedModel): + use_neck: bool = True + backbone_params: Kwargs = field(default_factory=dict) + neck_params: Kwargs = field(default_factory=dict) + head_params: Kwargs = field(default_factory=dict) + loss_params: Kwargs = field(default_factory=dict) + visualizer_params: Kwargs = field(default_factory=dict) + + @property + def nodes(self) -> list[ModelNodeConfig]: + nodes = [ + ModelNodeConfig( + name="EfficientRep", + override_name="detection_backbone", + frozen=self.backbone_params.pop("frozen", False), + params=self.backbone_params, + ), + ] + if self.use_neck: + nodes.append( + ModelNodeConfig( + name="RepPANNeck", + override_name="detection_neck", + inputs=["detection_backbone"], + frozen=self.neck_params.pop("frozen", False), + params=self.neck_params, + ) + ) + + nodes.append( + ModelNodeConfig( + name="EfficientBBoxHead", + override_name="detection_head", + frozen=self.head_params.pop("frozen", False), + inputs=["detection_neck"] if self.use_neck else ["detection_backbone"], + params=self.head_params, + ) + ) + return nodes + + @property + def losses(self) -> list[LossModuleConfig]: + return [ + LossModuleConfig( + name="AdaptiveDetectionLoss", + override_name="detection_loss", + attached_to="detection_head", + params=self.loss_params, + weight=1.0, + ) + ] + + @property + def metrics(self) -> list[MetricModuleConfig]: + return [ + MetricModuleConfig( + name="MeanAveragePrecision", + override_name="detection_map", + attached_to="detection_head", + is_main_metric=True, + ), + ] + + @property + def visualizers(self) -> list[AttachedModuleConfig]: + return [ + AttachedModuleConfig( + name="BBoxVisualizer", + override_name="detection_visualizer", + attached_to="detection_head", + params=self.visualizer_params, + ) + ] diff --git a/luxonis_train/models/predefined_models/keypoint_detection_model.py b/luxonis_train/models/predefined_models/keypoint_detection_model.py new file mode 100644 index 00000000..fb590eac --- /dev/null +++ b/luxonis_train/models/predefined_models/keypoint_detection_model.py @@ -0,0 +1,105 @@ +from dataclasses import dataclass, field + +from luxonis_train.utils.config import ( + AttachedModuleConfig, + LossModuleConfig, + MetricModuleConfig, + ModelNodeConfig, +) +from luxonis_train.utils.types import Kwargs + +from .base_predefined_model import BasePredefinedModel + + +@dataclass +class KeypointDetectionModel(BasePredefinedModel): + use_neck: bool = True + backbone_params: Kwargs = field(default_factory=dict) + neck_params: Kwargs = field(default_factory=dict) + head_params: Kwargs = field(default_factory=dict) + loss_params: Kwargs = field(default_factory=dict) + kpt_visualizer_params: Kwargs = field(default_factory=dict) + bbox_visualizer_params: Kwargs = field(default_factory=dict) + + @property + def nodes(self) -> list[ModelNodeConfig]: + nodes = [ + ModelNodeConfig( + name="EfficientRep", + override_name="kpt_detection_backbone", + frozen=self.backbone_params.pop("frozen", False), + params=self.backbone_params, + ), + ] + if self.use_neck: + nodes.append( + ModelNodeConfig( + name="RepPANNeck", + override_name="kpt_detection_neck", + inputs=["kpt_detection_backbone"], + frozen=self.neck_params.pop("frozen", False), + params=self.neck_params, + ) + ) + + nodes.append( + ModelNodeConfig( + name="ImplicitKeypointBBoxHead", + override_name="kpt_detection_head", + inputs=["kpt_detection_neck"] + if self.use_neck + else ["kpt_detection_backbone"], + frozen=self.head_params.pop("frozen", False), + params=self.head_params, + ) + ) + return nodes + + @property + def losses(self) -> list[LossModuleConfig]: + return [ + LossModuleConfig( + name="ImplicitKeypointBBoxLoss", + attached_to="kpt_detection_head", + params=self.loss_params, + weight=1.0, + ) + ] + + @property + def metrics(self) -> list[MetricModuleConfig]: + return [ + MetricModuleConfig( + name="ObjectKeypointSimilarity", + override_name="kpt_detection_oks", + attached_to="kpt_detection_head", + is_main_metric=True, + ), + MetricModuleConfig( + name="MeanAveragePrecisionKeypoints", + override_name="kpt_detection_map", + attached_to="kpt_detection_head", + ), + ] + + @property + def visualizers(self) -> list[AttachedModuleConfig]: + return [ + AttachedModuleConfig( + name="MultiVisualizer", + override_name="kpt_detection_visualizer", + attached_to="kpt_detection_head", + params={ + "visualizers": [ + { + "name": "KeypointVisualizer", + "params": self.kpt_visualizer_params, + }, + { + "name": "BBoxVisualizer", + "params": self.bbox_visualizer_params, + }, + ] + }, + ) + ] diff --git a/luxonis_train/models/predefined_models/segmentation_model.py b/luxonis_train/models/predefined_models/segmentation_model.py new file mode 100644 index 00000000..463099e5 --- /dev/null +++ b/luxonis_train/models/predefined_models/segmentation_model.py @@ -0,0 +1,83 @@ +from dataclasses import dataclass, field +from typing import Literal + +from luxonis_train.utils.config import ( + AttachedModuleConfig, + LossModuleConfig, + MetricModuleConfig, + ModelNodeConfig, +) +from luxonis_train.utils.types import Kwargs + +from .base_predefined_model import BasePredefinedModel + + +@dataclass +class SegmentationModel(BasePredefinedModel): + backbone: str = "MicroNet" + task: Literal["binary", "multiclass"] = "binary" + backbone_params: Kwargs = field(default_factory=dict) + head_params: Kwargs = field(default_factory=dict) + loss_params: Kwargs = field(default_factory=dict) + visualizer_params: Kwargs = field(default_factory=dict) + + @property + def nodes(self) -> list[ModelNodeConfig]: + return [ + ModelNodeConfig( + name=self.backbone, + override_name="segmentation_backbone", + frozen=self.backbone_params.pop("frozen", False), + params=self.backbone_params, + ), + ModelNodeConfig( + name="SegmentationHead", + override_name="segmentation_head", + inputs=["segmentation_backbone"], + frozen=self.head_params.pop("frozen", False), + params=self.head_params, + ), + ] + + @property + def losses(self) -> list[LossModuleConfig]: + return [ + LossModuleConfig( + name="BCEWithLogitsLoss" + if self.task == "binary" + else "CrossEntropyLoss", + override_name="segmentation_loss", + attached_to="segmentation_head", + params=self.loss_params, + weight=1.0, + ) + ] + + @property + def metrics(self) -> list[MetricModuleConfig]: + return [ + MetricModuleConfig( + name="JaccardIndex", + override_name="segmentation_jaccard_index", + attached_to="segmentation_head", + is_main_metric=True, + params={"task": self.task}, + ), + MetricModuleConfig( + name="F1Score", + override_name="segmentation_f1_score", + attached_to="segmentation_head", + params={"task": self.task}, + ), + ] + + @property + def visualizers(self) -> list[AttachedModuleConfig]: + return [ + AttachedModuleConfig( + name="SegmentationVisualizer", + override_name="segmentation_visualizer", + attached_to="segmentation_head", + params=self.visualizer_params, + ) + ] diff --git a/luxonis_train/nodes/README.md b/luxonis_train/nodes/README.md new file mode 100644 index 00000000..2c3758f9 --- /dev/null +++ b/luxonis_train/nodes/README.md @@ -0,0 +1,192 @@ +# Nodes + +Nodes are the basic building structures of the model. They can be connected together +arbitrarily as long as the two nodes are compatible with each other. + +## Table Of Contents + +- [ResNet18](#resnet18) +- [MicroNet](#micronet) +- [RepVGG](#repvgg) +- [EfficientRep](#efficientrep) +- [RexNetV1_lite](#rexnetv1_lite) +- [MobileOne](#mobileone) +- [MobileNetV2](#mobilenetv2) +- [EfficientNet](#efficientnet) +- [ContextSpatial](#contextspatial) +- [RepPANNeck](#reppanneck) +- [ClassificationHead](#classificationhead) +- [SegmentationHead](#segmentationhead) +- [BiSeNetHead](#bisenethead) +- [EfficientBBoxHead](#efficientbboxhead) +- [ImplicitKeypointBBoxHead](#implicitkeypointbboxhead) + +Every node takes these parameters: + +| Key | Type | Default value | Description | +| ------------ | ----------- | ------------- | ------------------------------------------------------------------------------------------------------------------------- | +| attach_index | int \| None | None | Index of previous output that the head attaches to. Each node has a sensible default. Usually should not be manually set. | +| n_classes | int \| None | None | Number of classes in the dataset. Inferred from the dataset if not provided. | + +Additional parameters for specific nodes are listed below. + +## ResNet18 + +Adapted from [here](https://pytorch.org/vision/main/models/generated/torchvision.models.resnet18.html). + +**Params** + +| Key | Type | Default value | Description | +| ---------------- | ---- | ------------- | -------------------------------------- | +| download_weights | bool | False | If True download weights from imagenet | + +## MicroNet + +Adapted from [here](https://github.com/liyunsheng13/micronet). + +**Params** + +| Key | Type | Default value | Description | +| ------- | --------------------------- | ------------- | ----------------------- | +| variant | Literal\["M1", "M2", "M3"\] | "M1" | Variant of the network. | + +## RepVGG + +Adapted from [here](https://github.com/DingXiaoH/RepVGG). + +**Params** + +| Key | Type | Default value | Description | +| ------- | --------------------------- | ------------- | ----------------------- | +| variant | Literal\["A0", "A1", "A2"\] | "A0" | Variant of the network. | + +## EfficientRep + +Adapted from [here](https://arxiv.org/pdf/2209.02976.pdf). + +**Params** + +| Key | Type | Default value | Description | +| ------------- | ----------- | --------------------------- | --------------------------------------------------- | +| channels_list | List\[int\] | \[64, 128, 256, 512, 1024\] | List of number of channels for each block | +| num_repeats | List\[int\] | \[1, 6, 12, 18, 6\] | List of number of repeats of RepVGGBlock | +| in_channels | int | 3 | Number of input channels, should be 3 in most cases | +| depth_mul | int | 0.33 | Depth multiplier | +| width_mul | int | 0.25 | Width multiplier | + +## RexNetV1_lite + +Adapted from ([here](https://github.com/clovaai/rexnet). + +**Params** + +| Key | Type | Default value | Description | +| --------------- | ----- | ------------- | ------------------------------ | +| fix_head_stem | bool | False | Whether to multiply head stem | +| divisible_value | int | 8 | Divisor used | +| input_ch | int | 16 | tarting channel dimension | +| final_ch | int | 164 | Final channel dimension | +| multiplier | float | 1.0 | Channel dimension multiplier | +| kernel_conf | str | '333333' | Kernel sizes encoded as string | + +## MobileOne + +Adapted from [here](https://github.com/apple/ml-mobileone). + +**Params** + +| Key | Type | Default value | Description | +| ------- | --------------------------------------- | ------------- | ----------------------- | +| variant | Literal\["s0", "s1", "s2", "s3", "s4"\] | "s0" | Variant of the network. | + +## MobileNetV2 + +Adapted from [here](https://pytorch.org/vision/main/models/generated/torchvision.models.mobilenet_v2.html). + +**Params** + +| Key | Type | Default value | Description | +| ---------------- | ---- | ------------- | -------------------------------------- | +| download_weights | bool | False | If True download weights from imagenet | + +## EfficientNet + +Adapted from [here](https://github.com/rwightman/gen-efficientnet-pytorch). + +**Params** + +| Key | Type | Default value | Description | +| ---------------- | ---- | ------------- | --------------------------------------- | +| download_weights | bool | False | If True download weights from imagenet. | + +## ContextSpatial + +Adapted from [here](https://github.com/taveraantonio/BiseNetv1). + +**Params** + +| Key | Type | Default value | Description | +| ---------------- | ---- | ------------- | ------------- | +| context_backbone | str | "MobileNetV2" | Backbone used | + +## RepPANNeck + +Adapted from [here](https://arxiv.org/pdf/2209.02976.pdf). + +**Params** + +| Key | Type | Default value | Description | +| ------------- | ---------------- | ------------------------------------------------------- | ----------------------------------------- | +| num_heads | Literal\[2,3,4\] | 3 ***Note:** Should be same also on head in most cases* | Number of output heads | +| channels_list | List\[int\] | \[256, 128, 128, 256, 256, 512\] | List of number of channels for each block | +| num_repeats | List\[int\] | \[12, 12, 12, 12\] | List of number of repeats of RepVGGBlock | +| depth_mul | int | 0.33 | Depth multiplier | +| width_mul | int | 0.25 | Width multiplier | + +## ClassificationHead + +**Params** + +| Key | Type | Default value | Description | +| ---------- | ----- | ------------- | --------------------------------------------- | +| fc_dropout | float | 0.2 | Dropout rate before last layer, range \[0,1\] | + +## SegmentationHead + +Adapted from [here](https://github.com/pytorch/vision/blob/main/torchvision/models/segmentation/fcn.py). + +## BiSeNetHead + +Adapted from [here](https://github.com/taveraantonio/BiseNetv1). + +**Params** + +| Key | Type | Default value | Description | +| -------------- | ---- | ------------- | ---------------------------------------------- | +| upscale_factor | int | 8 | Factor used for upscaling input | +| is_aux | bool | False | Either use 256 for intermediate channels or 64 | + +## EfficientBBoxHead + +Adapted from [here](https://arxiv.org/pdf/2209.02976.pdf). + +**Params** + +| Key | Type | Default value | Description | +| --------- | ---- | ------------- | ---------------------- | +| num_heads | bool | 3 | Number of output heads | + +## ImplicitKeypointBBoxHead + +Adapted from [here](https://arxiv.org/pdf/2207.02696.pdf). + +**Params** + +| Key | Type | Default value | Description | +| ---------------- | --------------------------- | ------------- | ---------------------------------------------------------------------------------------------------------- | +| n_keypoints | int \| None | None | Number of keypoints. | +| num_heads | int | 3 | Number of output heads | +| anchors | List\[List\[int\]\] \| None | None | Anchors used for object detection. If set to `None`, the anchors are computed at runtime from the dataset. | +| init_coco_biases | bool | True | Whether to use COCO bias and weight initialization | +| conf_thres | float | 0.25 | confidence threshold for nms (used for evaluation) | +| iou_thres | float | 0.45 | iou threshold for nms (used for evaluation) | diff --git a/luxonis_train/nodes/__init__.py b/luxonis_train/nodes/__init__.py new file mode 100644 index 00000000..d7ec70d0 --- /dev/null +++ b/luxonis_train/nodes/__init__.py @@ -0,0 +1,33 @@ +from .base_node import BaseNode +from .bisenet_head import BiSeNetHead +from .classification_head import ClassificationHead +from .contextspatial import ContextSpatial +from .efficient_bbox_head import EfficientBBoxHead +from .efficientrep import EfficientRep +from .implicit_keypoint_bbox_head import ImplicitKeypointBBoxHead +from .micronet import MicroNet +from .mobilenetv2 import MobileNetV2 +from .mobileone import MobileOne +from .reppan_neck import RepPANNeck +from .repvgg import RepVGG +from .resnet18 import ResNet18 +from .rexnetv1 import ReXNetV1_lite +from .segmentation_head import SegmentationHead + +__all__ = [ + "BiSeNetHead", + "ClassificationHead", + "ContextSpatial", + "EfficientBBoxHead", + "EfficientRep", + "ImplicitKeypointBBoxHead", + "BaseNode", + "MicroNet", + "MobileNetV2", + "MobileOne", + "ReXNetV1_lite", + "RepPANNeck", + "RepVGG", + "ResNet18", + "SegmentationHead", +] diff --git a/luxonis_train/nodes/activations/__init__.py b/luxonis_train/nodes/activations/__init__.py new file mode 100644 index 00000000..37aea0fc --- /dev/null +++ b/luxonis_train/nodes/activations/__init__.py @@ -0,0 +1,3 @@ +from .activations import HSigmoid, HSwish + +__all__ = ["HSigmoid", "HSwish"] diff --git a/luxonis_train/nodes/activations/activations.py b/luxonis_train/nodes/activations/activations.py new file mode 100644 index 00000000..f3abedd6 --- /dev/null +++ b/luxonis_train/nodes/activations/activations.py @@ -0,0 +1,23 @@ +from torch import Tensor, nn + + +class HSigmoid(nn.Module): + def __init__(self): + """Hard-Sigmoid (approximated sigmoid) activation function from + U{Searching for MobileNetV3}.""" + super().__init__() + self.relu = nn.ReLU6(True) + + def forward(self, x: Tensor) -> Tensor: + return self.relu(x + 3) / 6 + + +class HSwish(nn.Module): + def __init__(self): + """H-Swish activation function from U{Searching for MobileNetV3 + }.""" + super().__init__() + self.sigmoid = HSigmoid() + + def forward(self, x: Tensor) -> Tensor: + return x * self.sigmoid(x) diff --git a/luxonis_train/nodes/base_node.py b/luxonis_train/nodes/base_node.py new file mode 100644 index 00000000..6ec216fb --- /dev/null +++ b/luxonis_train/nodes/base_node.py @@ -0,0 +1,396 @@ +from abc import ABC, abstractmethod +from typing import Generic, TypeVar + +from luxonis_ml.utils.registry import AutoRegisterMeta +from pydantic import BaseModel, ValidationError +from torch import Size, Tensor, nn + +from luxonis_train.utils.general import DatasetMetadata, validate_packet +from luxonis_train.utils.registry import NODES +from luxonis_train.utils.types import ( + AttachIndexType, + FeaturesProtocol, + IncompatibleException, + LabelType, + Packet, +) + +ForwardOutputT = TypeVar("ForwardOutputT") +ForwardInputT = TypeVar("ForwardInputT") + + +class BaseNode( + nn.Module, + ABC, + Generic[ForwardInputT, ForwardOutputT], + metaclass=AutoRegisterMeta, + register=False, + registry=NODES, +): + """A base class for all model nodes. + + This class defines the basic interface for all nodes. + + Furthermore, it utilizes automatic registration of defined subclasses + to a L{NODES} registry. + + Inputs and outputs of nodes are defined as L{Packet}s. A L{Packet} is a dictionary + of lists of tensors. Each key in the dictionary represents a different output + from the previous node. Input to the node is a list of L{Packet}s, output is a single L{Packet}. + + Each node can define a list of L{BaseProtocol}s that the inputs must conform to. + L{BaseProtocol} is a pydantic model that defines the structure of the input. + When the node is called, the inputs are validated against the protocols and + then sent to the L{unwrap} method. The C{unwrap} method should return a valid + input to the L{forward} method. Outputs of the C{forward} method are then + send to L{weap} method, which wraps the output into a C{Packet}, which is the + output of the node. + + The L{run} method combines the C{unwrap}, C{forward} and C{wrap} methods + together with input validation. + + + @type input_shapes: list[Packet[Size]] | None + @param input_shapes: List of input shapes for the module. + + @type original_in_shape: Size | None + @param original_in_shape: Original input shape of the model. Some + nodes won't function if not provided. + + @type dataset_metadata: L{DatasetMetadata} | None + @param dataset_metadata: Metadata of the dataset. + Some nodes won't function if not provided. + + @type attach_index: AttachIndexType + @param attach_index: Index of previous output that this node attaches to. + Can be a single integer to specify a single output, a tuple of + two or three integers to specify a range of outputs or `"all"` to + specify all outputs. Defaults to "all". Python indexing conventions apply. + + @type in_protocols: list[type[BaseModel]] + @param in_protocols: List of input protocols used to validate inputs to the node. + Defaults to [FeaturesProtocol]. + + @type n_classes: int | None + @param n_classes: Number of classes in the dataset. Provide only + in case `dataset_metadata` is not provided. Defaults to None. + + @type in_sizes: Size | list[Size] | None + @param in_sizes: List of input sizes for the node. + Provide only in case the `input_shapes` were not provided. + """ + + attach_index: AttachIndexType = "all" + + def __init__( + self, + *, + input_shapes: list[Packet[Size]] | None = None, + original_in_shape: Size | None = None, + dataset_metadata: DatasetMetadata | None = None, + attach_index: AttachIndexType | None = None, + in_protocols: list[type[BaseModel]] | None = None, + n_classes: int | None = None, + in_sizes: Size | list[Size] | None = None, + task_type: LabelType | None = None, + ): + super().__init__() + + self.attach_index = attach_index or self.attach_index + self.in_protocols = in_protocols or [FeaturesProtocol] + self.task_type = task_type + + self._input_shapes = input_shapes + self._original_in_shape = original_in_shape + if n_classes is not None: + if dataset_metadata is not None: + raise ValueError("Cannot set both `dataset_metadata` and `n_classes`.") + dataset_metadata = DatasetMetadata(n_classes=n_classes) + self._dataset_metadata = dataset_metadata + self._export = False + self._epoch = 0 + self._in_sizes = in_sizes + + def _non_set_error(self, name: str) -> ValueError: + return ValueError( + f"{self.__class__.__name__} is trying to access `{name}`, " + "but it was not set during initialization. " + ) + + @property + def n_classes(self) -> int: + """Getter for the number of classes.""" + return self.dataset_metadata.n_classes(self.task_type) + + @property + def class_names(self) -> list[str]: + """Getter for the class names.""" + return self.dataset_metadata.class_names(self.task_type) + + @property + def input_shapes(self) -> list[Packet[Size]]: + """Getter for the input shapes.""" + if self._input_shapes is None: + raise self._non_set_error("input_shapes") + return self._input_shapes + + @property + def original_in_shape(self) -> Size: + """Getter for the original input shape.""" + if self._original_in_shape is None: + raise self._non_set_error("original_in_shape") + return self._original_in_shape + + @property + def dataset_metadata(self) -> DatasetMetadata: + """Getter for the dataset metadata. + + @type: L{DatasetMetadata} + @raises ValueError: If the C{dataset_metadata} is C{None}. + """ + if self._dataset_metadata is None: + raise ValueError( + f"{self._non_set_error('dataset_metadata')}" + "Either provide `dataset_metadata` or `n_classes`." + ) + return self._dataset_metadata + + @property + def in_sizes(self) -> Size | list[Size]: + """Simplified getter for the input shapes. + + Should work out of the box for most cases where the `input_shapes` are + sufficiently simple. Otherwise the `input_shapes` should be used directly. + + In case `in_sizes` were provided during initialization, they are returned + directly. + + Example: + + >>> input_shapes = [{"features": [Size(1, 64, 128, 128), Size(1, 3, 224, 224)]}] + >>> attach_index = -1 + >>> in_sizes = Size(1, 3, 224, 224) + + >>> input_shapes = [{"features": [Size(1, 64, 128, 128), Size(1, 3, 224, 224)]}] + >>> attach_index = "all" + >>> in_sizes = [Size(1, 64, 128, 128), Size(1, 3, 224, 224)] + + @type: Size | list[Size] + @raises IncompatibleException: If the C{input_shapes} are too complicated for + the default implementation. + """ + if self._in_sizes is not None: + return self._in_sizes + + features = self.input_shapes[0].get("features") + if features is None: + raise IncompatibleException( + f"Feature field is missing in {self.__class__.__name__}. " + "The default implementation of `in_sizes` cannot be used." + ) + shapes = self.get_attached(self.input_shapes[0]["features"]) + if isinstance(shapes, list) and len(shapes) == 1: + return shapes[0] + return shapes + + @property + def in_channels(self) -> int | list[int]: + """Simplified getter for the number of input channels. + + Should work out of the box for most cases where the C{input_shapes} are + sufficiently simple. Otherwise the C{input_shapes} should be used directly. If + C{attach_index} is set to "all" or is a slice, returns a list of input channels, + otherwise returns a single value. + + @type: int | list[int] + @raises IncompatibleException: If the C{input_shapes} are too complicated for + the default implementation. + """ + return self._get_nth_size(1) + + @property + def in_height(self) -> int | list[int]: + """Simplified getter for the input height. + + Should work out of the box for most cases where the `input_shapes` are + sufficiently simple. Otherwise the `input_shapes` should be used directly. + + @type: int | list[int] + @raises IncompatibleException: If the C{input_shapes} are too complicated for + the default implementation. + """ + return self._get_nth_size(2) + + @property + def in_width(self) -> int | list[int]: + """Simplified getter for the input width. + + Should work out of the box for most cases where the `input_shapes` are + sufficiently simple. Otherwise the `input_shapes` should be used directly. + + @type: int | list[int] + @raises IncompatibleException: If the C{input_shapes} are too complicated for + the default implementation. + """ + return self._get_nth_size(3) + + @property + def export(self) -> bool: + """Getter for the export mode.""" + return self._export + + def set_export_mode(self, mode: bool = True) -> None: + """Sets the module to export mode. + + @type mode: bool + @param mode: Value to set the export mode to. Defaults to True. + """ + self._export = mode + + def unwrap(self, inputs: list[Packet[Tensor]]) -> ForwardInputT: + """Prepares inputs for the forward pass. + + Unwraps the inputs from the C{list[Packet[Tensor]]} input so they can be passed + to the forward call. The default implementation expects a single input with + C{features} key and returns the tensor or tensors at the C{attach_index} + position. + + For most cases the default implementation should be sufficient. Exceptions are + modules with multiple inputs or producing more complex outputs. This is + typically the case for output nodes. + + @type inputs: list[Packet[Tensor]] + @param inputs: Inputs to the node. + @rtype: ForwardInputT + @return: Prepared inputs, ready to be passed to the L{forward} method. + """ + return self.get_attached(inputs[0]["features"]) # type: ignore + + @abstractmethod + def forward(self, inputs: ForwardInputT) -> ForwardOutputT: + """Forward pass of the module. + + @type inputs: ForwardInputT + @param inputs: Inputs to the module. + @rtype: ForwardOutputT + @return: Result of the forward pass. + """ + ... + + def wrap(self, output: ForwardOutputT) -> Packet[Tensor]: + """Wraps the output of the forward pass into a `Packet[Tensor]`. + + The default implementation expects a single tensor or a list of tensors + and wraps them into a Packet with `features` key. + + @type output: ForwardOutputT + @param output: Output of the forward pass. + + @rtype: L{Packet}[Tensor] + @return: Wrapped output. + """ + + match output: + case Tensor(data=out): + outputs = [out] + case list(tensors) if all(isinstance(t, Tensor) for t in tensors): + outputs = tensors + case _: + raise IncompatibleException( + "Default `wrap` expects a single tensor or a list of tensors." + ) + return {"features": outputs} + + def run(self, inputs: list[Packet[Tensor]]) -> Packet[Tensor]: + """Combines the forward pass with the wrapping and unwrapping of the inputs. + + Additionally validates the inputs against `in_protocols`. + + @type inputs: list[Packet[Tensor]] + @param inputs: Inputs to the module. + + @rtype: L{Packet}[Tensor] + @return: Outputs of the module as a dictionary of list of tensors: + `{"features": [Tensor, ...], "segmentation": [Tensor]}` + + @raises IncompatibleException: If the inputs are not compatible with the node. + """ + unwrapped = self.unwrap(self.validate(inputs)) + outputs = self(unwrapped) + return self.wrap(outputs) + + def validate(self, data: list[Packet[Tensor]]) -> list[Packet[Tensor]]: + """Validates the inputs against `in_protocols`.""" + if len(data) != len(self.in_protocols): + raise IncompatibleException( + f"Node {self.__class__.__name__} expects {len(self.in_protocols)} inputs, " + f"but got {len(data)} inputs instead." + ) + try: + return [ + validate_packet(d, protocol) + for d, protocol in zip(data, self.in_protocols) + ] + except ValidationError as e: + raise IncompatibleException.from_validation_error( + e, self.__class__.__name__ + ) from e + + T = TypeVar("T", Tensor, Size) + + def get_attached(self, lst: list[T]) -> list[T] | T: + """Gets the attached elements from a list. + + This method is used to get the attached elements from a list based on + the `attach_index` attribute. + + @type lst: list[T] + @param lst: List to get the attached elements from. Can be either + a list of tensors or a list of sizes. + + @rtype: list[T] | T + @return: Attached elements. If `attach_index` is set to `"all"` or is a slice, + returns a list of attached elements. + + @raises ValueError: If the `attach_index` is invalid. + """ + + def _normalize_index(index: int) -> int: + if index < 0: + index += len(lst) + return index + + def _normalize_slice(i: int, j: int) -> slice: + if i < 0 and j < 0: + return slice(len(lst) + i, len(lst) + j, -1 if i > j else 1) + if i < 0: + return slice(len(lst) + i, j, 1) + if j < 0: + return slice(i, len(lst) + j, 1) + if i > j: + return slice(i, j, -1) + return slice(i, j, 1) + + match self.attach_index: + case "all": + return lst + case int(i): + i = _normalize_index(i) + if i >= len(lst): + raise ValueError( + f"Attach index {i} is out of range for list of length {len(lst)}." + ) + return lst[_normalize_index(i)] + case (int(i), int(j)): + return lst[_normalize_slice(i, j)] + case (int(i), int(j), int(k)): + return lst[i:j:k] + case _: + raise ValueError(f"Invalid attach index: `{self.attach_index}`") + + def _get_nth_size(self, idx: int) -> int | list[int]: + match self.in_sizes: + case Size(sizes): + return sizes[idx] + case list(sizes): + return [size[idx] for size in sizes] diff --git a/luxonis_train/nodes/bisenet_head.py b/luxonis_train/nodes/bisenet_head.py new file mode 100644 index 00000000..99845177 --- /dev/null +++ b/luxonis_train/nodes/bisenet_head.py @@ -0,0 +1,50 @@ +"""BiSeNet segmentation head. + +Adapted from U{https://github.com/taveraantonio/BiseNetv1}. +License: NOT SPECIFIED. +""" + + +from torch import Tensor, nn + +from luxonis_train.nodes.blocks import ConvModule +from luxonis_train.utils.general import infer_upscale_factor +from luxonis_train.utils.types import LabelType, Packet + +from .base_node import BaseNode + + +class BiSeNetHead(BaseNode[Tensor, Tensor]): + attach_index: int = -1 + in_height: int + in_channels: int + + def __init__( + self, + intermediate_channels: int = 64, + **kwargs, + ): + """BiSeNet segmentation head. + TODO: Add more documentation. + + @type intermediate_channels: int + @param intermediate_channels: How many intermediate channels to use. + Defaults to C{64}. + """ + super().__init__(task_type=LabelType.SEGMENTATION, **kwargs) + + original_height = self.original_in_shape[2] + upscale_factor = 2 ** infer_upscale_factor(self.in_height, original_height) + out_channels = self.n_classes * upscale_factor * upscale_factor + + self.conv_3x3 = ConvModule(self.in_channels, intermediate_channels, 3, 1, 1) + self.conv_1x1 = nn.Conv2d(intermediate_channels, out_channels, 1, 1, 0) + self.upscale = nn.PixelShuffle(upscale_factor) + + def wrap(self, output: Tensor) -> Packet[Tensor]: + return {"segmentation": [output]} + + def forward(self, inputs: Tensor) -> Tensor: + inputs = self.conv_3x3(inputs) + inputs = self.conv_1x1(inputs) + return self.upscale(inputs) diff --git a/luxonis_train/nodes/blocks/__init__.py b/luxonis_train/nodes/blocks/__init__.py new file mode 100644 index 00000000..a87c336e --- /dev/null +++ b/luxonis_train/nodes/blocks/__init__.py @@ -0,0 +1,37 @@ +from .blocks import ( + AttentionRefinmentBlock, + BlockRepeater, + ConvModule, + EfficientDecoupledBlock, + FeatureFusionBlock, + KeypointBlock, + LearnableAdd, + LearnableMulAddConv, + LearnableMultiply, + RepDownBlock, + RepUpBlock, + RepVGGBlock, + SpatialPyramidPoolingBlock, + SqueezeExciteBlock, + UpBlock, + autopad, +) + +__all__ = [ + "autopad", + "EfficientDecoupledBlock", + "ConvModule", + "UpBlock", + "RepDownBlock", + "SqueezeExciteBlock", + "RepVGGBlock", + "BlockRepeater", + "AttentionRefinmentBlock", + "SpatialPyramidPoolingBlock", + "FeatureFusionBlock", + "LearnableAdd", + "LearnableMultiply", + "LearnableMulAddConv", + "KeypointBlock", + "RepUpBlock", +] diff --git a/luxonis_train/nodes/blocks/blocks.py b/luxonis_train/nodes/blocks/blocks.py new file mode 100644 index 00000000..f4bd0172 --- /dev/null +++ b/luxonis_train/nodes/blocks/blocks.py @@ -0,0 +1,728 @@ +# TODO: cleanup, document +# Check if some blocks could be merged togetner. + +import math +from typing import TypeVar + +import numpy as np +import torch +from torch import Tensor, nn + +from luxonis_train.nodes.activations import HSigmoid + + +class EfficientDecoupledBlock(nn.Module): + def __init__(self, n_classes: int, in_channels: int): + """Efficient Decoupled block used for class and regression predictions. + + @type n_classes: int + @param n_classes: Number of classes. + @type in_channels: int + @param in_channels: Number of input channels. + """ + super().__init__() + + self.decoder = ConvModule( + in_channels=in_channels, + out_channels=in_channels, + kernel_size=1, + stride=1, + activation=nn.SiLU(), + ) + + self.class_branch = nn.Sequential( + ConvModule( + in_channels=in_channels, + out_channels=in_channels, + kernel_size=3, + stride=1, + padding=1, + activation=nn.SiLU(), + ), + nn.Conv2d(in_channels=in_channels, out_channels=n_classes, kernel_size=1), + ) + self.regression_branch = nn.Sequential( + ConvModule( + in_channels=in_channels, + out_channels=in_channels, + kernel_size=3, + stride=1, + padding=1, + activation=nn.SiLU(), + ), + nn.Conv2d(in_channels=in_channels, out_channels=4, kernel_size=1), + ) + + prior_prob = 1e-2 + self._initialize_weights_and_biases(prior_prob) + + def forward(self, x: Tensor) -> tuple[Tensor, Tensor, Tensor]: + out_feature = self.decoder(x) + + out_cls = self.class_branch(out_feature) + out_reg = self.regression_branch(out_feature) + + return out_feature, out_cls, out_reg + + def _initialize_weights_and_biases(self, prior_prob: float): + data = [ + (self.class_branch[-1], -math.log((1 - prior_prob) / prior_prob)), + (self.regression_branch[-1], 1.0), + ] + for module, fill_value in data: + assert module.bias is not None + b = module.bias.view(-1) + b.data.fill_(fill_value) + module.bias = nn.Parameter(b.view(-1), requires_grad=True) + + w = module.weight + w.data.fill_(0.0) + module.weight = nn.Parameter(w, requires_grad=True) + + +class ConvModule(nn.Sequential): + def __init__( + self, + in_channels: int, + out_channels: int, + kernel_size: int, + stride: int = 1, + padding: int = 0, + dilation: int = 1, + groups: int = 1, + bias: bool = False, + activation: nn.Module | None = None, + ): + """Conv2d + BN + Activation. + + @type in_channels: int + @param in_channels: Number of input channels. + @type out_channels: int + @param out_channels: Number of output channels. + @type kernel_size: int + @param kernel_size: Kernel size. + @type stride: int + @param stride: Stride. Defaults to 1. + @type padding: int + @param padding: Padding. Defaults to 0. + @type dilation: int + @param dilation: Dilation. Defaults to 1. + @type groups: int + @param groups: Groups. Defaults to 1. + @type bias: bool + @param bias: Whether to use bias. Defaults to False. + @type activation: L{nn.Module} | None + @param activation: Activation function. Defaults to None. + """ + super().__init__( + nn.Conv2d( + in_channels, + out_channels, + kernel_size, + stride, + padding, + dilation, + groups, + bias, + ), + nn.BatchNorm2d(out_channels), + activation or nn.ReLU(), + ) + + +class UpBlock(nn.Sequential): + def __init__( + self, + in_channels: int, + out_channels: int, + kernel_size: int = 2, + stride: int = 2, + ): + """Upsampling with ConvTranspose2D (similar to U-Net Up block). + + @type in_channels: int + @param in_channels: Number of input channels. + @type out_channels: int + @param out_channels: Number of output channels. + @type kernel_size: int + @param kernel_size: Kernel size. Defaults to C{2}. + @type stride: int + @param stride: Stride. Defaults to C{2}. + """ + + super().__init__( + nn.ConvTranspose2d( + in_channels, out_channels, kernel_size=kernel_size, stride=stride + ), + ConvModule(out_channels, out_channels, kernel_size=3, padding=1), + ) + + +class SqueezeExciteBlock(nn.Module): + def __init__( + self, + in_channels: int, + intermediate_channels: int, + approx_sigmoid: bool = False, + activation: nn.Module | None = None, + ): + """Squeeze and Excite block, + Adapted from U{Squeeze-and-Excitation Networks}. + Code adapted from U{https://github.com/apple/ml-mobileone/blob/main/mobileone.py}. + + @type in_channels: int + @param in_channels: Number of input channels. + @type intermediate_channels: int + @param intermediate_channels: Number of intermediate channels. + @type approx_sigmoid: bool + @param approx_sigmoid: Whether to use approximated sigmoid function. Defaults to False. + @type activation: L{nn.Module} | None + @param activation: Activation function. Defaults to L{nn.ReLU}. + """ + super().__init__() + + activation = activation or nn.ReLU() + self.pool = nn.AdaptiveAvgPool2d(output_size=1) + self.conv_down = nn.Conv2d( + in_channels=in_channels, + out_channels=intermediate_channels, + kernel_size=1, + bias=True, + ) + self.activation = activation + self.conv_up = nn.Conv2d( + in_channels=intermediate_channels, + out_channels=in_channels, + kernel_size=1, + bias=True, + ) + self.sigmoid = HSigmoid() if approx_sigmoid else nn.Sigmoid() + + def forward(self, x: Tensor) -> Tensor: + weights = self.pool(x) + weights = self.conv_down(weights) + weights = self.activation(weights) + weights = self.conv_up(weights) + weights = self.sigmoid(weights) + x = x * weights + return x + + +class RepVGGBlock(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + kernel_size: int = 3, + stride: int = 1, + padding: int = 1, + dilation: int = 1, + groups: int = 1, + padding_mode: str = "zeros", + deploy: bool = False, + use_se: bool = False, + ): + """RepVGGBlock is a basic rep-style block, including training and deploy status + This code is based on U{https://github.com/DingXiaoH/RepVGG/blob/main/repvgg.py}. + + + @type in_channels: int + @param in_channels: Number of input channels. + @type out_channels: int + @param out_channels: Number of output channels. + @type kernel_size: int + @param kernel_size: Kernel size. Defaults to C{3}. + @type stride: int + @param stride: Stride. Defaults to C{1}. + @type padding: int + @param padding: Padding. Defaults to C{1}. + @type dilation: int + @param dilation: Dilation. Defaults to C{1}. + @type groups: int + @param groups: Groups. Defaults to C{1}. + @type padding_mode: str + @param padding_mode: Padding mode. Defaults to C{"zeros"}. + @type deploy: bool + @param deploy: Whether to use deploy mode. Defaults to C{False}. + @type use_se: bool + @param use_se: Whether to use SqueezeExciteBlock. Defaults to C{False}. + """ + super().__init__() + + self.deploy = deploy + self.groups = groups + self.in_channels = in_channels + self.out_channels = out_channels + + assert kernel_size == 3 + assert padding == 1 + + padding_11 = padding - kernel_size // 2 + + self.nonlinearity = nn.ReLU() + + if use_se: + # Note that RepVGG-D2se uses SE before nonlinearity. But RepVGGplus models uses SqueezeExciteBlock after nonlinearity. + self.se = SqueezeExciteBlock( + out_channels, intermediate_channels=int(out_channels // 16) + ) + else: + self.se = nn.Identity() # type: ignore + + if deploy: + self.rbr_reparam = nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + groups=groups, + bias=True, + padding_mode=padding_mode, + ) + else: + self.rbr_identity = ( + nn.BatchNorm2d(num_features=in_channels) + if out_channels == in_channels and stride == 1 + else None + ) + self.rbr_dense = ConvModule( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + groups=groups, + activation=nn.Identity(), + ) + self.rbr_1x1 = ConvModule( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=1, + stride=stride, + padding=padding_11, + groups=groups, + activation=nn.Identity(), + ) + + def forward(self, x: Tensor): + if hasattr(self, "rbr_reparam"): + return self.nonlinearity(self.se(self.rbr_reparam(x))) + + if self.rbr_identity is None: + id_out = 0 + else: + id_out = self.rbr_identity(x) + + return self.nonlinearity(self.se(self.rbr_dense(x) + self.rbr_1x1(x) + id_out)) + + def reparametrize(self): + if hasattr(self, "rbr_reparam"): + return + kernel, bias = self._get_equivalent_kernel_bias() + self.rbr_reparam = nn.Conv2d( + in_channels=self.rbr_dense[0].in_channels, + out_channels=self.rbr_dense[0].out_channels, + kernel_size=self.rbr_dense[0].kernel_size, + stride=self.rbr_dense[0].stride, + padding=self.rbr_dense[0].padding, + dilation=self.rbr_dense[0].dilation, + groups=self.rbr_dense[0].groups, + bias=True, + ) + self.rbr_reparam.weight.data = kernel # type: ignore + self.rbr_reparam.bias.data = bias # type: ignore + self.__delattr__("rbr_dense") + self.__delattr__("rbr_1x1") + if hasattr(self, "rbr_identity"): + self.__delattr__("rbr_identity") + if hasattr(self, "id_tensor"): + self.__delattr__("id_tensor") + + def _get_equivalent_kernel_bias(self): + """Derives the equivalent kernel and bias in a DIFFERENTIABLE way.""" + kernel3x3, bias3x3 = self._fuse_bn_tensor(self.rbr_dense) + kernel1x1, bias1x1 = self._fuse_bn_tensor(self.rbr_1x1) + kernelid, biasid = self._fuse_bn_tensor(self.rbr_identity) + return ( + kernel3x3 + + self._pad_1x1_to_3x3_tensor(kernel1x1) + + kernelid.to(kernel3x3.device), + bias3x3 + bias1x1 + biasid.to(bias3x3.device), + ) + + def _pad_1x1_to_3x3_tensor(self, kernel1x1: Tensor | None) -> Tensor: + if kernel1x1 is None: + return torch.tensor(0) + else: + return torch.nn.functional.pad(kernel1x1, [1, 1, 1, 1]) + + def _fuse_bn_tensor(self, branch: nn.Module | None) -> tuple[Tensor, Tensor]: + if branch is None: + return torch.tensor(0), torch.tensor(0) + if isinstance(branch, nn.Sequential): + kernel = branch[0].weight + running_mean = branch[1].running_mean + running_var = branch[1].running_var + gamma = branch[1].weight + beta = branch[1].bias + eps = branch[1].eps + else: + assert isinstance(branch, nn.BatchNorm2d) + if not hasattr(self, "id_tensor"): + input_dim = self.in_channels // self.groups + kernel_value = np.zeros( + (self.in_channels, input_dim, 3, 3), dtype=np.float32 + ) + for i in range(self.in_channels): + kernel_value[i, i % input_dim, 1, 1] = 1 + self.id_tensor = torch.from_numpy(kernel_value) + kernel = self.id_tensor + running_mean = branch.running_mean + running_var = branch.running_var + gamma = branch.weight + beta = branch.bias + eps = branch.eps + assert running_var is not None + std = (running_var + eps).sqrt() + t = (gamma / std).reshape(-1, 1, 1, 1).to(kernel.device) + return kernel * t, beta - running_mean * gamma / std + + +class BlockRepeater(nn.Module): + def __init__( + self, + block: type[nn.Module], + in_channels: int, + out_channels: int, + num_blocks: int = 1, + ): + """Module which repeats the block n times. First block accepts in_channels and + outputs out_channels while subsequent blocks accept out_channels and output + out_channels. + + @type block: L{nn.Module} + @param block: Block to repeat. + @type in_channels: int + @param in_channels: Number of input channels. + @type out_channels: int + @param out_channels: Number of output channels. + @type num_blocks: int + @param num_blocks: Number of blocks to repeat. Defaults to C{1}. + """ + super().__init__() + + in_channels = in_channels + self.blocks = nn.ModuleList() + for _ in range(num_blocks): + self.blocks.append( + block(in_channels=in_channels, out_channels=out_channels) + ) + in_channels = out_channels + + def forward(self, x): + for block in self.blocks: + x = block(x) + return x + + +class SpatialPyramidPoolingBlock(nn.Module): + def __init__(self, in_channels: int, out_channels: int, kernel_size: int = 5): + """Spatial Pyramid Pooling block with ReLU activation on three different scales. + + @type in_channels: int + @param in_channels: Number of input channels. + @type out_channels: int + @param out_channels: Number of output channels. + @type kernel_size: int + @param kernel_size: Kernel size. Defaults to C{5}. + """ + super().__init__() + + intermediate_channels = in_channels // 2 # hidden channels + self.conv1 = ConvModule(in_channels, intermediate_channels, 1, 1) + self.conv2 = ConvModule(intermediate_channels * 4, out_channels, 1, 1) + self.max_pool = nn.MaxPool2d( + kernel_size=kernel_size, stride=1, padding=kernel_size // 2 + ) + + def forward(self, x): + x = self.conv1(x) + # apply max-pooling at three different scales + y1 = self.max_pool(x) + y2 = self.max_pool(y1) + y3 = self.max_pool(y2) + + x = torch.cat([x, y1, y2, y3], dim=1) + x = self.conv2(x) + return x + + +class AttentionRefinmentBlock(nn.Module): + def __init__(self, in_channels: int, out_channels: int): + """Attention Refinment block adapted from + U{https://github.com/taveraantonio/BiseNetv1}. + + @type in_channels: int + @param in_channels: Number of input channels. + @type out_channels: int + @param out_channels: Number of output channels. + """ + super().__init__() + + self.conv_3x3 = ConvModule(in_channels, out_channels, 3, 1, 1) + self.attention = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + ConvModule( + in_channels=out_channels, + out_channels=out_channels, + kernel_size=1, + activation=nn.Identity(), + ), + nn.Sigmoid(), + ) + + def forward(self, x): + x = self.conv_3x3(x) + attention = self.attention(x) + out = x * attention + return out + + +class FeatureFusionBlock(nn.Module): + def __init__(self, in_channels: int, out_channels: int, reduction: int = 1): + """Feature Fusion block adapted from: U{https://github.com/taveraantonio/BiseNetv1}. + + @type in_channels: int + @param in_channels: Number of input channels. + @type out_channels: int + @param out_channels: Number of output channels. + @type reduction: int + @param reduction: Reduction factor. Defaults to C{1}. + """ + + super().__init__() + + self.conv_1x1 = ConvModule(in_channels, out_channels, 1, 1, 0) + self.attention = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + ConvModule( + in_channels=out_channels, + out_channels=out_channels // reduction, + kernel_size=1, + ), + ConvModule( + in_channels=out_channels, + out_channels=out_channels // reduction, + kernel_size=1, + activation=nn.Identity(), + ), + nn.Sigmoid(), + ) + + def forward(self, x1, x2): + fusion = torch.cat([x1, x2], dim=1) + x = self.conv_1x1(fusion) + attention = self.attention(x) + out = x + x * attention + return out + + +class LearnableAdd(nn.Module): + """Implicit add block.""" + + def __init__(self, channel: int): + super().__init__() + self.channel = channel + self.implicit = nn.Parameter(torch.zeros(1, channel, 1, 1)) + nn.init.normal_(self.implicit, std=0.02) + + def forward(self, x: Tensor): + return self.implicit.expand_as(x) + x + + +class LearnableMultiply(nn.Module): + """Implicit multiply block.""" + + def __init__(self, channel: int): + super().__init__() + self.channel = channel + self.implicit = nn.Parameter(torch.ones(1, channel, 1, 1)) + nn.init.normal_(self.implicit, mean=1.0, std=0.02) + + def forward(self, x: Tensor): + return self.implicit.expand_as(x) * x + + +class LearnableMulAddConv(nn.Module): + def __init__( + self, + add_channel: int, + mul_channel: int, + conv_in_channel: int, + conv_out_channel: int, + ): + super().__init__() + self.add = LearnableAdd(add_channel) + self.mul = LearnableMultiply(mul_channel) + self.conv = nn.Conv2d(conv_in_channel, conv_out_channel, 1) + + def forward(self, x: Tensor) -> Tensor: + return self.mul(self.conv(self.add(x))) + + +class KeypointBlock(nn.Module): + """Keypoint head block for keypoint predictions.""" + + def __init__(self, in_channels: int, out_channels: int): + super().__init__() + layers: list[nn.Module] = [] + for i in range(6): + depth_wise_conv = ConvModule( + in_channels, + in_channels, + kernel_size=3, + padding=autopad(3), + groups=math.gcd(in_channels, in_channels), + activation=nn.SiLU(), + ) + conv = ( + ConvModule( + in_channels, + in_channels, + kernel_size=1, + padding=autopad(1), + activation=nn.SiLU(), + ) + if i < 5 + else nn.Conv2d(in_channels, out_channels, 1) + ) + + layers.append(depth_wise_conv) + layers.append(conv) + + self.block = nn.Sequential(*layers) + + def forward(self, x: Tensor): + out = self.block(x) + return out + + +class RepUpBlock(nn.Module): + def __init__( + self, + in_channels: int, + in_channels_next: int, + out_channels: int, + num_repeats: int, + ): + """UpBlock used in RepPAN neck. + + @type in_channels: int + @param in_channels: Number of input channels. + @type in_channels_next: int + @param in_channels_next: Number of input channels of next input which is used in + concat. + @type out_channels: int + @param out_channels: Number of output channels. + @type num_repeats: int + @param num_repeats: Number of RepVGGBlock repeats. + """ + + super().__init__() + + self.conv = ConvModule( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=1, + stride=1, + ) + self.upsample = torch.nn.ConvTranspose2d( + in_channels=out_channels, + out_channels=out_channels, + kernel_size=2, + stride=2, + bias=True, + ) + self.rep_block = BlockRepeater( + block=RepVGGBlock, + in_channels=in_channels_next + out_channels, + out_channels=out_channels, + num_blocks=num_repeats, + ) + + def forward(self, x0: Tensor, x1: Tensor) -> tuple[Tensor, Tensor]: + conv_out = self.conv(x0) + upsample_out = self.upsample(conv_out) + concat_out = torch.cat([upsample_out, x1], dim=1) + out = self.rep_block(concat_out) + return conv_out, out + + +class RepDownBlock(nn.Module): + def __init__( + self, + in_channels: int, + downsample_out_channels: int, + in_channels_next: int, + out_channels: int, + num_repeats: int, + ): + """DownBlock used in RepPAN neck. + + @type in_channels: int + @param in_channels: Number of input channels. + @type downsample_out_channels: int + @param downsample_out_channels: Number of output channels after downsample. + @type in_channels_next: int + @param in_channels_next: Number of input channels of next input which is used in + concat. + @type out_channels: int + @param out_channels: Number of output channels. + @type num_repeats: int + @param num_repeats: Number of RepVGGBlock repeats. + """ + super().__init__() + + self.downsample = ConvModule( + in_channels=in_channels, + out_channels=downsample_out_channels, + kernel_size=3, + stride=2, + padding=3 // 2, + ) + self.rep_block = BlockRepeater( + block=RepVGGBlock, + in_channels=downsample_out_channels + in_channels_next, + out_channels=out_channels, + num_blocks=num_repeats, + ) + + def forward(self, x0: Tensor, x1: Tensor) -> Tensor: + x = self.downsample(x0) + x = torch.cat([x, x1], dim=1) + x = self.rep_block(x) + return x + + +T = TypeVar("T", int, tuple[int, ...]) + + +def autopad(kernel_size: T, padding: T | None = None) -> T: + """Compute padding based on kernel size. + + @type kernel_size: int | tuple[int, ...] + @param kernel_size: Kernel size. + @type padding: int | tuple[int, ...] | None + @param padding: Padding. Defaults to None. + + @rtype: int | tuple[int, ...] + @return: Computed padding. The output type is the same as the type of the + C{kernel_size}. + """ + if padding is not None: + return padding + if isinstance(kernel_size, int): + return kernel_size // 2 + return tuple(x // 2 for x in kernel_size) diff --git a/luxonis_train/nodes/classification_head.py b/luxonis_train/nodes/classification_head.py new file mode 100644 index 00000000..10f9b3c9 --- /dev/null +++ b/luxonis_train/nodes/classification_head.py @@ -0,0 +1,36 @@ +from torch import Tensor, nn + +from luxonis_train.utils.types import LabelType, Packet + +from .base_node import BaseNode + + +class ClassificationHead(BaseNode[Tensor, Tensor]): + in_channels: int + attach_index: int = -1 + + def __init__( + self, + dropout_rate: float = 0.2, + **kwargs, + ): + """Simple classification head. + + @type dropout_rate: float + @param dropout_rate: Dropout rate before last layer, range C{[0, 1]}. Defaults + to C{0.2}. + """ + super().__init__(task_type=LabelType.CLASSIFICATION, **kwargs) + + self.head = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + nn.Flatten(), + nn.Dropout(dropout_rate), + nn.Linear(self.in_channels, self.n_classes), + ) + + def forward(self, inputs: Tensor) -> Tensor: + return self.head(inputs) + + def wrap(self, output: Tensor) -> Packet[Tensor]: + return {"classes": [output]} diff --git a/luxonis_train/nodes/contextspatial.py b/luxonis_train/nodes/contextspatial.py new file mode 100644 index 00000000..adbb84bc --- /dev/null +++ b/luxonis_train/nodes/contextspatial.py @@ -0,0 +1,103 @@ +"""Implementation of Context Spatial backbone. + +Source: U{BiseNetV1} +""" + + +from torch import Tensor, nn +from torch.nn import functional as F + +from luxonis_train.nodes.blocks import ( + AttentionRefinmentBlock, + ConvModule, + FeatureFusionBlock, +) +from luxonis_train.utils.registry import NODES + +from .base_node import BaseNode + + +class ContextSpatial(BaseNode[Tensor, list[Tensor]]): + attach_index: int = -1 + + def __init__(self, context_backbone: str = "MobileNetV2", **kwargs): + """Context spatial backbone. + TODO: Add more documentation. + + + @type context_backbone: str + @param context_backbone: Backbone used. Defaults to C{MobileNetV2}. + """ + super().__init__(**kwargs) + + self.context_path = ContextPath(NODES.get(context_backbone)(**kwargs)) + self.spatial_path = SpatialPath(3, 128) + self.ffm = FeatureFusionBlock(256, 256) + + def forward(self, x: Tensor) -> list[Tensor]: + spatial_out = self.spatial_path(x) + context16, _ = self.context_path(x) + fm_fuse = self.ffm(spatial_out, context16) + outs = [fm_fuse] + return outs + + +class SpatialPath(nn.Module): + def __init__(self, in_channels: int, out_channels: int): + super().__init__() + intermediate_channels = 64 + self.conv_7x7 = ConvModule(in_channels, intermediate_channels, 7, 2, 3) + self.conv_3x3_1 = ConvModule( + intermediate_channels, intermediate_channels, 3, 2, 1 + ) + self.conv_3x3_2 = ConvModule( + intermediate_channels, intermediate_channels, 3, 2, 1 + ) + self.conv_1x1 = ConvModule(intermediate_channels, out_channels, 1, 1, 0) + + def forward(self, x: Tensor) -> Tensor: + x = self.conv_7x7(x) + x = self.conv_3x3_1(x) + x = self.conv_3x3_2(x) + return self.conv_1x1(x) + + +class ContextPath(nn.Module): + def __init__(self, backbone: BaseNode): + super().__init__() + self.backbone = backbone + + self.up16 = nn.Upsample(scale_factor=2.0, mode="bilinear", align_corners=True) + self.up32 = nn.Upsample(scale_factor=2.0, mode="bilinear", align_corners=True) + + self.refine16 = ConvModule(128, 128, 3, 1, 1) + self.refine32 = ConvModule(128, 128, 3, 1, 1) + + def forward(self, x: Tensor) -> list[Tensor]: + *_, down16, down32 = self.backbone.forward(x) + + if not hasattr(self, "arm16"): + self.arm16 = AttentionRefinmentBlock(down16.shape[1], 128) + self.arm32 = AttentionRefinmentBlock(down32.shape[1], 128) + + self.global_context = nn.Sequential( + nn.AdaptiveAvgPool2d(1), ConvModule(down32.shape[1], 128, 1, 1, 0) + ) + + arm_down16 = self.arm16(down16) + arm_down32 = self.arm32(down32) + + global_down32 = self.global_context(down32) + global_down32 = F.interpolate( + global_down32, size=down32.size()[2:], mode="bilinear", align_corners=True + ) + + arm_down32 = arm_down32 + global_down32 + arm_down32 = self.up32(arm_down32) + arm_down32 = self.refine32(arm_down32) + + arm_down16 = arm_down16 + arm_down32 + arm_down16 = self.up16(arm_down16) + arm_down16 = self.refine16(arm_down16) + + return [arm_down16, arm_down32] diff --git a/luxonis_train/nodes/efficient_bbox_head.py b/luxonis_train/nodes/efficient_bbox_head.py new file mode 100644 index 00000000..9f500cd4 --- /dev/null +++ b/luxonis_train/nodes/efficient_bbox_head.py @@ -0,0 +1,167 @@ +"""Head for object detection. + +Adapted from U{YOLOv6: A Single-Stage Object Detection Framework for Industrial +Applications}. +""" + +from typing import Literal + +import torch +from torch import Tensor, nn + +from luxonis_train.nodes.blocks import EfficientDecoupledBlock +from luxonis_train.utils.boxutils import ( + anchors_for_fpn_features, + dist2bbox, + non_max_suppression, +) +from luxonis_train.utils.types import LabelType, Packet + +from .base_node import BaseNode + + +class EfficientBBoxHead( + BaseNode[list[Tensor], tuple[list[Tensor], list[Tensor], list[Tensor]]] +): + in_channels: list[int] + + def __init__( + self, + n_heads: Literal[2, 3, 4] = 3, + conf_thres: float = 0.25, + iou_thres: float = 0.45, + **kwargs, + ): + """Head for object detection. + + TODO: add more documentation + + @type n_heads: Literal[2,3,4] + @param n_heads: Number of output heads. Defaults to 3. + ***Note:*** Should be same also on neck in most cases. + + @type conf_thres: float + @param conf_thres: Threshold for confidence. Defaults to C{0.25}. + + @type iou_thres: float + @param iou_thres: Threshold for IoU. Defaults to C{0.45}. + """ + super().__init__(task_type=LabelType.BOUNDINGBOX, **kwargs) + + self.n_heads = n_heads + + self.conf_thres = conf_thres + self.iou_thres = iou_thres + + self.stride = self._fit_stride_to_num_heads() + self.grid_cell_offset = 0.5 + self.grid_cell_size = 5.0 + + self.heads = nn.ModuleList() + for i in range(self.n_heads): + curr_head = EfficientDecoupledBlock( + n_classes=self.n_classes, + in_channels=self.in_channels[i], + ) + self.heads.append(curr_head) + + def forward( + self, inputs: list[Tensor] + ) -> tuple[list[Tensor], list[Tensor], list[Tensor]]: + features: list[Tensor] = [] + cls_score_list: list[Tensor] = [] + reg_distri_list: list[Tensor] = [] + + for i, module in enumerate(self.heads): + out_feature, out_cls, out_reg = module(inputs[i]) + features.append(out_feature) + out_cls = torch.sigmoid(out_cls) + cls_score_list.append(out_cls) + reg_distri_list.append(out_reg) + + return features, cls_score_list, reg_distri_list + + def wrap( + self, output: tuple[list[Tensor], list[Tensor], list[Tensor]] + ) -> Packet[Tensor]: + features, cls_score_list, reg_distri_list = output + + if self.export: + outputs = [] + for out_cls, out_reg in zip(cls_score_list, reg_distri_list, strict=True): + conf, _ = out_cls.max(1, keepdim=True) + out = torch.cat([out_reg, conf, out_cls], dim=1) + outputs.append(out) + return {"boxes": outputs} + + cls_tensor = torch.cat( + [cls_score_list[i].flatten(2) for i in range(len(cls_score_list))], dim=2 + ).permute(0, 2, 1) + reg_tensor = torch.cat( + [reg_distri_list[i].flatten(2) for i in range(len(reg_distri_list))], dim=2 + ).permute(0, 2, 1) + + if self.training: + return { + "features": features, + "class_scores": [cls_tensor], + "distributions": [reg_tensor], + } + + else: + boxes = self._process_to_bbox((features, cls_tensor, reg_tensor)) + return { + "boxes": boxes, + "features": features, + "class_scores": [cls_tensor], + "distributions": [reg_tensor], + } + + def _fit_stride_to_num_heads(self): + """Returns correct stride for number of heads and attach index.""" + stride = torch.tensor( + [ + self.original_in_shape[2] / x[2] # type: ignore + for x in self.in_sizes[: self.n_heads] + ], + dtype=torch.int, + ) + return stride + + def _process_to_bbox( + self, output: tuple[list[Tensor], Tensor, Tensor] + ) -> list[Tensor]: + """Performs post-processing of the output and returns bboxs after NMS.""" + features, cls_score_list, reg_dist_list = output + _, anchor_points, _, stride_tensor = anchors_for_fpn_features( + features, + self.stride, + self.grid_cell_size, + self.grid_cell_offset, + multiply_with_stride=False, + ) + + pred_bboxes = dist2bbox(reg_dist_list, anchor_points, out_format="xyxy") + + pred_bboxes *= stride_tensor + output_merged = torch.cat( + [ + pred_bboxes, + torch.ones( + (features[-1].shape[0], pred_bboxes.shape[1], 1), + dtype=pred_bboxes.dtype, + device=pred_bboxes.device, + ), + cls_score_list, + ], + dim=-1, + ) + + return non_max_suppression( + output_merged, + n_classes=self.n_classes, + conf_thres=self.conf_thres, + iou_thres=self.iou_thres, + bbox_format="xyxy", + predicts_objectness=False, + ) diff --git a/luxonis_train/nodes/efficientnet.py b/luxonis_train/nodes/efficientnet.py new file mode 100644 index 00000000..0b0aedde --- /dev/null +++ b/luxonis_train/nodes/efficientnet.py @@ -0,0 +1,40 @@ +"""Implementation of the EfficientNet backbone. + +Source: U{https://github.com/rwightman/gen-efficientnet-pytorch} +@license: U{Apache 2.0} +""" + +import torch +from torch import Tensor + +from .base_node import BaseNode + + +class EfficientNet(BaseNode[Tensor, list[Tensor]]): + def __init__(self, download_weights: bool = False, **kwargs): + """EfficientNet backbone. + + @type download_weights: bool + @param download_weights: If C{True} download weights from imagenet. Defaults to + C{False}. + """ + super().__init__(**kwargs) + + efficientnet_lite0_model = torch.hub.load( + "rwightman/gen-efficientnet-pytorch", + "efficientnet_lite0", + pretrained=download_weights, + ) + self.out_indices = [1, 2, 4, 6] + self.backbone = efficientnet_lite0_model + + def forward(self, x: Tensor) -> list[Tensor]: + outs = [] + x = self.backbone.conv_stem(x) + x = self.backbone.bn1(x) + x = self.backbone.act1(x) + for i, m in enumerate(self.backbone.blocks): + x = m(x) + if i in self.out_indices: + outs.append(x) + return outs diff --git a/luxonis_train/nodes/efficientrep.py b/luxonis_train/nodes/efficientrep.py new file mode 100644 index 00000000..e6a014af --- /dev/null +++ b/luxonis_train/nodes/efficientrep.py @@ -0,0 +1,113 @@ +"""Implementation of the EfficientRep backbone. + +Adapted from U{YOLOv6: A Single-Stage Object Detection Framework for Industrial +Applications}. +""" + +import logging + +from torch import Tensor, nn + +from luxonis_train.nodes.blocks import ( + BlockRepeater, + RepVGGBlock, + SpatialPyramidPoolingBlock, +) +from luxonis_train.utils.general import make_divisible + +from .base_node import BaseNode + + +class EfficientRep(BaseNode[Tensor, list[Tensor]]): + attach_index: int = -1 + + def __init__( + self, + channels_list: list[int] | None = None, + num_repeats: list[int] | None = None, + depth_mul: float = 0.33, + width_mul: float = 0.25, + **kwargs, + ): + """EfficientRep backbone. + + @type channels_list: list[int] | None + @param channels_list: List of number of channels for each block. Defaults to + C{[64, 128, 256, 512, 1024]}. + @type num_repeats: list[int] | None + @param num_repeats: List of number of repeats of RepVGGBlock. Defaults to C{[1, + 6, 12, 18, 6]}. + @type depth_mul: float + @param depth_mul: Depth multiplier. Defaults to 0.33. + @type width_mul: float + @param width_mul: Width multiplier. Defaults to 0.25. + @type kwargs: Any + @param kwargs: Additional arguments to pass to L{BaseNode}. + """ + super().__init__(**kwargs) + + channels_list = channels_list or [64, 128, 256, 512, 1024] + num_repeats = num_repeats or [1, 6, 12, 18, 6] + channels_list = [make_divisible(i * width_mul, 8) for i in channels_list] + num_repeats = [ + (max(round(i * depth_mul), 1) if i > 1 else i) for i in num_repeats + ] + + in_channels = self.in_channels + if not isinstance(in_channels, int): + raise ValueError("EfficientRep module expects only one input.") + + self.repvgg_encoder = RepVGGBlock( + in_channels=in_channels, + out_channels=channels_list[0], + kernel_size=3, + stride=2, + ) + + self.blocks = nn.ModuleList() + for i in range(4): + curr_block = nn.Sequential( + RepVGGBlock( + in_channels=channels_list[i], + out_channels=channels_list[i + 1], + kernel_size=3, + stride=2, + ), + BlockRepeater( + block=RepVGGBlock, + in_channels=channels_list[i + 1], + out_channels=channels_list[i + 1], + num_blocks=num_repeats[i + 1], + ), + ) + self.blocks.append(curr_block) + + self.blocks[-1].append( + SpatialPyramidPoolingBlock( + in_channels=channels_list[4], + out_channels=channels_list[4], + kernel_size=5, + ) + ) + + def set_export_mode(self, mode: bool = True) -> None: + """Reparametrizes instances of `RepVGGBlock` in the network. + + @type mode: bool + @param mode: Whether to set the export mode. Defaults to C{True}. + """ + super().set_export_mode(mode) + logger = logging.getLogger(__name__) + if mode: + logger.info("Reparametrizing EfficientRep.") + for module in self.modules(): + if isinstance(module, RepVGGBlock): + module.reparametrize() + + def forward(self, x: Tensor) -> list[Tensor]: + outputs = [] + x = self.repvgg_encoder(x) + for block in self.blocks: + x = block(x) + outputs.append(x) + return outputs diff --git a/luxonis_train/nodes/implicit_keypoint_bbox_head.py b/luxonis_train/nodes/implicit_keypoint_bbox_head.py new file mode 100644 index 00000000..0fdca420 --- /dev/null +++ b/luxonis_train/nodes/implicit_keypoint_bbox_head.py @@ -0,0 +1,263 @@ +import logging +import math +from typing import Literal, cast + +import torch +from torch import Tensor, nn + +from luxonis_train.nodes.blocks import ( + KeypointBlock, + LearnableMulAddConv, +) +from luxonis_train.utils.boxutils import ( + non_max_suppression, + process_bbox_predictions, + process_keypoints_predictions, +) +from luxonis_train.utils.types import LabelType, Packet + +from .base_node import BaseNode + +logger = logging.getLogger(__name__) + + +class ImplicitKeypointBBoxHead(BaseNode): + attach_index: Literal["all"] = "all" + + def __init__( + self, + n_keypoints: int | None = None, + num_heads: int = 3, + anchors: list[list[float]] | None = None, + init_coco_biases: bool = True, + conf_thres: float = 0.25, + iou_thres: float = 0.45, + **kwargs, + ): + """Head for object and keypoint detection. + + Adapted from U{YOLOv7: Trainable bag-of-freebies sets new state-of-the-art for real-time + object detectors}. + + TODO: more technical documentation + + @type n_keypoints: int | None + @param n_keypoints: Number of keypoints. If not defined, inferred + from the dataset metadata (if provided). Defaults to C{None}. + @type num_heads: int + @param num_heads: Number of output heads. Defaults to C{3}. + B{Note:} Should be same also on neck in most cases. + @type anchors: list[list[float]] | None + @param anchors: Anchors used for object detection. + @type init_coco_biases: bool + @param init_coco_biases: Whether to use COCO bias and weight + @type conf_thres: float + @param conf_thres: Threshold for confidence. Defaults to C{0.25}. + @type iou_thres: float + @param iou_thres: Threshold for IoU. Defaults to C{0.45}. + """ + super().__init__(task_type=LabelType.KEYPOINT, **kwargs) + + if anchors is None: + logger.info("No anchors provided, generating them automatically.") + anchors, recall = self.dataset_metadata.autogenerate_anchors(num_heads) + logger.info(f"Anchors generated. Best possible recall: {recall:.2f}") + + self.conf_thres = conf_thres + self.iou_thres = iou_thres + + n_keypoints = n_keypoints or self.dataset_metadata._n_keypoints + + if n_keypoints is None: + raise ValueError( + "Number of keypoints must be specified either in the constructor or " + "in the dataset metadata." + ) + self.n_keypoints = n_keypoints + self.num_heads = num_heads + + self.box_offset = 5 + self.n_det_out = self.n_classes + self.box_offset + self.n_kpt_out = 3 * self.n_keypoints + self.n_out = self.n_det_out + self.n_kpt_out + self.n_anchors = len(anchors[0]) // 2 + self.grid: list[Tensor] = [] + + self.anchors = torch.tensor(anchors).float().view(self.num_heads, -1, 2) + self.anchor_grid = self.anchors.clone().view(self.num_heads, 1, -1, 1, 1, 2) + + self.channel_list, self.stride = self._fit_to_num_heads( + cast(list[int], self.in_channels) + ) + + self.learnable_mul_add_conv = nn.ModuleList( + LearnableMulAddConv( + add_channel=in_channels, + mul_channel=self.n_det_out * self.n_anchors, + conv_in_channel=in_channels, + conv_out_channel=self.n_det_out * self.n_anchors, + ) + for in_channels in self.channel_list + ) + + self.kpt_heads = nn.ModuleList( + KeypointBlock( + in_channels=in_channels, + out_channels=self.n_kpt_out * self.n_anchors, + ) + for in_channels in self.channel_list + ) + + self.anchors /= self.stride.view(-1, 1, 1) + self._check_anchor_order() + + if init_coco_biases: + self._initialize_weights_and_biases() + + def forward(self, inputs: list[Tensor]) -> tuple[list[Tensor], Tensor]: + predictions: list[Tensor] = [] + features: list[Tensor] = [] + + self.anchor_grid = self.anchor_grid.to(inputs[0].device) + + for i in range(self.num_heads): + feat = cast( + Tensor, + torch.cat( + ( + self.learnable_mul_add_conv[i](inputs[i]), + self.kpt_heads[i](inputs[i]), + ), + axis=1, + ), # type: ignore + ) + + batch_size, _, feature_height, feature_width = feat.shape + if i >= len(self.grid): + self.grid.append( + self._construct_grid(feature_width, feature_height).to(feat.device) + ) + + feat = feat.reshape( + batch_size, self.n_anchors, self.n_out, feature_height, feature_width + ).permute(0, 1, 3, 4, 2) + + features.append(feat) + predictions.append( + self._build_predictions( + feat, self.anchor_grid[i], self.grid[i], self.stride[i] + ) + ) + + return features, torch.cat(predictions, dim=1) + + def wrap(self, outputs: tuple[list[Tensor], Tensor]) -> Packet[Tensor]: + features, predictions = outputs + + if self.export: + return {"boxes_and_keypoints": [predictions]} + + if self.training: + return {"features": features} + + nms = non_max_suppression( + predictions, + n_classes=self.n_classes, + conf_thres=self.conf_thres, + iou_thres=self.iou_thres, + bbox_format="cxcywh", + ) + + return { + "boxes": [detection[:, :6] for detection in nms], + "keypoints": [ + detection[:, 6:].reshape(-1, self.n_keypoints, 3) for detection in nms + ], + "features": features, + } + + def _build_predictions( + self, feat: Tensor, anchor_grid: Tensor, grid: Tensor, stride: Tensor + ) -> Tensor: + batch_size = feat.shape[0] + x_bbox = feat[..., : self.box_offset + self.n_classes] + x_keypoints = feat[..., self.box_offset + self.n_classes :] + + box_cxcy, box_wh, box_tail = process_bbox_predictions(x_bbox, anchor_grid) + grid = grid.to(box_cxcy.device) + stride = stride.to(box_cxcy.device) + box_cxcy = (box_cxcy + grid) * stride + out_bbox = torch.cat((box_cxcy, box_wh, box_tail), dim=-1) + + grid_x = grid[..., 0:1] + grid_y = grid[..., 1:2] + kpt_x, kpt_y, kpt_vis = process_keypoints_predictions(x_keypoints) + kpt_x = (kpt_x + grid_x) * stride + kpt_y = (kpt_y + grid_y) * stride + out_kpt = torch.stack([kpt_x, kpt_y, kpt_vis.sigmoid()], dim=-1).reshape( + *kpt_x.shape[:-1], -1 + ) + + out = torch.cat((out_bbox, out_kpt), dim=-1) + + return out.reshape(batch_size, -1, self.n_out) + + def _infer_bbox( + self, bbox: Tensor, stride: Tensor, grid: Tensor, anchor_grid: Tensor + ) -> Tensor: + out_bbox = bbox.sigmoid() + out_bbox_xy = (out_bbox[..., 0:2] * 2.0 - 0.5 + grid) * stride + out_bbox_wh = (out_bbox[..., 2:4] * 2) ** 2 * anchor_grid.view( + 1, self.n_anchors, 1, 1, 2 + ) + return torch.cat((out_bbox_xy, out_bbox_wh, out_bbox[..., 4:]), dim=-1) + + def _fit_to_num_heads(self, channel_list: list): + out_channel_list = channel_list[: self.num_heads] + stride = torch.tensor( + [ + self.original_in_shape[2] / h + for h in cast(list[int], self.in_height)[: self.num_heads] + ], + dtype=torch.int, + ) + return out_channel_list, stride + + def _initialize_weights_and_biases(self, class_freq: Tensor | None = None): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu") + elif isinstance(m, nn.BatchNorm2d): + m.eps = 1e-3 + m.momentum = 0.03 + elif isinstance(m, (nn.Hardswish, nn.LeakyReLU, nn.ReLU, nn.ReLU6)): + m.inplace = True + + for mi, s in zip(self.learnable_mul_add_conv, self.stride): + b = mi.conv.bias.view(self.n_anchors, -1) + b.data[:, 4] += math.log(8 / (640 / s) ** 2) + b.data[:, 5:] += ( + math.log(0.6 / (self.n_classes - 0.99)) + if class_freq is None + else torch.log(class_freq / class_freq.sum()) + ) + mi.conv.bias = torch.nn.Parameter(b.view(-1), requires_grad=True) + + def _construct_grid(self, feature_width: int, feature_height: int): + grid_y, grid_x = torch.meshgrid( + [torch.arange(feature_height), torch.arange(feature_width)], indexing="ij" + ) + return ( + torch.stack((grid_x, grid_y), 2) + .view((1, 1, feature_height, feature_width, 2)) + .float() + ) + + def _check_anchor_order(self): + a = self.anchor_grid.prod(-1).view(-1) + delta_a = a[-1] - a[0] + delta_s = self.stride[-1] - self.stride[0] + if delta_a.sign() != delta_s.sign(): + logger.warning("Reversing anchor order") + self.anchors[:] = self.anchors.flip(0) + self.anchor_grid[:] = self.anchor_grid.flip(0) diff --git a/luxonis_train/nodes/micronet.py b/luxonis_train/nodes/micronet.py new file mode 100644 index 00000000..03b43e1f --- /dev/null +++ b/luxonis_train/nodes/micronet.py @@ -0,0 +1,847 @@ +from typing import Literal + +import torch +from torch import Tensor, nn + +from luxonis_train.nodes.activations import HSigmoid, HSwish +from luxonis_train.nodes.blocks import ConvModule + +from .base_node import BaseNode + + +class MicroNet(BaseNode[Tensor, list[Tensor]]): + """ + + TODO: DOCS + """ + + attach_index: int = -1 + + def __init__(self, variant: Literal["M1", "M2", "M3"] = "M1", **kwargs): + """MicroNet backbone. + + @type variant: Literal["M1", "M2", "M3"] + @param variant: Model variant to use. Defaults to "M1". + """ + super().__init__(**kwargs) + + if variant not in MICRONET_VARIANTS_SETTINGS: + raise ValueError( + f"MicroNet model variant should be in {list(MICRONET_VARIANTS_SETTINGS.keys())}" + ) + + self.inplanes = 64 + ( + in_channels, + stem_groups, + _, + init_a, + init_b, + out_indices, + channels, + cfgs, + ) = MICRONET_VARIANTS_SETTINGS[variant] + self.out_indices = out_indices + self.channels = channels + + self.features = nn.ModuleList([Stem(3, 2, stem_groups)]) + + for ( + stride, + out_channels, + kernel_size, + c1, + c2, + g1, + g2, + _, + g3, + g4, + y1, + y2, + y3, + r, + ) in cfgs: + self.features.append( + MicroBlock( + in_channels, + out_channels, + kernel_size, + stride, + (c1, c2), + (g1, g2), + (g3, g4), + (y1, y2, y3), + r, + init_a, + init_b, + ) + ) + in_channels = out_channels + + def forward(self, x: Tensor) -> list[Tensor]: + outs = [] + for m in self.features: + x = m(x) + outs.append(x) + return outs + + +class MicroBlock(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + kernel_size: int = 3, + stride: int = 1, + t1: tuple[int, int] = (2, 2), + gs1: tuple[int, int] = (0, 6), + groups_1x1: tuple[int, int] = (1, 1), + dy: tuple[int, int, int] = (2, 0, 1), + r: int = 1, + init_a: tuple[float, float] = (1.0, 1.0), + init_b: tuple[float, float] = (0.0, 0.0), + ): + super().__init__() + + self.identity = stride == 1 and in_channels == out_channels + y1, y2, y3 = dy + g1, g2 = groups_1x1 + reduction = 8 * r + intermediate_channels = in_channels * t1[0] * t1[1] + + if gs1[0] == 0: + self.layers = nn.Sequential( + DepthSpatialSepConv(in_channels, t1, kernel_size, stride), + DYShiftMax( + intermediate_channels, + intermediate_channels, + init_a, + init_b, + True if y2 == 2 else False, + gs1[1], + reduction, + ) + if y2 > 0 + else nn.ReLU6(True), + ChannelShuffle(gs1[1]), + ChannelShuffle(intermediate_channels // 2) + if y2 != 0 + else nn.Sequential(), + ConvModule( + in_channels=intermediate_channels, + out_channels=out_channels, + kernel_size=1, + groups=g1, + activation=nn.Identity(), + ), + DYShiftMax( + out_channels, + out_channels, + (1.0, 0.0), + (0.0, 0.0), + False, + g2, + reduction // 2, + ) + if y3 > 0 + else nn.Sequential(), + ChannelShuffle(g2), + ChannelShuffle(out_channels // 2) + if out_channels % 2 == 0 and y3 != 0 + else nn.Sequential(), + ) + elif g2 == 0: + self.layers = nn.Sequential( + ConvModule( + in_channels=in_channels, + out_channels=intermediate_channels, + kernel_size=1, + groups=gs1[0], + activation=nn.Identity(), + ), + DYShiftMax( + intermediate_channels, + intermediate_channels, + (1.0, 0.0), + (0.0, 0.0), + False, + gs1[1], + reduction, + ) + if y3 > 0 + else nn.Sequential(), + ) + else: + self.layers = nn.Sequential( + ConvModule( + in_channels=in_channels, + out_channels=intermediate_channels, + kernel_size=1, + groups=gs1[0], + activation=nn.Identity(), + ), + DYShiftMax( + intermediate_channels, + intermediate_channels, + init_a, + init_b, + True if y1 == 2 else False, + gs1[1], + reduction, + ) + if y1 > 0 + else nn.ReLU6(True), + ChannelShuffle(gs1[1]), + DepthSpatialSepConv(intermediate_channels, (1, 1), kernel_size, stride), + nn.Sequential(), + DYShiftMax( + intermediate_channels, + intermediate_channels, + init_a, + init_b, + True if y2 == 2 else False, + gs1[1], + reduction, + True, + ) + if y2 > 0 + else nn.ReLU6(True), + ChannelShuffle(intermediate_channels // 4) + if y1 != 0 and y2 != 0 + else nn.Sequential() + if y1 == 0 and y2 == 0 + else ChannelShuffle(intermediate_channels // 2), + ConvModule( + in_channels=intermediate_channels, + out_channels=out_channels, + kernel_size=1, + groups=g1, + activation=nn.Identity(), + ), + DYShiftMax( + out_channels, + out_channels, + (1.0, 0.0), + (0.0, 0.0), + False, + g2, + reduction=reduction // 2 + if out_channels < intermediate_channels + else reduction, + ) + if y3 > 0 + else nn.Sequential(), + ChannelShuffle(g2), + ChannelShuffle(out_channels // 2) if y3 != 0 else nn.Sequential(), + ) + + def forward(self, x: Tensor): + identity = x + out = self.layers(x) + if self.identity: + out += identity + return out + + +class ChannelShuffle(nn.Module): + def __init__(self, groups: int): + super(ChannelShuffle, self).__init__() + self.groups = groups + + def forward(self, x): + b, c, h, w = x.size() + channels_per_group = c // self.groups + # reshape + x = x.view(b, self.groups, channels_per_group, h, w) + x = torch.transpose(x, 1, 2).contiguous() + out = x.view(b, -1, h, w) + return out + + +class DYShiftMax(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + init_a: tuple[float, float] = (0.0, 0.0), + init_b: tuple[float, float] = (0.0, 0.0), + act_relu: bool = True, + g: int = 6, + reduction: int = 4, + expansion: bool = False, + ): + super().__init__() + self.exp: Literal[2, 4] = 4 if act_relu else 2 + self.init_a = init_a + self.init_b = init_b + self.out_channels = out_channels + + self.avg_pool = nn.Sequential(nn.Sequential(), nn.AdaptiveAvgPool2d(1)) + + squeeze = self._make_divisible(in_channels // reduction, 4) + + self.fc = nn.Sequential( + nn.Linear(in_channels, squeeze), + nn.ReLU(True), + nn.Linear(squeeze, out_channels * self.exp), + HSigmoid(), + ) + + if g != 1 and expansion: + g = in_channels // g + + gc = in_channels // g + index = Tensor(range(in_channels)).view(1, in_channels, 1, 1) + index = index.view(1, g, gc, 1, 1) + indexgs = torch.split(index, [1, g - 1], dim=1) + indexgs = torch.cat([indexgs[1], indexgs[0]], dim=1) + indexs = torch.split(indexgs, [1, gc - 1], dim=2) + indexs = torch.cat([indexs[1], indexs[0]], dim=2) + self.index = indexs.view(in_channels).long() + + def forward(self, x: Tensor): + B, C, _, _ = x.shape + x_out = x + + y = self.avg_pool(x).view(B, C) + y = self.fc(y).view(B, -1, 1, 1) + y = (y - 0.5) * 4.0 + + x2 = x_out[:, self.index, :, :] + + if self.exp == 4: + a1, b1, a2, b2 = torch.split(y, self.out_channels, dim=1) + + a1 = a1 + self.init_a[0] + a2 = a2 + self.init_b[1] + b1 = b1 + self.init_b[0] + b2 = b2 + self.init_b[1] + + z1 = x_out * a1 + x2 * b1 + z2 = x_out * a2 + x2 * b2 + + out = torch.max(z1, z2) + + elif self.exp == 2: + a1, b1 = torch.split(y, self.out_channels, dim=1) + a1 = a1 + self.init_a[0] + b1 = b1 + self.init_b[0] + out = x_out * a1 + x2 * b1 + else: + raise RuntimeError("Expansion should be 2 or 4.") + + return out + + def _make_divisible(self, v, divisor, min_value=None): + if min_value is None: + min_value = divisor + new_v = max(min_value, int(v + divisor / 2) // divisor * divisor) + # Make sure that round down does not go down by more than 10%. + if new_v < 0.9 * v: + new_v += divisor + return new_v + + +class SwishLinear(nn.Module): + def __init__(self, in_channels: int, out_channels: int): + super().__init__() + self.linear = nn.Sequential( + nn.Linear(in_channels, out_channels), nn.BatchNorm1d(out_channels), HSwish() + ) + + def forward(self, x: Tensor): + return self.linear(x) + + +class SpatialSepConvSF(nn.Module): + def __init__( + self, in_channels: int, outs: tuple[int, int], kernel_size: int, stride: int + ): + super().__init__() + out_channels1, out_channels2 = outs + self.conv = nn.Sequential( + nn.Conv2d( + in_channels, + out_channels1, + (kernel_size, 1), + (stride, 1), + (kernel_size // 2, 0), + bias=False, + ), + nn.BatchNorm2d(out_channels1), + nn.Conv2d( + out_channels1, + out_channels1 * out_channels2, + (1, kernel_size), + (1, stride), + (0, kernel_size // 2), + groups=out_channels1, + bias=False, + ), + nn.BatchNorm2d(out_channels1 * out_channels2), + ChannelShuffle(out_channels1), + ) + + def forward(self, x: Tensor): + return self.conv(x) + + +class Stem(nn.Module): + def __init__(self, in_channels: int, stride: int, outs: tuple[int, int] = (4, 4)): + super().__init__() + self.stem = nn.Sequential( + SpatialSepConvSF(in_channels, outs, 3, stride), nn.ReLU6(True) + ) + + def forward(self, x: Tensor): + return self.stem(x) + + +class DepthSpatialSepConv(nn.Module): + def __init__( + self, in_channels: int, expand: tuple[int, int], kernel_size: int, stride: int + ): + super().__init__() + exp1, exp2 = expand + intermediate_channels = in_channels * exp1 + out_channels = in_channels * exp1 * exp2 + + self.conv = nn.Sequential( + nn.Conv2d( + in_channels, + intermediate_channels, + (kernel_size, 1), + (stride, 1), + (kernel_size // 2, 0), + groups=in_channels, + bias=False, + ), + nn.BatchNorm2d(intermediate_channels), + nn.Conv2d( + intermediate_channels, + out_channels, + (1, kernel_size), + (1, stride), + (0, kernel_size // 2), + groups=intermediate_channels, + bias=False, + ), + nn.BatchNorm2d(out_channels), + ) + + def forward(self, x: Tensor): + return self.conv(x) + + +MICRONET_VARIANTS_SETTINGS = { + "M1": [ + 6, # stem_ch + [3, 2], # stem_groups + 960, # out_ch + [1.0, 1.0], # init_a + [0.0, 0.0], # init_b + [1, 2, 4, 7], # out indices + [8, 16, 32, 576], + [ + # s, c, ks, c1, c2, g1, g2, c3, g3, g4, y1, y2, y3, r + [2, 8, 3, 2, 2, 0, 6, 8, 2, 2, 2, 0, 1, 1], + [2, 16, 3, 2, 2, 0, 8, 16, 4, 4, 2, 2, 1, 1], + [ + 2, + 16, + 5, + 2, + 2, + 0, + 16, + 16, + 4, + 4, + 2, + 2, + 1, + 1, + ], + [ + 1, + 32, + 5, + 1, + 6, + 4, + 4, + 32, + 4, + 4, + 2, + 2, + 1, + 1, + ], + [ + 2, + 64, + 5, + 1, + 6, + 8, + 8, + 64, + 8, + 8, + 2, + 2, + 1, + 1, + ], + [ + 1, + 96, + 3, + 1, + 6, + 8, + 8, + 96, + 8, + 8, + 2, + 2, + 1, + 2, + ], + [1, 576, 3, 1, 6, 12, 12, 0, 0, 0, 2, 2, 1, 2], # 96->96(4,24)->576 + ], + ], + "M2": [ + 8, + [4, 2], + 1024, + [1.0, 1.0], + [0.0, 0.0], + [1, 3, 6, 9], + [12, 24, 64, 768], + [ + # s, c, ks, c1, c2, g1, g2, c3, g3, g4, y1, y2, y3, r + [ + 2, + 12, + 3, + 2, + 2, + 0, + 8, + 12, + 4, + 4, + 2, + 0, + 1, + 1, + ], + [ + 2, + 16, + 3, + 2, + 2, + 0, + 12, + 16, + 4, + 4, + 2, + 2, + 1, + 1, + ], + [ + 1, + 24, + 3, + 2, + 2, + 0, + 16, + 24, + 4, + 4, + 2, + 2, + 1, + 1, + ], + [ + 2, + 32, + 5, + 1, + 6, + 6, + 6, + 32, + 4, + 4, + 2, + 2, + 1, + 1, + ], + [ + 1, + 32, + 5, + 1, + 6, + 8, + 8, + 32, + 4, + 4, + 2, + 2, + 1, + 2, + ], + [ + 1, + 64, + 5, + 1, + 6, + 8, + 8, + 64, + 8, + 8, + 2, + 2, + 1, + 2, + ], + [ + 2, + 96, + 5, + 1, + 6, + 8, + 8, + 96, + 8, + 8, + 2, + 2, + 1, + 2, + ], + [ + 1, + 128, + 3, + 1, + 6, + 12, + 12, + 128, + 8, + 8, + 2, + 2, + 1, + 2, + ], + [1, 768, 3, 1, 6, 16, 16, 0, 0, 0, 2, 2, 1, 2], + ], + ], + "M3": [ + 12, + [4, 3], + 1024, + [1.0, 0.5], + [0.0, 0.5], + [1, 3, 8, 12], + [16, 24, 80, 864], + [ + # s, c, ks, c1, c2, g1, g2, c3, g3, g4, y1, y2, y3, r + [ + 2, + 16, + 3, + 2, + 2, + 0, + 12, + 16, + 4, + 4, + 0, + 2, + 0, + 1, + ], + [ + 2, + 24, + 3, + 2, + 2, + 0, + 16, + 24, + 4, + 4, + 0, + 2, + 0, + 1, + ], + [ + 1, + 24, + 3, + 2, + 2, + 0, + 24, + 24, + 4, + 4, + 0, + 2, + 0, + 1, + ], + [ + 2, + 32, + 5, + 1, + 6, + 6, + 6, + 32, + 4, + 4, + 0, + 2, + 0, + 1, + ], + [ + 1, + 32, + 5, + 1, + 6, + 8, + 8, + 32, + 4, + 4, + 0, + 2, + 0, + 2, + ], + [ + 1, + 64, + 5, + 1, + 6, + 8, + 8, + 48, + 8, + 8, + 0, + 2, + 0, + 2, + ], + [ + 1, + 80, + 5, + 1, + 6, + 8, + 8, + 80, + 8, + 8, + 0, + 2, + 0, + 2, + ], + [ + 1, + 80, + 5, + 1, + 6, + 10, + 10, + 80, + 8, + 8, + 0, + 2, + 0, + 2, + ], + [ + 2, + 120, + 5, + 1, + 6, + 10, + 10, + 120, + 10, + 10, + 0, + 2, + 0, + 2, + ], + [ + 1, + 120, + 5, + 1, + 6, + 12, + 12, + 120, + 10, + 10, + 0, + 2, + 0, + 2, + ], + [ + 1, + 144, + 3, + 1, + 6, + 12, + 12, + 144, + 12, + 12, + 0, + 2, + 0, + 2, + ], + [1, 864, 3, 1, 6, 12, 12, 0, 0, 0, 0, 2, 0, 2], + ], + ], +} diff --git a/luxonis_train/nodes/mobilenetv2.py b/luxonis_train/nodes/mobilenetv2.py new file mode 100644 index 00000000..27fe87ec --- /dev/null +++ b/luxonis_train/nodes/mobilenetv2.py @@ -0,0 +1,45 @@ +"""MobileNetV2 backbone. + +TODO: source? +""" + +import torchvision +from torch import Tensor + +from .base_node import BaseNode + + +class MobileNetV2(BaseNode[Tensor, list[Tensor]]): + """Implementation of the MobileNetV2 backbone. + + TODO: add more info + """ + + attach_index: int = -1 + + def __init__(self, download_weights: bool = False, **kwargs): + """Constructor of the MobileNetV2 backbone. + + @type download_weights: bool + @param download_weights: If True download weights from imagenet. Defaults to + False. + @type kwargs: Any + @param kwargs: Additional arguments to pass to L{BaseNode}. + """ + super().__init__(**kwargs) + + mobilenet_v2 = torchvision.models.mobilenet_v2( + weights="DEFAULT" if download_weights else None + ) + self.out_indices = [3, 6, 13, 17] + self.channels = [24, 32, 96, 320] + self.backbone = mobilenet_v2 + + def forward(self, x: Tensor) -> list[Tensor]: + outs = [] + for i, m in enumerate(self.backbone.features): + x = m(x) + if i in self.out_indices: + outs.append(x) + + return outs diff --git a/luxonis_train/nodes/mobileone.py b/luxonis_train/nodes/mobileone.py new file mode 100644 index 00000000..e92d3225 --- /dev/null +++ b/luxonis_train/nodes/mobileone.py @@ -0,0 +1,430 @@ +"""MobileOne backbone. + +Soure: U{https://github.com/apple/ml-mobileone} @license: U{Apple +} @license: U{Apple +} @license: U{Apple +} @license: U{Apple +} @license: U{Apple +} @license: U{Apple +} @license: U{Apple +} @license: U{Apple +} @license: U{Apple +} @license: U{Apple +} @license: U{Apple +} @license: U{Apple +} @license: U{Apple +} @license: U{Apple +} @license: U{Apple +} @license: U{Apple +} +@license: U{Apple } +@license: U{Apple } +@license: U{Apple } +@license: U{Apple } +@license: U{Apple } +@license: U{Apple } +@license: U{Apple } +@license: U{Apple } +@license: U{Apple } +@license: U{Apple } +@license: U{Apple } +@license: U{Apple } +@license: U{Apple } +@license: U{Apple } +@license: U{Apple } +@license: U{Apple } +""" + + +from typing import Literal + +import torch +from torch import Tensor, nn + +from luxonis_train.nodes.blocks import ConvModule, SqueezeExciteBlock + +from .base_node import BaseNode + + +class MobileOne(BaseNode[Tensor, list[Tensor]]): + """Implementation of MobileOne backbone. + + TODO: add more details + """ + + attach_index: int = -1 + in_channels: int + + VARIANTS_SETTINGS: dict[str, dict] = { + "s0": {"width_multipliers": (0.75, 1.0, 1.0, 2.0), "num_conv_branches": 4}, + "s1": {"width_multipliers": (1.5, 1.5, 2.0, 2.5)}, + "s2": {"width_multipliers": (1.5, 2.0, 2.5, 4.0)}, + "s3": {"width_multipliers": (2.0, 2.5, 3.0, 4.0)}, + "s4": {"width_multipliers": (3.0, 3.5, 3.5, 4.0), "use_se": True}, + } + + def __init__(self, variant: Literal["s0", "s1", "s2", "s3", "s4"] = "s0", **kwargs): + """Constructor for the MobileOne module. + + @type variant: Literal["s0", "s1", "s2", "s3", "s4"] + @param variant: Specifies which variant of the MobileOne network to use. For + details, see TODO. Defaults to "s0". + """ + super().__init__(**kwargs) + + if variant not in MobileOne.VARIANTS_SETTINGS.keys(): + raise ValueError( + f"MobileOne model variant should be in {list(MobileOne.VARIANTS_SETTINGS.keys())}" + ) + + variant_params = MobileOne.VARIANTS_SETTINGS[variant] + # TODO: make configurable + self.width_multipliers = variant_params["width_multipliers"] + self.num_conv_branches = variant_params.get("num_conv_branches", 1) + self.num_blocks_per_stage = [2, 8, 10, 1] + self.use_se = variant_params.get("use_se", False) + + self.in_planes = min(64, int(64 * self.width_multipliers[0])) + + self.stage0 = MobileOneBlock( + in_channels=self.in_channels, + out_channels=self.in_planes, + kernel_size=3, + stride=2, + padding=1, + ) + self.cur_layer_idx = 1 + self.stage1 = self._make_stage( + int(64 * self.width_multipliers[0]), + self.num_blocks_per_stage[0], + num_se_blocks=0, + ) + self.stage2 = self._make_stage( + int(128 * self.width_multipliers[1]), + self.num_blocks_per_stage[1], + num_se_blocks=0, + ) + self.stage3 = self._make_stage( + int(256 * self.width_multipliers[2]), + self.num_blocks_per_stage[2], + num_se_blocks=int(self.num_blocks_per_stage[2] // 2) if self.use_se else 0, + ) + self.stage4 = self._make_stage( + int(512 * self.width_multipliers[3]), + self.num_blocks_per_stage[3], + num_se_blocks=self.num_blocks_per_stage[3] if self.use_se else 0, + ) + + def forward(self, x: Tensor) -> list[Tensor]: + outs = [] + x = self.stage0(x) + outs.append(x) + x = self.stage1(x) + outs.append(x) + x = self.stage2(x) + outs.append(x) + x = self.stage3(x) + outs.append(x) + + return outs + + def export_mode(self, export: bool = True) -> None: + """Sets the module to export mode. + + Reparameterizes the model to obtain a plain CNN-like structure for inference. + TODO: add more details + + @warning: The reparametrization is destructive and cannot be reversed! + + @type export: bool + @param export: Whether to set the export mode to True or False. Defaults to True. + """ + if export: + for module in self.modules(): + if hasattr(module, "reparameterize"): + module.reparameterize() + + def _make_stage(self, planes: int, num_blocks: int, num_se_blocks: int): + """Build a stage of MobileOne model. + + @type planes: int + @param planes: Number of output channels. + @type num_blocks: int + @param num_blocks: Number of blocks in this stage. + @type num_se_blocks: int + @param num_se_blocks: Number of SE blocks in this stage. + @rtype: nn.Sequential + @return: A stage of MobileOne model. + """ + # Get strides for all layers + strides = [2] + [1] * (num_blocks - 1) + blocks = [] + for ix, stride in enumerate(strides): + use_se = False + if num_se_blocks > num_blocks: + raise ValueError( + "Number of SE blocks cannot " "exceed number of layers." + ) + if ix >= (num_blocks - num_se_blocks): + use_se = True + + # Depthwise conv + blocks.append( + MobileOneBlock( + in_channels=self.in_planes, + out_channels=self.in_planes, + kernel_size=3, + stride=stride, + padding=1, + groups=self.in_planes, + use_se=use_se, + num_conv_branches=self.num_conv_branches, + ) + ) + # Pointwise conv + blocks.append( + MobileOneBlock( + in_channels=self.in_planes, + out_channels=planes, + kernel_size=1, + stride=1, + padding=0, + groups=1, + use_se=use_se, + num_conv_branches=self.num_conv_branches, + ) + ) + self.in_planes = planes + self.cur_layer_idx += 1 + return nn.Sequential(*blocks) + + +class MobileOneBlock(nn.Module): + """MobileOne building block. + + This block has a multi-branched architecture at train-time and + plain-CNN style architecture at inference time For more details, + please refer to our paper: U{An Improved One millisecond Mobile + Backbone} + """ + + def __init__( + self, + in_channels: int, + out_channels: int, + kernel_size: int, + stride: int = 1, + padding: int = 0, + groups: int = 1, + use_se: bool = False, + num_conv_branches: int = 1, + ): + """Construct a MobileOneBlock module. + + @type in_channels: int + @param in_channels: Number of channels in the input. + @type out_channels: int + @param out_channels: Number of channels produced by the block. + @type kernel_size: int + @param kernel_size: Size of the convolution kernel. + @type stride: int + @param stride: Stride size. Defaults to 1. + @type padding: int + @param padding: Zero-padding size. Defaults to 0. + @type dilation: int + @param dilation: Kernel dilation factor. Defaults to 1. + @type groups: int + @param groups: Group number. Defaults to 1. + @type use_se: bool + @param use_se: Whether to use SE-ReLU activations. Defaults to False. + @type num_conv_branches: int + @param num_conv_branches: Number of linear conv branches. Defaults to 1. + """ + super().__init__() + + self.groups = groups + self.stride = stride + self.kernel_size = kernel_size + self.in_channels = in_channels + self.out_channels = out_channels + self.num_conv_branches = num_conv_branches + self.inference_mode = False + + # Check if SE-ReLU is requested + if use_se: + self.se = SqueezeExciteBlock( + in_channels=out_channels, + intermediate_channels=int(out_channels * 0.0625), + ) + else: + self.se = nn.Identity() # type: ignore + self.activation = nn.ReLU() + + # Re-parameterizable skip connection + self.rbr_skip = ( + nn.BatchNorm2d(num_features=in_channels) + if out_channels == in_channels and stride == 1 + else None + ) + + # Re-parameterizable conv branches + rbr_conv = list() + for _ in range(self.num_conv_branches): + rbr_conv.append( + ConvModule( + in_channels=self.in_channels, + out_channels=self.out_channels, + kernel_size=kernel_size, + stride=self.stride, + padding=padding, + groups=self.groups, + activation=nn.Identity(), + ) + ) + self.rbr_conv: list[nn.Sequential] = nn.ModuleList(rbr_conv) # type: ignore + + # Re-parameterizable scale branch + self.rbr_scale = None + if kernel_size > 1: + self.rbr_scale = ConvModule( + in_channels=self.in_channels, + out_channels=self.out_channels, + kernel_size=1, + stride=self.stride, + padding=0, + groups=self.groups, + activation=nn.Identity(), + ) + + def forward(self, inputs: Tensor): + """Apply forward pass.""" + # Inference mode forward pass. + if self.inference_mode: + return self.activation(self.se(self.reparam_conv(inputs))) + + # Multi-branched train-time forward pass. + # Skip branch output + identity_out = 0 + if self.rbr_skip is not None: + identity_out = self.rbr_skip(inputs) + + # Scale branch output + scale_out = 0 + if self.rbr_scale is not None: + scale_out = self.rbr_scale(inputs) + + # Other branches + out = scale_out + identity_out + for ix in range(self.num_conv_branches): + out += self.rbr_conv[ix](inputs) + + return self.activation(self.se(out)) + + def reparameterize(self): + """Following works like U{RepVGG: Making VGG-style ConvNets Great Again + } + architecture used at training time to obtain a plain CNN-like structure + for inference. + """ + if self.inference_mode: + return + kernel, bias = self._get_kernel_bias() + self.reparam_conv = nn.Conv2d( + in_channels=self.rbr_conv[0][0].in_channels, + out_channels=self.rbr_conv[0][0].out_channels, + kernel_size=self.rbr_conv[0][0].kernel_size, + stride=self.rbr_conv[0][0].stride, + padding=self.rbr_conv[0][0].padding, + dilation=self.rbr_conv[0][0].dilation, + groups=self.rbr_conv[0][0].groups, + bias=True, + ) + self.reparam_conv.weight.data = kernel + assert self.reparam_conv.bias is not None + self.reparam_conv.bias.data = bias + + # Delete un-used branches + for para in self.parameters(): + para.detach_() + self.__delattr__("rbr_conv") + self.__delattr__("rbr_scale") + if hasattr(self, "rbr_skip"): + self.__delattr__("rbr_skip") + + self.inference_mode = True + + def _get_kernel_bias(self) -> tuple[Tensor, Tensor]: + """Method to obtain re-parameterized kernel and bias. + Reference: U{https://github.com/DingXiaoH/RepVGG/blob/main/repvgg.py#L83} + + @rtype: tuple[Tensor, Tensor] + @return: Tuple of (kernel, bias) after re-parameterization. + """ + # get weights and bias of scale branch + kernel_scale = torch.zeros(()) + bias_scale = torch.zeros(()) + if self.rbr_scale is not None: + kernel_scale, bias_scale = self._fuse_bn_tensor(self.rbr_scale) + # Pad scale branch kernel to match conv branch kernel size. + pad = self.kernel_size // 2 + kernel_scale = torch.nn.functional.pad(kernel_scale, [pad, pad, pad, pad]) + + # get weights and bias of skip branch + kernel_identity = torch.zeros(()) + bias_identity = torch.zeros(()) + if self.rbr_skip is not None: + kernel_identity, bias_identity = self._fuse_bn_tensor(self.rbr_skip) + + # get weights and bias of conv branches + kernel_conv = torch.zeros(()) + bias_conv = torch.zeros(()) + for ix in range(self.num_conv_branches): + _kernel, _bias = self._fuse_bn_tensor(self.rbr_conv[ix]) + kernel_conv = kernel_conv + _kernel + bias_conv = bias_conv + _bias + + kernel_final = kernel_conv + kernel_scale + kernel_identity + bias_final = bias_conv + bias_scale + bias_identity + return kernel_final, bias_final + + def _fuse_bn_tensor(self, branch) -> tuple[Tensor, Tensor]: + """Method to fuse batchnorm layer with preceeding conv layer. + Reference: U{https://github.com/DingXiaoH/RepVGG/blob/main/repvgg.py#L95} + + @rtype: tuple[Tensor, Tensor] + @return: Tuple of (kernel, bias) after fusing batchnorm. + """ + if isinstance(branch, nn.Sequential): + kernel = branch[0].weight + running_mean = branch[1].running_mean + running_var = branch[1].running_var + gamma = branch[1].weight + beta = branch[1].bias + eps = branch[1].eps + elif isinstance(branch, nn.BatchNorm2d): + if not hasattr(self, "id_tensor"): + input_dim = self.in_channels // self.groups + kernel_value = torch.zeros( + (self.in_channels, input_dim, self.kernel_size, self.kernel_size), + dtype=branch.weight.dtype, + device=branch.weight.device, + ) + for i in range(self.in_channels): + kernel_value[ + i, i % input_dim, self.kernel_size // 2, self.kernel_size // 2 + ] = 1 + self.id_tensor = kernel_value + kernel = self.id_tensor + running_mean = branch.running_mean + running_var = branch.running_var + gamma = branch.weight + beta = branch.bias + eps = branch.eps + else: + raise NotImplementedError( + "Only nn.BatchNorm2d and nn.Sequential " "are supported." + ) + assert running_var is not None + std = (running_var + eps).sqrt() + t = (gamma / std).reshape(-1, 1, 1, 1) + return kernel * t, beta - running_mean * gamma / std diff --git a/luxonis_train/nodes/reppan_neck.py b/luxonis_train/nodes/reppan_neck.py new file mode 100644 index 00000000..26fed274 --- /dev/null +++ b/luxonis_train/nodes/reppan_neck.py @@ -0,0 +1,164 @@ +"""Implementation of the RepPANNeck module. + +Adapted from U{YOLOv6: A Single-Stage Object Detection Framework for Industrial +Applications}. +It has the balance of feature fusion ability and hardware efficiency. +""" + + +from typing import Literal, cast + +from torch import Tensor, nn + +from luxonis_train.nodes.blocks import RepDownBlock, RepUpBlock +from luxonis_train.utils.general import make_divisible + +from .base_node import BaseNode + + +class RepPANNeck(BaseNode[list[Tensor], list[Tensor]]): + def __init__( + self, + num_heads: Literal[2, 3, 4] = 3, + channels_list: list[int] | None = None, + num_repeats: list[int] | None = None, + depth_mul: float = 0.33, + width_mul: float = 0.25, + **kwargs, + ): + """Constructor for the RepPANNeck module. + + @type num_heads: Literal[2,3,4] + @param num_heads: Number of output heads. Defaults to 3. ***Note: Should be same + also on head in most cases.*** + @type channels_list: list[int] | None + @param channels_list: List of number of channels for each block. Defaults to + C{[256, 128, 128, 256, 256, 512]}. + @type num_repeats: list[int] | None + @param num_repeats: List of number of repeats of RepVGGBlock. Defaults to C{[12, + 12, 12, 12]}. + @type depth_mul: float + @param depth_mul: Depth multiplier. Defaults to 0.33. + @type width_mul: float + @param width_mul: Width multiplier. Defaults to 0.25. + """ + + super().__init__(**kwargs) + + num_repeats = num_repeats or [12, 12, 12, 12] + channels_list = channels_list or [256, 128, 128, 256, 256, 512] + + self.num_heads = num_heads + + channels_list = [make_divisible(ch * width_mul, 8) for ch in channels_list] + num_repeats = [ + (max(round(i * depth_mul), 1) if i > 1 else i) for i in num_repeats + ] + channels_list, num_repeats = self._fit_to_num_heads(channels_list, num_repeats) + + self.up_blocks = nn.ModuleList() + + in_channels = cast(list[int], self.in_channels)[-1] + out_channels = channels_list[0] + in_channels_next = cast(list[int], self.in_channels)[-2] + curr_num_repeats = num_repeats[0] + up_out_channel_list = [in_channels] # used in DownBlocks + + for i in range(1, num_heads): + curr_up_block = RepUpBlock( + in_channels=in_channels, + in_channels_next=in_channels_next, + out_channels=out_channels, + num_repeats=curr_num_repeats, + ) + up_out_channel_list.append(out_channels) + self.up_blocks.append(curr_up_block) + if len(self.up_blocks) == (num_heads - 1): + up_out_channel_list.reverse() + break + + in_channels = out_channels + out_channels = channels_list[i] + in_channels_next = cast(list[int], self.in_channels)[-1 - (i + 1)] + curr_num_repeats = num_repeats[i] + + self.down_blocks = nn.ModuleList() + channels_list_down_blocks = channels_list[(num_heads - 1) :] + num_repeats_down_blocks = num_repeats[(num_heads - 1) :] + + in_channels = out_channels + downsample_out_channels = channels_list_down_blocks[0] + in_channels_next = up_out_channel_list[0] + out_channels = channels_list_down_blocks[1] + curr_num_repeats = num_repeats_down_blocks[0] + + for i in range(1, num_heads): + curr_down_block = RepDownBlock( + in_channels=in_channels, + downsample_out_channels=downsample_out_channels, + in_channels_next=in_channels_next, + out_channels=out_channels, + num_repeats=curr_num_repeats, + ) + self.down_blocks.append(curr_down_block) + if len(self.down_blocks) == (num_heads - 1): + break + + in_channels = out_channels + downsample_out_channels = channels_list_down_blocks[2 * i] + in_channels_next = up_out_channel_list[i] + out_channels = channels_list_down_blocks[2 * i + 1] + curr_num_repeats = num_repeats_down_blocks[i] + + def forward(self, inputs: list[Tensor]) -> list[Tensor]: + x0 = inputs[-1] + up_block_outs = [] + for i, up_block in enumerate(self.up_blocks): + conv_out, x0 = up_block(x0, inputs[-1 - (i + 1)]) + up_block_outs.append(conv_out) + up_block_outs.reverse() + + outs = [x0] + for i, down_block in enumerate(self.down_blocks): + x0 = down_block(x0, up_block_outs[i]) + outs.append(x0) + return outs + + def _fit_to_num_heads( + self, channels_list: list[int], num_repeats: list[int] + ) -> tuple[list[int], list[int]]: + """Fits channels_list and num_repeats to num_heads by removing or adding items. + + Also scales the numbers based on offset + """ + if self.num_heads == 3: + ... + elif self.num_heads == 2: + channels_list = [channels_list[0], channels_list[4], channels_list[5]] + num_repeats = [num_repeats[0], num_repeats[3]] + elif self.num_heads == 4: + channels_list = [ + channels_list[0], + channels_list[1], + channels_list[1] // 2, + channels_list[1] // 2, + channels_list[1], + channels_list[2], + channels_list[3], + channels_list[4], + channels_list[5], + ] + num_repeats = [ + num_repeats[0], + num_repeats[1], + num_repeats[1], + num_repeats[2], + num_repeats[2], + num_repeats[3], + ] + else: + raise ValueError( + f"Specified number of heads ({self.num_heads}) not supported." + ) + + return channels_list, num_repeats diff --git a/luxonis_train/nodes/repvgg.py b/luxonis_train/nodes/repvgg.py new file mode 100644 index 00000000..44579fa5 --- /dev/null +++ b/luxonis_train/nodes/repvgg.py @@ -0,0 +1,144 @@ +from copy import deepcopy + +import torch.utils.checkpoint as checkpoint +from torch import Tensor, nn + +from luxonis_train.nodes.blocks import RepVGGBlock + +from .base_node import BaseNode + + +class RepVGG(BaseNode): + """Implementation of RepVGG backbone. + + Source: U{https://github.com/DingXiaoH/RepVGG} + @license: U{MIT}. + + @todo: technical documentation + """ + + in_channels: int + + VARIANTS_SETTINGS = { + "A0": { + "num_blocks": [2, 4, 14, 1], + "num_classes": 1000, + "width_multiplier": [0.75, 0.75, 0.75, 2.5], + }, + "A1": { + "num_blocks": [2, 4, 14, 1], + "num_classes": 1000, + "width_multiplier": [1, 1, 1, 2.5], + }, + "A2": { + "num_blocks": [2, 4, 14, 1], + "num_classes": 1000, + "width_multiplier": [1.5, 1.5, 1.5, 2.75], + }, + } + + def __new__(cls, **kwargs): + variant = kwargs.pop("variant", "A0") + + if variant not in RepVGG.VARIANTS_SETTINGS.keys(): + raise ValueError( + f"RepVGG model variant should be in {list(RepVGG.VARIANTS_SETTINGS.keys())}" + ) + + overrides = deepcopy(kwargs) + kwargs.clear() + kwargs.update(RepVGG.VARIANTS_SETTINGS[variant]) + kwargs.update(overrides) + return cls.__new__(cls) + + def __init__( + self, + deploy: bool = False, + override_groups_map: dict[int, int] | None = None, + use_se: bool = False, + use_checkpoint: bool = False, + num_blocks: list[int] | None = None, + width_multiplier: list[float] | None = None, + **kwargs, + ): + """Constructor for the RepVGG module. + + @type deploy: bool + @param deploy: Whether to use the model in deploy mode. + @type override_groups_map: dict[int, int] | None + @param override_groups_map: Dictionary mapping layer index to number of groups. + @type use_se: bool + @param use_se: Whether to use Squeeze-and-Excitation blocks. + @type use_checkpoint: bool + @param use_checkpoint: Whether to use checkpointing. + @type num_blocks: list[int] | None + @param num_blocks: Number of blocks in each stage. + @type width_multiplier: list[float] | None + @param width_multiplier: Width multiplier for each stage. + """ + super().__init__(**kwargs) + num_blocks = num_blocks or [2, 4, 14, 1] + width_multiplier = width_multiplier or [0.75, 0.75, 0.75, 2.5] + self.deploy = deploy + self.override_groups_map = override_groups_map or {} + assert 0 not in self.override_groups_map + self.use_se = use_se + self.use_checkpoint = use_checkpoint + + self.in_planes = min(64, int(64 * width_multiplier[0])) + self.stage0 = RepVGGBlock( + in_channels=self.in_channels, + out_channels=self.in_planes, + kernel_size=3, + stride=2, + padding=1, + deploy=self.deploy, + use_se=self.use_se, + ) + self.cur_layer_idx = 1 + self.stage1 = self._make_stage( + int(64 * width_multiplier[0]), num_blocks[0], stride=2 + ) + self.stage2 = self._make_stage( + int(128 * width_multiplier[1]), num_blocks[1], stride=2 + ) + self.stage3 = self._make_stage( + int(256 * width_multiplier[2]), num_blocks[2], stride=2 + ) + self.stage4 = self._make_stage( + int(512 * width_multiplier[3]), num_blocks[3], stride=2 + ) + self.gap = nn.AdaptiveAvgPool2d(output_size=1) + + def forward(self, inputs: Tensor) -> list[Tensor]: + outputs = [] + out = self.stage0(inputs) + for stage in (self.stage1, self.stage2, self.stage3, self.stage4): + for block in stage: + if self.use_checkpoint: + out = checkpoint.checkpoint(block, out) + else: + out = block(out) + outputs.append(out) + return outputs + + def _make_stage(self, planes: int, num_blocks: int, stride: int): + strides = [stride] + [1] * (num_blocks - 1) + blocks = [] + for stride in strides: + cur_groups = self.override_groups_map.get(self.cur_layer_idx, 1) + blocks.append( + RepVGGBlock( + in_channels=self.in_planes, + out_channels=planes, + kernel_size=3, + stride=stride, + padding=1, + groups=cur_groups, + deploy=self.deploy, + use_se=self.use_se, + ) + ) + self.in_planes = planes + self.cur_layer_idx += 1 + return nn.ModuleList(blocks) diff --git a/luxonis_train/nodes/resnet18.py b/luxonis_train/nodes/resnet18.py new file mode 100644 index 00000000..9c38681a --- /dev/null +++ b/luxonis_train/nodes/resnet18.py @@ -0,0 +1,59 @@ +"""ResNet18 backbone. + +Source: U{https://pytorch.org/vision/main/models/generated/ +torchvision.models.resnet18.html} +@license: U{PyTorch} +""" + + +import torchvision +from torch import Tensor + +from .base_node import BaseNode + + +class ResNet18(BaseNode[Tensor, list[Tensor]]): + attach_index: int = -1 + + def __init__( + self, + channels_list: list[int] | None = None, + download_weights: bool = False, + **kwargs, + ): + """Implementation of the ResNet18 backbone. + + TODO: add more info + + @type channels_list: list[int] | None + @param channels_list: List of channels to return. + If unset, defaults to [64, 128, 256, 512]. + + @type download_weights: bool + @param download_weights: If True download weights from imagenet. + Defaults to False. + """ + super().__init__(**kwargs) + + self.backbone = torchvision.models.resnet18( + weights="DEFAULT" if download_weights else None + ) + self.channels_list = channels_list or [64, 128, 256, 512] + + def forward(self, x: Tensor) -> list[Tensor]: + outs = [] + x = self.backbone.conv1(x) + x = self.backbone.bn1(x) + x = self.backbone.relu(x) + x = self.backbone.maxpool(x) + + x = self.backbone.layer1(x) + outs.append(x) + x = self.backbone.layer2(x) + outs.append(x) + x = self.backbone.layer3(x) + outs.append(x) + x = self.backbone.layer4(x) + outs.append(x) + + return outs diff --git a/luxonis_train/nodes/rexnetv1.py b/luxonis_train/nodes/rexnetv1.py new file mode 100644 index 00000000..fb4de4b1 --- /dev/null +++ b/luxonis_train/nodes/rexnetv1.py @@ -0,0 +1,202 @@ +"""Implementation of the ReXNetV1 backbone. + +Source: U{https://github.com/clovaai/rexnet} +@license: U{MIT} +""" + + +import torch +from torch import Tensor, nn + +from luxonis_train.nodes.blocks import ( + ConvModule, +) +from luxonis_train.utils.general import make_divisible + +from .base_node import BaseNode + + +class ReXNetV1_lite(BaseNode[Tensor, list[Tensor]]): + attach_index: int = -1 + + def __init__( + self, + fix_head_stem: bool = False, + divisible_value: int = 8, + input_ch: int = 16, + final_ch: int = 164, + multiplier: float = 1.0, + kernel_sizes: int | list[int] = 3, + **kwargs, + ): + """ReXNetV1_lite backbone. + + @type fix_head_stem: bool + @param fix_head_stem: Whether to multiply head stem. Defaults to False. + @type divisible_value: int + @param divisible_value: Divisor used. Defaults to 8. + @type input_ch: int + @param input_ch: Starting channel dimension. Defaults to 16. + @type final_ch: int + @param final_ch: Final channel dimension. Defaults to 164. + @type multiplier: float + @param multiplier: Channel dimension multiplier. Defaults to 1.0. + @type kernel_sizes: int | list[int] + @param kernel_sizes: Kernel size for each block. Defaults to 3. + """ + super().__init__(**kwargs) + + self.out_indices = [1, 4, 10, 16] + self.channels = [16, 48, 112, 184] + layers = [1, 2, 2, 3, 3, 5] + strides = [1, 2, 2, 2, 1, 2] + + kernel_sizes = ( + [kernel_sizes] * 6 if isinstance(kernel_sizes, int) else kernel_sizes + ) + + strides = sum( + [ + [element] + [1] * (layers[idx] - 1) + for idx, element in enumerate(strides) + ], + [], + ) + ts = [1] * layers[0] + [6] * sum(layers[1:]) + kernel_sizes = sum( + [[element] * layers[idx] for idx, element in enumerate(kernel_sizes)], [] + ) + self.num_convblocks = sum(layers[:]) + + features: list[nn.Module] = [] + inplanes = input_ch / multiplier if multiplier < 1.0 else input_ch + first_channel = 32 / multiplier if multiplier < 1.0 or fix_head_stem else 32 + first_channel = make_divisible( + int(round(first_channel * multiplier)), divisible_value + ) + + in_channels_group = [] + channels_group = [] + + features.append( + ConvModule( + 3, + first_channel, + kernel_size=3, + stride=2, + padding=1, + activation=nn.ReLU6(inplace=True), + ) + ) + + for i in range(self.num_convblocks): + inplanes_divisible = make_divisible( + int(round(inplanes * multiplier)), divisible_value + ) + if i == 0: + in_channels_group.append(first_channel) + channels_group.append(inplanes_divisible) + else: + in_channels_group.append(inplanes_divisible) + inplanes += final_ch / (self.num_convblocks - 1 * 1.0) + inplanes_divisible = make_divisible( + int(round(inplanes * multiplier)), divisible_value + ) + channels_group.append(inplanes_divisible) + + assert channels_group + for in_c, c, t, k, s in zip( + in_channels_group, channels_group, ts, kernel_sizes, strides, strict=True + ): + features.append( + LinearBottleneck( + in_channels=in_c, channels=c, t=t, kernel_size=k, stride=s + ) + ) + + pen_channels = ( + int(1280 * multiplier) if multiplier > 1 and not fix_head_stem else 1280 + ) + features.append( + ConvModule( + in_channels=c, # type: ignore + out_channels=pen_channels, + kernel_size=1, + activation=nn.ReLU6(inplace=True), + ) + ) + self.features = nn.Sequential(*features) + + def forward(self, x: Tensor) -> list[Tensor]: + outs = [] + for i, m in enumerate(self.features): + x = m(x) + if i in self.out_indices: + outs.append(x) + return outs + + +class LinearBottleneck(nn.Module): + def __init__( + self, + in_channels: int, + channels: int, + t: int, + kernel_size: int = 3, + stride: int = 1, + **kwargs, + ): + super(LinearBottleneck, self).__init__(**kwargs) + self.conv_shortcut = None + self.use_shortcut = stride == 1 and in_channels <= channels + self.in_channels = in_channels + self.out_channels = channels + out = [] + if t != 1: + dw_channels = in_channels * t + out.append( + ConvModule( + in_channels=in_channels, + out_channels=dw_channels, + kernel_size=1, + activation=nn.ReLU6(inplace=True), + ) + ) + else: + dw_channels = in_channels + out.append( + ConvModule( + in_channels=dw_channels, + out_channels=dw_channels * 1, + kernel_size=kernel_size, + stride=stride, + padding=(kernel_size // 2), + groups=dw_channels, + activation=nn.ReLU6(inplace=True), + ) + ) + out.append( + ConvModule( + in_channels=dw_channels, + out_channels=channels, + kernel_size=1, + activation=nn.Identity(), + ) + ) + + self.out = nn.Sequential(*out) + + def forward(self, x): + out = self.out(x) + + if self.use_shortcut: + # this results in a ScatterND node which isn't supported yet in myriad + # out[:, 0:self.in_channels] += x + a = out[:, : self.in_channels] + b = x + a = a + b + c = out[:, self.in_channels :] + d = torch.concat([a, c], dim=1) + return d + + return out diff --git a/luxonis_train/nodes/segmentation_head.py b/luxonis_train/nodes/segmentation_head.py new file mode 100644 index 00000000..bdfe814d --- /dev/null +++ b/luxonis_train/nodes/segmentation_head.py @@ -0,0 +1,53 @@ +"""Implementation of a basic segmentation head. + +Adapted from: U{https://github.com/pytorch/vision/blob/main/torchvision/models/segmentation/fcn.py} +@license: U{BSD-3 } +""" + + +import torch.nn as nn +from torch import Tensor + +from luxonis_train.nodes.blocks import UpBlock +from luxonis_train.utils.general import infer_upscale_factor +from luxonis_train.utils.types import LabelType, Packet + +from .base_node import BaseNode + + +class SegmentationHead(BaseNode[Tensor, Tensor]): + attach_index: int = -1 + in_height: int + in_channels: int + + def __init__(self, **kwargs): + """Basic segmentation FCN head. + + Note that it doesn't ensure that ouptut is same size as input. + + @type kwargs: Any + @param kwargs: Additional arguments to pass to L{BaseNode}. + """ + super().__init__(task_type=LabelType.SEGMENTATION, **kwargs) + + original_height = self.original_in_shape[2] + num_up = infer_upscale_factor(self.in_height, original_height, strict=False) + + modules = [] + in_channels = self.in_channels + for _ in range(int(num_up)): + modules.append( + UpBlock(in_channels=in_channels, out_channels=in_channels // 2) + ) + in_channels //= 2 + + self.head = nn.Sequential( + *modules, + nn.Conv2d(in_channels, self.n_classes, kernel_size=1), + ) + + def wrap(self, output: Tensor) -> Packet[Tensor]: + return {"segmentation": [output]} + + def forward(self, inputs: Tensor) -> Tensor: + return self.head(inputs) diff --git a/luxonis_train/tools/__init__.py b/luxonis_train/tools/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/luxonis_train/tools/test_dataset.py b/luxonis_train/tools/test_dataset.py new file mode 100644 index 00000000..33734214 --- /dev/null +++ b/luxonis_train/tools/test_dataset.py @@ -0,0 +1,135 @@ +import argparse +import os + +import cv2 +import torch +from luxonis_ml.data import ( + LuxonisDataset, + TrainAugmentations, + ValAugmentations, +) + +from luxonis_train.attached_modules.visualizers.utils import ( + draw_bounding_box_labels, + draw_keypoint_labels, + draw_segmentation_labels, + get_unnormalized_images, +) +from luxonis_train.utils.config import Config +from luxonis_train.utils.loaders import LuxonisLoaderTorch, collate_fn +from luxonis_train.utils.types import LabelType + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "--config", + type=str, + required=True, + help="Configuration file to use", + ) + parser.add_argument( + "--view", + type=str, + default="val", + help="Dataset view to use", + ) + parser.add_argument( + "--no-display", + action="store_true", + help="Don't display images", + ) + parser.add_argument( + "--save-dir", + type=str, + default=None, + help="Path to save directory, by default don't save", + ) + parser.add_argument("opts", nargs=argparse.REMAINDER, help="Additional options") + args = parser.parse_args() + + opts = args.opts or [] + overrides = {} + if opts: + if len(opts) % 2 != 0: + raise ValueError("Override options should be a list of key-value pairs") + for i in range(0, len(opts), 2): + overrides[opts[i]] = opts[i + 1] + + cfg = Config.get_config(args.config, overrides) + + image_size = cfg.trainer.preprocessing.train_image_size + + dataset = LuxonisDataset( + dataset_name=cfg.dataset.dataset_name, + team_id=cfg.dataset.team_id, + dataset_id=cfg.dataset.dataset_id, + bucket_type=cfg.dataset.bucket_type, + bucket_storage=cfg.dataset.bucket_storage, + ) + augmentations = ( + TrainAugmentations( + image_size=image_size, + augmentations=[ + i.model_dump() for i in cfg.trainer.preprocessing.augmentations + ], + train_rgb=cfg.trainer.preprocessing.train_rgb, + keep_aspect_ratio=cfg.trainer.preprocessing.keep_aspect_ratio, + ) + if args.view == "train" + else ValAugmentations( + image_size=image_size, + augmentations=[ + i.model_dump() for i in cfg.trainer.preprocessing.augmentations + ], + train_rgb=cfg.trainer.preprocessing.train_rgb, + keep_aspect_ratio=cfg.trainer.preprocessing.keep_aspect_ratio, + ) + ) + + loader_train = LuxonisLoaderTorch( + dataset, + view=args.view, + augmentations=augmentations, + ) + + pytorch_loader_train = torch.utils.data.DataLoader( + loader_train, + batch_size=4, + num_workers=1, + collate_fn=collate_fn, + ) + + save_dir = args.save_dir + if save_dir is not None: + os.makedirs(save_dir, exist_ok=True) + + counter = 0 + for data in pytorch_loader_train: + imgs, label_dict = data + images = get_unnormalized_images(cfg, imgs) + for i, img in enumerate(images): + for label_type, labels in label_dict.items(): + if label_type == LabelType.CLASSIFICATION: + continue + elif label_type == LabelType.BOUNDINGBOX: + img = draw_bounding_box_labels( + img, labels[labels[:, 0] == i][:, 2:], colors="yellow", width=1 + ) + elif label_type == LabelType.KEYPOINT: + img = draw_keypoint_labels( + img, labels[labels[:, 0] == i][:, 1:], colors="red" + ) + elif label_type == LabelType.SEGMENTATION: + img = draw_segmentation_labels( + img, labels[i], alpha=0.8, colors="#5050FF" + ) + + img_arr = img.permute(1, 2, 0).numpy() + img_arr = cv2.cvtColor(img_arr, cv2.COLOR_RGB2BGR) + if save_dir is not None: + counter += 1 + cv2.imwrite(os.path.join(save_dir, f"{counter}.png"), img_arr) + if not args.no_display: + cv2.imshow("img", img_arr) + if cv2.waitKey() == ord("q"): + exit() diff --git a/luxonis_train/utils/__init__.py b/luxonis_train/utils/__init__.py new file mode 100644 index 00000000..609304c3 --- /dev/null +++ b/luxonis_train/utils/__init__.py @@ -0,0 +1,5 @@ +from .assigners import * +from .config import * +from .loaders import * +from .optimizers import * +from .schedulers import * diff --git a/luxonis_train/utils/assigners/__init__.py b/luxonis_train/utils/assigners/__init__.py new file mode 100644 index 00000000..4d9bec9f --- /dev/null +++ b/luxonis_train/utils/assigners/__init__.py @@ -0,0 +1,4 @@ +from .atts_assigner import ATSSAssigner +from .tal_assigner import TaskAlignedAssigner + +__all__ = ["ATSSAssigner", "TaskAlignedAssigner"] diff --git a/luxonis_train/utils/assigners/atts_assigner.py b/luxonis_train/utils/assigners/atts_assigner.py new file mode 100644 index 00000000..26b4dc23 --- /dev/null +++ b/luxonis_train/utils/assigners/atts_assigner.py @@ -0,0 +1,261 @@ +import torch +import torch.nn.functional as F +from torch import Tensor, nn + +from .utils import ( + batch_iou, + bbox_iou, + candidates_in_gt, + fix_collisions, +) + + +class ATSSAssigner(nn.Module): + def __init__(self, n_classes: int, topk: int = 9): + """Adaptive Training Sample Selection Assigner, adapted + from U{Bridging the Gap Between Anchor-based and Anchor-free Detection via + Adaptive Training Sample Selection}. + Code is adapted from: U{https://github.com/Nioolek/PPYOLOE_pytorch/blob/master/ + ppyoloe/assigner/atss_assigner.py} and + U{https://github.com/fcjian/TOOD/blob/master/mmdet/core/bbox/ + assigners/atss_assigner.py} + + @type n_classes: int + @param n_classes: Number of classes in the dataset. + @type topk: int + @param topk: Number of anchors considere in selection. Defaults to 9. + """ + super().__init__() + + self.topk = topk + self.n_classes = n_classes + + def forward( + self, + anchor_bboxes: Tensor, + n_level_bboxes: list[int], + gt_labels: Tensor, + gt_bboxes: Tensor, + mask_gt: Tensor, + pred_bboxes: Tensor, + ) -> tuple[Tensor, Tensor, Tensor, Tensor]: + """Assigner's forward method which generates final assignments. + + @type anchor_bboxes: Tensor + @param anchor_bboxes: Anchor bboxes of shape [n_anchors, 4] + @type n_level_bboxes: list[int] + @param n_level_bboxes: Number of bboxes per level + @type gt_labels: Tensor + @param gt_labels: Initial GT labels [bs, n_max_boxes, 1] + @type gt_bboxes: Tensor + @param gt_bboxes: Initial GT bboxes [bs, n_max_boxes, 4] + @type mask_gt: Tensor + @param mask_gt: Mask for valid GTs [bs, n_max_boxes, 1] + @type pred_bboxes: Tensor + @param pred_bboxes: Predicted bboxes of shape [bs, n_anchors, 4] + @rtype: tuple[Tensor, Tensor, Tensor, Tensor] + @return: Assigned labels of shape [bs, n_anchors], assigned bboxes of shape [bs, + n_anchors, 4], assigned scores of shape [bs, n_anchors, n_classes] and + output positive mask of shape [bs, n_anchors]. + """ + + self.n_anchors = anchor_bboxes.size(0) + self.bs = gt_bboxes.size(0) + self.n_max_boxes = gt_bboxes.size(1) + + if self.n_max_boxes == 0: + device = gt_bboxes.device + return ( + torch.full([self.bs, self.n_anchors], self.n_classes).to(device), + torch.zeros([self.bs, self.n_anchors, 4]).to(device), + torch.zeros([self.bs, self.n_anchors, self.n_classes]).to(device), + torch.zeros([self.bs, self.n_anchors]).to(device), + ) + + gt_bboxes_flat = gt_bboxes.reshape([-1, 4]) + + # Compute iou between all gt and anchor bboxes + overlaps = bbox_iou(gt_bboxes_flat, anchor_bboxes) + overlaps = overlaps.reshape([self.bs, -1, self.n_anchors]) + + # Compute center distance between all gt and anchor bboxes + gt_centers = self._get_bbox_center(gt_bboxes_flat) + anchor_centers = self._get_bbox_center(anchor_bboxes) + distances = ( + (gt_centers[:, None, :] - anchor_centers[None, :, :]).pow(2).sum(-1).sqrt() + ) + distances = distances.reshape([self.bs, -1, self.n_anchors]) + + # Select candidates based on the center distance + is_in_topk, topk_idxs = self._select_topk_candidates( + distances, n_level_bboxes, mask_gt + ) + + # Compute threshold and selected positive candidates based on it + is_pos = self._get_positive_samples(is_in_topk, topk_idxs, overlaps) + + # Select candidates inside GT + is_in_gts = candidates_in_gt(anchor_centers, gt_bboxes_flat) + is_in_gts = torch.reshape(is_in_gts, (self.bs, self.n_max_boxes, -1)) + + # Final positive candidates + mask_pos = is_pos * is_in_gts * mask_gt + + # If an anchor box is assigned to multiple gts, the one with the highest IoU is selected + assigned_gt_idx, mask_pos_sum, mask_pos = fix_collisions( + mask_pos, overlaps, self.n_max_boxes + ) + + # Generate final assignments based on masks + assigned_labels, assigned_bboxes, assigned_scores = self._get_final_assignments( + gt_labels, gt_bboxes, assigned_gt_idx, mask_pos_sum + ) + + # Soft label with IoU + if pred_bboxes is not None: + ious = batch_iou(gt_bboxes, pred_bboxes) * mask_pos + ious = ious.max(dim=-2)[0].unsqueeze(-1) + assigned_scores *= ious + + out_mask_positive = mask_pos_sum.bool() + + return ( + assigned_labels.long(), + assigned_bboxes, + assigned_scores, + out_mask_positive, + ) + + def _get_bbox_center(self, bbox: Tensor) -> Tensor: + """Computes centers of bbox with shape [N,4]""" + cx = (bbox[:, 0] + bbox[:, 2]) / 2.0 + cy = (bbox[:, 1] + bbox[:, 3]) / 2.0 + return torch.stack((cx, cy), dim=1).to(bbox.device) + + def _select_topk_candidates( + self, distances: Tensor, n_level_bboxes: list[int], mask_gt: Tensor + ) -> tuple[Tensor, Tensor]: + """Select k anchors whose centers are closest to GT. + + @type distance: Tensor + @param distance: Distances between GT and anchor centers. + @type n_level_bboxes: list[int] + @param n_level_bboxes: list of number of bboxes per level. + @type mask_gt: Tensor + @param mask_gt: Mask for valid GT per image. + @rtype: tuple[Tensor, Tensor] + @return: Mask of selected anchors and indices of selected anchors. + """ + mask_gt = mask_gt.repeat(1, 1, self.topk).bool() + level_distances = torch.split(distances, n_level_bboxes, dim=-1) + is_in_topk_list = [] + topk_idxs = [] + start_idx = 0 + for per_level_distances, per_level_boxes in zip( + level_distances, n_level_bboxes + ): + end_idx = start_idx + per_level_boxes + selected_k = min(self.topk, per_level_boxes) + _, per_level_topk_idxs = per_level_distances.topk( + selected_k, dim=-1, largest=False + ) + topk_idxs.append(per_level_topk_idxs + start_idx) + per_level_topk_idxs = torch.where( + mask_gt, per_level_topk_idxs, torch.zeros_like(per_level_topk_idxs) + ) + is_in_topk = F.one_hot(per_level_topk_idxs, per_level_boxes).sum(dim=-2) + is_in_topk = torch.where( + is_in_topk > 1, torch.zeros_like(is_in_topk), is_in_topk + ) + is_in_topk_list.append(is_in_topk.to(distances.dtype)) + start_idx = end_idx + + is_in_topk_list = torch.cat(is_in_topk_list, dim=-1) + topk_idxs = torch.cat(topk_idxs, dim=-1) + return is_in_topk_list, topk_idxs + + def _get_positive_samples( + self, + is_in_topk: Tensor, + topk_idxs: Tensor, + overlaps: Tensor, + ) -> Tensor: + """Computes threshold and returns mask for samples over threshold. + + @type is_in_topk: Tensor + @param is_in_topk: Mask of selected anchors [bx, n_max_boxes, n_anchors] + @type topk_idxs: Tensor + @param topk_idxs: Indices of selected anchors [bx, n_max_boxes, topK * n_levels] + @type overlaps: Tensor + @param overlaps: IoUs between GTs and anchors [bx, n_max_boxes, n_anchors] + @rtype: Tensor + @return: Mask of positive samples [bx, n_max_boxes, n_anchors] + """ + n_bs_max_boxes = self.bs * self.n_max_boxes + _candidate_overlaps = torch.where( + is_in_topk > 0, overlaps, torch.zeros_like(overlaps) + ) + topk_idxs = topk_idxs.reshape([n_bs_max_boxes, -1]) + assist_idxs = self.n_anchors * torch.arange( + n_bs_max_boxes, device=topk_idxs.device + ) + assist_idxs = assist_idxs[:, None] + flatten_idxs = topk_idxs + assist_idxs + candidate_overlaps = _candidate_overlaps.reshape(-1)[flatten_idxs] + candidate_overlaps = candidate_overlaps.reshape([self.bs, self.n_max_boxes, -1]) + + overlaps_mean_per_gt = candidate_overlaps.mean(dim=-1, keepdim=True) + overlaps_std_per_gt = candidate_overlaps.std(dim=-1, keepdim=True) + overlaps_thr_per_gt = overlaps_mean_per_gt + overlaps_std_per_gt + + is_pos = torch.where( + _candidate_overlaps > overlaps_thr_per_gt.repeat([1, 1, self.n_anchors]), + is_in_topk, + torch.zeros_like(is_in_topk), + ) + return is_pos + + def _get_final_assignments( + self, + gt_labels: Tensor, + gt_bboxes: Tensor, + assigned_gt_idx: Tensor, + mask_pos_sum: Tensor, + ) -> tuple[Tensor, Tensor, Tensor]: + """Generate final assignments based on the mask. + + @type gt_labels: Tensor + @param gt_labels: Initial GT labels [bs, n_max_boxes, 1] + @type gt_bboxes: Tensor + @param gt_bboxes: Initial GT bboxes [bs, n_max_boxes, 4] + @type assigned_gt_idx: Tensor + @param assigned_gt_idx: Indices of matched GTs [bs, n_anchors] + @type mask_pos_sum: Tensor + @param mask_pos_sum: Mask of matched GTs [bs, n_anchors] + @rtype: tuple[Tensor, Tensor, Tensor] + @return: Assigned labels of shape [bs, n_anchors], assigned bboxes of shape [bs, + n_anchors, 4], assigned scores of shape [bs, n_anchors, n_classes]. + """ + # assigned target labels + batch_idx = torch.arange( + self.bs, dtype=gt_labels.dtype, device=gt_labels.device + ) + batch_idx = batch_idx[..., None] + assigned_gt_idx = (assigned_gt_idx + batch_idx * self.n_max_boxes).long() + assigned_labels = gt_labels.flatten()[assigned_gt_idx.flatten()] + assigned_labels = assigned_labels.reshape([self.bs, self.n_anchors]) + assigned_labels = torch.where( + mask_pos_sum > 0, + assigned_labels, + torch.full_like(assigned_labels, self.n_classes), + ) + + # assigned target boxes + assigned_bboxes = gt_bboxes.reshape([-1, 4])[assigned_gt_idx.flatten()] + assigned_bboxes = assigned_bboxes.reshape([self.bs, self.n_anchors, 4]) + + # assigned target scores + assigned_scores = F.one_hot(assigned_labels.long(), self.n_classes + 1).float() + assigned_scores = assigned_scores[:, :, : self.n_classes] + + return assigned_labels, assigned_bboxes, assigned_scores diff --git a/luxonis_train/utils/assigners/tal_assigner.py b/luxonis_train/utils/assigners/tal_assigner.py new file mode 100644 index 00000000..0765ad6a --- /dev/null +++ b/luxonis_train/utils/assigners/tal_assigner.py @@ -0,0 +1,233 @@ +import torch +import torch.nn.functional as F +from torch import Tensor, nn + +from .utils import batch_iou, candidates_in_gt, fix_collisions + + +class TaskAlignedAssigner(nn.Module): + def __init__( + self, + n_classes: int, + topk: int = 13, + alpha: float = 1.0, + beta: float = 6.0, + eps: float = 1e-9, + ): + """Task Aligned Assigner. + + Adapted from: U{TOOD: Task-aligned One-stage Object Detection}. + Cose is adapted from: U{https://github.com/Nioolek/PPYOLOE_pytorch/blob/master/ppyoloe/assigner/tal_assigner.py}. + + @license: U{Apache License, Version 2.0} + + @type n_classes: int + @param n_classes: Number of classes in the dataset. + @type topk: int + @param topk: Number of anchors considere in selection. Defaults to 13. + @type alpha: float + @param alpha: Defaults to 1.0. + @type beta: float + @param beta: Defaults to 6.0. + @type eps: float + @param eps: Defaults to 1e-9. + """ + super().__init__() + + self.n_classes = n_classes + self.topk = topk + self.alpha = alpha + self.beta = beta + self.eps = eps + + @torch.no_grad() + def forward( + self, + pred_scores: Tensor, + pred_bboxes: Tensor, + anchor_points: Tensor, + gt_labels: Tensor, + gt_bboxes: Tensor, + mask_gt: Tensor, + ) -> tuple[Tensor, Tensor, Tensor, Tensor]: + """Assigner's forward method which generates final assignments. + + @type pred_scores: Tensor + @param pred_scores: Predicted scores [bs, n_anchors, 1] + @type pred_bboxes: Tensor + @param pred_bboxes: Predicted bboxes [bs, n_anchors, 4] + @type anchor_points: Tensor + @param anchor_points: Anchor points [n_anchors, 2] + @type gt_labels: Tensor + @param gt_labels: Initial GT labels [bs, n_max_boxes, 1] + @type gt_bboxes: Tensor + @param gt_bboxes: Initial GT bboxes [bs, n_max_boxes, 4] + @type mask_gt: Tensor + @param mask_gt: Mask for valid GTs [bs, n_max_boxes, 1] + @rtype: tuple[Tensor, Tensor, Tensor, Tensor] + @return: Assigned labels of shape [bs, n_anchors], assigned bboxes of shape [bs, + n_anchors, 4], assigned scores of shape [bs, n_anchors, n_classes] and + output mask of shape [bs, n_anchors] + """ + self.bs = pred_scores.size(0) + self.n_max_boxes = gt_bboxes.size(1) + + if self.n_max_boxes == 0: + device = gt_bboxes.device + return ( + torch.full_like(pred_scores[..., 0], self.n_classes).to(device), + torch.zeros_like(pred_bboxes).to(device), + torch.zeros_like(pred_scores).to(device), + torch.zeros_like(pred_scores[..., 0]).to(device), + ) + + # Compute alignment metric between all bboxes (bboxes of all pyramid levels) and GT + align_metric, overlaps = self._get_alignment_metric( + pred_scores, pred_bboxes, gt_labels, gt_bboxes + ) + + # Select top-k bboxes as candidates for each GT + is_in_gts = candidates_in_gt(anchor_points, gt_bboxes.reshape([-1, 4])) + is_in_gts = torch.reshape(is_in_gts, (self.bs, self.n_max_boxes, -1)) + is_in_topk = self._select_topk_candidates( + align_metric * is_in_gts, + topk_mask=mask_gt.repeat([1, 1, self.topk]).bool(), + ) + + # Final positive candidates + mask_pos = is_in_topk * is_in_gts * mask_gt + + # If an anchor box is assigned to multiple gts, the one with the highest IoU is selected + assigned_gt_idx, mask_pos_sum, mask_pos = fix_collisions( + mask_pos, overlaps, self.n_max_boxes + ) + + # Generate final targets based on masks + assigned_labels, assigned_bboxes, assigned_scores = self._get_final_assignments( + gt_labels, gt_bboxes, assigned_gt_idx, mask_pos_sum + ) + + # normalize + align_metric *= mask_pos + pos_align_metrics = align_metric.max(dim=-1, keepdim=True)[0] + pos_overlaps = (overlaps * mask_pos).max(dim=-1, keepdim=True)[0] + norm_align_metric = ( + (align_metric * pos_overlaps / (pos_align_metrics + self.eps)) + .max(-2)[0] + .unsqueeze(-1) + ) + assigned_scores = assigned_scores * norm_align_metric + + out_mask_positive = mask_pos_sum.bool() + + return assigned_labels, assigned_bboxes, assigned_scores, out_mask_positive + + def _get_alignment_metric( + self, + pred_scores: Tensor, + pred_bboxes: Tensor, + gt_labels: Tensor, + gt_bboxes: Tensor, + ): + """Calculates anchor alignment metric and IoU between GTs and predicted bboxes. + + @type pred_scores: Tensor + @param pred_scores: Predicted scores [bs, n_anchors, 1] + @type pred_bboxes: Tensor + @param pred_bboxes: Predicted bboxes [bs, n_anchors, 4] + @type gt_labels: Tensor + @param gt_labels: Initial GT labels [bs, n_max_boxes, 1] + @type gt_bboxes: Tensor + @param gt_bboxes: Initial GT bboxes [bs, n_max_boxes, 4] + """ + pred_scores = pred_scores.permute(0, 2, 1) + gt_labels = gt_labels.to(torch.long) + ind = torch.zeros([2, self.bs, self.n_max_boxes], dtype=torch.long) + ind[0] = torch.arange(end=self.bs).view(-1, 1).repeat(1, self.n_max_boxes) + ind[1] = gt_labels.squeeze(-1) + bbox_scores = pred_scores[ind[0], ind[1]] + + overlaps = batch_iou(gt_bboxes, pred_bboxes) + align_metric = bbox_scores.pow(self.alpha) * overlaps.pow(self.beta) + + return align_metric, overlaps + + def _select_topk_candidates( + self, + metrics: Tensor, + largest: bool = True, + topk_mask: Tensor | None = None, + ): + """Selects k anchors based on provided metrics tensor. + + @type metrics: Tensor + @param metrics: Metrics tensor of shape [bs, n_max_boxes, n_anchors] + @type largest: bool + @param largest: Flag if should keep largest topK. Defaults to True. + @type topk_mask: Tensor + @param topk_mask: Mask for valid GTs of shape [bs, n_max_boxes, topk] + @rtype: Tensor + @return: Mask of selected anchors of shape [bs, n_max_boxes, n_anchors] + """ + num_anchors = metrics.shape[-1] + topk_metrics, topk_idxs = torch.topk( + metrics, self.topk, dim=-1, largest=largest + ) + if topk_mask is None: + topk_mask = (topk_metrics.max(dim=-1, keepdim=True)[0] > self.eps).tile( + [1, 1, self.topk] + ) + topk_idxs = torch.where(topk_mask, topk_idxs, torch.zeros_like(topk_idxs)) + is_in_topk = F.one_hot(topk_idxs, num_anchors).sum(dim=-2) + is_in_topk = torch.where( + is_in_topk > 1, torch.zeros_like(is_in_topk), is_in_topk + ) + return is_in_topk.to(metrics.dtype) + + def _get_final_assignments( + self, + gt_labels: Tensor, + gt_bboxes: Tensor, + assigned_gt_idx: Tensor, + mask_pos_sum: Tensor, + ) -> tuple[Tensor, Tensor, Tensor]: + """Generate final assignments based on the mask. + + @type gt_labels: Tensor + @param gt_labels: Initial GT labels [bs, n_max_boxes, 1] + @type gt_bboxes: Tensor + @param gt_bboxes: Initial GT bboxes [bs, n_max_boxes, 4] + @type assigned_gt_idx: Tensor + @param assigned_gt_idx: Indices of matched GTs [bs, n_anchors] + @type mask_pos_sum: Tensor + @param mask_pos_sum: Mask of matched GTs [bs, n_anchors] + @rtype: tuple[Tensor, Tensor, Tensor] + @return: Assigned labels of shape [bs, n_anchors], assigned bboxes of shape [bs, + n_anchors, 4], assigned scores of shape [bs, n_anchors, n_classes]. + """ + # assigned target labels + batch_ind = torch.arange( + end=self.bs, dtype=torch.int64, device=gt_labels.device + )[..., None] + assigned_gt_idx = assigned_gt_idx + batch_ind * self.n_max_boxes + assigned_labels = gt_labels.long().flatten()[assigned_gt_idx] + + # assigned target boxes + assigned_bboxes = gt_bboxes.reshape([-1, 4])[assigned_gt_idx] + + # assigned target scores + assigned_labels[assigned_labels < 0] = 0 + assigned_scores = F.one_hot(assigned_labels, self.n_classes) + mask_pos_scores = mask_pos_sum[:, :, None].repeat(1, 1, self.n_classes) + assigned_scores = torch.where( + mask_pos_scores > 0, assigned_scores, torch.full_like(assigned_scores, 0) + ) + + assigned_labels = torch.where( + mask_pos_sum.bool(), + assigned_labels, + torch.full_like(assigned_labels, self.n_classes), + ) + + return assigned_labels, assigned_bboxes, assigned_scores diff --git a/luxonis_train/utils/assigners/utils.py b/luxonis_train/utils/assigners/utils.py new file mode 100644 index 00000000..fadf5f8e --- /dev/null +++ b/luxonis_train/utils/assigners/utils.py @@ -0,0 +1,73 @@ +import torch +import torch.nn.functional as F +from torch import Tensor + +from luxonis_train.utils.boxutils import bbox_iou + + +def candidates_in_gt( + anchor_centers: Tensor, gt_bboxes: Tensor, eps: float = 1e-9 +) -> Tensor: + """Check if anchor box's center is in any GT bbox. + + @type anchor_centers: Tensor + @param anchor_centers: Centers of anchor bboxes [n_anchors, 2] + @type gt_bboxes: Tensor + @param gt_bboxes: Ground truth bboxes [bs * n_max_boxes, 4] + @type eps: float + @param eps: Threshold for minimum delta. Defaults to 1e-9. + @rtype: Tensor + @return: Mask for anchors inside any GT bbox + """ + n_anchors = anchor_centers.size(0) + anchor_centers = anchor_centers.unsqueeze(0).repeat(gt_bboxes.size(0), 1, 1) + gt_bboxes_lt = gt_bboxes[:, :2].unsqueeze(1).repeat(1, n_anchors, 1) + gt_bboxes_rb = gt_bboxes[:, 2:].unsqueeze(1).repeat(1, n_anchors, 1) + bbox_delta_lt = anchor_centers - gt_bboxes_lt + bbox_delta_rb = gt_bboxes_rb - anchor_centers + bbox_delta = torch.cat([bbox_delta_lt, bbox_delta_rb], dim=-1) + candidates = (bbox_delta.min(dim=-1)[0] > eps).to(gt_bboxes.dtype) + return candidates + + +def fix_collisions( + mask_pos: Tensor, overlaps: Tensor, n_max_boxes: int +) -> tuple[Tensor, Tensor, Tensor]: + """If an anchor is assigned to multiple GTs, the one with highest IoU is selected. + + @type mask_pos: Tensor + @param mask_pos: Mask of assigned anchors [bs, n_max_boxes, n_anchors] + @type overlaps: Tensor + @param overlaps: IoUs between GTs and anchors [bx, n_max_boxes, n_anchors] + @type n_max_boxes: int + @param n_max_boxes: Number of maximum boxes per image + @rtype: tuple[Tensor, Tensor, Tensor] + @return: Assigned indices, sum of positive mask, positive mask + """ + mask_pos_sum = mask_pos.sum(dim=-2) + if mask_pos_sum.max() > 1: + mask_multi_gts = (mask_pos_sum.unsqueeze(1) > 1).repeat([1, n_max_boxes, 1]) + max_overlaps_idx = overlaps.argmax(dim=1) + is_max_overlaps = F.one_hot(max_overlaps_idx, n_max_boxes) + is_max_overlaps = is_max_overlaps.permute(0, 2, 1).to(overlaps.dtype) + mask_pos = torch.where(mask_multi_gts, is_max_overlaps, mask_pos) + mask_pos_sum = mask_pos.sum(dim=-2) + assigned_gt_idx = mask_pos.argmax(dim=-2) + return assigned_gt_idx, mask_pos_sum, mask_pos + + +def batch_iou(batch1: Tensor, batch2: Tensor) -> Tensor: + """Calculates IoU for each pair of bboxes in the batch. Bboxes must be in xyxy + format. + + @type batch1: Tensor + @param batch1: Tensor of shape C{[bs, N, 4]} + @type batch2: Tensor + @param batch2: Tensor of shape C{[bs, M, 4]} + @rtype: Tensor + @return: Per image box IoU of shape C{[bs, N, M]} + """ + ious = torch.stack( + [bbox_iou(batch1[i], batch2[i]) for i in range(batch1.size(0))], dim=0 + ) + return ious diff --git a/luxonis_train/utils/boxutils.py b/luxonis_train/utils/boxutils.py new file mode 100644 index 00000000..0d708f79 --- /dev/null +++ b/luxonis_train/utils/boxutils.py @@ -0,0 +1,703 @@ +"""This module contains various utility functions for working with bounding boxes.""" + +import math +from typing import Literal, TypeAlias + +import torch +from scipy.cluster.vq import kmeans +from torch import Tensor +from torchvision.ops import ( + batched_nms, + box_convert, + box_iou, + distance_box_iou, + generalized_box_iou, +) + +from luxonis_train.utils.types import LabelType + +IoUType: TypeAlias = Literal["none", "giou", "diou", "ciou", "siou"] +BBoxFormatType: TypeAlias = Literal["xyxy", "xywh", "cxcywh"] + +__all__ = [ + "anchors_for_fpn_features", + "anchors_from_dataset", + "bbox2dist", + "bbox_iou", + "compute_iou_loss", + "dist2bbox", + "match_to_anchor", + "non_max_suppression", + "process_bbox_predictions", + "process_keypoints_predictions", +] + + +def match_to_anchor( + targets: Tensor, + anchor: Tensor, + xy_shifts: Tensor, + scale_width: int, + scale_height: int, + n_keypoints: int, + anchor_threshold: float, + bias: float, + box_offset: int = 5, +) -> tuple[Tensor, Tensor]: + """Matches targets to anchors. + + 1. Scales the targets to the size of the feature map + 2. Matches the targets to the anchor, filtering out targets whose aspect + ratio is too far from the anchor's aspect ratio. + + @type targets: Tensor + @param targets: Targets in xyxy format + @type anchor: Tensor + @param anchor: Anchor boxes + @type xy_shifts: Tensor + @param xy_shifts: Shifts in x and y direction + @type scale_width: int + @param scale_width: Width of the feature map + @type scale_height: int + @param scale_height: Height of the feature map + @type n_keypoints: int + @param n_keypoints: Number of keypoints + @type anchor_threshold: float + @param anchor_threshold: Threshold for anchor filtering + @type bias: float + @param bias: Bias for anchor filtering + @type box_offset: int + @param box_offset: Offset for box. Defaults to 5. + + @rtype: tuple[Tensor, Tensor] + @return: Scaled targets and shifts. + """ + + # The boxes and keypoints need to be scaled to the size of the features + # First two indices are batch index and class label, + # last index is anchor index. Those are not scaled. + scale_length = 2 * n_keypoints + box_offset + 2 + scales = torch.ones(scale_length, device=targets.device) + scales[2 : scale_length - 1] = torch.tensor( + [scale_width, scale_height] * (n_keypoints + 2) + ) + scaled_targets = targets * scales + if targets.size(1) == 0: + return targets[0], torch.zeros(1, device=targets.device) + + wh_to_anchor_ratio = scaled_targets[:, :, 4:6] / anchor.unsqueeze(1) + ratio_mask = ( + torch.max(wh_to_anchor_ratio, 1.0 / wh_to_anchor_ratio).max(2)[0] + < anchor_threshold + ) + + filtered_targets = scaled_targets[ratio_mask] + + box_xy = filtered_targets[:, 2:4] + box_wh = torch.tensor([scale_width, scale_height]) - box_xy + + def decimal_part(x: Tensor) -> Tensor: + return x % 1.0 + + x, y = ((decimal_part(box_xy) < bias) & (box_xy > 1.0)).T + w, h = ((decimal_part(box_wh) < bias) & (box_wh > 1.0)).T + mask = torch.stack((torch.ones_like(x), x, y, w, h)) + final_targets = filtered_targets.repeat((len(xy_shifts), 1, 1))[mask] + + shifts = xy_shifts.unsqueeze(1).repeat((1, len(box_xy), 1))[mask] + return final_targets, shifts + + +def dist2bbox( + distance: Tensor, + anchor_points: Tensor, + out_format: BBoxFormatType = "xyxy", +) -> Tensor: + """Transform distance (ltrb) to box ("xyxy", "xywh" or "cxcywh"). + + @type distance: Tensor + @param distance: Distance predictions + @type anchor_points: Tensor + @param anchor_points: Head's anchor points + @type out_format: BBoxFormatType + @param out_format: BBox output format. Defaults to "xyxy". + @rtype: Tensor + @return: BBoxes in correct format + """ + lt, rb = torch.split(distance, 2, -1) + x1y1 = anchor_points - lt + x2y2 = anchor_points + rb + bbox = torch.cat([x1y1, x2y2], -1) + if out_format in ["xyxy", "xywh", "cxcywh"]: + bbox = box_convert(bbox, in_fmt="xyxy", out_fmt=out_format) + else: + raise ValueError(f"Out format `{out_format}` for bbox not supported") + return bbox + + +def bbox2dist(bbox: Tensor, anchor_points: Tensor, reg_max: float) -> Tensor: + """Transform bbox(xyxy) to distance(ltrb). + + @type bbox: Tensor + @param bbox: Bboxes in "xyxy" format + @type anchor_points: Tensor + @param anchor_points: Head's anchor points + @type reg_max: float + @param reg_max: Maximum regression distances + @rtype: Tensor + @return: BBoxes in distance(ltrb) format + """ + x1y1, x2y2 = torch.split(bbox, 2, -1) + lt = anchor_points - x1y1 + rb = x2y2 - anchor_points + dist = torch.cat([lt, rb], -1).clip(0, reg_max - 0.01) + return dist + + +def bbox_iou( + bbox1: Tensor, + bbox2: Tensor, + bbox_format: BBoxFormatType = "xyxy", + iou_type: IoUType = "none", + element_wise: bool = False, +) -> Tensor: + """Computes IoU between two sets of bounding boxes. + + @type bbox1: Tensor + @param bbox1: First set of bboxes [N, 4]. + @type bbox2: Tensor + @param bbox2: Second set of bboxes [M, 4]. + @type bbox_format: BBoxFormatType + @param bbox_format: Input bbox format. Defaults to "xyxy". + @type iou_type: IoUType + @param iou_type: IoU type. Defaults to "none". + @type element_wise: bool + @param element_wise: If True returns element wise IoUs. Defaults to False. + @rtype: Tensor + @return: IoU between bbox1 and bbox2. If element_wise is True returns [N, M] tensor, + otherwise returns [N] tensor. + """ + if bbox_format != "xyxy": + bbox1 = box_convert(bbox1, in_fmt=bbox_format, out_fmt="xyxy") + bbox2 = box_convert(bbox2, in_fmt=bbox_format, out_fmt="xyxy") + + if iou_type == "none": + iou = box_iou(bbox1, bbox2) + elif iou_type == "giou": + iou = generalized_box_iou(bbox1, bbox2) + elif iou_type == "diou": + iou = distance_box_iou(bbox1, bbox2) + elif iou_type == "ciou": + # CIoU from `Enhancing Geometric Factors in Model Learning and Inference for + # Object Detection and Instance Segmentation`, https://arxiv.org/pdf/2005.03572.pdf. + # Implementation adapted from torchvision complete_box_iou with added eps for stability + eps = 1e-7 + + iou = bbox_iou(bbox1, bbox2, iou_type="none") + diou = bbox_iou(bbox1, bbox2, iou_type="diou") + + w1 = bbox1[:, None, 2] - bbox1[:, None, 0] + h1 = bbox1[:, None, 3] - bbox1[:, None, 1] + eps + w2 = bbox2[:, 2] - bbox2[:, 0] + h2 = bbox2[:, 3] - bbox2[:, 1] + eps + + v = (4 / (torch.pi**2)) * torch.pow( + torch.atan(w1 / h1) - torch.atan(w2 / h2), 2 + ) + with torch.no_grad(): + alpha = v / (1 - iou + v + eps) + iou = diou - alpha * v + + elif iou_type == "siou": + # SIoU from `SIoU Loss: More Powerful Learning for Bounding Box Regression`, + # https://arxiv.org/pdf/2205.12740.pdf + + eps = 1e-7 + bbox1_xywh = box_convert(bbox1, in_fmt="xyxy", out_fmt="xywh") + w1, h1 = bbox1_xywh[:, 2], bbox1_xywh[:, 3] + bbox2_xywh = box_convert(bbox2, in_fmt="xyxy", out_fmt="xywh") + w2, h2 = bbox2_xywh[:, 2], bbox2_xywh[:, 3] + + # enclose area + enclose_x1y1 = torch.min(bbox1[:, None, :2], bbox2[:, :2]) + enclose_x2y2 = torch.max(bbox1[:, None, 2:], bbox2[:, 2:]) + enclose_wh = (enclose_x2y2 - enclose_x1y1).clamp(min=eps) + cw = enclose_wh[..., 0] + ch = enclose_wh[..., 1] + + # angle cost + s_cw = ( + bbox2[:, None, 0] + bbox2[:, None, 2] - bbox1[:, 0] - bbox1[:, 2] + ) * 0.5 + eps + s_ch = ( + bbox2[:, None, 1] + bbox2[:, None, 3] - bbox1[:, 1] - bbox1[:, 3] + ) * 0.5 + eps + + sigma = torch.pow(s_cw**2 + s_ch**2, 0.5) + + sin_alpha_1 = torch.abs(s_cw) / sigma + sin_alpha_2 = torch.abs(s_ch) / sigma + threshold = pow(2, 0.5) / 2 + sin_alpha = torch.where(sin_alpha_1 > threshold, sin_alpha_2, sin_alpha_1) + angle_cost = torch.cos(torch.arcsin(sin_alpha) * 2 - math.pi / 2) + + # distance cost + rho_x = (s_cw / cw) ** 2 + rho_y = (s_ch / ch) ** 2 + gamma = angle_cost - 2 + distance_cost = 2 - torch.exp(gamma * rho_x) - torch.exp(gamma * rho_y) + + # shape cost + omega_w = torch.abs(w1 - w2) / torch.max(w1, w2) + omega_h = torch.abs(h1 - h2) / torch.max(h1, h2) + shape_cost = torch.pow(1 - torch.exp(-1 * omega_w), 4) + torch.pow( + 1 - torch.exp(-1 * omega_h), 4 + ) + + iou = box_iou(bbox1, bbox2) - 0.5 * (distance_cost + shape_cost) + else: + raise ValueError(f"IoU type `{iou_type}` not supported.") + + iou = torch.nan_to_num(iou, 0) + + if element_wise: + return iou.diag() + else: + return iou + + +def non_max_suppression( + preds: Tensor, + n_classes: int, + conf_thres: float = 0.25, + iou_thres: float = 0.45, + keep_classes: list[int] | None = None, + agnostic: bool = False, + multi_label: bool = False, + bbox_format: BBoxFormatType = "xyxy", + max_det: int = 300, + predicts_objectness: bool = True, +) -> list[Tensor]: + """Non-maximum suppression on model's predictions to keep only best instances. + + @type preds: Tensor + @param preds: Model's prediction tensor of shape [bs, N, M]. + @type n_classes: int + @param n_classes: Number of model's classes. + @type conf_thres: float + @param conf_thres: Boxes with confidence higher than this will be kept. Defaults to + 0.25. + @type iou_thres: float + @param iou_thres: Boxes with IoU higher than this will be discarded. Defaults to + 0.45. + @type keep_classes: list[int] | None + @param keep_classes: Subset of classes to keep, if None then keep all of them. + Defaults to None. + @type agnostic: bool + @param agnostic: Whether perform NMS per class or treat all classes the same. + Defaults to False. + @type multi_label: bool + @param multi_label: Whether one prediction can have multiple labels. Defaults to + False. + @type bbox_format: BBoxFormatType + @param bbox_format: Input bbox format. Defaults to "xyxy". + @type max_det: int + @param max_det: Number of maximum output detections. Defaults to 300. + @type predicts_objectness: bool + @param predicts_objectness: Whether head predicts objectness confidence. Defaults to + True. + @rtype: list[Tensor] + @return: list of kept detections for each image, boxes in "xyxy" format. Tensors + with shape [n_kept, M] + """ + if not (0 <= conf_thres <= 1): + raise ValueError( + f"Confidence threshold must be in range [0,1] but set to {conf_thres}." + ) + if not (0 <= iou_thres <= 1): + raise ValueError( + f"IoU threshold must be in range [0,1] but set to {iou_thres}." + ) + + multi_label &= n_classes > 1 + + # If any data after bboxes are present. + has_additional = preds.size(-1) > (4 + 1 + n_classes) + + candidate_mask = preds[..., 4] > conf_thres + if not predicts_objectness: + candidate_mask = torch.logical_and( + candidate_mask, + torch.max(preds[..., 5 : 5 + n_classes], dim=-1)[0] > conf_thres, + ) + + output = [torch.zeros((0, preds.size(-1)), device=preds.device)] * preds.size(0) + + for i, x in enumerate(preds): + curr_out = x[candidate_mask[i]] + + if curr_out.size(0) == 0: + continue + + if predicts_objectness: + if n_classes == 1: + curr_out[:, 5 : 5 + n_classes] = curr_out[:, 4:5] + else: + curr_out[:, 5 : 5 + n_classes] *= curr_out[:, 4:5] + else: + curr_out[:, 5 : 5 + n_classes] *= curr_out[:, 4:5] + + bboxes = curr_out[:, :4] + keep_mask = torch.zeros(bboxes.size(0)).bool() + if bbox_format != "xyxy": + bboxes = box_convert(bboxes, in_fmt=bbox_format, out_fmt="xyxy") + + if multi_label: + box_idx, class_idx = ( + (curr_out[:, 5 : 5 + n_classes] > conf_thres).nonzero(as_tuple=False).T + ) + keep_mask[box_idx] = True + curr_out = torch.cat( + ( + bboxes[keep_mask], + curr_out[keep_mask, class_idx + 5, None], + class_idx[:, None].float(), + ), + 1, + ) + else: + conf, class_idx = curr_out[:, 5 : 5 + n_classes].max(1, keepdim=True) + keep_mask[conf.view(-1) > conf_thres] = True + curr_out = torch.cat((bboxes, conf, class_idx.float()), 1)[keep_mask] + + if has_additional: + curr_out = torch.hstack( + [curr_out, x[candidate_mask[i]][keep_mask, 5 + n_classes :]] + ) + + if keep_classes is not None: + curr_out = curr_out[ + ( + curr_out[:, 5:6] + == torch.tensor(keep_classes, device=curr_out.device) + ).any(1) + ] + + if not curr_out.size(0): + continue + + keep_indices = batched_nms( + boxes=curr_out[:, :4], + scores=curr_out[:, 4], + iou_threshold=iou_thres, + idxs=curr_out[:, 5].int() * (0 if agnostic else 1), + ) + keep_indices = keep_indices[:max_det] + + output[i] = curr_out[keep_indices] + + return output + + +def anchors_from_dataset( + loader: torch.utils.data.DataLoader, + n_anchors: int = 9, + n_generations: int = 1000, + ratio_threshold: float = 4.0, +) -> tuple[Tensor, float]: + """Generates anchors based on bounding box annotations present in provided data + loader. It uses K-Means for initial proposals which are then refined with genetic + algorithm. + + @type loader: L{torch.utils.data.DataLoader} + @param loader: Data loader. + @type n_anchors: int + @param n_anchors: Number of anchors, this is normally num_heads * 3 which generates + 3 anchors per layer. Defaults to 9. + @type n_generations: int + @param n_generations: Number of iterations for anchor improvement with genetic + algorithm. Defaults to 1000. + @type ratio_threshold: float + @param ratio_threshold: Minimum threshold for ratio. Defaults to 4.0. + @rtype: tuple[Tensor, float] + @return: Proposed anchors and the best possible recall. + """ + + widths = [] + inputs = None + for inp, labels in loader: + boxes = labels[LabelType.BOUNDINGBOX] + curr_wh = boxes[:, 4:] + widths.append(curr_wh) + inputs = inp + assert inputs is not None, "No inputs found in data loader" + _, _, h, w = inputs.shape # assuming all images are same size + img_size = torch.tensor([w, h]) + wh = torch.vstack(widths) * img_size + + # filter out small objects (w or h < 2 pixels) + wh = wh[(wh >= 2).any(1)] + + try: + assert n_anchors <= len( + wh + ), "More requested anchors than number of bounding boxes." + std = wh.std(0) + proposed_anchors = kmeans(wh / std, n_anchors, iter=30) + proposed_anchors = torch.tensor(proposed_anchors[0]) * std + assert n_anchors == len( + proposed_anchors + ), "KMeans returned insufficient number of points" + except Exception: + print("Fallback to random anchor init") + proposed_anchors = ( + torch.sort(torch.rand(n_anchors * 2))[0].reshape(n_anchors, 2) * img_size + ) + + proposed_anchors = proposed_anchors[ + torch.argsort(proposed_anchors.prod(1)) + ] # sort small to large + + def calc_best_anchor_ratio(anchors: Tensor, wh: Tensor) -> Tensor: + """Calculate how well most suitable anchor box matches each target bbox.""" + symmetric_size_ratios = torch.min( + wh[:, None] / anchors[None], anchors[None] / wh[:, None] + ) + worst_side_size_ratio = symmetric_size_ratios.min(-1).values + best_anchor_ratio = worst_side_size_ratio.max(-1).values + return best_anchor_ratio + + def calc_best_possible_recall(anchors: Tensor, wh: Tensor) -> Tensor: + """Calculate best possible recall if every bbox is matched to an appropriate + anchor.""" + best_anchor_ratio = calc_best_anchor_ratio(anchors, wh) + best_possible_recall = (best_anchor_ratio > 1 / ratio_threshold).float().mean() + return best_possible_recall + + def anchor_fitness(anchors: Tensor, wh: Tensor) -> Tensor: + """Fitness function used for anchor evolve.""" + best_anchor_ratio = calc_best_anchor_ratio(anchors, wh) + return ( + best_anchor_ratio * (best_anchor_ratio > 1 / ratio_threshold).float() + ).mean() + + # Genetic algorithm + best_fitness = anchor_fitness(proposed_anchors, wh) + anchor_shape = proposed_anchors.shape + mutation_probability = 0.9 + mutation_noise_mean = 1 + mutation_noise_std = 0.1 + for _ in range(n_generations): + anchor_mutation = torch.ones(anchor_shape) + anchor_mutation = ( + (torch.rand(anchor_shape) < mutation_probability) + * torch.randn(anchor_shape) + * mutation_noise_std + + mutation_noise_mean + ).clip(0.3, 3.0) + + mutated_anchors = (proposed_anchors.clone() * anchor_mutation).clip(min=2.0) + mutated_fitness = anchor_fitness(mutated_anchors, wh) + if mutated_fitness > best_fitness: + best_fitness = mutated_fitness + proposed_anchors = mutated_anchors.clone() + + proposed_anchors = proposed_anchors[ + torch.argsort(proposed_anchors.prod(1)) + ] # sort small to large + recall = calc_best_possible_recall(proposed_anchors, wh) + + return proposed_anchors, recall.item() + + +def anchors_for_fpn_features( + features: list[Tensor], + strides: Tensor, + grid_cell_size: float = 5.0, + grid_cell_offset: float = 0.5, + multiply_with_stride: bool = False, +) -> tuple[Tensor, Tensor, list[int], Tensor]: + """Generates anchor boxes, points and strides based on FPN feature shapes and + strides. + + @type features: list[Tensor] + @param features: List of FPN features. + @type strides: Tensor + @param strides: Strides of FPN features. + @type grid_cell_size: float + @param grid_cell_size: Cell size in respect to input image size. Defaults to 5.0. + @type grid_cell_offset: float + @param grid_cell_offset: Percent grid cell center's offset. Defaults to 0.5. + @type multiply_with_stride: bool + @param multiply_with_stride: Whether to multiply per FPN values with its stride. + Defaults to False. + @rtype: tuple[Tensor, Tensor, list[int], Tensor] + @return: BBox anchors, center anchors, number of anchors, strides + """ + anchors: list[Tensor] = [] + anchor_points: list[Tensor] = [] + n_anchors_list: list[int] = [] + stride_tensor: list[Tensor] = [] + for feature, stride in zip(features, strides): + _, _, h, w = feature.shape + cell_half_size = grid_cell_size * stride * 0.5 + shift_x = torch.arange(end=w) + grid_cell_offset + shift_y = torch.arange(end=h) + grid_cell_offset + if multiply_with_stride: + shift_x *= stride + shift_y *= stride + shift_y, shift_x = torch.meshgrid(shift_y, shift_x, indexing="ij") + + anchor = ( + torch.stack( + [ + shift_x - cell_half_size, + shift_y - cell_half_size, + shift_x + cell_half_size, + shift_y + cell_half_size, + ], + dim=-1, + ) + .reshape(-1, 4) + .to(feature.dtype) + ) + anchors.append(anchor) + + anchor_point = ( + torch.stack([shift_x, shift_y], dim=-1).reshape(-1, 2).to(feature.dtype) + ) + anchor_points.append(anchor_point) + + curr_n_anchors = len(anchor) + n_anchors_list.append(curr_n_anchors) + stride_tensor.append( + torch.full((curr_n_anchors, 1), stride, dtype=feature.dtype) # type: ignore + ) + + device = features[0].device + return ( + torch.cat(anchors).to(device), + torch.cat(anchor_points).to(device), + n_anchors_list, + torch.cat(stride_tensor).to(device), + ) + + +def process_keypoints_predictions(keypoints: Tensor) -> tuple[Tensor, Tensor, Tensor]: + """Extracts x, y and visibility from keypoints predictions. + + @type keypoints: Tensor + @param keypoints: Keypoints predictions. The last dimension must be divisible by 3 + and is expected to be in format [x1, y1, v1, x2, y2, v2, ...]. + + @rtype: tuple[Tensor, Tensor, Tensor] + @return: x, y and visibility tensors. + """ + x = keypoints[..., ::3] * 2.0 - 0.5 + y = keypoints[..., 1::3] * 2.0 - 0.5 + visibility = keypoints[..., 2::3] + return ( + x, + y, + visibility, + ) + + +def process_bbox_predictions( + bbox: Tensor, anchor: Tensor +) -> tuple[Tensor, Tensor, Tensor]: + """Transforms bbox predictions to correct format. + + @type bbox: Tensor + @param bbox: Bbox predictions + @type anchor: Tensor + @param anchor: Anchor boxes + @rtype: tuple[Tensor, Tensor, Tensor] + @return: xy and wh predictions and tail. The tail is anything after xywh. + """ + out_bbox = bbox.sigmoid() + out_bbox_xy = out_bbox[..., 0:2] * 2.0 - 0.5 + out_bbox_wh = (out_bbox[..., 2:4] * 2) ** 2 * anchor + out_bbox_tail = out_bbox[..., 4:] + return out_bbox_xy, out_bbox_wh, out_bbox_tail + + +def compute_iou_loss( + pred_bboxes: Tensor, + target_bboxes: Tensor, + target_scores: Tensor | None = None, + mask_positive: Tensor | None = None, + *, + iou_type: IoUType = "giou", + bbox_format: BBoxFormatType = "xyxy", + reduction: Literal["sum", "mean"] = "mean", +) -> tuple[Tensor, Tensor]: + """Computes an IoU loss between 2 sets of bounding boxes. + + @type pred_bboxes: Tensor + @param pred_bboxes: Predicted bounding boxes. + @type target_bboxes: Tensor + @param target_bboxes: Target bounding boxes. + @type target_scores: Tensor | None + @param target_scores: Target scores. Defaults to None. + @type mask_positive: Tensor | None + @param mask_positive: Mask for positive samples. Defaults to None. + @type iou_type: L{IoUType} + @param iou_type: IoU type. Defaults to "giou". + @type bbox_format: L{BBoxFormatType} + @param bbox_format: BBox format. Defaults to "xyxy". + @type reduction: Literal["sum", "mean"] + @param reduction: Reduction type. Defaults to "mean". + @rtype: tuple[Tensor, Tensor] + @return: IoU loss and IoU values. + """ + device = pred_bboxes.device + target_bboxes = target_bboxes.to(device) + if mask_positive is None or mask_positive.sum() > 0: + if target_scores is not None: + bbox_weight = torch.masked_select( + target_scores.sum(-1), + mask_positive + if mask_positive is not None + else torch.ones_like(target_scores.sum(-1)), + ).unsqueeze(-1) + else: + bbox_weight = torch.tensor(1.0) + + if mask_positive is not None: + bbox_mask = mask_positive.unsqueeze(-1).repeat([1, 1, 4]) + else: + bbox_mask = torch.ones_like(pred_bboxes, dtype=torch.bool) + + pred_bboxes_pos = torch.masked_select(pred_bboxes, bbox_mask).reshape([-1, 4]) + target_bboxes_pos = torch.masked_select(target_bboxes, bbox_mask).reshape( + [-1, 4] + ) + + iou = bbox_iou( + pred_bboxes_pos, + target_bboxes_pos, + iou_type=iou_type, + bbox_format=bbox_format, + element_wise=True, + ).unsqueeze(-1) + loss_iou = (1 - iou) * bbox_weight + + if reduction == "mean": + loss_iou = loss_iou.mean() + + elif reduction == "sum": + if target_scores is None: + raise NotImplementedError( + "Sum reduction is not supported when `target_scores` is None" + ) + loss_iou = loss_iou.sum() + if target_scores.sum() > 1: + loss_iou /= target_scores.sum() + else: + raise ValueError(f"Unknown reduction type `{reduction}`") + else: + loss_iou = torch.tensor(0.0).to(pred_bboxes.device) + iou = torch.zeros([len(target_bboxes)]).to(pred_bboxes.device) + + return loss_iou, iou.detach().clamp(0) diff --git a/luxonis_train/utils/config.py b/luxonis_train/utils/config.py new file mode 100644 index 00000000..9a1552a1 --- /dev/null +++ b/luxonis_train/utils/config.py @@ -0,0 +1,343 @@ +import logging +import sys +from enum import Enum +from typing import Annotated, Any, Literal + +from luxonis_ml.data import BucketStorage, BucketType +from luxonis_ml.utils import Environ, LuxonisConfig, LuxonisFileSystem, setup_logging +from pydantic import BaseModel, Field, field_serializer, model_validator + +from luxonis_train.utils.general import is_acyclic +from luxonis_train.utils.registry import MODELS + +logger = logging.getLogger(__name__) + + +class AttachedModuleConfig(BaseModel): + name: str + attached_to: str + override_name: str | None = None + params: dict[str, Any] = {} + + +class LossModuleConfig(AttachedModuleConfig): + weight: float = 1.0 + + +class MetricModuleConfig(AttachedModuleConfig): + is_main_metric: bool = False + + +class ModelNodeConfig(BaseModel): + name: str + override_name: str | None = None + inputs: list[str] = [] + params: dict[str, Any] = {} + frozen: bool = False + + +class PredefinedModelConfig(BaseModel): + name: str + params: dict[str, Any] = {} + include_nodes: bool = True + include_losses: bool = True + include_metrics: bool = True + include_visualizers: bool = True + + +class ModelConfig(BaseModel): + name: str + predefined_model: PredefinedModelConfig | None = None + weights: str | None = None + nodes: list[ModelNodeConfig] = [] + losses: list[LossModuleConfig] = [] + metrics: list[MetricModuleConfig] = [] + visualizers: list[AttachedModuleConfig] = [] + outputs: list[str] = [] + + @model_validator(mode="after") + def check_predefined_model(self): + if self.predefined_model: + logger.info(f"Using predefined model: `{self.predefined_model.name}`") + model = MODELS.get(self.predefined_model.name)( + **self.predefined_model.params + ) + nodes, losses, metrics, visualizers = model.generate_model( + include_nodes=self.predefined_model.include_nodes, + include_losses=self.predefined_model.include_losses, + include_metrics=self.predefined_model.include_metrics, + include_visualizers=self.predefined_model.include_visualizers, + ) + self.nodes += nodes + self.losses += losses + self.metrics += metrics + self.visualizers += visualizers + + return self + + @model_validator(mode="after") + def check_graph(self): + graph = {node.override_name or node.name: node.inputs for node in self.nodes} + if not is_acyclic(graph): + raise ValueError("Model graph is not acyclic.") + if not self.outputs: + outputs: list[str] = [] # nodes which are not inputs to any nodes + inputs = set(node_name for node in self.nodes for node_name in node.inputs) + for node in self.nodes: + name = node.override_name or node.name + if name not in inputs: + outputs.append(name) + self.outputs = outputs + if self.nodes and not self.outputs: + raise ValueError("No outputs specified.") + return self + + model_config = { + "json_schema_extra": { + "if": {"properties": {"predefined_model": {"type": "null"}}}, + "then": {"properties": {"nodes": {"type": "array"}}}, + } + } + + +class TrackerConfig(BaseModel): + project_name: str | None = None + project_id: str | None = None + run_name: str | None = None + run_id: str | None = None + save_directory: str = "output" + is_tensorboard: bool = True + is_wandb: bool = False + wandb_entity: str | None = None + is_mlflow: bool = False + + +class DatasetConfig(BaseModel): + dataset_name: str | None = None + dataset_id: str | None = None + team_name: str | None = None + team_id: str | None = None + bucket_type: BucketType = BucketType.INTERNAL + bucket_storage: BucketStorage = BucketStorage.LOCAL + json_mode: bool = False + train_view: str = "train" + val_view: str = "val" + test_view: str = "test" + + @field_serializer("bucket_storage", "bucket_type") + def get_enum_value(self, v: Enum, _) -> str: + return str(v.value) + + model_config = { + "json_schema_extra": { + "anyOf": [ + { + "allOf": [ + {"required": ["dataset_name"]}, + {"properties": {"dataset_name": {"type": "string"}}}, + ] + }, + { + "allOf": [ + {"required": ["dataset_id"]}, + {"properties": {"dataset_id": {"type": "string"}}}, + ] + }, + ] + }, + } + + +class NormalizeAugmentationConfig(BaseModel): + active: bool = True + params: dict[str, Any] = { + "mean": [0.485, 0.456, 0.406], + "std": [0.229, 0.224, 0.225], + } + + +class AugmentationConfig(BaseModel): + name: str + params: dict[str, Any] = {} + + +class PreprocessingConfig(BaseModel): + train_image_size: Annotated[ + list[int], Field(default=[256, 256], min_length=2, max_length=2) + ] = [256, 256] + keep_aspect_ratio: bool = True + train_rgb: bool = True + normalize: NormalizeAugmentationConfig = NormalizeAugmentationConfig() + augmentations: list[AugmentationConfig] = [] + + @model_validator(mode="after") + def check_normalize(self): + if self.normalize.active: + self.augmentations.append( + AugmentationConfig(name="Normalize", params=self.normalize.params) + ) + return self + + +class CallbackConfig(BaseModel): + name: str + active: bool = True + params: dict[str, Any] = {} + + +class OptimizerConfig(BaseModel): + name: str = "Adam" + params: dict[str, Any] = {} + + +class SchedulerConfig(BaseModel): + name: str = "ConstantLR" + params: dict[str, Any] = {} + + +class TrainerConfig(BaseModel): + preprocessing: PreprocessingConfig = PreprocessingConfig() + + accelerator: Literal["auto", "cpu", "gpu"] = "auto" + devices: int | list[int] | str = "auto" + strategy: Literal["auto", "ddp"] = "auto" + num_sanity_val_steps: int = 2 + profiler: Literal["simple", "advanced"] | None = None + verbose: bool = True + + batch_size: int = 32 + accumulate_grad_batches: int = 1 + use_weighted_sampler: bool = False + epochs: int = 100 + num_workers: int = 2 + train_metrics_interval: int = -1 + validation_interval: int = 1 + num_log_images: int = 4 + skip_last_batch: bool = True + log_sub_losses: bool = True + save_top_k: int = 3 + + callbacks: list[CallbackConfig] = [] + + optimizer: OptimizerConfig = OptimizerConfig() + scheduler: SchedulerConfig = SchedulerConfig() + + @model_validator(mode="after") + def check_num_workes_platform(self): + if ( + sys.platform == "win32" or sys.platform == "darwin" + ) and self.num_workers != 0: + self.num_workers = 0 + logger.warning( + "Setting `num_workers` to 0 because of platform compatibility." + ) + return self + + +class OnnxExportConfig(BaseModel): + opset_version: int = 12 + dynamic_axes: dict[str, Any] | None = None + + +class BlobconverterExportConfig(BaseModel): + active: bool = False + shaves: int = 6 + + +class ExportConfig(BaseModel): + export_save_directory: str = "output_export" + input_shape: list[int] | None = None + export_model_name: str = "model" + data_type: Literal["INT8", "FP16", "FP32"] = "FP16" + reverse_input_channels: bool = True + scale_values: list[float] | None = None + mean_values: list[float] | None = None + onnx: OnnxExportConfig = OnnxExportConfig() + blobconverter: BlobconverterExportConfig = BlobconverterExportConfig() + upload_url: str | None = None + + @model_validator(mode="after") + def check_values(self): + def pad_values(values: float | list[float] | None): + if values is None: + return None + if isinstance(values, float): + return [values] * 3 + + self.scale_values = pad_values(self.scale_values) + self.mean_values = pad_values(self.mean_values) + return self + + +class StorageConfig(BaseModel): + active: bool = True + storage_type: Literal["local", "remote"] = "local" + + +class TunerConfig(BaseModel): + study_name: str = "test-study" + use_pruner: bool = True + n_trials: int | None = 15 + timeout: int | None = None + storage: StorageConfig = StorageConfig() + params: Annotated[ + dict[str, list[str | int | float | bool]], Field(default={}, min_length=1) + ] = {} + + model_config = {"json_schema_extra": {"required": ["params"]}} + + +class Config(LuxonisConfig): + use_rich_text: bool = True + model: ModelConfig + dataset: DatasetConfig = DatasetConfig() + tracker: TrackerConfig = TrackerConfig() + trainer: TrainerConfig = TrainerConfig() + exporter: ExportConfig = ExportConfig() + tuner: TunerConfig = TunerConfig() + ENVIRON: Environ = Field(Environ(), exclude=True) + + @model_validator(mode="before") + @classmethod + def check_tuner_init(cls, data: Any) -> Any: + if isinstance(data, dict): + if data.get("tuner") and not data.get("tuner", {}).get("params"): + del data["tuner"] + logger.warning( + "`tuner` block specified but no `tuner.params`. If trying to tune values you have to specify at least one parameter" + ) + return data + + @model_validator(mode="before") + @classmethod + def check_environment(cls, data: Any) -> Any: + if "ENVIRON" in data: + logger.warning( + "Specifying `ENVIRON` section in config file is not recommended. " + "Please use environment variables or .env file instead." + ) + return data + + @model_validator(mode="before") + @classmethod + def setup_logging(cls, data: Any) -> Any: + if isinstance(data, dict): + if data.get("use_rich_text", True): + setup_logging(use_rich=True) + return data + + @classmethod + def get_config( + cls, + cfg: str | dict[str, Any] | None = None, + overrides: dict[str, Any] | None = None, + ): + instance = super().get_config(cfg, overrides) + if not isinstance(cfg, str): + return instance + fs = LuxonisFileSystem(cfg) + if fs.is_mlflow: + logger.info("Setting `project_id` and `run_id` to config's MLFlow run") + instance.tracker.project_id = fs.experiment_id + instance.tracker.run_id = fs.run_id + return instance diff --git a/luxonis_train/utils/general.py b/luxonis_train/utils/general.py new file mode 100644 index 00000000..9ea5884d --- /dev/null +++ b/luxonis_train/utils/general.py @@ -0,0 +1,299 @@ +import logging +import math +from typing import Generator, TypeVar + +from luxonis_ml.data import LuxonisDataset +from pydantic import BaseModel +from torch import Size, Tensor +from torch.utils.data import DataLoader + +from luxonis_train.utils.boxutils import anchors_from_dataset +from luxonis_train.utils.types import LabelType, Packet + + +# TODO: could be moved to luxonis-ml? +# TODO: support multiclass keypoints +class DatasetMetadata: + """Metadata about the dataset.""" + + def __init__( + self, + *, + classes: dict[LabelType, list[str]] | None = None, + n_classes: int | None = None, + n_keypoints: int | None = None, + keypoint_names: list[str] | None = None, + connectivity: list[tuple[int, int]] | None = None, + loader: DataLoader | None = None, + ): + """An object containing metadata about the dataset. Used to infer the number of + classes, number of keypoints, I{etc.} instead of passing them as arguments to + the model. + + @type classes: dict[LabelType, list[str]] | None + @param classes: Dictionary mapping label types to lists of class names. If not + provided, will be inferred from the dataset loader. + @type n_classes: int | None + @param n_classes: Number of classes for each label type. + @type n_keypoints: int | None + @param n_keypoints: Number of keypoints in the dataset. + @type keypoint_names: list[str] | None + @param keypoint_names: List of keypoint names. + @type connectivity: list[tuple[int, int]] | None + @param connectivity: List of edges in the skeleton graph. + @type loader: DataLoader | None + @param loader: Dataset loader. + """ + if classes is None and n_classes is not None: + classes = { + LabelType(lbl): [str(i) for i in range(n_classes)] + for lbl in LabelType.__members__ + } + self._classes = classes + self._keypoint_names = keypoint_names + self._connectivity = connectivity + self._n_keypoints = n_keypoints + if self._n_keypoints is None and self._keypoint_names is not None: + self._n_keypoints = len(self._keypoint_names) + self._loader = loader + + @property + def classes(self) -> dict[LabelType, list[str]]: + """Dictionary mapping label types to lists of class names. + + @type: dict[LabelType, list[str]] + @raises ValueError: If classes were not provided during initialization. + """ + if self._classes is None: + raise ValueError( + "Trying to access `classes`, byt they were not" + "provided during initialization." + ) + return self._classes + + def n_classes(self, label_type: LabelType | None) -> int: + """Gets the number of classes for the specified label type. + + @type label_type: L{LabelType} | None + @param label_type: Label type to get the number of classes for. + @rtype: int + @return: Number of classes for the specified label type. + @raises ValueError: If the dataset loader was not provided during + initialization. + @raises ValueError: If the dataset contains different number of classes for + different label types. + """ + if label_type is not None: + if label_type not in self.classes: + raise ValueError( + f"Task type {label_type.name} is not present in the dataset." + ) + return len(self.classes[label_type]) + n_classes = len(list(self.classes.values())[0]) + for classes in self.classes.values(): + if len(classes) != n_classes: + raise ValueError( + "The dataset contains different number of classes for different tasks." + ) + return n_classes + + def class_names(self, label_type: LabelType | None) -> list[str]: + """Gets the class names for the specified label type. + + @type label_type: L{LabelType} | None + @param label_type: Label type to get the class names for. + @rtype: list[str] + @return: List of class names for the specified label type. + @raises ValueError: If the dataset loader was not provided during + initialization. + @raises ValueError: If the dataset contains different class names for different + label types. + """ + if label_type is not None: + if label_type not in self.classes: + raise ValueError( + f"Task type {label_type.name} is not present in the dataset." + ) + return self.classes[label_type] + class_names = list(self.classes.values())[0] + for classes in self.classes.values(): + if classes != class_names: + raise ValueError( + "The dataset contains different class names for different tasks." + ) + return class_names + + def autogenerate_anchors(self, n_heads: int) -> tuple[list[list[float]], float]: + """Automatically generates anchors for the provided dataset. + + @type n_heads: int + @param n_heads: Number of heads to generate anchors for. + @rtype: tuple[list[list[float]], float] + @return: List of anchors in [-1,6] format and recall of the anchors. + @raises ValueError: If the dataset loader was not provided during + initialization. + """ + if self.loader is None: + raise ValueError( + "Cannot generate anchors without a dataset loader. " + "Please provide a dataset loader to the constructor " + "or call `set_loader` method." + ) + + proposed_anchors, recall = anchors_from_dataset( + self.loader, n_anchors=n_heads * 3 + ) + return proposed_anchors.reshape(-1, 6).tolist(), recall + + def set_loader(self, loader: DataLoader) -> None: + """Sets the dataset loader. + + @type loader: DataLoader + @param loader: Dataset loader. + """ + self.loader = loader + + @classmethod + def from_dataset(cls, dataset: LuxonisDataset) -> "DatasetMetadata": + """Creates a L{DatasetMetadata} object from a L{LuxonisDataset}. + + @type dataset: LuxonisDataset + @param dataset: Dataset to create the metadata from. + @rtype: DatasetMetadata + @return: Instance of L{DatasetMetadata} created from the provided dataset. + """ + _, classes = dataset.get_classes() + skeletons = dataset.get_skeletons() + + keypoint_names = None + connectivity = None + + if len(skeletons) == 1: + name = list(skeletons.keys())[0] + keypoint_names = skeletons[name]["labels"] + connectivity = skeletons[name]["edges"] + + elif len(skeletons) > 1: + raise NotImplementedError( + "The dataset defines multiclass keypoint detection. " + "This is not yet supported." + ) + + return cls( + classes=classes, + keypoint_names=keypoint_names, + connectivity=connectivity, + ) + + +def make_divisible(x: int | float, divisor: int) -> int: + """Upward revision the value x to make it evenly divisible by the divisor.""" + return math.ceil(x / divisor) * divisor + + +def infer_upscale_factor( + in_height: int, orig_height: int, strict: bool = True, warn: bool = True +) -> int: + """Infer the upscale factor from the input height and original height.""" + num_up = math.log2(orig_height) - math.log2(in_height) + if num_up.is_integer(): + return int(num_up) + elif not strict: + if warn: + logging.getLogger(__name__).warning( + f"Upscale factor is not an integer: {num_up}. " + "Output shape will not be the same as input shape." + ) + return round(num_up) + else: + raise ValueError( + f"Upscale factor is not an integer: {num_up}. " + "Output shape will not be the same as input shape." + ) + + +def get_shape_packet(packet: Packet[Tensor]) -> Packet[Size]: + shape_packet: Packet[Size] = {} + for name, value in packet.items(): + shape_packet[name] = [x.shape for x in value] + return shape_packet + + +def is_acyclic(graph: dict[str, list[str]]) -> bool: + """Tests if graph is acyclic. + + @type graph: dict[str, list[str]] + @param graph: Graph in a format of a dictionary of predecessors. Keys are node + names, values are inputs to the node (list of node names). + @rtype: bool + @return: True if graph is acyclic, False otherwise. + """ + graph = graph.copy() + + def dfs(node: str, visited: set[str], recursion_stack: set[str]): + visited.add(node) + recursion_stack.add(node) + + for predecessor in graph.get(node, []): + if predecessor in recursion_stack: + return True + if predecessor not in visited: + if dfs(predecessor, visited, recursion_stack): + return True + + recursion_stack.remove(node) + return False + + visited: set[str] = set() + recursion_stack: set[str] = set() + + for node in graph.keys(): + if node not in visited: + if dfs(node, visited, recursion_stack): + return False + + return True + + +def validate_packet(data: Packet[Tensor], protocol: type[BaseModel]) -> Packet[Tensor]: + return protocol(**data).model_dump() + + +T = TypeVar("T") + + +# TEST: +def traverse_graph( + graph: dict[str, list[str]], nodes: dict[str, T] +) -> Generator[tuple[str, T, list[str], set[str]], None, None]: + """Traverses the graph in topological order. + + @type graph: dict[str, list[str]] + @param graph: Graph in a format of a dictionary of predecessors. Keys are node + names, values are inputs to the node (list of node names). + @type nodes: dict[str, T] + @param nodes: Dictionary mapping node names to node objects. + @rtype: Generator[tuple[str, T, list[str], set[str]], None, None] + @return: Generator of tuples containing node name, node object, node dependencies + and unprocessed nodes. + @raises RuntimeError: If the graph is malformed. + """ + unprocessed_nodes = set(nodes.keys()) + processed: set[str] = set() + + while unprocessed_nodes: + unprocessed_nodes_copy = unprocessed_nodes.copy() + for node_name in unprocessed_nodes_copy: + node_dependencies = graph[node_name] + if not node_dependencies or all( + dependency in processed for dependency in node_dependencies + ): + yield node_name, nodes[node_name], node_dependencies, unprocessed_nodes + processed.add(node_name) + unprocessed_nodes.remove(node_name) + + if unprocessed_nodes_copy == unprocessed_nodes: + raise RuntimeError( + "Malformed graph. " + "Please check that all nodes are connected in a directed acyclic graph." + ) diff --git a/luxonis_train/utils/loaders/__init__.py b/luxonis_train/utils/loaders/__init__.py new file mode 100644 index 00000000..fe5cc4e8 --- /dev/null +++ b/luxonis_train/utils/loaders/__init__.py @@ -0,0 +1,4 @@ +from .base_loader import collate_fn +from .luxonis_loader_torch import LuxonisLoaderTorch + +__all__ = ["LuxonisLoaderTorch", "collate_fn"] diff --git a/luxonis_train/utils/loaders/base_loader.py b/luxonis_train/utils/loaders/base_loader.py new file mode 100644 index 00000000..93f3fd0c --- /dev/null +++ b/luxonis_train/utils/loaders/base_loader.py @@ -0,0 +1,95 @@ +from abc import ABC, abstractmethod, abstractproperty + +import torch +from luxonis_ml.utils.registry import AutoRegisterMeta +from torch import Size, Tensor +from torch.utils.data import Dataset + +from luxonis_train.utils.registry import LOADERS +from luxonis_train.utils.types import Labels, LabelType + +LuxonisLoaderTorchOutput = tuple[Tensor, Labels] +"""LuxonisLoaderTorchOutput is a tuple of images and corresponding labels.""" + + +class BaseLoaderTorch( + Dataset[LuxonisLoaderTorchOutput], + ABC, + metaclass=AutoRegisterMeta, + register=False, + registry=LOADERS, +): + """Base abstract loader class that enforces LuxonisLoaderTorchOutput output label + structure.""" + + @abstractproperty + def input_shape(self) -> Size: + """Input shape in [N,C,H,W] format.""" + ... + + @abstractmethod + def __len__(self) -> int: + """Returns length of the dataset.""" + ... + + @abstractmethod + def __getitem__(self, idx: int) -> LuxonisLoaderTorchOutput: + """Loads sample from dataset. + + @type idx: int + @param idx: Sample index. + @rtype: L{LuxonisLoaderTorchOutput} + @return: Sample's data in L{LuxonisLoaderTorchOutput} format + """ + ... + + +def collate_fn( + batch: list[LuxonisLoaderTorchOutput], +) -> tuple[Tensor, dict[LabelType, Tensor]]: + """Default collate function used for training. + + @type batch: list[LuxonisLoaderTorchOutput] + @param batch: List of images and their annotations in the LuxonisLoaderTorchOutput + format. + @rtype: tuple[Tensor, dict[LabelType, Tensor]] + @return: Tuple of images and annotations in the format expected by the model. + """ + zipped = zip(*batch) + imgs, anno_dicts = zipped + imgs = torch.stack(imgs, 0) + + present_annotations = anno_dicts[0].keys() + out_annotations: dict[LabelType, Tensor] = { + anno: torch.empty(0) for anno in present_annotations + } + + if LabelType.CLASSIFICATION in present_annotations: + class_annos = [anno[LabelType.CLASSIFICATION] for anno in anno_dicts] + out_annotations[LabelType.CLASSIFICATION] = torch.stack(class_annos, 0) + + if LabelType.SEGMENTATION in present_annotations: + seg_annos = [anno[LabelType.SEGMENTATION] for anno in anno_dicts] + out_annotations[LabelType.SEGMENTATION] = torch.stack(seg_annos, 0) + + if LabelType.BOUNDINGBOX in present_annotations: + bbox_annos = [anno[LabelType.BOUNDINGBOX] for anno in anno_dicts] + label_box: list[Tensor] = [] + for i, box in enumerate(bbox_annos): + l_box = torch.zeros((box.shape[0], 6)) + l_box[:, 0] = i # add target image index for build_targets() + l_box[:, 1:] = box + label_box.append(l_box) + out_annotations[LabelType.BOUNDINGBOX] = torch.cat(label_box, 0) + + if LabelType.KEYPOINT in present_annotations: + keypoint_annos = [anno[LabelType.KEYPOINT] for anno in anno_dicts] + label_keypoints: list[Tensor] = [] + for i, points in enumerate(keypoint_annos): + l_kps = torch.zeros((points.shape[0], points.shape[1] + 1)) + l_kps[:, 0] = i # add target image index for build_targets() + l_kps[:, 1:] = points + label_keypoints.append(l_kps) + out_annotations[LabelType.KEYPOINT] = torch.cat(label_keypoints, 0) + + return imgs, out_annotations diff --git a/luxonis_train/utils/loaders/luxonis_loader_torch.py b/luxonis_train/utils/loaders/luxonis_loader_torch.py new file mode 100644 index 00000000..a0e1f324 --- /dev/null +++ b/luxonis_train/utils/loaders/luxonis_loader_torch.py @@ -0,0 +1,39 @@ +import numpy as np +from luxonis_ml.data import Augmentations, LuxonisDataset, LuxonisLoader +from torch import Size, Tensor + +from .base_loader import BaseLoaderTorch, LuxonisLoaderTorchOutput + + +class LuxonisLoaderTorch(BaseLoaderTorch): + def __init__( + self, + dataset: LuxonisDataset, + view: str = "train", + stream: bool = False, + augmentations: Augmentations | None = None, + ): + self.base_loader = LuxonisLoader( + dataset=dataset, + view=view, + stream=stream, + augmentations=augmentations, + ) + + def __len__(self) -> int: + return len(self.base_loader) + + @property + def input_shape(self) -> Size: + img, _ = self[0] + return Size([1, *img.shape]) + + def __getitem__(self, idx: int) -> LuxonisLoaderTorchOutput: + img, annotations = self.base_loader[idx] + + img = np.transpose(img, (2, 0, 1)) # HWC to CHW + tensor_img = Tensor(img) + for key in annotations: + annotations[key] = Tensor(annotations[key]) # type: ignore + + return tensor_img, annotations diff --git a/luxonis_train/utils/optimizers.py b/luxonis_train/utils/optimizers.py new file mode 100644 index 00000000..7583cef9 --- /dev/null +++ b/luxonis_train/utils/optimizers.py @@ -0,0 +1,19 @@ +from torch import optim + +from luxonis_train.utils.registry import OPTIMIZERS + +for optimizer in [ + optim.Adadelta, + optim.Adagrad, + optim.Adam, + optim.AdamW, + optim.SparseAdam, + optim.Adamax, + optim.ASGD, + optim.LBFGS, + optim.NAdam, + optim.RAdam, + optim.RMSprop, + optim.SGD, +]: + OPTIMIZERS.register_module(module=optimizer) diff --git a/luxonis_train/utils/registry.py b/luxonis_train/utils/registry.py new file mode 100644 index 00000000..7f76df7c --- /dev/null +++ b/luxonis_train/utils/registry.py @@ -0,0 +1,31 @@ +"""This module implements a metaclass for automatic registration of classes.""" + + +from luxonis_ml.utils.registry import Registry + +CALLBACKS = Registry(name="callbacks") +"""Registry for all callbacks.""" + +LOADERS = Registry(name="loaders") +"""Registry for all loaders.""" + +LOSSES = Registry(name="losses") +"""Registry for all losses.""" + +METRICS = Registry(name="metrics") +"""Registry for all metrics.""" + +MODELS = Registry(name="models") +"""Registry for all models.""" + +NODES = Registry(name="nodes") +"""Registry for all nodes.""" + +OPTIMIZERS = Registry(name="optimizers") +"""Registry for all optimizers.""" + +SCHEDULERS = Registry(name="schedulers") +"""Registry for all schedulers.""" + +VISUALIZERS = Registry(name="visualizers") +"""Registry for all visualizers.""" diff --git a/luxonis_train/utils/schedulers.py b/luxonis_train/utils/schedulers.py new file mode 100644 index 00000000..488a7498 --- /dev/null +++ b/luxonis_train/utils/schedulers.py @@ -0,0 +1,22 @@ +from torch.optim import lr_scheduler + +from luxonis_train.utils.registry import SCHEDULERS + +for scheduler in [ + lr_scheduler.LambdaLR, + lr_scheduler.MultiplicativeLR, + lr_scheduler.StepLR, + lr_scheduler.MultiStepLR, + lr_scheduler.ConstantLR, + lr_scheduler.LinearLR, + lr_scheduler.ExponentialLR, + lr_scheduler.PolynomialLR, + lr_scheduler.CosineAnnealingLR, + lr_scheduler.ChainedScheduler, + lr_scheduler.SequentialLR, + lr_scheduler.ReduceLROnPlateau, + lr_scheduler.CyclicLR, + lr_scheduler.OneCycleLR, + lr_scheduler.CosineAnnealingWarmRestarts, +]: + SCHEDULERS.register_module(module=scheduler) diff --git a/luxonis_train/utils/tracker.py b/luxonis_train/utils/tracker.py new file mode 100644 index 00000000..13c77cb2 --- /dev/null +++ b/luxonis_train/utils/tracker.py @@ -0,0 +1,8 @@ +from lightning.pytorch.loggers.logger import Logger +from luxonis_ml.tracker import LuxonisTracker + + +class LuxonisTrackerPL(LuxonisTracker, Logger): + """Implementation of LuxonisTracker that is compatible with PytorchLightning.""" + + ... diff --git a/luxonis_train/utils/types.py b/luxonis_train/utils/types.py new file mode 100644 index 00000000..dbbf471e --- /dev/null +++ b/luxonis_train/utils/types.py @@ -0,0 +1,65 @@ +from typing import Annotated, Any, Literal, TypeVar + +from luxonis_ml.enums import LabelType +from pydantic import BaseModel, Field, ValidationError +from torch import Size, Tensor + +Kwargs = dict[str, Any] +OutputTypes = Literal["boxes", "class", "keypoints", "segmentation", "features"] +Labels = dict[LabelType, Tensor] + +AttachIndexType = Literal["all"] | int | tuple[int, int] | tuple[int, int, int] +"""AttachIndexType is used to specify to which output of the prevoius node does the +current node attach to. + +It can be either "all" (all outputs), an index of the output or a tuple of indices of +the output (specifying a range of outputs). +""" + +T = TypeVar("T", Tensor, Size) +Packet = dict[str, list[T]] +"""Packet is a dictionary containing a list of objects of type T. + +It is used to pass data between different nodes of the network graph. +""" + + +class IncompatibleException(Exception): + """Raised when two parts of the model are incompatible with each other.""" + + @classmethod + def from_validation_error(cls, val_error: ValidationError, class_name: str): + return cls( + f"{class_name} received an input not conforming to the protocol. " + f"Validation error: {val_error.errors(include_input=False, include_url=False)}." + ) + + @classmethod + def from_missing_label( + cls, label: LabelType, present_labels: list[LabelType], class_name: str + ): + return cls( + f"{class_name} requires {label} label, but it was not found in " + f"the label dictionary. Available labels: {present_labels}." + ) + + +class BaseProtocol(BaseModel): + class Config: + arbitrary_types_allowed = True + + +class SegmentationProtocol(BaseProtocol): + segmentation: Annotated[list[Tensor], Field(min_length=1)] + + +class KeypointProtocol(BaseProtocol): + keypoints: Annotated[list[Tensor], Field(min_length=1)] + + +class BBoxProtocol(BaseProtocol): + boxes: Annotated[list[Tensor], Field(min_length=1)] + + +class FeaturesProtocol(BaseProtocol): + features: Annotated[list[Tensor], Field(min_length=1)] diff --git a/media/coverage_badge.svg b/media/coverage_badge.svg new file mode 100644 index 00000000..12876e69 --- /dev/null +++ b/media/coverage_badge.svg @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + coverage + coverage + 78% + 78% + + diff --git a/media/example_viz/bbox.png b/media/example_viz/bbox.png new file mode 100644 index 00000000..5fd9f26a Binary files /dev/null and b/media/example_viz/bbox.png differ diff --git a/media/example_viz/class.png b/media/example_viz/class.png new file mode 100644 index 00000000..5b07651a Binary files /dev/null and b/media/example_viz/class.png differ diff --git a/media/example_viz/kpts.png b/media/example_viz/kpts.png new file mode 100644 index 00000000..a5534e83 Binary files /dev/null and b/media/example_viz/kpts.png differ diff --git a/media/example_viz/multi.png b/media/example_viz/multi.png new file mode 100644 index 00000000..e01b9817 Binary files /dev/null and b/media/example_viz/multi.png differ diff --git a/media/example_viz/segmentation.png b/media/example_viz/segmentation.png new file mode 100644 index 00000000..67b89b4b Binary files /dev/null and b/media/example_viz/segmentation.png differ diff --git a/media/pybadge.svg b/media/pybadge.svg new file mode 100644 index 00000000..983d6f42 --- /dev/null +++ b/media/pybadge.svg @@ -0,0 +1 @@ +pythonpython3.10 | 3.113.10 | 3.11 \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..4187875b --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,59 @@ +[project] +name = "luxonis-train" +version = "0.1.0" +description = "Luxonis training framework for seamless training of various neural networks." +readme = "README.md" +requires-python = ">=3.10" +license = { file = "LICENSE" } +authors = [{ name = "Luxonis", email = "support@luxonis.com" }] +maintainers = [{ name = "Luxonis", email = "support@luxonis.com" }] +keywords = ["ml", "training", "luxonis", "oak"] +dynamic = ["dependencies", "optional-dependencies"] +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Development Status :: 3 - Alpha", + "Programming Language :: Python :: 3.10", + "Topic :: Scientific/Engineering :: Artificial Intelligence", + "Topic :: Scientific/Engineering :: Image Processing", + "Topic :: Scientific/Engineering :: Image Recognition", +] + +[project.scripts] +luxonis_train = "tools.main:main" + +[project.urls] +repository = "https://github.com/luxonis/models" +issues = "https://github.com/luxonis/models/issues" + +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools.packages.find] +where = ["."] + +[tool.setuptools.dynamic] +dependencies = { file = ["requirements.txt"] } +optional-dependencies = { dev = { file = ["requirements-dev.txt"] } } + +[tool.ruff] +target-version = "py310" +line-length = 88 +indent-width = 4 + +[tool.ruff.lint] +ignore = ["F403", "B028", "B905", "D1", "W191"] +select = ["E4", "E7", "E9", "F", "W", "B", "I"] + +[tool.ruff.pydocstyle] +convention = "google" + +[tool.docformatter] +black = true + +[tool.mypy] +python_version = "3.10" +ignore_missing_imports = true + +[tool.pyright] +typeCheckingMode = "basic" diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 00000000..a919d265 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,5 @@ +coverage-badge>=1.1.0 +gdown>=4.2.0 +pre-commit>=3.2.1 +opencv-stubs>=0.0.8 +pytest-cov>=4.1.0 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..eecf828e --- /dev/null +++ b/requirements.txt @@ -0,0 +1,14 @@ +blobconverter>=1.4.2 +lightning>=2.0.0 +luxonis-ml[all]>=0.0.1 +onnx>=1.12.0 +onnxruntime>=1.13.1 +onnxsim>=0.4.10 +optuna>=3.2.0 +psycopg2-binary>=2.9.1 +pycocotools>=2.0.7 +rich>=13.0.0 +s3fs>=2023.0.0 +tensorboard>=2.10.1 +torchvision>=0.16.0 +typer>=0.9.0 diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py new file mode 100644 index 00000000..6e2196a6 --- /dev/null +++ b/tests/integration/conftest.py @@ -0,0 +1,159 @@ +import glob +import json +import os +import zipfile +from pathlib import Path + +import cv2 +import gdown +import numpy as np +import pytest +import torchvision +from luxonis_ml.data import LuxonisDataset +from luxonis_ml.utils import environ + +Path(environ.LUXONISML_BASE_PATH).mkdir(exist_ok=True) + + +def create_dataset(name: str) -> LuxonisDataset: + if LuxonisDataset.exists(name): + dataset = LuxonisDataset(name) + dataset.delete_dataset() + return LuxonisDataset(name) + + +@pytest.fixture(scope="session", autouse=True) +def create_coco_dataset(): + dataset = create_dataset("coco_test") + url = "https://drive.google.com/uc?id=1XlvFK7aRmt8op6-hHkWVKIJQeDtOwoRT" + output_zip = "../data/COCO_people_subset.zip" + output_folder = "../data/" + + if not os.path.exists(output_zip) and not os.path.exists( + os.path.join(output_folder, "COCO_people_subset") + ): + gdown.download(url, output_zip, quiet=False) + + with zipfile.ZipFile(output_zip, "r") as zip_ref: + zip_ref.extractall(output_folder) + + def COCO_people_subset_generator(): + img_dir = "../data/person_val2017_subset" + annot_file = "../data/person_keypoints_val2017.json" + im_paths = glob.glob(img_dir + "/*.jpg") + nums = np.array([int(Path(path).stem) for path in im_paths]) + idxs = np.argsort(nums) + im_paths = list(np.array(im_paths)[idxs]) + with open(annot_file) as file: + data = json.load(file) + imgs = data["images"] + anns = data["annotations"] + + for path in im_paths: + gran = Path(path).name + img = [img for img in imgs if img["file_name"] == gran][0] + img_id = img["id"] + img_anns = [ann for ann in anns if ann["image_id"] == img_id] + + im = cv2.imread(path) + height, width, _ = im.shape + + if len(img_anns): + yield { + "file": path, + "class": "person", + "type": "classification", + "value": True, + } + + for ann in img_anns: + seg = ann["segmentation"] + if isinstance(seg, list): + poly = [] + for s in seg: + poly_arr = np.array(s).reshape(-1, 2) + poly += [ + (poly_arr[i, 0] / width, poly_arr[i, 1] / height) + for i in range(len(poly_arr)) + ] + yield { + "file": path, + "class": "person", + "type": "polyline", + "value": poly, + } + + x, y, w, h = ann["bbox"] + yield { + "file": path, + "class": "person", + "type": "box", + "value": (x / width, y / height, w / width, h / height), + } + + kps = np.array(ann["keypoints"]).reshape(-1, 3) + keypoint = [] + for kp in kps: + keypoint.append( + (float(kp[0] / width), float(kp[1] / height), int(kp[2])) + ) + yield { + "file": path, + "class": "person", + "type": "keypoints", + "value": keypoint, + } + + dataset.set_classes(["person"]) + + annot_file = "../data/person_keypoints_val2017.json" + with open(annot_file) as file: + data = json.load(file) + dataset.set_skeletons( + { + "person": { + "labels": data["categories"][0]["keypoints"], + "edges": (np.array(data["categories"][0]["skeleton"]) - 1).tolist(), + } + } + ) + dataset.add(COCO_people_subset_generator) # type: ignore + dataset.make_splits() + + +@pytest.fixture(scope="session", autouse=True) +def create_cifar10_dataset(): + dataset = create_dataset("cifar10_test") + cifar10_torch = torchvision.datasets.CIFAR10( + root="../data", train=False, download=True + ) + classes = [ + "airplane", + "automobile", + "bird", + "cat", + "deer", + "dog", + "frog", + "horse", + "ship", + "truck", + ] + + def CIFAR10_subset_generator(): + for i, (image, label) in enumerate(cifar10_torch): # type: ignore + if i == 1000: + break + path = f"../data/cifar_{i}.png" + image.save(path) + yield { + "file": path, + "class": classes[label], + "type": "classification", + "value": True, + } + + dataset.set_classes(classes) + + dataset.add(CIFAR10_subset_generator) # type: ignore + dataset.make_splits() diff --git a/tests/integration/test_sanity.py b/tests/integration/test_sanity.py new file mode 100644 index 00000000..8b6f872b --- /dev/null +++ b/tests/integration/test_sanity.py @@ -0,0 +1,85 @@ +import os +import shutil +import subprocess +from pathlib import Path + +import pytest + + +@pytest.fixture(scope="function", autouse=True) +def clear_output(): + shutil.rmtree("output", ignore_errors=True) + + +@pytest.mark.parametrize( + "config_file", [path for path in os.listdir("configs") if "model" in path] +) +def test_sanity(config_file): + opts = [ + "trainer.epochs", + "1", + "trainer.validation_interval", + "1", + "trainer.callbacks", + "[]", + ] + result = subprocess.run( + ["luxonis_train", "train", "--config", f"configs/{config_file}", *opts], + ) + assert result.returncode == 0 + + opts += ["model.weights", str(list(Path("output").rglob("*.ckpt"))[0])] + opts += ["exporter.onnx.opset_version", "11"] + + result = subprocess.run( + ["luxonis_train", "export", "--config", f"configs/{config_file}", *opts], + ) + + assert result.returncode == 0 + + result = subprocess.run( + ["luxonis_train", "eval", "--config", f"configs/{config_file}", *opts], + ) + + assert result.returncode == 0 + + save_dir = Path("sanity_infer_save_dir") + shutil.rmtree(save_dir, ignore_errors=True) + + result = subprocess.run( + [ + "luxonis_train", + "infer", + "--save-dir", + str(save_dir), + "--config", + f"configs/{config_file}", + *opts, + ], + ) + + assert result.returncode == 0 + assert save_dir.exists() + assert len(list(save_dir.rglob("*.png"))) > 0 + shutil.rmtree(save_dir, ignore_errors=True) + + +def test_tuner(): + Path("study_local.db").unlink(missing_ok=True) + result = subprocess.run( + [ + "luxonis_train", + "tune", + "--config", + "configs/example_tuning.yaml", + "trainer.epochs", + "1", + "trainer.validation_interval", + "1", + "trainer.callbacks", + "[]", + "tuner.n_trials", + "4", + ], + ) + assert result.returncode == 0 diff --git a/tests/unittests/__init__.py b/tests/unittests/__init__.py new file mode 100644 index 00000000..f9269fdf --- /dev/null +++ b/tests/unittests/__init__.py @@ -0,0 +1,2 @@ +# import warnings +# warnings.filterwarnings("module", category=DeprecationWarning) diff --git a/tests/unittests/test_losses/__init__.py b/tests/unittests/test_losses/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unittests/test_losses/test_bce_with_logits_loss.py b/tests/unittests/test_losses/test_bce_with_logits_loss.py new file mode 100644 index 00000000..27871019 --- /dev/null +++ b/tests/unittests/test_losses/test_bce_with_logits_loss.py @@ -0,0 +1,61 @@ +import pytest +import torch + +from luxonis_train.attached_modules.losses import BCEWithLogitsLoss + +torch.manual_seed(42) + + +def test_forward_pass(): + batch_sizes = [1, 2, 10, 11, 15, 64, 128, 255] + n_classes = [1, 2, 3, 4, 64] + + for bs in batch_sizes: + for n_cl in n_classes: + targets = torch.ones([bs, n_cl], dtype=torch.float32) + predictions = torch.full([bs, n_cl], 1.5) # logit + loss_fn = BCEWithLogitsLoss() + + loss = loss_fn.forward(predictions, targets) # -log(sigmoid(1.5)) = 0.2014 + + assert isinstance(loss, torch.Tensor) + assert loss.shape == torch.Size([]) + assert torch.round(loss, decimals=2) == 0.20 + + +def test_minimum(): + bs, n_classes = 10, 4 + + targets = torch.ones([bs, n_classes], dtype=torch.float32) + predictions = torch.full([bs, n_classes], 10e3) # logit + loss_fn = BCEWithLogitsLoss() + + loss = loss_fn.forward(predictions, targets) + assert torch.round(loss, decimals=2) == 0.0 + + targets = torch.zeros([bs, n_classes], dtype=torch.float32) + predictions = torch.full([bs, n_classes], -10e3) # logit + loss_fn = BCEWithLogitsLoss() + + loss = loss_fn.forward(predictions, targets) + assert torch.round(loss, decimals=2) == 0.0 + + +def test_weights(): + bs, n_classes = 10, 4 + + targets = torch.ones([bs, n_classes], dtype=torch.float32) + predictions = torch.rand([bs, n_classes]) * 10 - 5 # logit + + loss_fn_weight = BCEWithLogitsLoss( + pos_weight=torch.randint(1, 10, torch.Size((n_classes,))) + ) + loss_fn_no_weight = BCEWithLogitsLoss() + + loss_weight = loss_fn_weight.forward(predictions, targets) + loss_no_weight = loss_fn_no_weight.forward(predictions, targets) + assert loss_weight != loss_no_weight + + +if __name__ == "__main__": + pytest.main() diff --git a/tests/unittests/test_utils/__init__.py b/tests/unittests/test_utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unittests/test_utils/test_assigners/test_atts_assigner.py b/tests/unittests/test_utils/test_assigners/test_atts_assigner.py new file mode 100644 index 00000000..4512d9e5 --- /dev/null +++ b/tests/unittests/test_utils/test_assigners/test_atts_assigner.py @@ -0,0 +1,105 @@ +import torch + +from luxonis_train.utils.assigners.atts_assigner import ATSSAssigner + + +def test_init(): + assigner = ATSSAssigner(n_classes=80, topk=9) + assert assigner.n_classes == 80 + assert assigner.topk == 9 + + +def test_forward(): + bs = 10 + n_max_boxes = 5 + n_anchors = 100 + n_classes = 80 + topk = 9 + + assigner = ATSSAssigner(n_classes=n_classes, topk=topk) + anchor_bboxes = torch.rand(n_anchors, 4) + n_level_bboxes = [20, 30, 50] + gt_labels = torch.rand(bs, n_max_boxes, 1) + gt_bboxes = torch.zeros(bs, n_max_boxes, 4) + mask_gt = torch.rand(bs, n_max_boxes, 1) + pred_bboxes = torch.rand(bs, n_anchors, 4) + + labels, bboxes, scores, mask = assigner.forward( + anchor_bboxes, n_level_bboxes, gt_labels, gt_bboxes, mask_gt, pred_bboxes + ) + + assert labels.shape == (bs, n_anchors) + assert bboxes.shape == (bs, n_anchors, 4) + assert scores.shape == (bs, n_anchors, n_classes) + assert mask.shape == (bs, n_anchors) + + +def test_get_bbox_center(): + assigner = ATSSAssigner(n_classes=80, topk=9) + bbox = torch.tensor([[0, 0, 10, 10], [10, 10, 20, 20]]) + centers = assigner._get_bbox_center(bbox) + expected_centers = torch.tensor([[5, 5], [15, 15]]) + assert torch.all(torch.eq(centers, expected_centers)) + + +def test_select_topk_candidates(): + batch_size = 2 + n_max_boxes = 3 + n_anchors = 10 + topk = 2 + n_level_bboxes = [4, 6] # Mock number of boxes per level + + assigner = ATSSAssigner(n_classes=80, topk=topk) + distances = torch.rand(batch_size, n_max_boxes, n_anchors) + mask_gt = torch.ones(batch_size, n_max_boxes, 1) + + is_in_topk, topk_idxs = assigner._select_topk_candidates( + distances, n_level_bboxes, mask_gt + ) + + assert is_in_topk.shape == (batch_size, n_max_boxes, n_anchors) + assert topk_idxs.shape == (batch_size, n_max_boxes, topk * len(n_level_bboxes)) + + +def test_get_positive_samples(): + batch_size = 2 + n_max_boxes = 3 + n_anchors = 10 + topk = 2 + + assigner = ATSSAssigner(n_classes=80, topk=topk) + assigner.bs = batch_size + assigner.n_max_boxes = n_max_boxes + assigner.n_anchors = n_anchors + is_in_topk = torch.rand(batch_size, n_max_boxes, n_anchors) + topk_idxs = torch.randint(0, n_anchors, (batch_size, n_max_boxes, topk)) + overlaps = torch.rand(batch_size, n_max_boxes, n_anchors) + + is_pos = assigner._get_positive_samples(is_in_topk, topk_idxs, overlaps) + + assert is_pos.shape == (batch_size, n_max_boxes, n_anchors) + + +def test_get_final_assignments(): + batch_size = 2 + n_max_boxes = 3 + n_anchors = 10 + n_classes = 80 + + assigner = ATSSAssigner(n_classes=n_classes, topk=9) + assigner.bs = batch_size + assigner.n_anchors = n_anchors + assigner.n_max_boxes = n_max_boxes + + gt_labels = torch.randint(0, n_classes, (batch_size, n_max_boxes, 1)) + gt_bboxes = torch.rand(batch_size, n_max_boxes, 4) + assigned_gt_idx = torch.randint(0, n_max_boxes, (batch_size, n_anchors)) + mask_pos_sum = torch.randint(0, 2, (batch_size, n_anchors)) + + assigned_labels, assigned_bboxes, assigned_scores = assigner._get_final_assignments( + gt_labels, gt_bboxes, assigned_gt_idx, mask_pos_sum + ) + + assert assigned_labels.shape == (batch_size, n_anchors) + assert assigned_bboxes.shape == (batch_size, n_anchors, 4) + assert assigned_scores.shape == (batch_size, n_anchors, n_classes) diff --git a/tests/unittests/test_utils/test_assigners/test_tal_assigner.py b/tests/unittests/test_utils/test_assigners/test_tal_assigner.py new file mode 100644 index 00000000..bb2dd912 --- /dev/null +++ b/tests/unittests/test_utils/test_assigners/test_tal_assigner.py @@ -0,0 +1,161 @@ +import torch + +from luxonis_train.utils.assigners.tal_assigner import TaskAlignedAssigner + + +def test_init(): + assigner = TaskAlignedAssigner(n_classes=80, topk=13, alpha=1.0, beta=6.0, eps=1e-9) + assert assigner.n_classes == 80 + assert assigner.topk == 13 + assert assigner.alpha == 1.0 + assert assigner.beta == 6.0 + assert assigner.eps == 1e-9 + + +def test_forward(): + # Constants for clarity + batch_size = 10 + num_anchors = 100 + num_max_boxes = 5 + num_classes = 80 + + # Initialize the TaskAlignedAssigner + assigner = TaskAlignedAssigner(n_classes=num_classes, topk=13) + + # Create mock inputs + pred_scores = torch.rand(batch_size, num_anchors, 1) + pred_bboxes = torch.rand(batch_size, num_anchors, 4) + anchor_points = torch.rand(num_anchors, 2) + gt_labels = torch.rand(batch_size, num_max_boxes, 1) + gt_bboxes = torch.zeros(batch_size, num_max_boxes, 4) # no gt bboxes + mask_gt = torch.rand(batch_size, num_max_boxes, 1) + + # Call the forward method + labels, bboxes, scores, mask = assigner.forward( + pred_scores, pred_bboxes, anchor_points, gt_labels, gt_bboxes, mask_gt + ) + + # Assert the expected outcomes + assert labels.shape == (batch_size, num_anchors) + assert labels.unique().tolist() == [ + num_classes + ] # All labels should be num_classes as there are no GT boxes + assert bboxes.shape == (batch_size, num_anchors, 4) + assert torch.equal( + bboxes, torch.zeros_like(bboxes) + ) # All bboxes should be zero as there are no GT boxes + assert ( + scores.shape + == ( + batch_size, + num_anchors, + num_classes, + ) + ) # TODO: We have this in doc string: Returns: ... assigned scores of shape [bs, n_anchors, 1], + # it returns tensor of shape [bs, n_anchors, n_classes] instead + assert torch.equal( + scores, torch.zeros_like(scores) + ) # All scores should be zero as there are no GT boxes + assert mask.shape == (batch_size, num_anchors) + assert torch.equal( + mask, torch.zeros_like(mask) + ) # All mask values should be zero as there are no GT boxes + + +def test_get_alignment_metric(): + # Create mock inputs + bs = 2 # batch size + n_anchors = 5 + n_max_boxes = 3 + n_classes = 80 + + pred_scores = torch.rand( + bs, n_anchors, n_classes + ) # TODO: Same issue: works with n_classes instead of 1, change it in the doc string in the method itself!!! + pred_bboxes = torch.rand(bs, n_anchors, 4) + gt_labels = torch.randint(0, n_classes, (bs, n_max_boxes, 1)) + gt_bboxes = torch.rand(bs, n_max_boxes, 4) + + # Initialize the TaskAlignedAssigner + assigner = TaskAlignedAssigner( + n_classes=n_classes, topk=13, alpha=1.0, beta=6.0, eps=1e-9 + ) + assigner.bs = pred_scores.size(0) + assigner.n_max_boxes = gt_bboxes.size(1) + + # Call the method + align_metric, overlaps = assigner._get_alignment_metric( + pred_scores, pred_bboxes, gt_labels, gt_bboxes + ) + + # Assert the expected outcomes + assert align_metric.shape == (bs, n_max_boxes, n_anchors) + assert overlaps.shape == (bs, n_max_boxes, n_anchors) + assert align_metric.dtype == torch.float32 + assert overlaps.dtype == torch.float32 + assert (align_metric >= 0).all() and ( + align_metric <= 1 + ).all() # Alignment metric should be in the range [0, 1] + assert (overlaps >= 0).all() and ( + overlaps <= 1 + ).all() # IoU should be in the range [0, 1] + + +def test_select_topk_candidates(): + # Constants for the test + batch_size = 2 + num_max_boxes = 3 + num_anchors = 5 + topk = 2 + + metrics = torch.rand(batch_size, num_max_boxes, num_anchors) + mask_gt = torch.rand(batch_size, num_max_boxes, 1) + + # Initialize the TaskAlignedAssigner + assigner = TaskAlignedAssigner(n_classes=80, topk=topk) + + # Call the method + is_in_topk = assigner._select_topk_candidates( + metrics, + ) + topk_mask = mask_gt.repeat([1, 1, topk]).bool() + assert torch.equal( + assigner._select_topk_candidates(metrics), + assigner._select_topk_candidates(metrics, topk_mask=topk_mask), + ) + # Assert the expected outcomes + assert is_in_topk.shape == (batch_size, num_max_boxes, num_anchors) + assert is_in_topk.dtype == torch.float32 + + # Check that each ground truth has at most 'topk' anchors selected + assert (is_in_topk.sum(dim=-1) <= topk).all() + + +def test_get_final_assignments(): + # Constants for the test + batch_size = 2 + num_max_boxes = 3 + num_anchors = 5 + num_classes = 80 + + # Mock inputs + gt_labels = torch.randint(0, num_classes, (batch_size, num_max_boxes, 1)) + gt_bboxes = torch.rand(batch_size, num_max_boxes, 4) + assigned_gt_idx = torch.randint(0, num_max_boxes, (batch_size, num_anchors)) + mask_pos_sum = torch.randint(0, 2, (batch_size, num_anchors)) + + # Initialize the TaskAlignedAssigner + assigner = TaskAlignedAssigner(n_classes=num_classes, topk=13) + assigner.bs = batch_size # Set batch size + assigner.n_max_boxes = gt_bboxes.size(1) + + # Call the method + assigned_labels, assigned_bboxes, assigned_scores = assigner._get_final_assignments( + gt_labels, gt_bboxes, assigned_gt_idx, mask_pos_sum + ) + + # Assert the expected outcomes + assert assigned_labels.shape == (batch_size, num_anchors) + assert assigned_bboxes.shape == (batch_size, num_anchors, 4) + assert assigned_scores.shape == (batch_size, num_anchors, num_classes) + assert (assigned_labels >= 0).all() and (assigned_labels <= num_classes).all() diff --git a/tests/unittests/test_utils/test_assigners/test_utils.py b/tests/unittests/test_utils/test_assigners/test_utils.py new file mode 100644 index 00000000..bf849e25 --- /dev/null +++ b/tests/unittests/test_utils/test_assigners/test_utils.py @@ -0,0 +1,52 @@ +import torch + +from luxonis_train.utils.assigners.utils import ( + batch_iou, + candidates_in_gt, + fix_collisions, +) + + +def test_fix_collisions(): + batch_size = 2 + n_max_boxes = 3 + n_anchors = 4 + + mask_pos = torch.randint(0, 2, (batch_size, n_max_boxes, n_anchors)) + overlaps = torch.rand(batch_size, n_max_boxes, n_anchors) + + assigned_gt_idx, mask_pos_sum, new_mask_pos = fix_collisions( + mask_pos, overlaps, n_max_boxes + ) + + assert assigned_gt_idx.shape == (batch_size, n_anchors) + assert mask_pos_sum.shape == (batch_size, n_anchors) + assert new_mask_pos.shape == (batch_size, n_max_boxes, n_anchors) + + +def test_candidates_in_gt(): + n_anchors = 4 + batch_size = 2 + n_max_boxes = 3 + + anchor_centers = torch.rand(n_anchors, 2) + gt_bboxes = torch.rand(batch_size * n_max_boxes, 4) + + candidates = candidates_in_gt(anchor_centers, gt_bboxes) + + assert candidates.shape == (batch_size * n_max_boxes, n_anchors) + assert candidates.dtype == torch.float32 + + +def test_batch_iou(): + batch_size = 2 + n = 3 + m = 4 + + batch1 = torch.rand(batch_size, n, 4) + batch2 = torch.rand(batch_size, m, 4) + + ious = batch_iou(batch1, batch2) + + assert ious.shape == (batch_size, n, m) + assert ious.dtype == torch.float32 diff --git a/tests/unittests/test_utils/test_boxutils.py b/tests/unittests/test_utils/test_boxutils.py new file mode 100644 index 00000000..2cb3df24 --- /dev/null +++ b/tests/unittests/test_utils/test_boxutils.py @@ -0,0 +1,116 @@ +import torch + +from luxonis_train.utils.boxutils import ( + anchors_for_fpn_features, + bbox2dist, + bbox_iou, + compute_iou_loss, + dist2bbox, + process_bbox_predictions, + process_keypoints_predictions, +) + + +def generate_random_bboxes(num_bboxes, max_width, max_height, format="xyxy"): + # Generate top-left corners (x1, y1) + x1y1 = torch.rand(num_bboxes, 2) * torch.tensor([max_width - 1, max_height - 1]) + + # Generate widths and heights ensuring x2 > x1 and y2 > y1 + wh = ( + torch.rand(num_bboxes, 2) * (torch.tensor([max_width, max_height]) - 1 - x1y1) + + 1 + ) + + if format == "xyxy": + # Calculate bottom-right corners (x2, y2) for xyxy format + x2y2 = x1y1 + wh + bboxes = torch.cat((x1y1, x2y2), dim=1) + elif format == "xywh": + # Use x1y1 as top-left corner and wh as width and height for xywh format + bboxes = torch.cat((x1y1, wh), dim=1) + elif format == "cxcywh": + # Calculate center coordinates and use wh as width and height for cxcywh format + cxcy = x1y1 + wh / 2 + bboxes = torch.cat((cxcy, wh), dim=1) + else: + raise ValueError("Unsupported format. Choose from 'xyxy', 'xywh', 'cxcywh'.") + + return bboxes + + +def test_dist2bbox(): + distance = torch.rand(10, 4) + anchor_points = torch.rand(10, 2) + bbox = dist2bbox(distance, anchor_points) + + assert bbox.shape == distance.shape + + +def test_bbox2dist(): + bbox = torch.rand(10, 4) + anchor_points = torch.rand(10, 2) + reg_max = 10.0 + + distance = bbox2dist(bbox, anchor_points, reg_max) + + assert distance.shape == bbox.shape + + +def test_bbox_iou(): + for format in ["xyxy", "cxcywh", "xywh"]: + bbox1 = generate_random_bboxes(5, 640, 640, format) + bbox2 = generate_random_bboxes(8, 640, 640, format) + + iou = bbox_iou(bbox1, bbox2) + + assert iou.shape == (5, 8) + assert iou.min() >= 0 and iou.max() <= 1 + + +def test_compute_iou_loss(): + pred_bboxes = generate_random_bboxes(8, 640, 640, "xyxy") + target_bboxes = generate_random_bboxes(8, 640, 640, "xyxy") + + loss_iou, iou = compute_iou_loss(pred_bboxes, target_bboxes, iou_type="giou") + + assert isinstance(loss_iou, torch.Tensor) + assert isinstance(iou, torch.Tensor) + assert 0 <= iou.min() and iou.max() <= 1 + + +def test_process_bbox_predictions(): + bbox = generate_random_bboxes(10, 64, 64, "xywh") + data = torch.rand(10, 4) + prediction = torch.concat([bbox, data], dim=-1) + anchor = torch.rand(10, 2) + + out_bbox_xy, out_bbox_wh, out_bbox_tail = process_bbox_predictions( + prediction, anchor + ) + + assert out_bbox_xy.shape == (10, 2) + assert out_bbox_wh.shape == (10, 2) + assert out_bbox_tail.shape == (10, 4) + + +def test_process_keypoints_predictions(): + keypoints = torch.rand(10, 15) # 5 keypoints * 3 (x, y, visibility) + + x, y, visibility = process_keypoints_predictions(keypoints) + + assert x.shape == y.shape == visibility.shape == (10, 5) + + +def test_anchors_for_fpn_features(): + features = [torch.rand(1, 256, 14, 14), torch.rand(1, 256, 28, 28)] + strides = torch.tensor([8, 16]) + + anchors, anchor_points, n_anchors_list, stride_tensor = anchors_for_fpn_features( + features, strides + ) + + assert isinstance(anchors, torch.Tensor) + assert isinstance(anchor_points, torch.Tensor) + assert isinstance(n_anchors_list, list) + assert isinstance(stride_tensor, torch.Tensor) + assert len(n_anchors_list) == len(features) diff --git a/tests/unittests/test_utils/test_loaders/test_base_loader.py b/tests/unittests/test_utils/test_loaders/test_base_loader.py new file mode 100644 index 00000000..e48f81ad --- /dev/null +++ b/tests/unittests/test_utils/test_loaders/test_base_loader.py @@ -0,0 +1,39 @@ +import pytest +import torch + +from luxonis_train.utils.loaders import ( + collate_fn, +) +from luxonis_train.utils.types import LabelType + + +def test_collate_fn(): + # Mock batch data + batch = [ + ( + torch.rand(3, 224, 224, dtype=torch.float32), + {LabelType.CLASSIFICATION: torch.tensor([1, 0])}, + ), + ( + torch.rand(3, 224, 224, dtype=torch.float32), + {LabelType.CLASSIFICATION: torch.tensor([0, 1])}, + ), + ] + + # Call collate_fn + imgs, annotations = collate_fn(batch) + + # Check images tensor + assert imgs.shape == (2, 3, 224, 224) + assert imgs.dtype == torch.float32 + + # Check annotations + assert LabelType.CLASSIFICATION in annotations + assert annotations[LabelType.CLASSIFICATION].shape == (2, 2) + assert annotations[LabelType.CLASSIFICATION].dtype == torch.int64 + + # TODO: test also segmentation, boundingbox and keypoint + + +if __name__ == "__main__": + pytest.main() diff --git a/tools/main.py b/tools/main.py new file mode 100644 index 00000000..e86954ec --- /dev/null +++ b/tools/main.py @@ -0,0 +1,112 @@ +from enum import Enum +from importlib.metadata import version +from pathlib import Path +from typing import Annotated, Optional + +import typer + +app = typer.Typer(help="Luxonis Train CLI", add_completion=False) + + +class View(str, Enum): + train = "train" + val = "val" + test = "test" + + def __str__(self): + return self.value + + +ConfigType = Annotated[ + Optional[Path], + typer.Option( + help="Path to the configuration file.", + show_default=False, + ), +] + +OptsType = Annotated[ + Optional[list[str]], + typer.Argument( + help="A list of optional CLI overrides of the config file.", + show_default=False, + ), +] + +ViewType = Annotated[View, typer.Option(help="Which dataset view to use.")] + +SaveDirType = Annotated[ + Optional[Path], + typer.Option(help="Where to save the inference results."), +] + + +@app.command() +def train(config: ConfigType = None, opts: OptsType = None): + """Start training.""" + from luxonis_train.core import Trainer + + Trainer(str(config), opts).train() + + +@app.command() +def eval(config: ConfigType = None, view: ViewType = View.val, opts: OptsType = None): + """Evaluate model.""" + from luxonis_train.core import Trainer + + Trainer(str(config), opts).test(view=view.name) + + +@app.command() +def tune(config: ConfigType = None, opts: OptsType = None): + """Start hyperparameter tuning.""" + from luxonis_train.core import Tuner + + Tuner(str(config), opts).tune() + + +@app.command() +def export(config: ConfigType = None, opts: OptsType = None): + """Export model.""" + from luxonis_train.core import Exporter + + Exporter(str(config), opts).export() + + +@app.command() +def infer( + config: ConfigType = None, + view: ViewType = View.val, + save_dir: SaveDirType = None, + opts: OptsType = None, +): + """Run inference.""" + from luxonis_train.core import Inferer + + Inferer(str(config), opts, view=view.name, save_dir=save_dir).infer() + + +def version_callback(value: bool): + if value: + typer.echo(f"LuxonisTrain Version: {version(__package__)}") + raise typer.Exit() + + +@app.callback() +def common( + _: Annotated[ + bool, + typer.Option( + "--version", callback=version_callback, help="Show version and exit." + ), + ] = False, +): + ... + + +def main(): + app() + + +if __name__ == "__main__": + main()