From a6d8945e9090ede697dcf0be19aed66866d9c592 Mon Sep 17 00:00:00 2001 From: xju Date: Tue, 2 Apr 2024 14:32:38 -0700 Subject: [PATCH 1/4] add toml description for installing --- Makefile | 4 ++-- pyproject.toml | 40 ++++++++++++++++++++++++++++++++++++++++ requirements.txt | 5 +++++ setup.py | 14 -------------- 4 files changed, 47 insertions(+), 16 deletions(-) delete mode 100644 setup.py diff --git a/Makefile b/Makefile index 249a658..fc17d94 100644 --- a/Makefile +++ b/Makefile @@ -27,7 +27,7 @@ test-full: ## Run all tests pytest train: ## Train the model - python src/train.py + python hadml/train.py debug: ## Enter debugging mode with pdb # @@ -39,4 +39,4 @@ debug: ## Enter debugging mode with pdb # - use "l" to print src code around current line, "ll" for full function code # - docs: https://docs.python.org/3/library/pdb.html # - python -m pdb src/train.py debug=default + python -m pdb hadml/train.py debug=default diff --git a/pyproject.toml b/pyproject.toml index 300ebf0..6a737bc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,43 @@ +[project] +name = "hadml" +version = "0.1.0" +description = "Machine Learning for Hadronization" +readme = "README.md" +requires-python = ">=3.9" +license = {file = "LICENSE"} +keywords = ["HEP", "Hadronization", "ML"] +authors = [ + {name = "X. Ju", email = "xju@lbl.gov" }, + {name = "A. Kania", emmail = ""}, + {name = "C.H Chan", email = "JayChan@lbl.gov"} +] +dependencies = [ +] + +[project.urls] +Homepage = "https://github.com/hep-lbdl/hadml" +Issues = "https://github.com/hep-lbdl/hadml/issues" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.ruff] +line-length = 100 +preview = true +lint.select = ["ALL"] +lint.ignore = [ + "COM", "D100", "D101", "D102", "D103", "D104", "D105", "D205", "D401", "EM", "FIX", "FBT", + "S101", "S404", "S602", "PLR2004", "PLR0912", "PLR0913", "PLR0914", "PLR0915", "PLR0917", + "PLC0415", "G004", "PD901", "N802", "C901", "DTZ005", "DTZ007", "INP", "EXE002", "TD002", + "ANN001", "ANN002", "ANN003", "ANN101", "ANN201", "ANN202", "ANN204", "CPY001", "TRY003", + "N803", "N806", "N812", "T201", "PLW1514", "PTH123", "RUF015", "RUF017", "PLR6301", "ERA", "ISC001", + "RET504", "SLF001", "S403", "PLR1702" +] + +[tool.ruff.lint.pydocstyle] +convention = "numpy" + [tool.pytest.ini_options] addopts = [ "--color=yes", diff --git a/requirements.txt b/requirements.txt index 578db37..127411f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -25,3 +25,8 @@ pandas matplotlib pot # sh # for running bash commands in some tests (linux/macos only) + +# for python distribution +build +twine +pytest-cov diff --git a/setup.py b/setup.py deleted file mode 100644 index c09baa7..0000000 --- a/setup.py +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env python - -from setuptools import find_packages, setup - -setup( - name="hadml", - version="0.1.0", - description="Machine Learning Project for Hadronizaton", - author="", - author_email="", - url="https://github.com/hep-lbdl/hadml", - install_requires=["pytorch-lightning", "hydra-core"], - packages=find_packages(), -) From ce941ce2ba315299b02502c65169f7b5168d8c5f Mon Sep 17 00:00:00 2001 From: xju Date: Tue, 2 Apr 2024 14:33:07 -0700 Subject: [PATCH 2/4] into src folder --- {hadml => src/hadml}/__init__.py | 0 {hadml => src/hadml}/datamodules/__init__.py | 0 {hadml => src/hadml}/datamodules/components/__init__.py | 0 {hadml => src/hadml}/datamodules/components/herwig.py | 0 {hadml => src/hadml}/datamodules/components/pythia.py | 0 {hadml => src/hadml}/datamodules/components/utils.py | 0 {hadml => src/hadml}/datamodules/gan_datamodule.py | 0 {hadml => src/hadml}/eval.py | 0 {hadml => src/hadml}/loss/__init__.py | 0 {hadml => src/hadml}/loss/loss.py | 0 {hadml => src/hadml}/metrics/__init__.py | 0 {hadml => src/hadml}/metrics/compare_fn.py | 0 {hadml => src/hadml}/metrics/image_converter.py | 0 {hadml => src/hadml}/metrics/media_logger.py | 0 {hadml => src/hadml}/models/__init__.py | 0 {hadml => src/hadml}/models/cgan/__init__.py | 0 {hadml => src/hadml}/models/cgan/cond_event_gan.py | 0 {hadml => src/hadml}/models/cgan/cond_particle_gan.py | 0 {hadml => src/hadml}/models/components/__init__.py | 0 {hadml => src/hadml}/models/components/deep_set.py | 0 {hadml => src/hadml}/models/components/epic_gan.py | 0 {hadml => src/hadml}/models/components/mlp.py | 0 {hadml => src/hadml}/models/components/mlp_res.py | 0 {hadml => src/hadml}/models/components/simple_dense_net.py | 0 {hadml => src/hadml}/models/components/transform.py | 0 {hadml => src/hadml}/models/node_embedding_module.py | 0 {hadml => src/hadml}/train.py | 0 {hadml => src/hadml}/utils/__init__.py | 0 {hadml => src/hadml}/utils/pylogger.py | 0 {hadml => src/hadml}/utils/rich_utils.py | 0 {hadml => src/hadml}/utils/utils.py | 0 31 files changed, 0 insertions(+), 0 deletions(-) rename {hadml => src/hadml}/__init__.py (100%) rename {hadml => src/hadml}/datamodules/__init__.py (100%) rename {hadml => src/hadml}/datamodules/components/__init__.py (100%) rename {hadml => src/hadml}/datamodules/components/herwig.py (100%) rename {hadml => src/hadml}/datamodules/components/pythia.py (100%) rename {hadml => src/hadml}/datamodules/components/utils.py (100%) rename {hadml => src/hadml}/datamodules/gan_datamodule.py (100%) rename {hadml => src/hadml}/eval.py (100%) rename {hadml => src/hadml}/loss/__init__.py (100%) rename {hadml => src/hadml}/loss/loss.py (100%) rename {hadml => src/hadml}/metrics/__init__.py (100%) rename {hadml => src/hadml}/metrics/compare_fn.py (100%) rename {hadml => src/hadml}/metrics/image_converter.py (100%) rename {hadml => src/hadml}/metrics/media_logger.py (100%) rename {hadml => src/hadml}/models/__init__.py (100%) rename {hadml => src/hadml}/models/cgan/__init__.py (100%) rename {hadml => src/hadml}/models/cgan/cond_event_gan.py (100%) rename {hadml => src/hadml}/models/cgan/cond_particle_gan.py (100%) rename {hadml => src/hadml}/models/components/__init__.py (100%) rename {hadml => src/hadml}/models/components/deep_set.py (100%) rename {hadml => src/hadml}/models/components/epic_gan.py (100%) rename {hadml => src/hadml}/models/components/mlp.py (100%) rename {hadml => src/hadml}/models/components/mlp_res.py (100%) rename {hadml => src/hadml}/models/components/simple_dense_net.py (100%) rename {hadml => src/hadml}/models/components/transform.py (100%) rename {hadml => src/hadml}/models/node_embedding_module.py (100%) rename {hadml => src/hadml}/train.py (100%) rename {hadml => src/hadml}/utils/__init__.py (100%) rename {hadml => src/hadml}/utils/pylogger.py (100%) rename {hadml => src/hadml}/utils/rich_utils.py (100%) rename {hadml => src/hadml}/utils/utils.py (100%) diff --git a/hadml/__init__.py b/src/hadml/__init__.py similarity index 100% rename from hadml/__init__.py rename to src/hadml/__init__.py diff --git a/hadml/datamodules/__init__.py b/src/hadml/datamodules/__init__.py similarity index 100% rename from hadml/datamodules/__init__.py rename to src/hadml/datamodules/__init__.py diff --git a/hadml/datamodules/components/__init__.py b/src/hadml/datamodules/components/__init__.py similarity index 100% rename from hadml/datamodules/components/__init__.py rename to src/hadml/datamodules/components/__init__.py diff --git a/hadml/datamodules/components/herwig.py b/src/hadml/datamodules/components/herwig.py similarity index 100% rename from hadml/datamodules/components/herwig.py rename to src/hadml/datamodules/components/herwig.py diff --git a/hadml/datamodules/components/pythia.py b/src/hadml/datamodules/components/pythia.py similarity index 100% rename from hadml/datamodules/components/pythia.py rename to src/hadml/datamodules/components/pythia.py diff --git a/hadml/datamodules/components/utils.py b/src/hadml/datamodules/components/utils.py similarity index 100% rename from hadml/datamodules/components/utils.py rename to src/hadml/datamodules/components/utils.py diff --git a/hadml/datamodules/gan_datamodule.py b/src/hadml/datamodules/gan_datamodule.py similarity index 100% rename from hadml/datamodules/gan_datamodule.py rename to src/hadml/datamodules/gan_datamodule.py diff --git a/hadml/eval.py b/src/hadml/eval.py similarity index 100% rename from hadml/eval.py rename to src/hadml/eval.py diff --git a/hadml/loss/__init__.py b/src/hadml/loss/__init__.py similarity index 100% rename from hadml/loss/__init__.py rename to src/hadml/loss/__init__.py diff --git a/hadml/loss/loss.py b/src/hadml/loss/loss.py similarity index 100% rename from hadml/loss/loss.py rename to src/hadml/loss/loss.py diff --git a/hadml/metrics/__init__.py b/src/hadml/metrics/__init__.py similarity index 100% rename from hadml/metrics/__init__.py rename to src/hadml/metrics/__init__.py diff --git a/hadml/metrics/compare_fn.py b/src/hadml/metrics/compare_fn.py similarity index 100% rename from hadml/metrics/compare_fn.py rename to src/hadml/metrics/compare_fn.py diff --git a/hadml/metrics/image_converter.py b/src/hadml/metrics/image_converter.py similarity index 100% rename from hadml/metrics/image_converter.py rename to src/hadml/metrics/image_converter.py diff --git a/hadml/metrics/media_logger.py b/src/hadml/metrics/media_logger.py similarity index 100% rename from hadml/metrics/media_logger.py rename to src/hadml/metrics/media_logger.py diff --git a/hadml/models/__init__.py b/src/hadml/models/__init__.py similarity index 100% rename from hadml/models/__init__.py rename to src/hadml/models/__init__.py diff --git a/hadml/models/cgan/__init__.py b/src/hadml/models/cgan/__init__.py similarity index 100% rename from hadml/models/cgan/__init__.py rename to src/hadml/models/cgan/__init__.py diff --git a/hadml/models/cgan/cond_event_gan.py b/src/hadml/models/cgan/cond_event_gan.py similarity index 100% rename from hadml/models/cgan/cond_event_gan.py rename to src/hadml/models/cgan/cond_event_gan.py diff --git a/hadml/models/cgan/cond_particle_gan.py b/src/hadml/models/cgan/cond_particle_gan.py similarity index 100% rename from hadml/models/cgan/cond_particle_gan.py rename to src/hadml/models/cgan/cond_particle_gan.py diff --git a/hadml/models/components/__init__.py b/src/hadml/models/components/__init__.py similarity index 100% rename from hadml/models/components/__init__.py rename to src/hadml/models/components/__init__.py diff --git a/hadml/models/components/deep_set.py b/src/hadml/models/components/deep_set.py similarity index 100% rename from hadml/models/components/deep_set.py rename to src/hadml/models/components/deep_set.py diff --git a/hadml/models/components/epic_gan.py b/src/hadml/models/components/epic_gan.py similarity index 100% rename from hadml/models/components/epic_gan.py rename to src/hadml/models/components/epic_gan.py diff --git a/hadml/models/components/mlp.py b/src/hadml/models/components/mlp.py similarity index 100% rename from hadml/models/components/mlp.py rename to src/hadml/models/components/mlp.py diff --git a/hadml/models/components/mlp_res.py b/src/hadml/models/components/mlp_res.py similarity index 100% rename from hadml/models/components/mlp_res.py rename to src/hadml/models/components/mlp_res.py diff --git a/hadml/models/components/simple_dense_net.py b/src/hadml/models/components/simple_dense_net.py similarity index 100% rename from hadml/models/components/simple_dense_net.py rename to src/hadml/models/components/simple_dense_net.py diff --git a/hadml/models/components/transform.py b/src/hadml/models/components/transform.py similarity index 100% rename from hadml/models/components/transform.py rename to src/hadml/models/components/transform.py diff --git a/hadml/models/node_embedding_module.py b/src/hadml/models/node_embedding_module.py similarity index 100% rename from hadml/models/node_embedding_module.py rename to src/hadml/models/node_embedding_module.py diff --git a/hadml/train.py b/src/hadml/train.py similarity index 100% rename from hadml/train.py rename to src/hadml/train.py diff --git a/hadml/utils/__init__.py b/src/hadml/utils/__init__.py similarity index 100% rename from hadml/utils/__init__.py rename to src/hadml/utils/__init__.py diff --git a/hadml/utils/pylogger.py b/src/hadml/utils/pylogger.py similarity index 100% rename from hadml/utils/pylogger.py rename to src/hadml/utils/pylogger.py diff --git a/hadml/utils/rich_utils.py b/src/hadml/utils/rich_utils.py similarity index 100% rename from hadml/utils/rich_utils.py rename to src/hadml/utils/rich_utils.py diff --git a/hadml/utils/utils.py b/src/hadml/utils/utils.py similarity index 100% rename from hadml/utils/utils.py rename to src/hadml/utils/utils.py From 6e942c2b5a4180caea833c31bede075605d8a96a Mon Sep 17 00:00:00 2001 From: xju Date: Tue, 2 Apr 2024 15:21:10 -0700 Subject: [PATCH 3/4] new formatting --- .flake8 | 38 --- LICENSE | 201 +++++++++++++++ notebooks/test_data.ipynb | 232 ++++++++++++++++++ pyproject.toml | 2 +- requirements.txt | 24 +- scripts/check_env.py | 61 +++++ src/hadml/datamodules/components/herwig.py | 130 +++++----- src/hadml/datamodules/components/pythia.py | 40 ++- src/hadml/datamodules/components/utils.py | 55 ++--- src/hadml/datamodules/gan_datamodule.py | 24 +- src/hadml/eval.py | 6 +- src/hadml/loss/loss.py | 3 +- src/hadml/metrics/compare_fn.py | 48 ++-- src/hadml/metrics/image_converter.py | 1 - src/hadml/metrics/media_logger.py | 13 +- src/hadml/models/cgan/cond_event_gan.py | 122 ++++----- src/hadml/models/cgan/cond_particle_gan.py | 144 +++++------ src/hadml/models/components/deep_set.py | 4 +- src/hadml/models/components/epic_gan.py | 59 +++-- src/hadml/models/components/mlp.py | 8 +- src/hadml/models/components/mlp_res.py | 22 +- .../models/components/simple_dense_net.py | 1 + src/hadml/models/components/transform.py | 7 +- src/hadml/models/node_embedding_module.py | 10 +- src/hadml/utils/pylogger.py | 1 - src/hadml/utils/rich_utils.py | 4 +- src/hadml/utils/utils.py | 36 +-- test_data.ipynb | 165 ------------- tests/test_eval.py | 9 +- tests/test_mnist_datamodule.py | 1 - tests/test_sweeps.py | 10 +- tests/test_train.py | 10 +- src/hadml/train.py => train.py | 73 ++---- 33 files changed, 868 insertions(+), 696 deletions(-) delete mode 100644 .flake8 create mode 100644 LICENSE create mode 100644 notebooks/test_data.ipynb create mode 100755 scripts/check_env.py delete mode 100644 test_data.ipynb rename src/hadml/train.py => train.py (62%) diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 4600586..0000000 --- a/.flake8 +++ /dev/null @@ -1,38 +0,0 @@ -[flake8] -enable-extensions = G -select = B,C,E,F,G,P,T4,W,B9 -max-line-length = 120 -# C408 ignored because we like the dict keyword argument syntax -# E501 is not flexible enough, we're using B950 instead -ignore = - E203,E305,E402,E501,E721,E741,F405,F821,F841,F999,W503,W504,C408,E302,W291,E303, - # shebang has extra meaning in fbcode lints, so I think it's not worth trying - # to line this up with executable bit - EXE001, - # these ignores are from flake8-bugbear; please fix! - B007,B008, - # these ignores are from flake8-comprehensions; please fix! - C407 - # these ignores are from flake8-logging-format; please fix! - G001,G002,G003,G004,G100,G101,G200,G201,G202 -per-file-ignores = - __init__.py: F401 -optional-ascii-coding = True -exclude = - ./.git, - ./build_test_custom_build, - ./build, - ./caffe2, - ./docs/caffe2, - ./docs/cpp/src, - ./docs/src, - ./functorch/docs, - ./functorch/examples, - ./functorch/notebooks, - ./scripts, - ./test/generated_type_hints_smoketest.py, - ./third_party, - ./torch/include, - ./torch/lib, - ./venv, - *.pyi \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..f49a4e1 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/notebooks/test_data.ipynb b/notebooks/test_data.ipynb new file mode 100644 index 0000000..610efb2 --- /dev/null +++ b/notebooks/test_data.ipynb @@ -0,0 +1,232 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This notebook look into the raw HERWIG data from the 4th paper: \"paper title to be determined\"." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from hadml import utils, datamodules\n", + "from hadml.datamodules.components.herwig import HerwigEventMultiHadronDataset\n", + "import matplotlib.pyplot as plt" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "import hadml" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'/pscratch/sd/x/xju/Herwig/hadml/src/hadml/__init__.py'" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "hadml.__file__" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loading existing pids map: ../data/Herwig/pids_to_ix.pkl\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Processing...\n", + "Done!\n" + ] + } + ], + "source": [ + "data = HerwigEventMultiHadronDataset(\n", + " \"../data/Herwig\",\n", + " raw_file_list=[\"AllClusters_paper4.dat\"],\n", + " processed_file_name=\"herwig_multihadron_graph_obs_data_variation.pt\",\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Data(x=[13, 8], had_kin_rest_frame=[30, 4], had_kin=[30, 4], had_type_indices=[30, 1], cluster_labels=[30])" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "data[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Data(x=[4, 8], had_kin_rest_frame=[14, 4], had_kin=[14, 4], had_type_indices=[14, 1], cluster_labels=[14])" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "data[1]" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "n_clusters = []\n", + "n_hadrons = []\n", + "# n_hadrons_per_cluster = []\n", + "had_kin = []\n", + "for d in data:\n", + " n_clusters.append(len(d.x))\n", + " n_hadrons.append(len(d.had_kin))\n", + " had_kin.append(d.had_kin)\n", + "had_kin = torch.cat(had_kin)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkYAAAG7CAYAAAAmOVo2AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/H5lhTAAAACXBIWXMAAA9hAAAPYQGoP6dpAABJtUlEQVR4nO3deVyVZf7/8fdBVkFwLXADF9QyzUwdExRRQ9Pc0kbtm2uu1WhpqTmay+SuqGM1ZsyIZrZqNmaGmaJiarmmKSPuhlSu4M5y7t8fHc4vYj8cOECv5+NxHtO57+u+78/NcThvrvu6r9tkGIYhAAAAyMnRBQAAABQXBCMAAAALghEAAIAFwQgAAMCCYAQAAGBBMAIAALAgGAEAAFg4O7qAksRsNuvixYsqV66cTCaTo8sBAAB5YBiGbty4oapVq8rJKec+IYJRPly8eFE1atRwdBkAAMAGFy5cUPXq1XNsQzDKh3Llykn67Qfr7e3t4GoAAEBeJCUlqUaNGtbv8ZwQjPIh/fKZt7c3wQgAgBImL8NgGHwNAABgQTACAACwIBgBAABYEIwAAAAsCEYAAAAWBCMAAAALghEAAIAFwQgAAMCCYAQAAGBRLIPR3bt3NXbsWLVp00ZVq1aVu7u7fH19FRQUpBUrViglJSXP+zKbzVq6dKkaNWokDw8PValSRf369dPp06cL8QwAAEBJZDIMw3B0EX90+fJl1ahRQy1atFC9evVUpUoVXbt2TZs2bdK5c+cUFhamTZs25fqEXEkaNmyYIiIi1LBhQ3Xp0kUXL17Uxx9/LC8vL+3Zs0eBgYF5rispKUk+Pj5KTEzkkSAAAJQQ+fn+LpbByGw2KzU1Va6urhmWp6am6vHHH1d0dLS++OILdenSJcf9bNu2Te3atVObNm309ddfW/e3adMmde7cWWFhYYqKispzXQQjAABKnvx8fxfLS2lOTk6ZQpEkOTs7q2fPnpKkkydP5rqfd999V5L0j3/8I8P+nnjiCbVt21abN2/W+fPn7VQ1AAAo6YplMMqO2WzWV199JUl66KGHcm0fHR0tT09PBQUFZVrXsWNHSdL27dvtWyQAACixnB1dQE6Sk5M1a9YsGYahK1eu6JtvvlFsbKwGDx6s9u3b57jtrVu3lJCQoIceekhlypTJtD59bFFcXFy2+wgPD1d4eLj1vdlstvFMAABASVDsg9H06dOt700mk1555RXNnj07120TExMlST4+PlmuT7/GmN4uK0lJSYqPj89PyShGAiZudHQJhe7snJzH2QEA8qdYX0rz8vKSYRhKS0vThQsX9NZbbykiIkJt27ZVUlJSoR/f29tb1apVs778/PwK/ZgAAMBxinUwSufk5KTq1atr1KhRWr58uXbt2qWZM2fmuE16T1F2PULpwSq7HiVJGjt2rH766SfrKzY21sYzAAAAJUGJCEa/FxYWJum3gdU58fT0lJ+fn86cOaO0tLRM69PHFuVnHiMAAFC6lbhgdPHiRUmSi4tLrm1DQkJ069Yt7dq1K9O69PmL2rRpY98CAQBAiVUsg9GxY8d0+/btTMtv376tsWPHSpI6d+5sXX758mXFxsbq8uXLGdoPHz5ckjRlyhQlJydbl2/atEnR0dEKCwuTv79/YZwCAAAogYrlXWkff/yxwsPDFRwcrICAAHl7eys+Pl6bNm3SlStX1Lp1a7388svW9m+++aamT5+uqVOnatq0adbloaGhGjp0qCIiItS0aVN16dJFCQkJ+uijj1SxYkUtXbrUAWcHAACKq2IZjJ588kldvHhR3377rXbv3q2bN2/Kx8dHjRs3Vt++fTVkyBA5O+et9HfeeUeNGjXS8uXLtWTJEnl5ealnz56aOXOm6tSpU8hnAgAASpJi+ay04opnpZUszGMEAJBKwbPSAAAAHIFgBAAAYEEwAgAAsCAYAQAAWBCMAAAALAhGAAAAFgQjAAAAC4IRAACABcEIAADAgmAEAABgQTACAACwIBgBAABYEIwAAAAsCEYAAAAWBCMAAAALghEAAIAFwQgAAMCCYAQAAGBBMAIAALAgGAEAAFgQjAAAACwIRgAAABYEIwAAAAuCEQAAgAXBCAAAwIJgBAAAYEEwAgAAsCAYAQAAWBCMAAAALAhGAAAAFgQjAAAAC4IRAACABcEIAADAgmAEAABgQTACAACwIBgBAABYEIwAAAAsCEYAAAAWBCMAAAALghEAAIAFwQgAAMCCYAQAAGBBMAIAALAgGAEAAFgUy2AUHx+vxYsXKywsTDVr1pSrq6t8fX3Vq1cv7d27N8/7iY6OlslkyvYVGRlZeCcBAABKHGdHF5CVpUuXau7cuapTp47CwsJUpUoVxcXFaf369Vq/fr3WrFmjPn365Hl/ISEhatu2bablTZo0sV/RAACgxCuWwahFixaKjo5WSEhIhuU7d+5U+/btNWrUKPXo0UNubm552l/btm01bdq0QqgUAACUJsXyUtpTTz2VKRRJUuvWrRUaGqpr167pyJEjDqgMAACUZsWyxygnLi4ukiRn57yXHhcXp8WLF+vOnTuqXr262rVrp2rVquW6XXh4uMLDw63vzWZz/gsGAAAlRokKRufPn9eWLVvk5+enRo0a5Xm7NWvWaM2aNdb3zs7O+tvf/qb58+erTJky2W6XlJSk+Pj4AtUMAABKjmJ5KS0rKSkp6t+/v+7du6e5c+fmGGjSValSRXPmzNHRo0d18+ZN/fLLL1q/fr3q1q2rRYsWafz48Tlu7+3trWrVqllffn5+9jodAABQDJkMwzAcXURuzGaz+vfvrzVr1mjYsGFavnx5gfb3888/q3Hjxrp27Zri4+N133335Wm7pKQk+fj4KDExUd7e3gWqAYUvYOJGR5dQ6M7O6eLoEgCg2MvP93ex7zEym80aMmSI1qxZo2effVbLli0r8D59fX3VvXt3paam5mteJAAAULoV6zFGZrNZgwcP1qpVq9SvXz9FRkbKyck+Wa5y5cqSpFu3btllfwAAoOQrtj1Gvw9Fffr00XvvvZencUV5ld5TFBAQYLd9AgCAkq1YBqP0y2erVq3S008/rdWrV+cYii5fvqzY2Fhdvnw5w/L9+/dn2X7JkiXatm2bAgMD1bx5c7vWDgAASq5ieSltxowZWrlypby8vFSvXj298cYbmdr06NHD+kiPN998U9OnT9fUqVMzzHDdq1cvubi4qFmzZqpevbpu3bqlPXv26ODBgypfvnyugQsAAPy5FMtgdPbsWUnSzZs3NXPmzCzbBAQE5Pqss1GjRikqKko7duzQlStX5OTkJH9/f7300ksaN26cqlevbufKAQBASVYibtcvLrhdv2Thdn0AgFTKbtcHAAAoKgQjAAAAC5uCUVpampKSkpSampph+Z07dzR9+nT17NlTL7/8si5evGiXIgEAAIqCTYOvZ8yYoTfeeEPR0dFq3bq1JMkwDLVt21b79u2TYRgymUxat26dDh06pAoVKti1aAAAgMJgU4/RN998I19fX2sokqQNGzbo+++/V2BgoBYvXqywsDD99NNPevfdd+1WLAAAQGGyKRidOXNGDRo0yLDs888/l8lk0vvvv6/Ro0drw4YNqlKlij799FO7FAoAAFDYbApGV65cka+vb4Zlu3btUrVq1fToo49KkpydndWyZUudP3++4FUCAAAUAZuCkbOzc4aHr167dk1xcXEKCgrK0K5cuXJKTEwsWIUAAABFxKZgVLt2be3Zs0dms1mS9MUXX8gwDAUHB2do9+uvv6pKlSoFrxIAAKAI2BSMunXrpl9//VXdu3fXkiVLNGHCBJUpU0Zdu3a1tjEMQwcPHlStWrXsViwAAEBhsul2/fHjx+vzzz/Xxo0btXHjb49dmDhxomrWrGltExMTo8uXL2fqRQIAACiubApG3t7e+u677/Tpp5/ql19+UfPmzRUSEpKhzZUrVzRmzBj99a9/tUuhAAAAhc2mYCRJHh4e6t+/f7bre/TooR49eti6ewAAgCJn0xijdu3aad68ebm2W7Bggdq3b2/LIQAAAIqcTT1G0dHRCggIyLXd//73P0VHR9tyCAAAgCJnU49RXqWkpMjJqVAPAQAAYDeFmlqOHDmiSpUqFeYhAAAA7CbPl9KGDBmS4X1MTEymZelSU1N17NgxHTp0SN26dStYhQAAAEUkz8EoMjLS+t8mk0knT57UyZMnc9ymatWqmjlzps3FAQAAFKU8B6MVK1ZI+m1G6yFDhig4OFjPPfdclm1dXV1VvXp1tWzZUi4uLvapFAAAoJDlORgNHDjQ+t/Tpk1Ty5YtMywDAAAo6Wy6Xf/s2bN2LgMAAMDxuJceAADAwuZHgkjS3r17tWXLFsXHx+vu3btZtjGZTPr3v/9dkMMAAAAUCZuCUXJysvr166f169dL+m1AdnYIRgAAoKSwKRj94x//0GeffSZPT0/1799fDzzwgLy9ve1dGwAAQJGyKRh98MEHKlu2rPbu3asHH3zQ3jUBAAA4hE2Dr3/66ScFBQURigAAQKliUzCqUKGCKlasaO9aAAAAHMqmYNShQwft3bs3x0HXAAAAJY1Nwegf//iHrl69qmnTptm5HAAAAMexafD1jh07NHjwYL3xxhv66quv1KVLF9WsWVNOTlnnrAEDBhSoSAAAgKJgMmy4Hubk5CSTyWS9lGYymXJsn5aWZlt1xUxSUpJ8fHyUmJjI9AQlQMDEjY4uodCdndPF0SUAQLGXn+9vm3qMBgwYkGsYAgAAKGlsCkaRkZF2LgMAAMDxeIgsAACARYEeIpvu5MmTunTpkipVqqR69erZY5cAAABFzuYeo7S0NL3xxhvy9fVV/fr1FRwcrDlz5ljXv//++2rVqpV+/PFHuxQKAABQ2GwKRmlpaXryySc1depUXbt2TQ888ECmyR6DgoK0Z88erVu3zi6FAgAAFDabgtGyZcsUFRWl0NBQnTlzRkePHs3UJiAgQHXq1NHmzZsLXCQAAEBRsCkYrVy5UhUrVtQnn3yiqlWrZtvugQce0Pnz520uDgAAoCjZFIxiY2PVokULVahQIcd2Pj4++vXXX20qDAAAoKjZPMbIzc0t13YJCQl5agcAAFAc2BSM/P399cMPP+TYJiUlRUePHlVgYGC+9x8fH6/FixcrLCxMNWvWlKurq3x9fdWrVy/t3bs3X/sym81aunSpGjVqJA8PD1WpUkX9+vXT6dOn810XAAAo3WwKRp06ddLZs2e1fPnybNssXbpUly5dUpcu+X+W09KlS/Xyyy/r9OnTCgsL07hx4xQcHKzPP/9crVq10kcffZTnfY0YMUKjR4+WYRgaPXq0OnXqpHXr1ql58+aKi4vLd20AAKD0sukhsgkJCWrYsKGSkpL04osv6q9//auCg4P19NNPa8KECfr4448VHh6uChUq6Mcff1TlypXztf9169apUqVKCgkJybB8586dat++vby8vPJ0mW7btm1q166d2rRpo6+//lqurq6SpE2bNqlz584KCwtTVFRUnuviIbIlCw+RBQBI+fv+tikYSdKOHTv01FNP6erVq5keKGsYhsqXL6///ve/Cg4OtmX32erYsaM2b96s77//Xs2aNcux7TPPPKMPPvhA27dvV5s2bTKsCw0NVXR0tM6dO6eaNWvm6dgEo5KFYAQAkPL3/W3zzNdt2rTRjz/+qPHjx6thw4by8PCQm5ub6tatq9GjR+vIkSN2D0WS5OLiIklyds79aSbR0dHy9PRUUFBQpnUdO3aUJG3fvt2+BQIAgBKrQM9Ku//++zVnzpwMjwIpTOfPn9eWLVvk5+enRo0a5dj21q1bSkhI0EMPPaQyZcpkWp8+KDyncUbh4eEKDw+3vjebzTZWDgAASgKbglFaWlqWYaMwpaSkqH///rp3757mzp2b6/ETExMl/TaXUlbSu9LS22UlKSlJ8fHxNlaMP/ozXNoCAJRsNl1Kq169usaPH69jx47Zu54smc1mDRo0SDt27NCwYcPUv3//Ijmut7e3qlWrZn35+fkVyXEBAIBj2BSMfvnlFy1cuFCNGjVSy5YttXz5ciUlJdm7Nkm/haIhQ4ZozZo1evbZZ7Vs2bI8bZfeU5Rdj1B6vdn1KEnS2LFj9dNPP1lfsbGx+aweAACUJDYFoyNHjuill15SlSpV9N1332nUqFHy8/PTs88+q2+++cZuxZnNZg0ePFgrV65Uv379FBkZKSenvJXs6ekpPz8/nTlzRmlpaZnWp48tsmUCSgAAUDrZFIwaNmyohQsXKj4+Xp999pm6du2qlJQUrVmzRmFhYQoICNC0adN09uxZmwtLD0WrVq1Snz599N577+V7XFNISIhu3bqlXbt2ZVqXPn/RH2/jBwAAf142364vSWXKlFH37t21fv16xcfHa8GCBWrYsKHOnz+vGTNmqG7dumrfvn2+95t++WzVqlV6+umntXr16hxD0eXLlxUbG6vLly9nWD58+HBJ0pQpU5ScnGxdvmnTJkVHRyssLEz+/v75rg8AAJRONk/wmJMDBw7orbfe0ooVK2QymbK8lJWTadOmafr06fLy8tKYMWOynLOoR48eatKkSYb2U6dO1bRp0zK0GzZsmCIiItSwYUN16dJFCQkJ+uijj+Tl5aXdu3erXr16ea6LCR4LhrvS7I8JHgEgd/n5/i7QPEZZ2b17tyIjI7Vu3Tqb95F+Ce7mzZuaOXNmlm0CAgKswSgn77zzjho1aqTly5dryZIl8vLyUs+ePTVz5kzVqVPH5hoBAEDpY5ceo4sXL2rVqlVauXKlTpw4IcMw5OTkpLCwMA0ZMkS9e/e2R60OR49RwdBjZH/0GAFA7oqkxyg5OVnr16/XihUrtGXLFpnNZhmGobp162rw4MEaOHCgqlatauvuAQAAipxNwej555/XRx99pOvXr8swDHl6eurpp5/WkCFDCuX5aAAAAEXBpmCUPsliUFCQhgwZor/+9a/y9PS0a2EAAABFzaZgNHHiRA0ePJjJEQEAQKliUzCaNWuWvesAAABwuALfrn/s2DF9++23unTpkho2bKhu3bpJ+m2SxtTUVLm6uha4SAAAgKJg88zXFy5cUIcOHdSoUSONGDFCkydP1vr1663r3333XXl4eNj12WkAAACFyaZgdPXqVYWEhGjr1q1q2LChRo0apT9Oh/TXv/5VTk5O+u9//2uXQgEAAAqbTcFo7ty5Onv2rF555RUdPnxYb775ZqY2FSpUUKNGjRQTE1PgIgEAAIqCTcHo888/V0BAgObMmSOTyZRtu9q1a+vixYs2FwcAAFCUbApG586dU9OmTeXklPPmrq6uunr1qk2FAQAAFDWbgpG7u7tu3LiRa7vz58/Lx8fHlkMAAAAUOZuCUYMGDXTgwAHdunUr2zaXL1/W4cOH1bhxY5uLAwAAKEo2BaPevXvrypUrGjt2rMxmc5ZtXn31Vd2+fVt9+vQpUIEAAABFxaYJHl944QWtXLlSERER2r9/v5566ilJ0qlTpxQeHq5PPvlE3333nZo0aaJBgwbZs14AAIBCY1Mwcnd3V1RUlJ5++ml9++23OnjwoCQpJiZGMTExMgxDzZs31/r16+Xi4mLXggEAAAqLzY8E8fPzU0xMjKKiorRx40adPn1aZrNZNWrU0BNPPKHu3bvneCs/AABAcVPgZ6V17NhRHTt2tEctAAAADmXzs9IAAABKG4IRAACABcEIAADAgmAEAABgQTACAACwIBgBAABY5CkY7dixQydOnCjsWgAAABwqT8Gobdu2mjNnjvV9u3btNG/evEIrCgAAwBHyPMGjYRjW/46OjlZAQEBh1AMAAOAweeoxKleunBISEgq7FgAAAIfKU49R48aNtXXrVr3++uuqW7euJOnkyZNatWpVng4yYMAA2ysEAAAoIibj99fIsrFhwwb17t1bqampkn67rJafB8SmpaXZXmExkpSUJB8fHyUmJsrb29vR5ZQ4ARM3OrqEUufsnC6OLgEAir38fH/nqceoa9eu+u6777R+/XqdO3dOkZGRqlOnjoKCguxSMAAAQHGQ58HXDz/8sB5++GFJUmRkpIKDg/Wf//yn0AoDAAAoankORr83depUPfLII/auBQAAwKFsDkYAAACljU3BKF1qaqo+/fRTbdu2TfHx8ZKkatWqKTQ0VL1795azc4F2DwAAUKRsTi6HDh1S7969debMGf3xxraIiAhNmTJFn3zyiZo0aVLQGgEAAIqETcHo4sWLCgsL0+XLl3X//ferb9++qlOnjiTp9OnT+vDDD3Xq1Cl17NhRhw4dkp+fn12LBgAAKAw2BaO5c+fq8uXLGjp0qJYsWSIPD48M62fNmqXRo0crIiJC8+bN06JFi+xSLAAAQGHK0wSPf1SvXj0lJyfr1KlTKlOmTJZtUlNTVbduXbm6uurEiRMFLrQ4YILHgmGCR/tjgkcAyF1+vr/z9Ky0P7pw4YJatWqVbSiSJGdnZz322GO6cOGCLYcAAAAocjYFIzc3NyUlJeXa7saNG3Jzc7PlEAAAAEXOpmD04IMPatu2bTn2Bp0/f17btm1Tw4YNbS4OAACgKNkUjAYMGKA7d+6oQ4cO+vLLLzOt/+KLL/T444/r7t27GjBgQIGLBAAAKAo23ZU2bNgwrV27Vt988426du2qihUrqlatWpKkM2fO6OrVqzIMQx06dNCwYcPsWjAAAEBhsanHqEyZMtq4caPGjx8vT09PXblyRfv27dO+fft05coVeXp6asKECfriiy/k5GTTIbR69WqNGDFCzZo1k5ubm0wmkyIjI/O1j+joaJlMpmxf+d0fAAAo3Wye+drV1VVz5szR9OnTtW/fvgyPBEkPMwUxefJknTt3TpUrV5afn5/OnTtn875CQkLUtm3bTMuZlRsAAPxegR9m5ubmpqCgIHvUkkFERIQCAwPl7++vOXPm6LXXXrN5X23bttW0adPsVxwAACiViu1TXjt06ODoEgAAwJ9MsQ1G9hQXF6fFixfrzp07ql69utq1a6dq1ao5uiwAAFDM/CmC0Zo1a7RmzRrre2dnZ/3tb3/T/Pnzc5y9Ozw8XOHh4db3ZrO5UOsEAACOZdstYyVElSpVNGfOHB09elQ3b97UL7/8ovXr16tu3bpatGiRxo8fn+P2SUlJio+Pt74SEhKKqHIAAOAIpbrHqGHDhhlm3vb09FT37t31l7/8RY0bN9Y///lPTZgwQffdd1+W23t7e2e45GY2mwlHAACUYqW6xyg7vr6+6t69u1JTU7V3795s240dO1Y//fST9RUbG1uEVQIAgKJmUzA6f/58js9JKwkqV64sSbp165aDKwEAAMWFTcEoICBAffv2tXctRSq9pyggIMCxhQAAgGLDpmDk7e1tfTZacXD58mXFxsbq8uXLGZbv378/y/ZLlizRtm3bFBgYqObNmxdFiQAAoASwafD1gw8+WOiX0iIiIhQTEyNJOnLkiHVZdHS0JCk4OFhDhw6VJL355puaPn26pk6dmmGG6169esnFxUXNmjVT9erVdevWLe3Zs0cHDx5U+fLltXr16hxv1wcAAH8uNgWjYcOGadiwYfr+++8LrcclJiZGK1euzLBs165d2rVrl/V9ejDKzqhRoxQVFaUdO3boypUrcnJykr+/v1566SWNGzdO1atXL5TaAQBAyWQyDMOwZcPRo0frvffe06uvvqpevXopICCgwA+OLe6SkpLk4+OjxMREeXt7O7qcEidg4kZHl1DqnJ3TxdElAECxl5/vb5t6jH5/+WnKlCmaMmVKtm1NJpNSU1NtOQwAAECRsikY5aeTycYOKQAAgCJnUzDimWEAAKA0+lPOfA0AAJAVghEAAIBFgYLRqVOnNH78eAUHB6t+/foZnla/d+9eLV++XImJiQUuEgAAoCjYNMZIklauXKmRI0fq3r17kn67++z3M0/fvn1bo0aNkqurqwYNGlTgQgEAAAqbTT1Ge/bs0dChQ+Xq6qp58+Zp7969me4+CwkJkY+PjzZs2GCXQgEAAAqbTT1G8+bNk2EY2rhxo4KDg7Ns4+TkpCZNmujYsWMFKhAAAKCo2NRjtGvXLrVo0SLbUJTO19dXCQkJNhUGAABQ1GwKRtevX1fNmjVzbXfnzh0lJyfbcggAAIAiZ1MwqlSpks6dO5dru5MnT8rX19eWQwAAABQ5m4JRy5YttW/fPv3444/Zttm1a5d+/PHHXC+3AQAAFBc2BaMXXnhBaWlp6tWrlw4dOpRp/fHjxzVkyBCZTCY9//zzBa0RAACgSNgUjNq3b6+xY8fqxIkTevTRR1WvXj2ZTCZFRUWpcePGatSokeLi4vTqq6+qZcuW9q4ZAACgUNg88/WCBQv0zjvvyNfXVydPnpRhGEpISNDRo0dVsWJFLV26VHPmzLFnrQAAAIXK5pmvJWnYsGEaOnSoDh48qNOnT8tsNqtGjRpq3ry5nJ0LtGsAAIAiV+D0YjKZ1LRpUzVt2tQe9QAAADiMXbp1DMPQlStXZBiGKlWqJCenAj2bFgAAwCEKlGC+/vprderUSeXKldP9998vX19flStXTp06dVJUVJS9agQAACgSNgejV199VZ06ddLmzZt1+/ZtGYYhwzB0584dbd68WZ07d9a4cePsWSsAAEChsikYrV69WgsXLpS7u7vGjRunH374QTdu3NCNGzd05MgRvfLKK/Lw8NDixYu1evVqe9cMAABQKGwKRkuXLlWZMmX01Vdfaf78+XrooYfk6ekpT09PNWzYUPPmzdNXX30lk8mkN9980941AwAAFAqbgtHRo0cVHBys1q1bZ9smff3Ro0dtLg4AAKAo2RSM3N3dVbVq1VzbVa1aVa6urrYcAgAAoMjZFIweffRR/fDDD7m2++GHH9SsWTNbDgEAAFDkbApGf//733X8+HHNmzcv2zbz58/X8ePHNWnSJJuLAwAAKEp5muBxx44dGd6bTCa9+OKLeu211/TJJ5+of//+qlWrliTpzJkzWr16tfbv36/Ro0cz2SMAACgxTIZhGLk1cnJykslkyrQ8fdM/rvv9cpPJpNTUVHvU6nBJSUny8fFRYmKivL29HV1OiRMwcaOjSyh1zs7p4ugSAKDYy8/3d556jNq0aZNlMAIAAChN8hSMoqOjC7kMAAAAx2MAEAAAgAXBCAAAwCJPl9Kyc/fuXe3bt08XL17U3bt3s203YMCAghwGAACgSNgcjObPn69Zs2YpKSkp17YEIwAAUBLYFIzefPNNTZgwQZLUqFEjBQYGqly5cnYtDAAAoKjZHIycnZ21du1ade3a1d41AQAAOIRNg6/Pnj2rNm3aEIoAAECpYlMwuu+++1SlShV71wIAAOBQNgWjJ554Qrt375bZbLZ3PQAAAA5jUzCaOnWqkpOTNXr0aCUnJ9u7JgAAAIewafB11apVFRMTo27duql+/foKDQ1VzZo15eSUOWeZTCZNmTKlwIUCAAAUNpuCkWEYWrJkiWJjY2U2mxUZGZmpjclkkmEYBCMAAFBi2BSM5s+fr6VLl8rZ2VlPPvmkAgMD5eXlZe/aAAAAipRNwSgiIkJly5bVzp079cgjj9i7JknS6tWrtXPnTu3fv19HjhxRcnKyVqxYoUGDBuVrP2azWW+99ZaWL1+ukydPysvLSx06dNDMmTNVu3btQqkdAACUTDYNvr5w4YJat25daKFIkiZPnqzly5fr3Llz8vPzs3k/I0aM0OjRo2UYhkaPHq1OnTpp3bp1at68ueLi4uxYMQAAKOlsCka+vr6F/giQiIgInT17VpcuXdLIkSNt2se2bdsUERGhNm3a6MCBA5o7d67ee+89rV+/XlevXtWLL75o56oBAEBJZlMw6tmzp3bu3Km7d+/aux6rDh06yN/fv0D7ePfddyVJ//jHP+Tq6mpd/sQTT6ht27bavHmzzp8/X6BjAACA0sOmYDRt2jRVrFhR/fr10+XLl+1dk91ER0fL09NTQUFBmdZ17NhRkrR9+/Zstw8PD1f16tWtrwYNGhRarQAAwPFsGnz90ksvqX79+lq/fr22bt2qRx99NMd5jP79738XuND8unXrlhISEvTQQw+pTJkymdYHBgZKUo7jjJKSkhQfH19oNQJAwMSNRXq8s3O6FOnxSjs+v9LHpmAUGRkpk8kkSbpx44aio6OzbeuoYJSYmChJ8vHxyXK9t7d3hnbZtalWrZr1vdlsVkJCgh2rBAAAxYlNwWjFihX2rqNYGjt2rMaOHWt9n5SUlG3QAgAAJZ9NwWjgwIH2rsPu0gNMdj1CSUlJGdoBAADYNPi6JPD09JSfn5/OnDmjtLS0TOvTxxaljzUCAAAotcFIkkJCQnTr1i3t2rUr07qoqChJUps2bYq6LAAAUEzZdCltyJAheW5bFIOvL1++rMuXL6ty5cqqXLmydfnw4cP14YcfasqUKfr666+tcxlt2rRJ0dHRCgsLK/BcSQAAoPSw+a60nKTfsWYYhs3BKCIiQjExMZKkI0eOWJel3wEXHBysoUOHSpLefPNNTZ8+XVOnTtW0adOs+wgNDdXQoUMVERGhpk2bqkuXLkpISNBHH32kihUraunSpfmuCwAAlF52vSvNbDbr3Llz+vLLL7Vv3z699NJLevjhh20qLCYmRitXrsywbNeuXRkui6UHo5y88847atSokZYvX64lS5bIy8tLPXv21MyZM1WnTh2bagMAAKWTyTAMozB2PH78eL377rs6cOCAatWqVRiHKHLpt+snJiZa50FC3hX1RGh/Bkz2VvIxQWDJxudXMuTn+7vQBl/PmjVL5cqV0+uvv15YhwAAALCrQgtGzs7Oatq0qbZs2VJYhwAAALCrQr1d/86dO7p27VphHgIAAMBuCi0YHT9+XDExMapRo0ZhHQIAAMCubLorbdWqVdmuu3Hjho4fP6733ntPd+/e1TPPPGNzcQAAAEXJpmA0aNAg61xFWUm/0a179+6aPHmybZUBAAAUMZuC0YABA7INRq6urqpWrZo6dOigVq1aFag4AACAolQoM18DAACURKX6IbIAAAD5QTACAACwyNOltJzuQsuLAQMGFGh7AACAopCnYJTbXWi5IRgBAICSIE/BqF27dvkORrt379bt27cLFKgAAACKUp6CUX6ed7Zz506NHz9ed+7ckSQ1atTItsoAAACKmN0GXx89elRdu3ZV27ZttXfvXtWoUUORkZE6ePCgvQ4BAABQqGyax+j3Lly4oClTpuj9999XWlqaKlWqpEmTJumFF16Qq6urPWoEAAAoEjYHo2vXrmnmzJl6++23dffuXZUtW1ZjxozRhAkT5O3tbc8aAQAAikS+g9Hdu3e1aNEizZs3T0lJSSpTpoyGDx+uadOmydfXtzBqBAAAKBJ5DkZms1kRERGaMWOGEhISZBiGnnrqKc2aNUv16tUrzBoBAACKRJ6C0bp16/T3v/9dJ06ckGEYCgkJ0dy5c9WiRYvCrg8AAKDI5CkY9e7dWyaTyTqOqHPnzkpNTdW3336bp4O0atWqQEUCAAAUhXyNMbp9+7Zmz56t2bNn53kbk8mk1NTUfBcGAABQ1PIUjGrWrMkM1gAAoNTLUzA6e/ZsIZcBAADgeHab+RoAAKCkIxgBAABYEIwAAAAsCEYAAAAWBCMAAAALghEAAIAFwQgAAMCCYAQAAGBBMAIAALAgGAEAAFgQjAAAACwIRgAAABYEIwAAAAuCEQAAgAXBCAAAwIJgBAAAYEEwAgAAsCAYAQAAWBCMAAAALIp1MPr+++/VuXNnlS9fXp6enmrZsqU+/vjjPG8fGRkpk8mU7Ss6OrrwigcAACWOs6MLyM62bdvUsWNHubu7q2/fvipXrpzWrl2rPn366MKFCxo3blye99W9e3c1adIk0/KAgAD7FQwAAEq8YhmMUlNTNWzYMDk5OWnHjh3WUPP666+rRYsWmjRpknr37i1/f/887a9Hjx4aNGhQ4RUMAABKhWJ5KW3r1q06deqUnnnmmQw9PT4+Ppo0aZKSk5O1cuVKxxUIAABKpWLZY5Q+9icsLCzTuo4dO0qStm/fnuf9HTx4UFeuXFFqaqoCAgLUoUMHVapUyS61AgCA0qNYBqO4uDhJUmBgYKZ1vr6+8vLysrbJi3/+858Z3nt4eGjq1KmaMGFCjtuFh4crPDzc+t5sNuf5mAAAoOQplpfSEhMTJf126Swr3t7e1jY5qVWrlpYuXaoTJ07o9u3b+umnn7Rq1SpVrFhREydO1NKlS3PcPikpSfHx8dZXQkJC/k8GAACUGMWyx8heQkJCFBISYn1frVo19e/fX02bNlWzZs00bdo0jRo1Ss7OWf8YvL29Va1aNet7s9lMOAIAoBQrlj1G6T1F2fUKJSUlZdublBcNGzZUcHCwrl69quPHj2fbbuzYsfrpp5+sr9jYWJuPCQAAir9iGYzSxxZlNY7o559/1s2bN7Mcf5QflStXliTdunWrQPsBAAClR7EMRumXvzZv3pxpXVRUVIY2tkhLS9O+ffskKc9zIQEAgNKvWAaj9u3bq3bt2lqzZo0OHTpkXZ6YmKhZs2bJ1dVVAwYMsC5PSEhQbGxspktv+/fvz7TvtLQ0TZw4USdPnlRoaKj8/PwK7TwAAEDJUiwHXzs7OysiIkIdO3ZUmzZtMjwS5Ny5c1qwYEGGx3m89tprWrlypVasWJFhhutmzZqpcePGaty4sapVq6arV69q+/btOnHihKpXr66IiIiiPzkAAFBsFctgJEmhoaGKiYnR1KlT9dFHHyklJUWNGjXS3Llz1adPnzztY9y4cdqzZ4++/vprXb16Va6urqpbt64mT56ssWPHqkKFCoV8FgAAoCQptsFIklq0aKFNmzbl2i4yMlKRkZGZli9YsKAQqgIAAKVVsRxjBAAA4AgEIwAAAAuCEQAAgAXBCAAAwIJgBAAAYEEwAgAAsCAYAQAAWBCMAAAALAhGAAAAFgQjAAAAC4IRAACABcEIAADAgmAEAABgQTACAACwIBgBAABYEIwAAAAsCEYAAAAWBCMAAAALghEAAIAFwQgAAMCCYAQAAGBBMAIAALAgGAEAAFgQjAAAACwIRgAAABYEIwAAAAuCEQAAgAXBCAAAwIJgBAAAYEEwAgAAsCAYAQAAWBCMAAAALAhGAAAAFgQjAAAAC4IRAACABcEIAADAgmAEAABgQTACAACwIBgBAABYEIwAAAAsCEYAAAAWBCMAAAALghEAAIAFwQgAAMCiWAej77//Xp07d1b58uXl6empli1b6uOPP87XPu7du6cZM2YoMDBQ7u7uqlq1qoYPH65ff/21kKoGAAAllbOjC8jOtm3b1LFjR7m7u6tv374qV66c1q5dqz59+ujChQsaN25crvswm83q3r27oqKi1LJlS/Xq1UtxcXGKiIjQN998oz179qhKlSpFcDYAAKAkKJY9RqmpqRo2bJicnJy0Y8cOLV++XAsXLtThw4dVr149TZo0SefOnct1PytXrlRUVJT69eunb7/9VnPmzNHatWv19ttv6/Tp05o8eXIRnA0AACgpimUw2rp1q06dOqVnnnlGTZo0sS738fHRpEmTlJycrJUrV+a6n3fffVeSNHv2bJlMJuvyESNGqHbt2nr//fd1584du9cPAABKpmIZjKKjoyVJYWFhmdZ17NhRkrR9+/Yc93H37l3t3btX9evXl7+/f4Z1JpNJjz/+uG7duqV9+/bZp2gAAFDiFcsxRnFxcZKkwMDATOt8fX3l5eVlbZOdU6dOyWw2Z7mP3+87Li5OrVu3zrJNeHi4wsPDre/T0tIkSUlJSbmfBDIx37vt6BJKHf4tlnxF/f8L/s3YF59fyZD+czMMI9e2xTIYJSYmSvrt0llWvL29rW0Kso/ft8tKUlKS4uPjMy2vUaNGjscGiorPYkdXgJKGfzMlG59fwdy4cSPbXJCuWAaj4sLb21vVqlWzvjebzRo6dKjGjRuXYcySIzVo0EAJCQny8/NTbGyso8uxu9J+flLpP0fOr+Qr7edY2s9P+nOcY04Mw9CNGzdUtWrVXNsWy2CUnuay681JSkpShQoVCryP37fLytixYzV27Nhc63UkJycn6/+m94KVJqX9/KTSf46cX8lX2s+xtJ+f9Oc4x9zk1lOUrlgOvv79+J8/+vnnn3Xz5s1sxw6lq127tpycnLIdi5TTOCYAAPDnVCyDUUhIiCRp8+bNmdZFRUVlaJMdDw8PtWjRQv/73/8yzXlkGIa+/vpreXp6qlmzZnaqGgAAlHTF8lJa+/btVbt2ba1Zs0ajR4+2zmWUmJioWbNmydXVVQMGDLC2T0hIUGJiovz8/DJ0lQ0fPlx79uzRa6+9pvfff986Luidd97R6dOnNXz4cHl4eBTpudnb2LFjlZSUVGq7Rkv7+Uml/xw5v5KvtJ9jaT8/6c9xjvZiMvJy75oDZPdIkHPnzmnBggUZHgkyaNAgrVy5UitWrNCgQYOsy81mszp37mx9JEhISIhOnjypdevWKSAgQHv37uWRIAAAwKpYXkqTpNDQUMXExCgoKEgfffSR/vWvf+n+++/Xhx9+mKfnpEm/DTL7/PPPNW3aNF26dEmLFi3Srl279Nxzz2n37t2EIgAAkEGx7TECAAAoasW2xwgAAKCoEYwAAAAsCEYl1Pfff6/OnTurfPny8vT0VMuWLfXxxx87uiy7iI+P1+LFixUWFqaaNWvK1dVVvr6+6tWrl/bu3evo8grN3LlzZTKZZDKZtGfPHkeXYxefffaZHn/8cVWqVEnu7u6qVauW+vXrpwsXLji6tAIzDEPr1q1TaGio/Pz8VLZsWdWvX18jRozQ6dOnHV1enq1evVojRoxQs2bN5ObmJpPJpMjIyGzbJyUlaezYsfL395ebm5sCAgL06quv6ubNm0VXdD7k9fxSUlK0du1aDRw4UA888IC8vLxUrlw5/eUvf9G//vUv67Myi5v8fn6/d/r0aXl5eclkMmnkyJGFW2hJYqDE2bp1q+Hi4mKUK1fOGDZsmDF27FjD39/fkGQsWLDA0eUV2IQJEwxJRp06dYznnnvOmDhxotGrVy+jTJkyhpOTk/Hhhx86ukS7O3LkiOHm5mZ4enoakozdu3c7uqQCMZvNxvDhw62f4/PPP29MmDDB6N+/v1GzZk1j586dji6xwMaOHWtIMvz8/IyRI0ca48ePNzp27GiYTCajXLlyxpEjRxxdYp6k/+6oXLmy9b9XrFiRZdubN28aTZo0MSQZYWFhxoQJE4ywsDBDktG8eXPjzp07RVt8HuT1/I4fP25IMry8vIzu3bsb48ePN0aMGGFUrVrVkGQ8+eSThtlsLvoTyEV+Pr/fS0tLM1q3bm39nTNixIjCL7aEIBiVMCkpKUadOnUMNzc34+DBg9bl169fN+rVq2e4uroaZ8+edVyBdrB27VojOjo60/IdO3YYLi4uRoUKFYy7d+86oLLCkZycbDRt2tT4y1/+Yjz77LOlIhgtXrzYkGQ8//zzRmpqaqb1KSkpDqjKfhISEgwnJyfD39/fuH79eoZ14eHhhiRj8ODBDqouf77++mvr74zZs2fn+MX6+uuvG5KMCRMmZFie/sfMrFmzCrvcfMvr+f3000/GW2+9Zdy8eTPD8ps3bxrNmjUzJBkff/xxUZScL/n5/H5vwYIFhrOzs7Fo0SKC0R9wKa2E2bp1q06dOqVnnnnGOvGl9NszYCZNmqTk5GStXLnScQXawVNPPZXlzOatW7dWaGiorl27piNHjjigssIxc+ZM/fjjj/rPf/6jMmXKOLqcArtz546mT5+u2rVra8mSJVmek7NzsZxbNs/Onj0rs9msoKCgTM9fevLJJyVJly5dckRp+dahQwf5+/vn2s4wDEVERMjLy0tTpkzJsG7KlCny8vJSREREYZVps7yeX7Vq1fT888/L09Mzw3JPT0/rMzO3b99eKDUWRF7P7/diY2M1efJkvfbaaxm+R/AbglEJEx0dLUkKCwvLtK5jx46Siuf/ee3FxcVFUsn/Yk134MABzZw5U1OnTtWDDz7o6HLsYvPmzbp27Zp69OihtLQ0rVu3TnPmzNGyZct08uRJR5dnF4GBgXJ1ddWuXbusD6RO98UXX0j6bQb/0iQuLk4XL15UUFBQluEhKChIp0+fLhXjx/6oNP3eSUtL08CBAxUYGKjJkyc7upxiqeR/yn8yOT381tfXV15eXtk+OLekO3/+vLZs2SI/Pz81atTI0eUU2L179zRgwAA1adJE48ePd3Q5drN//35JUpkyZdS4cWOdOHHCus7JyUkvv/yyFixY4Kjy7KJSpUqaM2eOxo0bpwYNGqh79+7y9vbW4cOHtXXrVj3//PN68cUXHV2mXeX24O3AwEBFRUUpLi5ONWrUKMrSCt1//vMfSVn/QVrSzJ49WwcOHNCePXvk6urq6HKKJYJRCZOYmChJmbrv03l7e1vblCYpKSnq37+/7t27p7lz55aKS06vv/664uLitH///lJxPul+/fVXSVJ4eLiaNm2q7777Tg888IAOHjyo4cOHa+HChapTp45GjRrl4EoL5uWXX1a1atU0dOhQLVu2zLo8ODhYzzzzTKnoXfi9vPzu+X270mL58uXatGmT2rVrp86dOzu6nAI5fPiwZsyYoVdffVWPPvqoo8sptriUhmLPbDZr0KBB2rFjh4YNG6b+/fs7uqQC2717txYsWKDJkyfroYcecnQ5dmU2myVJrq6uWr9+vZo3by4vLy+1bt1an3zyiZycnLRw4UIHV1lwM2bM0LPPPqtJkybpwoULunHjhnbu3Km7d++qbdu2+u9//+voElFAX3zxhV588UX5+/tr9erVji6nQJKTkzVw4EDVrVtXU6dOdXQ5xRrBqIRJ/2stu7/KkpKSsv2LriQym80aMmSI1qxZo2effTbDX+YlVWpqqgYOHKjGjRtr4sSJji7H7tL//TVr1kxVq1bNsO6hhx5S7dq1derUKV2/ft0B1dnHli1bNHXqVL344ouaOHGiqlevLi8vLwUHB2vDhg1ycXHJ8zMdS4q8/O75fbuS7ssvv1Tv3r11//33a+vWrfLz83N0SQUye/ZsHTlyRCtWrJCbm5ujyynWCEYlTPr1/azGEf3888+6efNmtmMAShqz2azBgwdr5cqV6tevnyIjI+XkVPL/yd68eVNxcXE6dOiQXF1drZM6mkwm6x2Fjz32mEwmk9avX+/YYm1Qv359SVL58uWzXJ++/M6dO0VUkf1t2rRJ0m8Pu/4jX19fNWjQQCdPniy2kx7aIqffPb9fXhp+/2zcuFFPPfWUKleurG3btql27dqOLqnADh48KLPZrJYtW2b4nZP+b/idd96RyWRSjx49HFtoMVC6LoL/CYSEhGj27NnavHmz+vbtm2FdVFSUtU1Jlx6KVq1apT59+ui9994rNeNw3Nzc9Nxzz2W5bseOHYqLi1O3bt1UpUoVBQQEFG1xdpD+i/b48eOZ1qWkpOjkyZPy9PRUlSpViro0u0lOTpaU/S35ly5dkpOTk/VuptIgMDBQVatW1a5du3Tr1q0Md6bdunVLu3btUq1atUr8wOuNGzeqV69eqlixorZt26a6des6uiS7ePzxx1W5cuVMyxMSEvTll1+qQYMGCgoK0iOPPOKA6ooZR0+khPxJSUkxateuneMEj2fOnHFYffaQlpZmDBw40JBkPP300yV+MsD8SD/vkj7BY/psyO+++26G5TNmzDAkGc8++6yDKrOPDz74wJBkNGzYMNMEj//6178MSUZQUJCDqrNdaZzg8fdyO78vv/zScHNzM3x9fY3Y2NiiLc4O8jPBY7pt27YxweMfmAzDMBwTyWCrbdu2qWPHjnJ3d1ffvn1Vrlw5rV27VufOndOCBQtK/NiGadOmafr06fLy8tKYMWOyvLunR48epXJiskGDBmnlypXavXu3WrZs6ehybHbq1Cm1atVKv/76q7p06aIGDRro4MGD2rp1q/z9/bVnzx75+vo6ukybpaWlqV27dtqxY4fuu+8+devWTeXLl9eBAwe0detWeXh4KDo6Wi1atHB0qbmKiIhQTEyMJOnIkSM6cOCAgoKCrD0lwcHBGjp0qKTfeoaCgoJ0+PBhhYWFqWnTpjpw4IA2b96s5s2ba/v27fLw8HDYuWQlr+cXGxurJk2a6N69e+rbt6/1kvDvBQQEaNCgQUVZfq7y8/llJTo6WqGhoRoxYkSpGMNpF45OZrDN3r17jU6dOhne3t6Gh4eH0aJFi1LzDLH0XpOcXvn5i6gkKS09RoZhGOfPnzcGDRpk+Pr6Gi4uLkaNGjWMF154wfjll18cXZpd3L1715g9e7bxyCOPGGXLljWcnZ2NatWqGc8++6xx7NgxR5eXZ7n9/23gwIEZ2l+/ft146aWXjBo1ahguLi5GzZo1jXHjxhlJSUmOOYFc5PX80ntOcnqFhIQ49Fyykt/P74/oMcqMHiMAAACLkn+LDwAAgJ0QjAAAACwIRgAAABYEIwAAAAuCEQAAgAXBCAAAwIJgBAAAYEEwAgAAsCAYAQAAWBCMgCIUEBAgk8kkk8mkTz/9NNt2HTp0kMlkUmRkZNEVZ4O2bdvKZDIpOjra0aUUug0bNqh169by9va2foaFcd6DBg0qEZ89UFoRjAAH+fvf/67U1FRHl4E8OHTokHr16mV9uO+AAQM0cODAEv0g3OwQzPBnl/mx5QAKXdmyZXXixAlFRERo5MiRji4HuVi/fr1SUlI0adIkzZw509HlAChE9BgBDjBmzBhJ0owZM3T79m0HV4PcnD9/XpIUGBjo4EoAFDaCEeAAnTt3VkhIiBISErRo0aI8b5fbZY7IyEiZTCYNGjQo2+WJiYkaO3asAgIC5O7ursDAQM2dO1dms1mSFB8frxEjRqhGjRpyc3NT/fr1tXTp0lxr2759u8LCwlSxYkWVLVtWLVq00HvvvZfjNt98842eeuop+fn5ydXVVffdd5969uyp3bt3Z9k+fWyPJK1YsUKPPfaYfHx8ZDKZdPbs2VxrlKTU1FQtW7ZMrVq1ko+Pj/VnMHr0aMXHx2doO23aNJlMJq1YsUKSNHjwYGsNbdu2zdPxJOn27dtavHixgoODVaFCBbm5ucnf319du3bVmjVr8rQPWz97Sfrkk0/UoUMHVapUSS4uLqpUqZIefPBBDRs2TD/88IMk6ezZszKZTFq5cmWmczWZTJo2bVqGfd65c0cLFy5Uy5YtVb58ebm7u6t+/foaP368rly5kmN9V69e1UsvvaQ6derIzc0tw89y//796tOnj6pXry5XV1d5e3urdu3a6tWrlz7//PM8/ayAguBSGuAgc+fOVcuWLTVv3jyNHDlSlSpVKvRjXr9+XY899piuXLmi1q1b68aNG9q5c6cmTpyon376SS+99JKCg4Pl4uKiVq1a6dKlS9qxY4dGjx6t27dva8KECVnu97PPPtObb76pBg0aqGPHjrp48aJiYmI0YMAAHTp0SAsXLsy0zSuvvKKFCxfKyclJzZo1U+vWrXX+/Hl9/vnn2rBhg959910NHjw4y+P97W9/09tvv61WrVqpS5cuOn36tDUw5eTevXt68skntWXLFrm7uys0NFTe3t769ttvtXTpUn3wwQeKiopS06ZNJUlNmjTRwIEDFRMTo1OnTikoKEh169aVJDVo0CBPP/MLFy6oU6dOOnbsmMqWLaugoCBVqlRJ8fHx2rlzp44cOaJnnnkmT/uyxYwZMzR16lQ5OzurVatWqlatmhITE3X+/Hn9+9//VsOGDdW4cWN5eXlle67Sbz+LdBcvXlSnTp105MgRVaxYUc2bN1e5cuV04MABzZ8/X5988omio6Pl7++fqZ7Lly+rWbNmun79ulq3bq1HH31Urq6ukn4Lyk888YRSUlL08MMP67HHHlNaWpri4+O1ceNGpaWlqXv37oX2swIkSQaAIuPv729IMnbu3GkYhmE89dRThiTj5ZdfztCuffv2hiRjxYoVGZYPHDgwy+XpVqxYYUgyBg4cmOVySUbXrl2NW7duWdft37/fcHZ2NpycnIwHH3zQGDlypJGSkmJdv379ekOS4e3tnWE7wzCMkJAQ635nzZqVYV10dLTh4eFhSDK++uqrDOuWL19uSDLq1q1rHD58OMO67du3G+XKlTNcXV2NEydOZFiXfixvb29j9+7dWf4McjJhwgRDklGnTh3jzJkz1uXJycnGc889Z0gyatWqZdy7dy/Ddrn93LOTlpZmNGvWzJBkhIWFGb/++muG9Xfu3DE2btyYp2PZ8tnfvXvX8PDwMLy8vIzY2NhM25w9e9Y4fvx4vo5jNpuNoKAgQ5Lx3HPPGUlJSdZ1KSkpxrhx4wxJRmhoaJb1STLat29vJCYmZtp3aGioIclYvXp1pnXXr1+36TMH8otLaYADzZo1S87Oznr77bd17ty5Qj+el5eXIiIiVLZsWeuypk2bqnPnzjKbzbp586YWLVokZ+f/35ncvXt3NWrUSElJSdq3b1+W+33kkUf02muvZVgWEhKi559/XpIy9BiZzWbrZZkPP/xQjRs3zrBdmzZtNGXKFCUnJ+udd97J8nivvPKKWrZsmfcTl3T37l299dZbkqRFixYpICDAus7FxUX//Oc/df/99+vMmTM5TqWQHxs2bNC+ffvk5+entWvXqkqVKhnWu7u7q3PnznY5VlaSkpJ0584d1a5dW/Xr18+03t/fP889X+mioqK0a9cuNWnSRMuWLVO5cuWs65ydnTVv3jw99NBD2rZtm44ePZppexcXFy1fvlze3t6Z1v3yyy+SlOXPxMfHJ9+fOWALghHgQPXr19eQIUN07949TZkypdCP9+ijj+q+++7LtDx9UHFoaKjc3d2zXX/x4sUs9ztgwIAslw8cOFCSFBMTo7S0NEnSwYMHdfHiRdWpU0ePPvpoltuljzn59ttvs1zfu3fvLJfnZN++fbp586YqVqyorl27ZlpftmxZ9e3bV5K0bdu2fO8/K1999ZUk6ZlnnpGXl5dd9pkfVapUUUBAgH744QeNGzdOx44dK/A+N27cKEnq1atXhgCdzsnJSW3atJGU9ef3yCOPqHbt2lnuu0WLFpKk//u//1NMTAzTWcAhCEaAg02bNk1ly5bV+++/bx0IW1hq1qyZ5fL0L+3s1qf3Cty9ezfL9bVq1cpx+Z07d6wDck+fPi1JOnXqVIbBvb9/pX9BXrp0Kcv9/r63J6/SB1ZnV6sk1alTJ0PbgkrvBcxvr4w9rVq1Svfdd5/Cw8PVsGFDVapUSZ07d9aiRYt0+fLlfO8v/fObMmVKtp/f22+/LSnrzy+nz2727Nlq2rSpNm3aZJ1MMzg4WJMnT9bx48fzXStgCwZfAw7m5+enMWPGaPbs2Xrttdesf5HbIv3Osuw4OeX8t1Bu6wvCMAxJ/79GX19fdezYMcdtKleunOVyDw8P+xZXCmT32bdu3Vpnz57Vxo0btX37dn377beKiorSpk2bNHXqVH322Wdq3759vo8THBxsDZLZadiwYaZlOX12vr6+2rdvn7Zv364tW7Zo165d2rt3r3bt2qVZs2Zp9uzZ2d4AANgLwQgoBiZMmKDly5fryy+/1I4dO7Jtl373zo0bN7JcXxTjlLJy5syZLJen30Lv7u5uveuuRo0akqRKlSoV6ezK1apVk5R9rdL/7w1Jb1tQ6T1wsbGxBd5XQT57Dw8P9e7d23oJ8tKlS5o8ebKWL1+uIUOG5OvfTfrn1717d73yyit53i6v0qdCSL+cevfuXUVGRuqFF17QpEmT1Lt371wDGVAQXEoDigEfHx9NmjRJkjR+/Phs26V/YWd1WcEwDG3atKlwCszF6tWrs1y+atUqSb/1LqSPR2nevLkqV66sY8eO6ccffyyyGps1ayYvLy9dvXpV//3vfzOtv3Pnjj788ENJv421sodOnTpJkj744APdunWrQPuy52dfpUoVzZs3T9Jvk1deu3bNui49gGU3vueJJ56Q9NvcSOm9gIXJ3d1dI0eOVOPGjWU2mwv9cjNAMAKKiRdeeEE1a9bU3r17s53gsEOHDpKk9957L8NA2pSUFE2YMEHff/99kdT6R/v377d+0aaLiYmx3gX28ssvW5e7uLho6tSpMgxDPXv2VExMTKb9paWlaevWrdqzZ4/danR3d9cLL7wgSRo3blyGXpKUlBSNGTNGP//8s2rVqmXT4O6sdOvWTY888oguXryop59+OtPEh3fv3s1zoLHlsz937pwiIiKUlJSUad2GDRskSRUqVMhwh1j16tUlKdvQ2r17dzVv3lzfffedBg8enOU4omvXrmnZsmX5Hjy9YMEC6yzjvxcbG6u4uDhJynJuJMCeuJQGFBNubm6aMWOGBg0alO1jQoKCgtS9e3d9/vnnatasmYKDg+Xh4aEDBw4oKSlJY8aM0ZIlS4q4cmn06NF67bXXtGrVKjVu3FgXL17Uzp07ZTabNWbMmEy3X7/44os6f/685s+fr9atW6thw4aqW7euPDw89PPPP+vQoUO6fv26/vWvf9n1Fu3p06dr3759+uabb/TAAw8oNDRU5cqV0+7du3X+/HlVqlRJn3zyibXXpKCcnJz02WefqWPHjtq0aZNq1qyp4OBg6wSPhw8fVvny5fM0a7ctn/21a9c0bNgwPf/882rSpIl14HlcXJwOHjwok8mk+fPnq0yZMtZtevTooenTp+uf//ynjh49qho1asjJyUndunVTt27d5OTkpPXr16tLly5auXKlPv30Uz388MOqWbOmkpOTdfr0aR05ckRpaWkaNGhQlneuZeeNN97Qq6++qgYNGuiBBx6Qh4eHdbLQ1NRUDRgwwDr5JlBY6DECipH+/furUaNGObb56KOPNHnyZPn5+Sk6Olp79uxR69atdeDAgQyzExelnj176uuvv5avr6++/PJLfffdd2ratKkiIyO1ePHiLLeZN2+edu3apf/7v//TzZs39dVXX2njxo26ePGi2rZtq4iICPXp08eudbq5uemrr77S22+/rYcfflg7d+7UZ599JhcXF/3tb3/T4cOHs51CwFb+/v7at2+f5s6dq4YNG2r37t1at26dzp07p5CQEM2dOzfP+8rvZ1+nTh0tXrxYTz75pK5fv64vv/xSGzdu1K1btzRgwAB9//33eu655zJs07hxY61du1aPPfaY9u7dq8jISP373//WgQMHrG2qVq2qPXv2aNmyZWrRooX+97//6dNPP7X2/o0cOVJRUVFZTv2Qk7feekuDBw+Ws7Oztm/frrVr1+rMmTN6/PHH9dlnnxXpmDT8eZmMorhIDAAAUALQYwQAAGBBMAIAALAgGAEAAFgQjAAAACwIRgAAABYEIwAAAAuCEQAAgAXBCAAAwIJgBAAAYEEwAgAAsCAYAQAAWBCMAAAALP4frAlQn1O5uC0AAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlIAAAG7CAYAAAAIfHukAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/H5lhTAAAACXBIWXMAAA9hAAAPYQGoP6dpAABS7ElEQVR4nO3deVxU9f4/8NewKzC4oIGooIhpflFcIA1kUQLXzBX1hqJJZpkaXkVNBTXXlBb1WsoNUDSXLM0U0VRAUMk9NxI3lMUKF0YRZJnz+6PfzHWaAYbDIAO8no8Hj0fzOZ9zzvsczr3z8pwPnyMRBEEAEREREVWaQU0XQERERFRbMUgRERERicQgRURERCQSgxQRERGRSAxSRERERCIxSBERERGJxCBFREREJJJRTRdQ18nlcmRnZ8PS0hISiaSmyyEiIiItCIKAJ0+eoEWLFjAwKPu+E4NUNcvOzkarVq1qugwiIiIS4d69e2jZsmWZyxmkqpmlpSWAv38RUqm0hqshIiIibchkMrRq1Ur5PV4WBqlqpnicJ5VKGaSIiIhqmYqG5XCwOREREZFIDFJEREREIjFIEREREYnEIEVEREQkEoMUERERkUgMUkREREQiMUgRERERicQgRURERCQSgxQRERGRSHoZpLKysvDFF1/Az88PrVu3homJCWxsbDB8+HCkpqZWaltyuRxr166Fs7MzGjRogGbNmmHMmDG4detWmevEx8fDy8sLlpaWkEql8PHxwZEjR6p6WERERFTHSARBEGq6iH+aM2cOVq5cCUdHR3h7e6NZs2ZIT0/Hnj17IAgCtm3bhoCAAK22FRwcjMjISHTq1AkDBw5EdnY2du7cCQsLC5w6dQpOTk4q/WNjYxEYGIhmzZop97Fjxw7k5uZi586dGDFiRKWORSaTwcrKCnl5eXxFDBERUS2h7fe3XgapH374AU2bNoWXl5dK+/Hjx9G3b19YWFggJycHpqam5W7n2LFj6NOnDzw9PXH48GGYmJgAAOLi4jBgwAD4+fkhPj5e2f/Ro0do27YtjIyMcP78eeXbnjMzM9G1a1cAwK1btyp8geGLGKSIiIhqH22/v/Xy0d6wYcPUQhQA9O7dGz4+Pnj06BEuXbpU4XY2bdoEAFiyZIkyRAFA//794e3tjUOHDuHu3bvK9l27duHx48f46KOPlCEKAFq2bImpU6ciNzcXP/74Y1UOjYiIiOoQvQxS5TE2NgYAGBkZVdg3ISEB5ubmcHd3V1vm7+8PAEhMTFTpDwB+fn5a9SciIqL6rVYFqbt37+KXX36Bra0tnJ2dy+2bn5+PnJwctGnTBoaGhmrLFWOj0tPTlW2K//7nuKmy+msSERGBli1bKn86dOhQ/kERERFRrVXxbR09UVxcjMDAQDx//hwrV67UGI5elJeXBwCwsrLSuFzxvFPRr6J1NPXXRCaTISsrq9w+RLWRw5z9OtnOnRUDdbIdIiJ9UCuClFwuR1BQEJKSkhAcHIzAwMCaLqlMUqkUdnZ2ys9yuRw5OTk1WBERERFVF71/tCeXyzFx4kRs27YN77zzDr7++mut1lPcVSrrDpJMJlPpV9E6mvprEhISgszMTOVPWlqaVvUSERFR7aPXQUoul2PChAmIiYnBmDFjEB0dDQMD7Uo2NzeHra0tbt++jdLSUrXlmsZDlTcOqrzxU0RERFQ/6W2QUoSozZs3IyAgAFu2bKlwXNQ/eXl5IT8/HykpKWrLFPNHeXp6qvQHgEOHDpXZX9O0DERERFQ/6WWQUjzO27x5M0aOHInY2NhyQ1Rubi7S0tKQm5ur0v7ee+8BABYsWICioiJle1xcHBISEuDn5wd7e3tl+6hRo2BlZYW1a9ciMzNT2Z6ZmYl169bB2toaQ4cO1dVhEhERUS2nl4PNFy9ejJiYGFhYWKB9+/b49NNP1fq8/fbbcHFxAQCsW7cOixYtQlhYGMLDw5V9fHx8MGnSJERGRqJbt24YOHAgcnJysGPHDjRp0gRr165V2Wbjxo2xbt06BAYGolu3biqviHnw4AF27NhRqVnNiYiIqG7TyyB1584dAMDTp0+xdOlSjX0cHByUQao833zzDZydnbFx40Z8+eWXsLCwwNChQ7F06VI4Ojqq9X/nnXdgbW2NZcuWISoqChKJBN27d8f8+fPh6+tblcMiIiKiOkYv37VXl/Bde1RXcB4pIqpPavW79oiIiIhqAwYpIiIiIpEYpIiIiIhEYpAiIiIiEolBioiIiEgkBikiIiIikRikiIiIiERikCIiIiISiUGKiIiISCQGKSIiIiKRGKSIiIiIRGKQIiIiIhKJQYqIiIhIJAYpIiIiIpEYpIiIiIhEYpAiIiIiEolBioiIiEgkBikiIiIikRikiIiIiERikCIiIiISiUGKiIiISCQGKSIiIiKRGKSIiIiIRGKQIiIiIhKJQYqIiIhIJAYpIiIiIpEYpIiIiIhEYpAiIiIiEolBioiIiEgkvQ1SsbGxmDx5Mnr06AFTU1NIJBJER0dXahve3t6QSCTl/mzZskVlHQcHhzL7ent76+4AiYiIqNYzqukCyjJ//nxkZGTA2toatra2yMjIqPQ2goKCNIaf4uJiLF++HAYGBujbt6/acisrK8yYMUOt3cHBodI1EBERUd2lt0EqMjISTk5OsLe3x4oVKzB37txKbyMoKEhj++7duyEIAgYMGIAWLVqoLW/UqBHCw8MrvT8iIiKqX/Q2SPn6+lbbtv/73/8CAN59991q2wcRERHVfXobpKpLZmYm4uPjYWtri4EDB2rs8/z5c0RHRyM7OxtSqRSurq54/fXXX3KlREREpO/qXZCKioqCXC7H+PHjYWSk+fDv37+PCRMmqLS5urriu+++g6OjY7nbj4iIQEREhPKzXC6vetFERESkl/T2r/aqgyAIiIqKAlD2Y70JEybgyJEj+OOPP5Cfn4/z588jMDAQp0+fRt++ffHkyZNy9yGTyZCVlaX8ycnJ0flxEBERkX6oV3ekjh49itu3b8PLywvt2rXT2CcsLEzls4uLCzZv3gwA2LJlCzZt2oSQkJAy9yGVSmFnZ6f8LJfLGaaIiIjqqHp1R0oxyHzSpEmVXnfy5MkAgJSUlHL7hYSEIDMzU/mTlpZW+UKJiIioVqg3QerRo0f48ccf0ahRI4wYMaLS61tbWwMA8vPzdV0aERER1VL1JkjFxsaisLAQ//rXv2BmZlbp9VNTUwFwUk4iIiL6nzoRpHJzc5GWlobc3Nwy+2gzd1RaWhqePXumsT00NBQAMHbs2CpWS0RERHWF3g42j4yMRHJyMgDg0qVLyraEhAQAgIeHh3Ks07p167Bo0SKEhYVpnJH87NmzuHjxIrp164auXbuWuc/t27cjIiICnp6esLe3h7m5Oa5fv44DBw6guLgYc+fOhaenp24PlIiIiGotvQ1SycnJiImJUWlLSUlRGeyt7aBxbQeZ+/j44Nq1azh//jyOHz+OZ8+ewdraGgMGDMAHH3wAPz+/Sh4FERER1WUSQRCEmi6iLpPJZLCyskJeXh6kUmlNl0MkmsOc/TrZzp0Vmt8oQESkT7T9/q4TY6SIiIiIagKDFBEREZFIDFJEREREIjFIEREREYnEIEVEREQkEoMUERERkUgMUkREREQiMUgRERERicQgRURERCQSgxQRERGRSAxSRERERCIxSBERERGJxCBFREREJBKDFBEREZFIDFJEREREIjFIEREREYnEIEVEREQkkqggVVpaCplMhpKSEpX2goICLFq0CEOHDsXHH3+M7OxsnRRJREREpI+MxKy0ePFifPrpp0hISEDv3r0BAIIgwNvbG2fOnIEgCJBIJPjhhx9w4cIFNG7cWKdFExEREekDUXekjhw5AhsbG2WIAoB9+/bh9OnTcHJywhdffAE/Pz9kZmZi06ZNOiuWiIiISJ+IClK3b99Ghw4dVNr27t0LiUSCrVu3Ytq0adi3bx+aNWuG77//XieFEhEREekbUUHqwYMHsLGxUWlLSUmBnZ0dunfvDgAwMjJCz549cffu3apXSURERKSHRAUpIyMj5OfnKz8/evQI6enpcHd3V+lnaWmJvLy8qlVIREREpKdEBam2bdvi1KlTkMvlAICff/4ZgiDAw8NDpd+ff/6JZs2aVb1KIiIiIj0kKki99dZb+PPPPzFkyBB8+eWXCA0NhaGhIQYPHqzsIwgCzp8/jzZt2uisWCIiIiJ9Imr6g9mzZ2Pv3r3Yv38/9u/fDwCYM2cOWrdureyTnJyM3NxctbtURERERHWFqCAllUrx66+/4vvvv8cff/wBV1dXeHl5qfR58OABpk+fjlGjRumkUCIiIiJ9IypIAUCDBg0QGBhY5vK3334bb7/9ttjNExEREek9UWOk+vTpg1WrVlXYb/Xq1ejbt6+YXRARERHpPVFBKiEhAWlpaRX2+/3335GQkCBmF4iNjcXkyZPRo0cPmJqaQiKRIDo6utJ1SiSSMn/K2t7169cxatQoWFtbo0GDBujSpQs2bNgAQRBEHQsRERHVTaIf7WmjuLgYBgaishrmz5+PjIwMWFtbw9bWFhkZGaLr8PLygre3t1q7i4uLWtvVq1fxxhtvoKCgAKNGjUKLFi2wf/9+fPDBB7h69SrWrl0rug4iIiKqW6o1SF26dAlNmzYVtW5kZCScnJxgb2+PFStWYO7cuaLr8Pb2Rnh4uFZ9p0yZgry8PBw4cAD9+/cHACxZsgS+vr5Yt24dxo4di169eomuhYiIiOoOrYPUxIkTVT4nJyertSmUlJTg6tWruHDhAt566y1Rhfn6+oparyquX7+OpKQk+Pj4KEMUAJiYmGDJkiXw9vbGpk2bGKSIiIgIQCWC1IvjiSQSCW7cuIEbN26Uu06LFi2wdOlS0cXpSnp6Or744gsUFBSgZcuW6NOnD+zs7NT6KcZz+fn5qS3z8PCAubk5EhMTq7tcIiIiqiW0DlJRUVEA/p6xfOLEifDw8MC7776rsa+JiQlatmyJnj17wtjYWDeVVsG2bduwbds25WcjIyN89NFH+Oyzz2BoaKhsT09PBwA4OTmpbcPQ0BBt2rTB1atXUVJSAiMjzacuIiICERERys+K1+gQERFR3aN1kBo/frzyv8PDw9GzZ0+VNn3UrFkzrFixAoMGDYKDgwPy8/Nx8uRJzJkzB59//jkkEgnWrFmj7K94wbKVlZXG7UmlUsjlcjx58gSNGzfW2EcmkyErK0v3B0NERER6R9Rg8zt37ui4jOrRqVMndOrUSfnZ3NwcQ4YMweuvv47OnTvjq6++QmhoKJo3b66zfUqlUpXHhnK5HDk5OTrbPhEREekPcXMT1HI2NjYYMmQISkpKkJqaqmxX3IlS3Jn6J5lMBolEAktLyzK3HRISgszMTOWPNvNtERERUe1UpekPUlNT8csvvyArKwuFhYUa+0gkEvz3v/+tym6qhbW1NQAgPz9f2aYYG6UYK/Wi0tJS3L59G23atClzfBQRERHVL6ISQVFREcaMGYM9e/YAQLkzfutrkFLciXJwcFC2KV68fOjQIcyZM0elf3JyMvLz89VezkxERET1l6ggtWTJEvz4448wNzdHYGAgOnbsCKlUquvatJabm4vc3FxYW1sr7zQBwNmzZ9G9e3e1/l9++SWOHTsGJycnuLq6KttfffVVeHp64tixY4iLi1POJVVUVIQFCxYAACZNmlTNR0NERES1hagg9d1336Fhw4ZITU3Fa6+9puuaAPw9s3lycjKAv2dIV7Qp5nry8PBQhpp169Zh0aJFCAsLU5nBfPjw4TA2NkaPHj3QsmVL5Ofn49SpUzh//jwaNWqE2NhYlekPAOA///kP3N3d8fbbbyMgIAC2trbYv38/rly5gqlTp+KNN96oluMlIiKi2kdUkMrMzISXl1e1hSjg70dpMTExKm0pKSlISUlRfq7o7tCUKVMQHx+PpKQkPHjwAAYGBrC3t8eMGTMwc+ZMtGzZUm2dTp06ITU1FfPnz8f+/fuRn5+P9u3bY/369ZgyZYpuDo6IiIjqBIlQ3gCnMtja2sLb2xvfffddddRUp8hkMlhZWSEvL69GH38SVZXDnP062c6dFQN1sh0iouqk7fe3qOkPfH19kZqaWu4gcyIiIqK6TlSQWrJkCR4+fKgyHomIiIiovhE1RiopKQkTJkzAp59+ioMHD2LgwIFo3bo1DAw057Jx48ZVqUgiIiIifSRqjJSBgQEkEony0Z5EIim3f2lpqbjq6gCOkaK6gmOkiKg+0fb7W9QdqXHjxlUYnoiIiIjqOlFBKjo6WsdlEBEREdU+9fKlxURERES6oJO37964cQN//fUXmjZtivbt2+tik0RERER6T/QdqdLSUnz66aewsbHBq6++Cg8PD6xYsUK5fOvWrXjjjTdw5coVnRRKREREpG9EBanS0lIMGjQIYWFhePToETp27Kg2Oae7uztOnTqFH374QSeFEhEREekbUUHq66+/Rnx8PHx8fHD79m1cvnxZrY+DgwMcHR1x6NChKhdJREREpI9EBamYmBg0adIEu3btQosWLcrs17FjR9y9e1d0cURERET6TFSQSktLg5ubGxo3blxuPysrK/z555+iCiMiIiLSd6LHSJmamlbYLycnR6t+RERERLWRqCBlb2+P3377rdw+xcXFuHz5MpycnEQVRkRERKTvRAWpfv364c6dO9i4cWOZfdauXYu//voLAwfyvVpERERUN4makHPWrFmIjo7GBx98gKtXr2LUqFEAgPz8fJw7dw47d+5EREQErK2tMXXqVJ0WTERERKQvJMI/J4DSUlJSEoYNG4aHDx+qvcBYEAQ0atQIP/30Ezw8PHRSaG2l7dujifSdw5z9OtnOnRW8S01E+k/b72/RM5t7enriypUrmD17Njp16oQGDRrA1NQU7dq1w7Rp03Dp0qV6H6KIiIiobqvSu/ZeeeUVrFixQuXVMERERET1hejpD4iIiIjqO1FBqmXLlpg9ezauXr2q63qIiIiIag1RQeqPP/7AmjVr4OzsjJ49e2Ljxo2QyWS6ro2IiIhIr4kKUpcuXcKMGTPQrFkz/Prrr5gyZQpsbW3xzjvv4MiRI7qukYiIiEgviQpSnTp1wpo1a5CVlYUff/wRgwcPRnFxMbZt2wY/Pz84ODggPDwcd+7c0XG5RERERPpD9PQHAGBoaIghQ4Zgz549yMrKwurVq9GpUyfcvXsXixcvRrt27dC3b19d1UpERESkV6oUpF7UrFkzhISE4LfffsOZM2cwYcIEyOVyJCQk6GoXRERERHqlSvNIaXLy5ElER0fjhx9+0PWmiYiIiPSKToJUdnY2Nm/ejJiYGFy/fh2CIMDAwAD9+vXDxIkTdbELIiIiIr0j+tFeUVERdu7cif79+8Pe3h6ffPIJfv/9dzg6OmLp0qW4e/cuDhw4gBEjRojafmxsLCZPnowePXrA1NQUEokE0dHRldpGcnIyZs6cie7du6Np06YwMzNDhw4dEBoaisePH2tcx8HBARKJROOPt7e3qGMhIiKiuknUHakPPvgAO3bswOPHjyEIAszNzTFy5EhMnDhRZ+/Xmz9/PjIyMmBtbQ1bW1tkZGRUehsjRoxAbm4uPDw8MG7cOEgkEiQkJGDVqlX4/vvvceLECbzyyitq61lZWWHGjBlq7Q4ODiKOhIiIiOoqUUHq66+/BgC4u7tj4sSJGDVqFMzNzXVaWGRkJJycnGBvb48VK1Zg7ty5ld7Gxx9/jMDAQLRo0ULZJggCPvzwQ2zYsAGLFy/G+vXr1dZr1KgRwsPDq1I+ERER1QOigtScOXMwYcIEODk56boeJV9f3ypvIzQ0VK1NIpFgwYIF2LBhAxITE6u8DyIiIqq/RAWpZcuW6bqOl8rY2BgAYGSk+fCfP3+O6OhoZGdnQyqVwtXVFa+//vrLLJGIiIhqgSr/1d7Vq1dx4sQJ/PXXX+jUqRPeeustAIBcLkdJSQlMTEyqXKSuffvttwAAPz8/jcvv37+PCRMmqLS5urriu+++g6OjY7nbjoiIQEREhPKzXC6vYrVERESkr0T/1d69e/fg6+sLZ2dnTJ48GfPnz8eePXuUyzdt2oQGDRro3bv3Lly4gEWLFqF58+aYPXu22vIJEybgyJEj+OOPP5Cfn4/z588jMDAQp0+fRt++ffHkyZNyty+TyZCVlaX8ycnJqa5DISIiohomKkg9fPgQXl5eOHr0KDp16oQpU6ZAEASVPqNGjYKBgQF++uknnRSqC7du3cLAgQNRWlqK7du3w9raWq1PWFgY+vTpg+bNm6Nhw4ZwcXHB5s2bERgYiIyMDGzatKncfUilUtjZ2Sl/bG1tq+twiIiIqIaJClIrV67EnTt38O9//xsXL17EunXr1Po0btwYzs7OSE5OrnKRunD79m34+PggNzcX33//PXx8fCq1/uTJkwEAKSkp5fYLCQlBZmam8ictLU10zURERKTfRAWpvXv3wsHBAStWrIBEIimzX9u2bZGdnS26OF25desWvL29kZOTg507d2LQoEGV3obi7lV+fr6uyyMiIqJaSlSQysjIQLdu3WBgUP7qJiYmePjwoajCdOXWrVvw8fFBTk4OduzYgSFDhojaTmpqKgBOyklERET/IypImZmZVTjoGgDu3r0LKysrMbuolNzcXKSlpSE3N1elXfE4Lzs7G9u3b8fQoUPL3U5aWhqePXumsV0xJ9XYsWN1VzgRERHVaqKmP+jQoQPOnTuH/Pz8Mmc0z83NxcWLF0XPvxQZGakcX3Xp0iVlW0JCAgDAw8MDkyZNAgCsW7cOixYtQlhYmMqM5D4+Prh79y569uyJ3377Db/99pvafl7sv337dkRERMDT0xP29vYwNzfH9evXceDAARQXF2Pu3Lnw9PQUdTxERERU94gKUiNGjMCsWbMQEhKCDRs2aHzEN2vWLDx79gwBAQGiCktOTkZMTIxKW0pKispgb0WQKovi/XynTp3CqVOnNPb5Z/C6du0azp8/j+PHj+PZs2ewtrbGgAED8MEHH5Q57xQRERHVTxLhn/MWaKGwsBBubm64cuUKunbtimHDhmH+/Pno3bs3hgwZgl27duHXX3+Fi4sLTp06pZxJvD6SyWSwsrJCXl4epFJpTZdDJJrDnP062c6dFQN1sh0iouqk7fe3qDtSZmZmiI+Px8iRI3HixAmcP38ewN93kZKTkyEIAlxdXbFnz556HaKIiIiobhP9ihhbW1skJycjPj4e+/fvx61btyCXy9GqVSv0798fQ4YMKXdqBCIiIqLarsrv2vP394e/v78uaiEiIiKqVUS/a4+IiIiovmOQIiIiIhKJQYqIiIhIJAYpIiIiIpEYpIiIiIhEYpAiIiIiEkmrIJWUlITr169Xdy1EREREtYpWQcrb2xsrVqxQfu7Tpw9WrVpVbUURERER1QZaT8j54iv5EhIS4ODgUB31EBEREdUaWt2RsrS0RE5OTnXXQkRERFSraHVHqnPnzjh69CgWLlyIdu3aAQBu3LiBzZs3a7WTcePGia+QiIiISE9JhBef2ZVh3759GDFiBEpKSgD8/ZivMi8kLi0tFV9hLSeTyWBlZYW8vDxIpdKaLodINIc5+3WynTsrBupkO0RE1Unb72+t7kgNHjwYv/76K/bs2YOMjAxER0fD0dER7u7uOiuYiIiIqLbRerB5ly5d0KVLFwBAdHQ0PDw88O2331ZbYURERET6Tusg9aKwsDB07dpV17UQERER1SqigxQRERFRfScqSCmUlJTg+++/x7Fjx5CVlQUAsLOzg4+PD0aMGAEjoyptnoiIiEiviU46Fy5cwIgRI3D79m388w//IiMjsWDBAuzatQsuLi5VrZGIiIhIL4kKUtnZ2fDz80Nubi5eeeUVjB49Go6OjgCAW7duYfv27bh58yb8/f1x4cIF2Nra6rRoIiIiIn0gKkitXLkSubm5mDRpEr788ks0aNBAZfmyZcswbdo0REZGYtWqVfj88891UiwRERGRPtFqQs5/at++PYqKinDz5k0YGhpq7FNSUoJ27drBxMQE169fr3KhtRUn5KS6ghNyElF9ou33t1bv2vune/fu4Y033igzRAGAkZERevXqhXv37onZBREREZHeExWkTE1NIZPJKuz35MkTmJqaitkFERERkd4TFaRee+01HDt2rNy7TXfv3sWxY8fQqVMn0cURERER6TNRQWrcuHEoKCiAr68vDhw4oLb8559/xptvvonCwkKMGzeuykUSERER6SNRf7UXHByM3bt348iRIxg8eDCaNGmCNm3aAABu376Nhw8fQhAE+Pr6Ijg4WKcFExEREekLUXekDA0NsX//fsyePRvm5uZ48OABzpw5gzNnzuDBgwcwNzdHaGgofv75ZxgYiNoFYmNjMXnyZPTo0QOmpqaQSCSIjo6u9HbkcjnWrl0LZ2dnNGjQAM2aNcOYMWNw69atMteJj4+Hl5cXLC0tIZVK4ePjgyNHjog6DiIiIqq7RE1/8KLnz5/jzJkzKq+IUYSfqnBwcEBGRgasra1hbm6OjIwMREVFISgoqFLbCQ4ORmRkJDp16oSBAwciOzsbO3fuhIWFBU6dOgUnJyeV/rGxsQgMDESzZs0QEBAAANixYwdyc3Oxc+dOjBgxolL75/QHVFdw+gMiqk+0/f6ucpCqLr/88gucnJxgb2+PFStWYO7cuZUOUseOHUOfPn3g6emJw4cPw8TEBAAQFxeHAQMGwM/PD/Hx8cr+jx49Qtu2bWFkZITz58+jZcuWAIDMzEx07doVwN8zt1taWmpdA4MU1RUMUkRUn1TrPFIvg6+vL+zt7au0jU2bNgEAlixZogxRANC/f394e3vj0KFDuHv3rrJ9165dePz4MT766CNliAKAli1bYurUqcjNzcWPP/5YpZqIiIio7tDbIKULCQkJMDc3h7u7u9oyf39/AEBiYqJKfwDw8/PTqj8RERHVb3U2SOXn5yMnJwdt2rTROAO7YmxUenq6sk3x3/8cN1VWf00iIiLQsmVL5U+HDh1EHwMRERHpN1HTH9QGeXl5AAArKyuNyxXPOxX9KlpHU39NZDKZcuB9baGLsS91bdyLPp0TfapFX+jTOdGnWkgdfz/6rS78fupskKopUqkUdnZ2ys9yuRw5OTk1WBERERFVlzr7aE9xV6msO0iKdwW+ePepvHU09dckJCQEmZmZyp+0tLTKF09ERES1gqggdffu3XLfs6cPzM3NYWtri9u3b6O0tFRtuabxUOWNgypv/BQRERHVT6KClIODA0aPHq3rWnTOy8sL+fn5SElJUVummD/K09NTpT8AHDp0qMz+ij5EREREooKUVCpVvltPH+Tm5iItLQ25ubkq7e+99x4AYMGCBSgqKlK2x8XFISEhAX5+fipzVY0aNQpWVlZYu3YtMjMzle2ZmZlYt24drK2tMXTo0Go+GiIiIqotRA02f+2116r90V5kZCSSk5MBAJcuXVK2KeZ68vDwwKRJkwAA69atw6JFixAWFobw8HDlNnx8fDBp0iRERkaiW7duGDhwIHJycrBjxw40adIEa9euVdln48aNsW7dOgQGBqJbt24qr4h58OABduzYUalZzYmIiKhuExWkgoODERwcjNOnT8PV1VXXNQEAkpOTERMTo9KWkpKi8phOEaTK880338DZ2RkbN27El19+CQsLCwwdOhRLly6Fo6OjWv933nkH1tbWWLZsGaKioiCRSNC9e3fMnz8fvr6+VT8wIiIiqjNEBakJEybg/Pnz8PPzw6xZszB8+HA4ODhU+UXFL4qOjkZ0dLRWfcPDw1XuRL3IwMAA06ZNw7Rp07Ted79+/dCvXz+t+xMREVH9JCpIvThT+IIFC7BgwYIy+0okEpSUlIjZDREREZFeExWkBEGolr5EREREtYmoICWXy3VdBxEREVGtU2dnNiciIiKqbgxSRERERCJVKUjdvHkTs2fPhoeHB1599VXMnj1buSw1NRUbN24s8113RERERLWdqDFSABATE4P3338fz58/B/D3X+e9OLP4s2fPMGXKFJiYmCAoKKjKhRIRERHpG1F3pE6dOoVJkybBxMQEq1atQmpqqtpf53l5ecHKygr79u3TSaFERERE+kbUHalVq1ZBEATs378fHh4eGvsYGBjAxcUFV69erVKBRERERPpK1B2plJQUuLm5lRmiFGxsbJCTkyOqMCIiIiJ9JypIPX78GK1bt66wX0FBAYqKisTsgoiIiEjviQpSTZs2RUZGRoX9bty4ARsbGzG7ICIiItJ7ooJUz549cebMGVy5cqXMPikpKbhy5UqFj/+IiIiIaitRQerDDz9EaWkphg8fjgsXLqgtv3btGiZOnAiJRIIPPvigqjUSERER6SVRQapv374ICQnB9evX0b17d7Rv3x4SiQTx8fHo3LkznJ2dkZ6ejlmzZqFnz566rpmIiIhIL4ie2Xz16tX45ptvYGNjgxs3bkAQBOTk5ODy5cto0qQJ1q5dixUrVuiyViIiIiK9InpmcwAIDg7GpEmTcP78edy6dQtyuRytWrWCq6srjIyqtGkiIiIivVfltCORSNCtWzd069ZNF/UQERER1Ro6uW0kCAIePHgAQRDQtGlTGBhU6V3IRERERLVClRLP4cOH0a9fP1haWuKVV16BjY0NLC0t0a9fP8THx+uqRiIiIiK9JDpIzZo1C/369cOhQ4fw7NkzCIIAQRBQUFCAQ4cOYcCAAZg5c6YuayUiIiLSK6KCVGxsLNasWQMzMzPMnDkTv/32G548eYInT57g0qVL+Pe//40GDRrgiy++QGxsrK5rJiIiItILooLU2rVrYWhoiIMHD+Kzzz7D//3f/8Hc3Bzm5ubo1KkTVq1ahYMHD0IikWDdunW6rpmIiIhIL4gKUpcvX4aHhwd69+5dZh/F8suXL4sujoiIiEifiQpSZmZmaNGiRYX9WrRoARMTEzG7ICIiItJ7ooJU9+7d8dtvv1XY77fffkOPHj3E7IKIiIhI74kKUp988gmuXbuGVatWldnns88+w7Vr1zBv3jzRxRERERHpM60m5ExKSlL5LJFIMHXqVMydOxe7du1CYGAg2rRpAwC4ffs2YmNjcfbsWUybNo2TcxIREVGdpVWQ8vb2hkQiUWsXBAFnz57FuXPn1NoB4KuvvsLatWtRUlKig1KJiIiI9ItWQcrT01NjkKpup0+fRlhYGE6cOIHi4mI4OzsjJCQEo0aN0mp9BwcHZGRklNsnKSlJ5a8PyzvO8ePHIzo6Wqt9ExERUd2nVZBKSEio5jLUHTt2DP7+/jAzM8Po0aNhaWmJ3bt3IyAgAPfu3dNq1vQZM2bg8ePHau25ublYv349GjduDFdXV7Xl9vb2CAoKUmt3cXERcSRERERUV+nkpcW6VlJSguDgYBgYGCApKUkZYBYuXAg3NzfMmzcPI0aMgL29fbnbmTFjhsb2NWvWAADeeecdmJmZqS13cHBAeHh4VQ6BiIiI6gG9HAl+9OhR3Lx5E2PHjlW5C2RlZYV58+ahqKgIMTExorf/3//+FwDw7rvvVrVUIiIiqseqdEeqsLAQZ86cQXZ2NgoLC8vsN27cuEptV/Eo0c/PT22Zv78/ACAxMbFS21Q4ceIErl27hh49eqBLly4a+zx+/BgbN25Ebm4umjRpAnd3dzg7O4vaHxEREdVdooPUZ599hmXLlkEmk1XYt7JBKj09HQDg5OSktszGxgYWFhbKPpWluBs1adKkMvtcvHgRkydPVmnr168fYmJi0Lx583K3HxERgYiICOVnuVwuqk4iIiLSf6KC1Lp16xAaGgoAcHZ2hpOTEywtLXVWVF5eHoC/H+VpIpVKlX0q4+nTp9i5cycaNmyIMWPGaOwzc+ZMDB8+HO3bt4eJiQkuX76MJUuWIC4uDoMGDcLJkydhaGhY5j5kMhmysrIqXRsRERHVPqKDlJGREXbv3o3BgwfruqZqs2PHDjx9+hTjx4+HVCrV2Gf16tUqn3v16oWff/4Zffr0QWJiIvbu3Ythw4aVuQ+pVAo7OzvlZ7lcjpycHN0cABEREekVUYPN79y5A09Pz2oLUYo7UWXddZLJZGXerSqPNo/1NDEwMEBwcDAAICUlpdy+ISEhyMzMVP6kpaVVuk4iIiKqHUQFqebNm6NZs2a6rkVJMTZK0zio+/fv4+nTpxrHT5Xn6tWrOHnyJDp06AAPD49K12RtbQ0AyM/Pr/S6REREVDeJClL9+/fHyZMnq20gtZeXFwDg0KFDasvi4+NV+mirqlMepKamAvh7jikiIiIiQGSQCgsLQ1FREaZNm4aioiJd14S+ffuibdu22LZtGy5cuKBsz8vLw7Jly2BiYqLyl4A5OTlIS0sr81FgcXExtmzZAmNj43L/gvDSpUsoLi5Waz9x4gRWrlwJY2NjjBw5UvyBERERUZ0iarB5ixYtkJycjLfeeguvvvoqfHx80Lp1axgYqOcyiUSCBQsWVK4oIyNERkbC398fnp6eKq+IycjIwOrVq1XuDM2dOxcxMTGIiorS+GqXn376CX/99ReGDRtW7vQFa9aswf79++Hh4YFWrVrB2NgYV65cwaFDhyCRSLB+/Xo4OjpW6liIiIio7hIVpARBwJdffom0tDTI5XKNL/KVSCQQBEFUkAIAHx8fJCcnIywsDDt27FC+tHjlypUICAio1La0HWQ+ZMgQPH78GBcvXsThw4dRVFQEGxsbjB49GjNmzICbm1ulj4OIiIjqLlFB6rPPPsPatWthZGSEQYMGwcnJCRYWFrquDW5uboiLi6uwX3R0tMYwp3DgwAGt9jd06FAMHTpU2/KIiIionhMVpCIjI9GwYUMcP34cXbt21XVNRERERLWCqMHm9+7dQ+/evRmiiIiIqF4TFaRsbGx0+koYIiIiotpIVJAaOnQojh8/jsLCQl3XQ0RERFRriApS4eHhaNKkCcaMGYPc3Fxd10RERERUK4gabD5jxgy8+uqr2LNnD44ePYru3buXO4+UYvoBIiIiorpEVJCKjo6GRCIBADx58gQJCQll9mWQIiIiorpKVJCKiorSdR1EREREtY6oIDV+/Hhd10FERERU64gabE5EREREDFJEREREool6tDdx4kSt+3KwOREREdVVov9qrzyKv+gTBIFBioiIiOosnf7VnlwuR0ZGBg4cOIAzZ85gxowZ6NKlS5UKJCIiItJX1fJXe+Hh4Zg9ezY2bdqEc+fOiSqMiIiISN9V22DzZcuWwdLSEgsXLqyuXRARERHVqGoLUkZGRujWrRt++eWX6toFERERUY2q1ukPCgoK8OjRo+rcBREREVGNqbYgde3aNSQnJ6NVq1bVtQsiIiKiGiVqsPnmzZvLXPbkyRNcu3YNW7ZsQWFhIcaOHSu6OCIiIiJ9JipIBQUFKeeK0kQQBADAkCFDMH/+fHGVEREREek5UUFq3LhxZQYpExMT2NnZwdfXF2+88UaViiMiIiLSZ9UyszkRERFRfcCXFhMRERGJxCBFREREJJJWj/bK+ys9bYwbN65K6xMRERHpI62CVEV/pVcRBikiIiKqi7QKUn369Kl0kDp58iSePXtWpQBGREREpM+0ClKVeV/e8ePHMXv2bBQUFAAAnJ2dxVVGREREpOd0Ntj88uXLGDx4MLy9vZGamopWrVohOjoa58+fF73N06dPY8CAAWjUqBHMzc3Rs2dP7Ny5U+v1o6OjIZFIyvxJSEiolv0SERFR/SBqHqkX3bt3DwsWLMDWrVtRWlqKpk2bYt68efjwww9hYmIiervHjh2Dv78/zMzMMHr0aFhaWmL37t0ICAjAvXv3MHPmTK23NWTIELi4uKi1Ozg4VOt+iYiIqG4THaQePXqEpUuX4j//+Q8KCwvRsGFDTJ8+HaGhoZBKpVUqqqSkBMHBwTAwMEBSUpIyBC1cuBBubm6YN28eRowYAXt7e6229/bbbyMoKOil75eIiIjqtko/2issLMTy5cvRtm1bfP755ygpKcF7772HGzduYOnSpVUOUQBw9OhR3Lx5E2PHjlW5k2RlZYV58+ahqKgIMTExVd6PvuyXiIiIaiet70jJ5XJERkZi8eLFyMnJgSAIGDZsGJYtW4b27dvrtCjF2CU/Pz+1Zf7+/gCAxMRErbd3/vx5PHjwACUlJXBwcICvry+aNm1a7fslIiKiuk2rIPXDDz/gk08+wfXr1yEIAry8vLBy5Uq4ublVS1Hp6ekAACcnJ7VlNjY2sLCwUPbRxldffaXyuUGDBggLC0NoaKjO9xsREYGIiAjlZ7lcrnWdREREVLtoFaRGjBgBiUSiHAc1YMAAlJSU4MSJE1rt5I033qhUUXl5eQD+fqSmiVQqVfYpT5s2bbB27Vr4+/ujZcuWePjwIY4ePYq5c+dizpw5aNiwIT766COd7lcmkyErK6vC2oiIiKj2q9Rg82fPnmH58uVYvny51utIJBKUlJRUujBd8PLygpeXl/KznZ0dAgMD0a1bN/To0QPh4eGYMmUKjIyq/MeLSlKpFHZ2dsrPcrkcOTk5Ots+ERER6Q+tEkTr1q1f6gzlijtCZd39kclkaNy4sejtd+rUCR4eHvjll19w7do15aShuthvSEgIQkJCVNYp6w4XERER1W5aBak7d+5UcxmqFGOU0tPT0b17d5Vl9+/fx9OnT6s8Psva2hoAkJ+f/1L3S0RERHWHzmY21yXF47hDhw6pLYuPj1fpI0ZpaSnOnDkDACpzQlX3fomIiKhu0csg1bdvX7Rt2xbbtm3DhQsXlO15eXlYtmwZTExMMG7cOGV7Tk4O0tLS1B7JnT17Vm3bpaWlmDNnDm7cuAEfHx/Y2tqK3i8RERHVb7obZa1DRkZGiIyMhL+/Pzw9PVVe1ZKRkYHVq1ervN5l7ty5iImJQVRUlMoM5j169EDnzp3RuXNn2NnZ4eHDh0hMTMT169fRsmVLREZGVmm/REREVL/pZZACAB8fHyQnJyMsLAw7duxAcXExnJ2dsXLlSgQEBGi1jZkzZ+LUqVM4fPgwHj58CBMTE7Rr1w7z589HSEiIxoHjutgvERER1Q96G6QAwM3NDXFxcRX2i46ORnR0tFr76tWrq3W/REREVL/p5RgpIiIiotqAQYqIiIhIJAYpIiIiIpEYpIiIiIhEYpAiIiIiEolBioiIiEgkBikiIiIikRikiIiIiERikCIiIiISiUGKiIiISCQGKSIiIiKRGKSIiIiIRGKQIiIiIhKJQYqIiIhIJAYpIiIiIpEYpIiIiIhEYpAiIiIiEolBioiIiEgkBikiIiIikRikiIiIiERikCIiIiISiUGKiIiISCQGKSIiIiKRGKSIiIiIRGKQIiIiIhKJQYqIiIhIJAYpIiIiIpEYpIiIiIhEYpAiIiIiEkmvg9Tp06cxYMAANGrUCObm5ujZsyd27typ1bqCICAuLg5TpkxB586dYWVlhYYNG6JLly5YtmwZCgsLNa4nkUjK/AkKCtLh0REREVFtZ1TTBZTl2LFj8Pf3h5mZGUaPHg1LS0vs3r0bAQEBuHfvHmbOnFnu+s+fP8eAAQNgamoKb29v+Pv7o7CwEPHx8fjkk0+wZ88eJCQkoGHDhmrr2tvbawxNLi4uOjo6IiIiqgv0MkiVlJQgODgYBgYGSEpKUgaYhQsXws3NDfPmzcOIESNgb29f5jYMDQ3x6aef4oMPPkDjxo2V7cXFxRg+fDj27duH9evXY9asWWrrOjg4IDw8XNeHRURERHWMXj7aO3r0KG7evImxY8eq3AWysrLCvHnzUFRUhJiYmHK3YWxsjE8++UQlRCna586dCwBITEzUee1ERERUf+jlHamEhAQAgJ+fn9oyf39/AFULQcbGxgAAIyPNh//48WNs3LgRubm5aNKkCdzd3eHs7Cx6f0RERFQ36WWQSk9PBwA4OTmpLbOxsYGFhYWyjxjffvstAM1BDQAuXryIyZMnq7T169cPMTExaN68ebnbjoiIQEREhPKzXC4XXScRERHpN718tJeXlwfg70d5mkilUmWfyoqLi8M333yDjh074t1331VbPnPmTJw4cQK5ubmQyWQ4ceIE+vfvj4MHD2LQoEEoLS0td/symQxZWVnKn5ycHFF1EhERkf7TyztS1eX06dMICAiAlZUVdu3aBVNTU7U+q1evVvncq1cv/Pzzz+jTpw8SExOxd+9eDBs2rMx9SKVS2NnZKT/L5XKGKSIiojpKL+9IKe5ElXXXSSaTlXm3qixnzpyBn58fDAwMEB8fj06dOmm9roGBAYKDgwEAKSkp5fYNCQlBZmam8ictLa1SdRIREVHtoZdBSjE2StM4qPv37+Pp06cax0+V5cyZM3jzzTchl8sRHx8PV1fXStdkbW0NAMjPz6/0ukRERFQ36WWQ8vLyAgAcOnRIbVl8fLxKn4ooQlRpaSkOHjyI119/XVRNqampAP6eY4qIiIgI0NMg1bdvX7Rt2xbbtm3DhQsXlO15eXlYtmwZTExMMG7cOGV7Tk4O0tLS1B4Fnj17Fm+++SZKSkoQFxeHXr16lbvfS5cuobi4WK39xIkTWLlyJYyNjTFy5MiqHRwRERHVGXo52NzIyAiRkZHw9/eHp6enyitiMjIysHr1apU7Q3PnzkVMTAyioqKUr3Z5+PAh3nzzTTx+/Bj9+vXD4cOHcfjwYZX9NGrUCDNmzFB+XrNmDfbv3w8PDw+0atUKxsbGuHLlCg4dOgSJRIL169fD0dHxJZwBIiIiqg30MkgBgI+PD5KTkxEWFoYdO3aguLgYzs7OWLlyJQICAipcXyaT4dGjRwCAgwcP4uDBg2p97O3tVYLUkCFD8PjxY1y8eBGHDx9GUVERbGxsMHr0aMyYMQNubm46Oz4iIiKq/fQ2SAGAm5sb4uLiKuwXHR2N6OholTYHBwcIglCp/Q0dOhRDhw6t1DpERERUf+nlGCkiIiKi2oBBioiIiEgkBikiIiIikRikiIiIiERikCIiIiISiUGKiIiISCQGKSIiIiKRGKSIiIiIRGKQIiIiIhKJQYqIiIhIJAYpIiIiIpEYpIiIiIhEYpAiIiIiEolBioiIiEgkBikiIiIikRikiIiIiERikCIiIiISiUGKiIiISCQGKSIiIiKRGKSIiIiIRGKQIiIiIhKJQYqIiIhIJAYpIiIiIpEYpIiIiIhEYpAiIiIiEolBioiIiEgkBikiIiIikRikiIiIiERikCIiIiISSa+D1OnTpzFgwAA0atQI5ubm6NmzJ3bu3FmpbTx//hyLFy+Gk5MTzMzM0KJFC7z33nv4888/y1xn69atcHNzg7m5ORo3boxBgwbh3LlzVT0cIiIiqmP0NkgdO3YM7u7uSE5OxqhRo/D+++/j/v37CAgIwJo1a7Tahlwux5AhQxAWFgZra2vMmDEDvXr1QmRkJHr16oW//vpLbZ2lS5finXfewZ9//on3338fI0eORFJSEt544w2kpKTo+jCJiIioFjOq6QI0KSkpQXBwMAwMDJCUlAQXFxcAwMKFC+Hm5oZ58+ZhxIgRsLe3L3c7MTExiI+Px5gxY7B161ZIJBIAwNdff40pU6Zg/vz5+Oabb5T909PTER4ejvbt2+PXX3+FlZUVAOCDDz5Az549ERwcjMuXL8PAQG/zJxEREb1EepkIjh49ips3b2Ls2LHKEAUAVlZWmDdvHoqKihATE1PhdjZt2gQAWL58uTJEAcDkyZPRtm1bbN26FQUFBcr2qKgolJSU4JNPPlGGKABwcXHBmDFjcO3aNSQnJ+vgCImIiKgu0MsglZCQAADw8/NTW+bv7w8ASExMLHcbhYWFSE1Nxauvvqp250oikeDNN99Efn4+zpw5o9P9EhERUf2hl4/20tPTAQBOTk5qy2xsbGBhYaHsU5abN29CLpdr3MaL205PT0fv3r2V/21hYQEbG5ty+5cnIiICERERys+lpaUAAJlMVu56NUn+/FmVt6HPxyeGPp0TfalFF3UA+lNLXfv9kGb8/eg3ff79KLYrCEK5/fQySOXl5QGAyuO1F0mlUmWfqmzjxX6K/27evLnW/TWRyWTIyspSa2/VqlW569V2Vl/UdAX6R5/OCWtRpy91APpVC6nj70e/Vffv58mTJ2VmCUBPg1RtJpVKYWdnp/wsl8sxadIkzJw5U2Wc1svQoUMH5OTkwNbWFmlpaS9133Udz2314HmtPjy31YfntvrU5LkVBAFPnjxBixYtyu2nl0FKkfzKuvsjk8nQuHHjKm/jxX6K/65Mf01CQkIQEhJSbp+XRfHXhQYGBso7aqQbPLfVg+e1+vDcVh+e2+pT0+e2ou98QE8Hm5c3Hun+/ft4+vRpmWOfFNq2bQsDA4MyxzRpGofl5OSEp0+f4v79+1r1JyIiovpNL4OUl5cXAODQoUNqy+Lj41X6lKVBgwZwc3PD77//joyMDJVlgiDg8OHDMDc3R48ePXS6XyIiIqo/9PLRXt++fdG2bVts27YN06ZNU84llZeXh2XLlsHExATjxo1T9s/JyUFeXh5sbW1VbsO99957OHXqFObOnasyIec333yDW7du4b333kODBg2U/SdMmIDVq1dj6dKlGDJkiHJbFy5cwHfffYeOHTvCw8PjJZwB3QgJCYFMJuOt5mrAc1s9eF6rD89t9eG5rT614dxKhIr+rq+GHDt2DP7+/jAzM8Po0aNhaWmJ3bt3IyMjA6tXr8bMmTOVfYOCghATE4OoqCgEBQUp2+VyOQYMGID4+Hj07NkTXl5euHHjBn744Qc4ODggNTUVzZo1U9nv0qVLMX/+fNjb22P48OF48uQJtm/fjqKiIhw5cgTu7u4v6xQQERGRntPLR3sA4OPjg+TkZLi7u2PHjh3YsGEDXnnlFWzfvl0lRJXHwMAAe/fuRXh4OP766y98/vnnSElJwbvvvouTJ0+qhSgA+OSTTxAbG4tmzZphw4YN2LlzJ3r37o0TJ04wRBEREZEKvb0jRURERKTv9PaOFBEREZG+Y5AiIiIiEolBqo5xcHCARCLR+OPt7V3T5dUKsbGxmDx5Mnr06AFTU1NIJBJER0eX2V8mkyEkJAT29vYwNTWFg4MDZs2ahadPn768omuBypzX8PDwMq9jiUSCO3fuvNTa9VlWVha++OIL+Pn5oXXr1jAxMYGNjQ2GDx+O1NRUjevwmtVOZc8tr1vtFRYWIiQkBJ6enmjRogXMzMxgY2MDd3d3REVFobi4WG0dfb1u9XL6A6oaKysrzJgxQ63dwcHhpddSG82fPx8ZGRmwtraGra2t2jxkL8rPz4eXlxcuXLgAPz8/jBkzBufPn8fq1auRmJiIpKQkmJmZvcTq9VdlzqvC+PHjNV63jRo10n2BtdTatWuxcuVKODo6ws/PD82aNUN6ejr27NmDPXv2YNu2bQgICFD25zWrvcqeWwVetxV7+vQpNmzYADc3NwwcOBDNmjXDo0ePEBcXh4kTJ2L79u2Ii4tTzmyu19etQHWKvb29YG9vX9Nl1GqHDx8W7ty5IwiCICxfvlwAIERFRWnsu3DhQgGAEBoaqtIeGhoqABCWLVtW3eXWGpU5r2FhYQIA4dixYy+vwFpq9+7dQkJCglp7UlKSYGxsLDRu3FgoLCxUtvOa1V5lzy2vW+2VlpYKz58/V2svLi4WvL29BQDCzz//rGzX5+uWj/aI/sHX1xf29vYV9hMEAZGRkbCwsMCCBQtUli1YsAAWFhaIjIysrjJrHW3PK1XOsGHDNL5xoXfv3vDx8cGjR49w6dIlALxmK6sy55Yqx8DAACYmJmrtRkZGGDp0KADgxo0bAPT/uuWjvTro+fPniI6ORnZ2NqRSKVxdXfH666/XdFl1Tnp6OrKzs+Hv7w9zc3OVZebm5nB3d0d8fDzu3buHVq1a1VCVtVtSUhJSU1NhYGAAJycn+Pr6wsLCoqbLqjWMjY0B/P3lBPCa1aV/ntsX8boVTy6X4+DBgwCA//u//wOg/9ctg1QddP/+fUyYMEGlzdXVFd999x0cHR1rqKq6p6IXWTs5OSE+Ph7p6en8UhIpLCxM5XOjRo3w5ZdfqrwiijS7e/cufvnlF9ja2sLZ2RkAr1ld0XRuX8TrVntFRUVYtmwZBEHAgwcPcOTIEaSlpWHChAno27cvAP2/bvlor46ZMGECjhw5gj/++AP5+fk4f/48AgMDcfr0afTt2xdPnjyp6RLrjLy8PABQeb/jixTvhlL0I+116dIF3377LW7duoWCggLcvn0ba9euhUQiQVBQEH766aeaLlGvFRcXIzAwEM+fP8fKlSthaGgIgNesLpR1bgFet2IUFRVh0aJFWLx4MdavX4/ff/8d//73v7Fx40ZlH32/bnlHqo7557+EXFxcsHnzZgDAli1bsGnTJoSEhNREaURaU4yRUHBwcMDUqVPRsWNHvPnmm5g/fz7eeuutGqpOv8nlcgQFBSEpKQnBwcEIDAys6ZLqjIrOLa/byrOwsIAgCJDL5cjOzsa+ffswb948nDx5EgcOHNDrlxUr8I5UPTF58mQAQEpKSg1XUnco/nVU1r+CZDKZSj+qur59+8LR0RGXLl1Snl/6H7lcjokTJ2Lbtm1455138PXXX6ss5zUrXkXntjy8bitmYGCAli1bYsqUKdi4cSNSUlKwdOlSAPp/3TJI1RPW1tYA/p6Lg3RD8bxe8fz+nyp6rk/iKK7lZ8+e1XAl+kUul2PChAmIiYnBmDFjEB0drZyDR4HXrDjanNuK8LrVnp+fHwAgISEBgP5ftwxS9YRiFl5Oyqk7Tk5OaNGiBVJSUtQCan5+PlJSUtCmTRsO2tWh/Px8XLlyBebm5sovJvrfF/3mzZsREBCALVu2qIzdUeA1W3nantvy8LqtnOzsbAD/+8tIfb9uGaTqkLS0NI3/2klLS0NoaCgAYOzYsS+7rDpLIpFg0qRJePr0KZYsWaKybMmSJXj69CmCg4NrqLra68mTJ7h+/bpae0FBAYKDg/HkyROMGjVK45+d10eKR06bN2/GyJEjERsbW+YXPa/ZyqnMueV1WzlXr17V+H317Nkz5TjeAQMGAND/61YiCIJQY3snnQoPD0dERAQ8PT1hb28Pc3NzXL9+HQcOHEBxcTHmzp2LZcuW1XSZei8yMhLJyckAgEuXLuHcuXNwd3dHu3btAAAeHh6YNGkSgL//NeTu7o6LFy/Cz88P3bp1w7lz53Do0CG4uroiMTERDRo0qLFj0Sfantc7d+6gbdu2cHV1RceOHWFjY4M//vgDv/zyCzIzM+Hs7Ixjx46hadOmNXk4eiM8PByLFi2ChYUFpk+frvGL+u2334aLiwsAXrOVUZlzy+u2chTfVx4eHnBwcIBUKkVWVhbi4uLw4MED9O7dG/Hx8cprUa+v2xqbU510LiEhQRg1apTg5OQkSKVSwcjISLCxsRGGDBkixMfH13R5tcb48eMFAGX+jB8/XqX/48ePhRkzZgitWrUSjI2NhdatWwszZ84UZDJZzRyAntL2vObl5Qkffvih4OrqKjRr1kwwMjISLC0tBTc3N2HVqlXCs2fPavZA9ExF5xUaXsXDa1Y7lTm3vG4r5/Tp00JwcLDQqVMnoVGjRoKRkZHQtGlTwcfHR/jmm2+E4uJitXX09brlHSkiIiIikThGioiIiEgkBikiIiIikRikiIiIiERikCIiIiISiUGKiIiISCQGKSIiIiKRGKSIiIiIRGKQIiIiIhKJQYqIiIhIJAYpIj3l4OAAiUQCiUSC77//vsx+vr6+kEgkiI6OfnnFieDt7Q2JRIKEhISaLqXa7du3D71794ZUKlX+DrU5bkVffXDnzh1IJBI4ODjUdClEeo2voSaqBT755BO8/fbbfHN8LXDhwgUMHz4ccrkcffr0ga2tLSQSCWxsbGq6NCKqBvx/ZSI917BhQ1y/fh2RkZF4//33a7ocqsCePXtQXFyMefPmYenSpTVdDhFVMz7aI9Jz06dPBwAsXrwYz549q+FqqCJ3794FADg5OdVwJUT0MjBIEem5AQMGwMvLCzk5Ofj888+1Xi8oKKjcsVPR0dGQSCQICgoqsz0vLw8hISFwcHCAmZkZnJycsHLlSsjlcgBAVlYWJk+ejFatWsHU1BSvvvoq1q5dW2FtiYmJ8PPzQ5MmTdCwYUO4ublhy5Yt5a5z5MgRDBs2DLa2tjAxMUHz5s0xdOhQnDx5UmP/F8cbRUVFoVevXrCysoJEIsGdO3cqrBEASkpK8PXXX+ONN96AlZWV8hxMmzYNWVlZKn3Dw8MhkUgQFRUFAJgwYYKyBm9vb63296Ldu3fDw8MDUqkU5ubmcHd3x4EDBzT2vXr1KsLCwuDu7g47OzuYmJigadOm8PX1xc6dO8vdz88//wwvLy9YWlrCysoKvXv3xt69e8vs/+LYqdLSUkRERKBr166wsLBQG98VHx+PQYMGoXnz5jAxMUGLFi0QEBCAM2fOaNz2i+PoLly4gGHDhsHa2hqmpqZ47bXXsGbNGgiCoLbe8+fP8dlnn6F79+6wtLSEiYkJbGxs4OrqitmzZ+Phw4flngOiKhGISC/Z29sLAITjx48Lp06dEgAIUqlUyM3NVenXt29fAYAQFRWl0j5+/HiN7QpRUVECAGH8+PEa24cMGSJ07NhRaN68uTB8+HDBz89PaNCggQBAmDp1qnDjxg3BxsZGaNWqlTBq1CjBx8dHMDQ0FAAIK1asUNufl5eXAECYNm2aYGBgILz22mvC6NGjBU9PT8HAwEAAIISEhGisdebMmQIAwcDAQHBzcxNGjhwpvP7664JEIhEMDQ2Fb7/9Vm0dAMpaDQwMBA8PD2HMmDHC66+/Lty5c6fsE///FRYWCr6+vgIAwczMTOjfv78QEBAgtGrVSgAgWFtbC2fPnlX2//HHH4Xx48cLjo6OAgDB3d1dGD9+vDB+/Hhh+fLlFe7vxZoXLlwoSCQSwd3dXQgICBC6dOkiABAkEonwww8/qK337rvvCgCEDh06CP7+/kJAQIDQq1cv5Xn9+OOPNe4vIiJCuU83NzdhzJgxQo8ePZS/CwCCvb29yjq3b98WAAitW7cW3nrrLcHExETo27evMGbMGKFz587KfvPnz1fW7O7uLowZM0ZwcXERAAiGhobCf//7X7V6FNfInDlzBBMTE6Fjx47C6NGjBS8vL+W1NX36dJV1SktLlf8bkEqlQv/+/YUxY8YIvr6+yv8NnT9/XqvzTyQGgxSRnnoxSAmCIAwbNkzjl2J1BSkAwuDBg4X8/HzlsrNnzwpGRkbKIPT+++8LxcXFyuV79uxRfqG9uJ4g/O9LEoCwbNkylWUJCQnKkHbw4EGVZRs3bhQACO3atRMuXryosiwxMVGwtLQUTExMhOvXr6ssU+xLKpUKJ0+e1HgOyhMaGioAEBwdHYXbt28r24uKipTBpU2bNsLz589V1qvovJdHUXOjRo2EU6dOqSwLCwsTAAjt27dXWy8hIUG4efOmWntaWprQsmVLAYCQmpqqsuzixYuCoaGhYGBgIOzatUtlWWxsrCCRSMoNUgCEli1bCr///rvafuPi4pQB9NChQyrLIiMjBQCCsbGxcPnyZZVlL14jX3/9tcqyI0eOKIPzvXv3lO2JiYkCAKFr166CTCZTq+X06dNq//gg0iUGKSI99c8glZaWJhgZGQmmpqYqd1SqK0hZWFgIf/zxh9p6b731lvKOREFBgdpyZ2dnAYCQmJio0q74kuzatavGehR3nd58801lW2lpqdCiRQsBgHDmzBmN661atUoAIMycOVOlXfGFvHjxYo3rlaegoECwsLAQAAg//fST2vL8/HzhlVdeEQAIW7duVVmmiyD11VdfqS0rLCwUrKysBADC3bt3td7mN998IwAQZs2apdI+adIkAYAQEBCgcb0hQ4ZUGKQ2b96scV3FNVnWHcZBgwYJAITg4GCVdsU1MmzYMI3r9evXT22/O3fuVN7pJKoJHCNFVEu8+uqrmDhxIp4/f44FCxZU+/66d++O5s2bq7UrBlH7+PjAzMyszOXZ2dkatztu3DiN7ePHjwcAJCcno7S0FABw/vx5ZGdnw9HREd27d9e4nmL80YkTJzQuHzFihMb28pw5cwZPnz5FkyZNMHjwYLXlDRs2xOjRowEAx44dq/T2K6Jpn6ampmjbti0AqI3PAoCnT59i165dmDdvHt577z0EBQUhKCgIu3fvBgD8/vvvKv0V81q98847GmtQ/D7KM3z4cLW2kpISpKSkAIDa+DuFd999F0DZ507T8QNAx44dAagef7du3WBoaIhvv/0W69evR05OToV1E+kSpz8gqkXCw8MRGxuLrVu34t///jc6d+5cbftq3bq1xnYLC4tyl1taWgIACgsLNS5v06ZNue0FBQV48OABmjdvjlu3bgEAbt68WeFElX/99ZfGdjETSiq+qMuqFQAcHR1V+upSWedWKpUCUD+3+/btw4QJE/DgwYMytymTyVQ+Z2ZmAqj491GW5s2bo2HDhmrtDx48UNZX1jYqOneVOX5HR0d8/vnnmDVrFqZOnYqpU6fC3t4evXr1wqBBgzBy5EiYmJiUeyxEVcEgRVSL2NraYvr06Vi+fDnmzp2L/fv3i96W4i/vymJgUP4N64qWV4Xw//8yS1GjjY0N/P39y13H2tpaY3uDBg10W9xLUJlzm5WVhYCAABQUFGD27Nn417/+BQcHB1hYWMDAwACHDh2Cv7+/xr92q4rqPK+VvbY++ugjjBo1Cj/99BOSk5ORnJyM7du3Y/v27QgLC8Px48dha2tbTdVSfccgRVTLhIaGYuPGjThw4ACSkpLK7Kf4V/iTJ080Ls/IyKiW+ipy+/Ztje2KKQnMzMzQtGlTAECrVq0AAE2bNn2pr8Cxs7MDUHatAJR3yxR9a8q+fftQUFCAoUOHYuXKlWrL09PTNa5nZ2eHmzdv4s6dO+jUqZPacm2niPinpk2bwtTUFM+fP8etW7c03jWtjnP3yiuvIDg4GMHBwQCAtLQ0TJw4ESdPnsScOXMQExOjs30RvYhjpIhqGSsrK8ybNw8AMHv27DL7Kb6krl27prZMEATExcVVT4EViI2N1di+efNmAICHh4fyVTiurq6wtrbG1atXceXKlZdWY48ePWBhYYGHDx/ip59+UlteUFCA7du3A/h7rFhNUsyRZG9vr7ZMEARs27ZN43peXl4AgK1bt2pcrvh9VJaRkRE8PDwAoMzw++233wKo3nPXoUMHhIaGAvj7tT1E1YVBiqgW+vDDD9G6dWukpqaWOSGlr68vAGDLli24evWqsr24uBihoaE4ffr0S6n1n86ePYtVq1aptCUnJ2P9+vUAgI8//ljZbmxsjLCwMAiCgKFDhyI5OVlte6WlpTh69ChOnTqlsxrNzMzw4YcfAgBmzpypcveuuLgY06dPx/3799GmTRtRg9l1STEA+/vvv1cZaF1aWoqFCxeWOQj/o48+gqGhIXbu3Ikff/xRZdn27duxZ88e0TXNnDkTALBhwwYcOXJEZVl0dDR++uknGBsbK2ftr4qjR4/iwIEDKC4uVmkXBAE///wzAM0hk0hX+GiPqBYyNTXF4sWLERQUVOZrY9zd3TFkyBDs3bsXPXr0gIeHBxo0aIBz585BJpNh+vTp+PLLL19y5cC0adMwd+5cbN68GZ07d0Z2djaOHz8OuVyO6dOnY8CAASr9p06dirt37+Kzzz5D79690alTJ7Rr1w4NGjTA/fv3ceHCBTx+/BgbNmxAz549dVbnokWLcObMGRw5cgQdO3aEj48PLC0tcfLkSdy9exdNmzbFrl27anwg8+DBg9G9e3ecPXsW7du3h5eXF8zNzZGamors7GyEhoZqfOTn4uKC5cuXY/bs2Rg2bBhef/11ODo6Ij09HadPn8bHH39cqZn0X9S/f3/Mnz8fn376Kd588024u7ujdevWSEtLw7lz52BoaIivv/5a4yPFyvrtt9/w8ccfQyqVolu3bmjRogUKCgpw7tw5ZGRkwMrKCosXL67yfojKwjtSRLVUYGAgnJ2dy+2zY8cOzJ8/H7a2tkhISMCpU6fQu3dvnDt3Di4uLi+n0H8YOnQoDh8+DBsbGxw4cAC//vorunXrhujoaHzxxRca11m1ahVSUlLwr3/9C0+fPsXBgwexf/9+ZGdnw9vbG5GRkQgICNBpnaampjh48CD+85//oEuXLjh+/Dh+/PFHGBsb46OPPsLFixfLnJLhZTIyMkJCQgLmzZsHOzs7HDlyBAkJCejatStOnjyJfv36lbnurFmzsHfvXnh4eODy5cvKO0Xff/89pk2bVqW6lixZgri4OPTv3x/Xrl3Dzp07kZ2djZEjR+LEiROYOHFilbavMHjwYISHh8PV1RW3bt3CDz/8gISEBFhZWWHOnDm4fPlyjV3rVD9IBF3/KQcRERFRPcE7UkREREQiMUgRERERicQgRURERCQSgxQRERGRSAxSRERERCIxSBERERGJxCBFREREJBKDFBEREZFIDFJEREREIjFIEREREYnEIEVEREQkEoMUERERkUj/D5uGAjw6YBjgAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkAAAAG7CAYAAAArJypxAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/H5lhTAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA/y0lEQVR4nO3deVyU5f7/8feAATYIHksP4gJqqJUVkpqFS2pBmVp+bdG+7ktl/SpDRa1wyVwwhbbTt4WTWGbbsfScyqBcsMhUFDI7WrghgXXSlBHc0Ll/f/hgTgQo3MzAwLyej8c8cu77mvv+DPfDeHvd13VfFsMwDAEAAHgQr9ouAAAAoKYRgAAAgMchAAEAAI9DAAIAAB6HAAQAADwOAQgAAHgcAhAAAPA4DWq7AHdkt9uVn5+vRo0ayWKx1HY5AACgEgzD0PHjxxUcHCwvrwv38RCAypGfn69WrVrVdhkAAMCE3NxctWzZ8oJtCEDlaNSokaTzP8CAgIBargYAAFSGzWZTq1atHL/HL4QAVI6S214BAQEEIAAA6pjKDF9hEDQAAPA4BCAAAOBxCEAAAMDjEIAAAIDHIQABAACPQwACAAAehwAEAAA8DgEIAAB4HAIQAADwOAQgAADgcQhAAADA4xCAAACAxyEAAQAAj0MAAgAAHocABAAAPE6D2i7AE4VO/9Qlxz2w8A6XHBcAgPqGHiAAAOBxCEAAAMDjEIAAAIDHIQABAACPQwACAAAehwAEAAA8DgEIAAB4HAIQAADwOAQgAADgcQhAAADA4xCAAACAxyEAAQAAj0MAAgAAHocABAAAPA4BCAAAeBwCEAAA8DgEIAAA4HHcMgAlJyfLYrFc8NWvX79Sn7HZbIqJiVFISIh8fX0VGhqqqVOnqrCwsJa+BQAAcFcNaruA8oSHh2vWrFnl7vvHP/6hH374QdHR0Y5tRUVF6t27t7KyshQVFaVhw4YpMzNTixcvVlpamjZu3Cg/P7+aKh8AALg5tw1A4eHhZbafOXNGL7/8sho0aKBRo0Y5ti9atEhZWVmaNm2aFi5c6Ng+ffp0xcfHKzExUTNmzKiJ0gEAQB3glrfAKrJq1SodOXJEAwYM0F//+ldJkmEYSkpKkr+/v+Li4kq1j4uLk7+/v5KSkmqjXAAA4KbqVAAqCTLjx493bMvOzlZ+fr4iIyNltVpLtbdarYqMjNS+ffuUm5tb4XETEhLUsmVLx6tjx46u+QIAAMAt1JkAlJOTo7Vr16ply5a67bbbHNuzs7MlSWFhYeV+rmR7Sbvy2Gw25eXlOV6HDh1yYuUAAMDduOUYoPIsXbpUdrtdo0ePlre3t2N7QUGBJCkwMLDczwUEBJRqV1GbFi1aON7b7XZCEAAA9Vid6AGy2+1aunSpLBaLxo4d6/Tjx8TE6Oeff3a8du/e7fRzAAAA91EnAtCXX36pgwcPqm/fvmrTpk2pfSU9PxX18NhstlLtAAAA6kQAKm/wc4mLjfG52BghAADgedw+AB05ckSrV69WkyZNNHjw4DL7w8LCFBwcrPT0dBUVFZXaV1RUpPT0dLVp00atWrWqqZIBAICbc/sA9Pbbb+vMmTMaPny4fH19y+y3WCwaP368CgsLNXfu3FL75s6dq8LCQk2YMKGmygUAAHWA288C+/vf/y6p/NtfJWJjY7V69WrFx8crMzNTERER2r59u1JTU9W1a1dNmjSphqoFAAB1gVv3AG3ZskU7d+5Ut27ddM0111TYzmq1Ki0tTZMmTdKuXbu0ZMkS7d69W5MnT9batWvVsGHDGqwaAAC4O4thGEZtF+FubDabAgMDVVBQ4HiOkDOFTv/U6ceUpAML73DJcQEAqAuq8vvbrXuAAAAAXIEABAAAPA4BCAAAeBwCEAAA8DgEIAAA4HEIQAAAwOMQgAAAgMchAAEAAI9DAAIAAB6HAAQAADwOAQgAAHgcAhAAAPA4BCAAAOBxCEAAAMDjEIAAAIDHIQABAACPQwACAAAehwAEAAA8DgEIAAB4HAIQAADwOAQgAADgcQhAAADA4xCAAACAxyEAAQAAj0MAAgAAHocABAAAPA4BCAAAeBwCEAAA8DgEIAAA4HFMBaBjx45px44dOnr0aKntv/76q8aMGaPOnTtr8ODB2rFjh1OKBAAAcCZTAWjBggXq3Lmz9u/f79hWXFysHj166K233tJ3332n1atXq0+fPsrPz3dasQAAAM5gKgCtX79eISEhioiIcGz78MMPtXfvXt14441atWqVxo0bp6NHj+qVV15xWrEAAADOYCoA5ebmKiwsrNS2Tz75RBaLRW+++aYGDRqkN954QyEhIfr000+rVeDHH3+sW2+9VZdddpn8/PzUpk0bDRs2TLm5uaXa2Ww2xcTEKCQkRL6+vgoNDdXUqVNVWFhYrfMDAID6p4GZD/3+++9q2rRpqW2bNm1S27Zt1b59e8e2iIgIpaWlmSrMMAw99NBDev3119WuXTsNHTpUjRo1Un5+vtLS0pSTk6NWrVpJkoqKitS7d29lZWUpKipKw4YNU2ZmphYvXqy0tDRt3LhRfn5+puoAAAD1j6kA5Ovrq2PHjjne//LLL8rJydGoUaNKtWvYsKFOnjxpqrAXX3xRr7/+uh5++GG9+OKL8vb2LrX/7Nmzjj8vWrRIWVlZmjZtmhYuXOjYPn36dMXHxysxMVEzZswwVQcAAKh/TN0Ca9++vdLT03XixAlJ0kcffSSLxaIePXqUapefn69mzZpV+fgnT57UnDlz1LZtW73wwgtlwo8kNWhwPrsZhqGkpCT5+/srLi6uVJu4uDj5+/srKSmpyjUAAID6y1QAuu+++1RQUKDevXvriSee0PTp0+Xr66tBgwY52pw9e1bbt28vM1aoMlJTU3X06FHdddddOnfunD766CMtXLhQr776qvbs2VOqbXZ2tvLz8xUZGSmr1Vpqn9VqVWRkpPbt21dmzNAfJSQkqGXLlo5Xx44dq1wzAACoO0zdAnv88ceVkpKidevWadu2bfL29tbzzz9falzQF198IZvNpp49e1b5+Nu2bZMkeXt769prr9VPP/3k2Ofl5aUnnnhCixcvlnQ+AEmqMGiFhYUpJSVF2dnZjjFDf2az2ZSXl1flOgEAQN1kKgD5+Pjoiy++0Ndff61ff/1VERERatu2bak2fn5+SkxMLNUrVFn/+c9/JJ3vmYmIiNCWLVt05ZVXKjMzUw888ICWLFmidu3aaeLEiSooKJAkBQYGlnusgIAASXK0q6hNixYtHO/tdrsOHTpU5boBAEDdYCoASZLFYrlg706fPn3Up08fU8e22+2SzgetVatWKTg4WJLUs2dPffjhh7ruuuu0ZMkSTZw40dTx/ywmJkYxMTGO9zabrcJABQAA6j63XAusJHx06dLFEX5KdOrUSW3bttXevXt17NgxR9uKenhsNlupYwIAAJjuAZLOz/Jav3698vLydOrUqXLbWCyWMrOzLqZDhw6SpMaNG5e7v2T7yZMnHWN/SsYC/dnFxggBAADPYzoAxcTE6OWXX9a5c+cknZ+O/kcWi0WGYZgKQCW3znbt2lVmX3Fxsfbs2SOr1aqmTZsqKChIwcHBSk9PV1FRUamZYEVFRUpPT1ebNm0qHAANAAA8j6kAlJCQoOeff14Wi0XR0dG68sorHYONnaFdu3aKiopSamqqkpKSNH78eMe+hQsX6tixYxo+fLjjWUDjx4/XM888o7lz55Z6EOLcuXNVWFioJ5980mm1AQCAus9i/LnrphKuvvpqZWdnKzU1VTfffLMLypL27t2rm266Sf/5z390xx13qGPHjsrMzNS6desUEhKib7/9VkFBQZLO9/RERkbqu+++U1RUlCIiIrR9+3alpqaqa9euSktLU8OGDSt97pJB0AUFBU4NdiVCp1dvfbSKHFh4h0uOCwBAXVCV39+mBkHv3btXPXr0cFn4kc73AmVkZGj06NHatm2bXnzxRWVnZ+uRRx7Rli1bHOFHOv/Aw7S0NE2aNEm7du3SkiVLtHv3bk2ePFlr166tUvgBAAD1n6lbYI0aNVLz5s2dXUsZrVq10tKlSyvVNjAwUImJiUpMTHRxVQAAoK4z1QPUs2dPfffdd86uBQAAoEaYCkAzZ87Unj17WGQUAADUSaZugdlsNsXExOjBBx9UamqqBgwYoNatW8vLq/w81atXr2oVCQAA4EymAtDNN9/seM7PypUrtXLlygrbWiwWnT171nSBAAAAzmYqAPXq1UsWi8XZtQAAANQIUwFow4YNTi4DAACg5rjlYqgAAACuRAACAAAep1oBaNeuXXrooYfUoUMH+fv7y9/fXx06dNDEiRPLXcgUAADAHZheDT45OVkPPfSQiouLS60En52drezsbC1dulSvvfaaRo0a5ZRCAQAAnMVUD9C2bds0YcIEnTlzRnfccYc+/vhj7dixQzt27NCqVas0cOBAnTlzRhMmTFBGRoazawYAAKgWUz1Azz33nOx2u/7+979rzJgxpfZ16tRJgwYNUnJyssaOHaslS5bo3XffdUqxAAAAzmCqB+irr75SeHh4mfDzR6NHj1ZERIQ2btxoujgAAABXMBWADh8+rCuvvPKi7Tp27KjDhw+bOQUAAIDLmApAjRs31sGDBy/a7uDBgwoMDDRzCgAAAJcxFYC6du2qb775RuvWrauwzbp165Senq4bbrjBdHEAAACuYCoAPfroo7Lb7Ro4cKBiY2P1ww8/6MSJEzpx4oR27typKVOmaODAgY62AAAA7sTULLDo6Gg99dRTmjdvnpYsWaIlS5aUaWMYhuLi4hQVFVXtIgEAAJzJ9JOg586dq88++0x9+vSRr6+vDMOQYRjy8fFR37599dlnn2nOnDnOrBUAAMApTD8JWpJuu+023XbbbTp37pyOHDkiSbrsssvk7e3tlOIAAABcwVQPkJeXlyIiIhzvvb291axZMzVr1ozwAwAA3J6pAGS1WnXVVVc5uxYAAIAaYSoAhYWF6T//+Y+zawEAAKgRpgLQ8OHD9dVXX2nv3r3OrgcAAMDlTAWgSZMmKTo6Wn379tWKFSt06tQpZ9cFAADgMqZmgV1xxRUyDEO5ubkaMWKERowYoWbNmqlhw4Zl2losFnqKAACAWzEVgA4cOOD4s2EYkqRff/213LYWi8XMKQAAAFzGVADav3+/s+sAAACoMaYCUEhIiLPrAAAAqDGml8IAAACoqwhAAADA41TqFljbtm1Nn8DsLLDQ0FDl5OSUu693797asGFDqW2nT59WfHy83n77beXm5qpJkyYaMGCAnn32WTVr1sxM6QAAoJ6qVAD646yvP7JYLI5ZYBXtq84ssMDAQE2aNKnM9tDQ0FLv7Xa77rzzTqWkpKh79+4aMmSIsrOzlZSUpLVr1+rbb79V06ZNTdcBAADql0oFoPJmfb300ktKTEzUnXfeqVGjRqlNmzaSzoelZcuWafXq1YqJidGjjz5qurjGjRtr9uzZF223bNkypaSkaNiwYXrnnXccoevVV1/VxIkT9fTTT+u1114zXQcAAKhfLEZFXTgX8M9//lODBw/W8uXLNWzYsHLbvPfee/rf//1frVy5UnfddVeVCyvp5amo9+mPbrrpJm3atEkHDhwoNUPNMAxdccUV+vXXX/Xbb7+V+6DG8thsNgUGBqqgoEABAQFVrv1iQqd/6vRjStKBhXe45LgAANQFVfn9bWoQ9OLFi9WlS5cKw48kDR06VF26dNGSJUvMnELS+XE9ycnJmj9/vl5++WVt3ry5TJtTp05p8+bN6tChQ5np+RaLRbfeequKioqUkZFhug4AAFC/mHoO0I4dOzRgwICLtgsLC9O//vUvM6eQJP3yyy8aM2ZMqW1du3bVu+++q3bt2kmS9u7dK7vdrrCwsAprkKTs7Gz17Nmz3DYJCQlKSEhwvLfb7aZrBgAA7s9UD5Ddbq/UzK69e/dWOEj6YsaMGaO1a9fq119/VVFRkTIzMzVixAht3bpV/fr10/HjxyVJBQUFks4PmC5PSRdYSbvy2Gw25eXlOV6HDh0yVTMAAKgbTAWgzp07a8uWLfr4448rbLNq1Spt3rxZnTt3NlXYrFmz1LdvXzVr1kyXXnqpwsPD9dZbb2nEiBHKycnRG2+8Yeq45QkICFCLFi0cr+bNmzvt2AAAwP2YCkBTp06VYRi699579b//+7/67LPPtGvXLu3atUtr1qzR8OHDde+998pisWjq1KlOLfjBBx+UJKWnp0v6b89PRT08NputVLvyxMTE6Oeff3a8du/e7cySAQCAmzE1BmjAgAFasmSJYmNj9d577+m9994rtd8wDHl7eys+Pr5SY4Wq4vLLL5ckFRUVSTr/kEYvLy9lZ2eX275ke0VjhAAAgOcxvRTGE088oe3bt2vs2LFq166dfH195evrq7Zt22rs2LHKyMjQlClTnFmrJDlmgpVMk2/YsKG6deumH3/8scyTow3D0BdffCGr1aouXbo4vRYAAFA3meoBKnHNNdc4dSxOid27d6t169a69NJLy2yfNm2aJOn+++93bH/ggQf07bffasaMGaUehPjaa69p3759euCBByr9DCAAAFD/VSsAucp7772nhIQE9erVSyEhIbJarfrpp5/02Wefqbi4WDNmzFCvXr0c7UeNGqX3339f7777rvbv36/evXtrz549+uijj9SmTRs9++yztfhtAACAu3HLANSnTx/t2rVLmZmZ+uqrr3TixAldfvnl6t+/vx5++GFFRUWVau/l5aXVq1dr4cKFevvtt5WYmKgmTZpo3LhxevbZZ1kHDAAAlGJqKYwSGRkZ+sc//qEff/xRNput3Gf+WCwWrV27tlpF1jSWwgAAoO6pyu9v0z1AU6ZMUWJioiP0/HlleGesBg8AAOAKpmaBffjhh0pISFCLFi302muvOW5JpaSk6OWXX9aNN94owzA0ffp0rVu3zqkFAwAAVJepAPT666/L29tba9eu1YQJExxPTr711lv18MMPKz09XU899ZQSEhIu+ABCAACA2mAqAGVmZuqGG2644MMF58yZo+bNmzMDCwAAuB1TAej48eNq3bq1472Pj48kqbCw8L8H9vLSDTfc4FiyAgAAwF2YCkBNmzbVsWPHHO9Llqc4cOBAqXZFRUWOtbgAAADchakAFBoaWmrZic6dO8swDK1YscKx7ZdfflFaWppCQkKqXyUAAIATmQpA/fr10+7dux09PrfffruaNGmi+Ph43XPPPZo8ebJuuOEGFRUVaciQIc6sFwAAoNpMPQdo6NChys/PV25urkJDQ2W1WrV06VINHTpUK1eudLS7/vrrNWPGDKcVCwAA4AymAtCVV15ZZhHUgQMHKjs7W//617/0+++/68orr9TAgQPl7e3tlEIBAACcxalrgQUHB+vBBx905iEBAACcztQYIAAAgLqs2j1AeXl5ysvL06lTpyps06tXr+qeBgAAwGlMB6DVq1dr+vTp+umnny7YzmKx6OzZs2ZPAwAA4HSmAtCaNWs0ZMgQ2e12BQYGqm3bthdddh4AAMBdmApA8+bNk91u1+zZszV9+nTHUhgAAAB1gakAlJWVpfDwcM2cOdPZ9QAAALicqVlg3t7e6tixo7NrAQAAqBGmAtC1116rn3/+2dm1AAAA1AhTAWjSpElKT09XRkaGs+sBAABwOVMBaMiQIYqLi1N0dLReeeUVHTx40Nl1AQAAuEylBkFfaD2vRx99VI8++miF+3kOEAAAcDeVCkCGYZg+QXU+CwAA4AqVCkB2u93VdQAAANQYFkMFAAAehwAEAAA8DgEIAAB4HAIQAADwOAQgAADgcQhAAADA4xCAAACAx6lUANq4caN++uknV9cCAABQIyoVgG6++WYtXLjQ8b5v375atGiRy4qqSHx8vCwWiywWi7799tsy+202m2JiYhQSEiJfX1+FhoZq6tSpKiwsrPFaAQCA+6r0LbA/LmmxYcMG7d692yUFVWTnzp2aNWuWrFZrufuLiorUu3dvJSYmqmPHjnriiSfUoUMHLV68WH379tWpU6dqtF4AAOC+KhWAGjVqpEOHDrm6lgoVFxdr1KhRCg8P1+DBg8tts2jRImVlZWnatGlKSUnRwoULlZKSomnTpmnr1q1KTEys4aoBAIC7qtRaYNdee63WrVunmTNn6oorrpAk7dmzR2+99ValTjJy5EjzFUqaN2+efvjhB23fvr3cW2+GYSgpKUn+/v6Ki4srtS8uLk5/+9vflJSUpBkzZlSrDgAAUD9UKgDFxsbq7rvv1rx58xzb0tPTlZ6eXqmTVCcAbd++XfPmzdMzzzyjq666qtw22dnZys/PV3R0dJlbZFarVZGRkUpJSVFubq5atWpluhYAAFA/VCoADRw4UFu2bNGqVauUk5Oj5ORktWvXTpGRkS4t7vTp0xo5cqTCw8MVGxtbYbvs7GxJUlhYWLn7w8LClJKSouzs7HIDUEJCghISEhzv7XZ7NSsHAADurFIBSJKuu+46XXfddZKk5ORk9ejRQ2+++abLCpOkmTNnKjs7W9u2bZO3t3eF7QoKCiRJgYGB5e4PCAgo1e7PbDab8vLyqlktAACoKyodgP5o1qxZ6ty5s7NrKWXTpk1avHixZs+erU6dOrn0XAEBAWrRooXjvd1ur9VB3wAAwLVMByBXOnv2rEaNGqVrr71W06dPv2j7kp6fC/Xw/LHdn8XExCgmJqZU+4raAgCAus9UACpx9uxZ/eMf/9D69esdt5BatGihPn366O6771aDBuYOX1hY6BjX4+PjU26bG2+8UZL08ccfOwZHl3zmzy42RggAAHgW0wEoKytLd999t/bv31/qIYmSlJSUpLi4OH344YcKDw+v8rF9fX01bty4cvdt3LhR2dnZGjRokJo2barQ0FCFhYUpODhY6enpKioqKjUTrKioSOnp6WrTpg0zwAAAgCSTASg/P19RUVE6fPiw/vrXv2ro0KFq166dJGnfvn167733tHfvXkVHRysrK0vNmzev0vEbNmyopKSkcveNHj1a2dnZmjFjhrp37+7YPn78eD3zzDOaO3duqWU75s6dq8LCQj355JMmvikAAKiPTAWg+Ph4HT58WOPHj9cLL7yghg0blto/f/58PfbYY0pKStKiRYtq5CnMsbGxWr16teLj45WZmamIiAht375dqamp6tq1qyZNmuTyGgAAQN1Q6bXA/mjNmjVq3bq1/u///q9M+JEkPz8/vfLKK2rdurU+/fTTahdZGVarVWlpaZo0aZJ27dqlJUuWaPfu3Zo8ebLWrl1bbp0AAMAzWYw/D+CphIYNG2rw4MFasWLFBdsNGzZMq1at0smTJ00XWBtKZoEVFBQ4niHkTKHTXRMKDyy8wyXHBQCgLqjK729TPUC+vr6OqeUXcvz4cfn6+po5BQAAgMuYCkBXXXWV1q9fr9zc3ArbHDx4UOvXr9fVV19tujgAAABXMBWARo4cqZMnT+qWW27RZ599Vmb/J598oltvvVWnTp2q9krwAAAAzmZqFtiECRO0cuVKrV27VgMHDlSTJk3Upk0bSdL+/fv1+++/yzAM3XLLLZowYYJTCwYAAKguUz1A3t7e+vTTTxUbGyur1aojR44oIyNDGRkZOnLkiKxWq6ZNm6ZPPvlEXl6mTgEAAOAypp8E7ePjo4ULF2rOnDnKyMgotRRGly5dGPwMAADcVrXWApPOzwiLjIx0Ri0AAAA1gvtTAADA4xCAAACAxyEAAQAAj0MAAgAAHocABAAAPA4BCAAAeBxTAejgwYMXXAcMAADAnZkKQKGhoRo6dKizawEAAKgRpgJQQECAY+0vAACAusZUALrqqqu4BQYAAOosUwFowoQJSk9P19atW51dDwAAgMuZCkBjxozRww8/rKioKM2fP18//vijTp8+7ezaAAAAXMLUYqje3t6OP8fFxSkuLq7CthaLRWfPnjVzGgAAAJcwFYAMw3BJWwAAgJpgKgDZ7XZn1wEAAFBjeBI0AADwOAQgAADgcaoVgPbu3avY2Fj16NFDHTp0UGxsrGPf5s2b9frrr6ugoKDaRQIAADiTqTFAkrRs2TI99NBDjunvFotFhw8fduw/ceKEJk6cKB8fH40ePbrahQIAADiLqR6gb7/9VuPHj5ePj48WLVqkzZs3l5nt1bt3bwUGBupf//qXUwoFAABwFlM9QIsWLZJhGPr000/Vo0ePctt4eXkpPDxc//73v6tVIAAAgLOZ6gFKT09Xt27dKgw/JYKCgnTo0CFThQEAALiKqQB07NgxtW7d+qLtTp48qTNnzpg5BQAAgMuYCkCXXXaZcnJyLtpuz549CgoKMnMKAAAAlzEVgLp3766MjAz98MMPFbZJT0/XDz/8cNHbZAAAADXNVAB65JFHdO7cOQ0ZMkRZWVll9u/atUtjx46VxWLRww8/XOXjnzp1SjExMerVq5eCg4Pl5+enoKAgRUZGaunSpSouLi7zGZvNppiYGIWEhMjX11ehoaGaOnWqCgsLzXxFAABQj1kMk6uVTpkyRQkJCbJYLGrXrp327t2roKAgXXbZZfr3v/8tu92u2NhYLVy4sMrHPnz4sFq1aqVu3bqpffv2atq0qY4ePao1a9YoJydHUVFRWrNmjby8zue3oqIi9ejRQ1lZWYqKilLnzp2VmZmp1NRUde3aVRs3bpSfn1+lz2+z2RQYGKiCggIFBARUuf6LCZ3+qdOPKUkHFt7hkuMCAFAXVOX3t+kHIS5evFgdOnTQ7NmztWfPHknSoUOHdOjQIV1++eWaNWuWHnnkEVPHbtKkiQoKCuTj41Nq+9mzZ3XrrbcqNTVVa9as0R13nP+Fv2jRImVlZWnatGmlAtf06dMVHx+vxMREzZgxw+Q3BQAA9Y3pHqAShmEoMzNT+/btk91uV6tWrdS1a1c1aGA6W13Qiy++qMcff1zPP/+8Hn/8cRmGoZYtW8pms+mXX36R1Wp1tC0qKlJQUJCaNWumvXv3Vvoc9AABAFD31EgPUAmLxaKIiAhFRERU91AXZbfb9fnnn0uSOnXqJEnKzs5Wfn6+oqOjS4UfSbJarYqMjFRKSopyc3PVqlUrl9cIAADcn1O6aQzD0JEjR2QYhi677DLH2JzqOnPmjObPn+84/tq1a7V7926NGTNG/fr1k3Q+AElSWFhYuccICwtTSkqKsrOzKwxACQkJSkhIcLy32+1OqR8AALinagWgL774QkuWLNHXX3+tkydPSpL8/PzUs2dPPfHEE4qOjq5WcWfOnNGcOXMc7y0Wi6ZMmaIFCxY4tpWsNh8YGFjuMUq6wC60Kr3NZlNeXl61agUAAHWH6a6aqVOn6rbbblNqaqpOnDghwzBkGIZOnjyp1NRU9e/fX5MnT65Wcf7+/jIMQ+fOnVNubq7+9re/KSkpSTfffLNsNlu1jv1HAQEBatGihePVvHlzpx0bAAC4H1MBaPny5VqyZIn8/Pw0efJk7dixQ8ePH9fx48f1/fffa8qUKWrYsKGef/55LV++vPpFenmpZcuWmjhxol5//XWlp6dr3rx5kv7b81NRD09JUKqoh0iSYmJi9PPPPzteu3fvrnbNAADAfZkKQC+99JK8vb31+eef67nnnlOnTp1ktVpltVp19dVXa9GiRfr8889lsVj08ssvO7XgqKgoSdKGDRsk/XfsT8lYoD+72BghAADgeUwFoJ07d6pHjx7q2bNnhW1K9u/cudN0ceXJz8+XJF1yySWSzgeb4OBgpaenq6ioqFTboqIipaenq02bNswAAwAADqYCkJ+fn4KDgy/aLjg4uMzDDCvj3//+t06cOFFm+4kTJxQTEyNJ6t+/v6TzA6PHjx+vwsJCzZ07t1T7uXPnqrCwUBMmTKhyDQAAoP4yNQvs+uuv144dOy7abseOHerSpUuVj//BBx8oISFBPXr0UGhoqAICApSXl6c1a9boyJEjjllmJWJjY7V69WrFx8crMzNTERER2r59u2MpjEmTJlW5BgAAUH+ZCkBPPfWU+vXrp0WLFik2NrbcNs8995x27dqll156qcrHHzBggPLz8/XNN99o06ZNKiwsVGBgoK699loNHTpUY8eOLfWkaavVqrS0NM2ePVsrV67U+vXr1bx5c02ePFmzZs1Sw4YNzXxNAABQT1VqKYyNGzeW2fbRRx/ppZdeUkREhEaMGKE2bdpIkvbv36/ly5dr27ZteuyxxzR48GD16tXL+ZW7EEthAABQ91Tl93elApCXl5csFkuZ7SUf/fO+P263WCw6e/ZspYt3BwQgAADqHqevBdarV69yAxAAAEBdVKkAVPLMHQAAgPrAOauWAgAA1CEEIAAA4HGqtRr8qVOnlJGRofz8fJ06darCdiNHjqzOaQAAAJzKdAB67rnnNH/+/Eqtyk4AAgAA7sRUAHr55Zc1bdo0SdI111yjsLAwNWrUyKmFAQAAuIrpANSgQQOtXLlSAwcOdHZNAAAALmVqEPSBAwfUq1cvwg8AAKiTTPUANWvWTE2bNnV2LagmVz1hWuIp0wCA+sVUD9Dtt9+uTZs2yW63O7seAAAAlzMVgGbNmqUzZ87oscce05kzZ5xdEwAAgEuZugUWHBysr7/+WoMGDVKHDh3Up08ftW7dWl5eZfOUxWJRXFxctQsFAABwFlMByDAMvfDCC9q9e7fsdruSk5PLtLFYLDIMgwAEAADcjqkA9Nxzz+mll15SgwYNNGDAAIWFhcnf39/ZtQEAALiEqQCUlJSkSy+9VF999ZU6d+7s7JoAAABcytQg6NzcXPXs2ZPwAwAA6iRTASgoKIilLwAAQJ1lKgANHjxYX3311QVXgAcAAHBXpgLQ7Nmz1aRJEw0bNkyHDx92dk0AAAAuZWoQ9KRJk9ShQwetWrVK69at0/XXX3/B5wD9/e9/r3ahAAAAzmIqACUnJ8tisUiSjh8/rg0bNlTYlgAEAADcjakAtHTpUmfXAQAAUGNMBaBRo0Y5uw4AAIAaY2oQNAAAQF1GAAIAAB7H1C2wsWPHVrotg6ABAIC7MT0L7EJKZoiVrAZPAAIAAO7EqbPA7Ha7cnJy9NlnnykjI0OTJk3SddddV60CAQAAnM0ls8Bmz56t2NhYvfHGG9q+fbupwgAAAFzFZYOg58+fr0aNGmnmzJmuOgUAAIApLgtADRo0UEREhL788ssqfzYvL0/PP/+8oqKi1Lp1a/n4+CgoKEhDhgzR5s2by/2MzWZTTEyMQkJC5Ovrq9DQUE2dOlWFhYXV/SoAAKCecek0+JMnT+ro0aNV/txLL72kJ554Qvv27VNUVJQmT56sHj16aPXq1brpppv0/vvvl2pfVFSk3r17KzExUR07dtQTTzyhDh06aPHixerbty+r1gMAgFJMjQGqjF27dunrr79Wq1atqvzZbt26acOGDerdu3ep7V999ZX69euniRMn6q677pKvr68kadGiRcrKytK0adO0cOFCR/vp06crPj5eiYmJmjFjRvW+EAAAqDcshmEYVf3QW2+9VeG+48ePa9euXXr77bdVWFiop59+WnPmzKlWkX8UHR2t1NRUbd26VV26dJFhGGrZsqVsNpt++eUXWa1WR9uioiIFBQWpWbNm2rt3b6XPYbPZFBgYqIKCAgUEBDit9hKh0z91+jFd7cDCO2q7BAAALqgqv79N9QCNHj3a8ayf8pRkqjvvvFNPP/20mVNU6JJLLpF0foyRJGVnZys/P1/R0dGlwo8kWa1WRUZGKiUlRbm5uaZ6owAAQP1jKgCNHDmywgDk4+OjFi1a6JZbbtFNN91UreL+7ODBg/ryyy/VvHlzXXPNNZLOByBJCgsLK/czYWFhSklJUXZ2doUBKCEhQQkJCY73drvdqXUDAAD34pInQbtCcXGxRowYodOnTys+Pl7e3t6SpIKCAklSYGBguZ8r6QIraVcem82mvLw8J1cMAADclcsGQTuT3W7X6NGjtXHjRk2YMEEjRoxw6vEDAgLUokWLUuc7dOiQU88BAADch9uvBm+32zV27FitWLFCw4cP16uvvlpqf0nPT0U9PDabrVS78sTExOjnn392vHbv3u2k6gEAgDuqVA/QhWZ9VcbIkSNNfc5ut2vMmDF66623NGzYMCUnJ8vLq3RmKxn7UzIW6M8uNkYIAAB4nkpNg/fy8rrgrK+LOXfuXJU/88fwc9999+mdd95xjPv5o8pMg2/atKn27dtX6XMzDb4spsEDANyd06fB9+3bt8oBaNOmTTpx4oSp4FRy2+utt97SPffco+XLl5cbfiTJYrFo/PjxeuaZZzR37txSD0KcO3euCgsL9eSTT1a5BgAAUH9VKgBVZT2vr776SrGxsTp58qQkOaarV8UzzzyjZcuWyd/fX+3bt9ezzz5bps1dd92l8PBwSVJsbKxWr16t+Ph4ZWZmKiIiQtu3b1dqaqq6du2qSZMmVbkGAABQfzltFtjOnTs1Y8YMffbZZzIMQ61bt9YzzzxjasbWgQMHJEmFhYWaN29euW1CQ0MdAchqtSotLU2zZ8/WypUrtX79ejVv3lyTJ0/WrFmz1LBhQ7NfCwAA1EOmlsL4o9zcXMXFxemdd97RuXPndNlll+nJJ5/UI488Ih8fH2fVWaMYA1QWY4AAAO7O5UthSNLRo0c1b948vfLKKzp16pQuvfRSPf7445o2bZpLQgMAAICzVDkAnTp1SomJiVq0aJFsNpu8vb31wAMPaPbs2QoKCnJFjQAAAE5V6QBkt9uVlJSkZ555RocOHZJhGPqf//kfzZ8/X+3bt3dljQAAAE5VqQD00Ucf6amnntJPP/0kwzDUu3dvxcfHq1u3bq6uDwAAwOkqFYDuvvtuWSwWxzif/v376+zZs/rmm28qdRJnrwoPAABQHVUaA3TixAktWLBACxYsqPRnLBaLzp49W+XCAAAAXKVSAah169bVWgoDAADAnVQqAJU8mBAAAKA+8Lp4EwAAgPqFAAQAADwOAQgAAHgcAhAAAPA4BCAAAOBxCEAAAMDjEIAAAIDHIQABAACPQwACAAAehwAEAAA8DgEIAAB4HAIQAADwOAQgAADgcQhAAADA4xCAAACAxyEAAQAAj0MAAgAAHocABAAAPA4BCAAAeBwCEAAA8DgEIAAA4HEIQAAAwOMQgAAAgMchAAEAAI/jtgFo+fLlevDBB9WlSxf5+vrKYrEoOTm5wvY2m00xMTEKCQmRr6+vQkNDNXXqVBUWFtZc0QAAoE5oUNsFVOTpp59WTk6OLr/8cjVv3lw5OTkVti0qKlLv3r2VlZWlqKgoDRs2TJmZmVq8eLHS0tK0ceNG+fn51WD1AADAnbltD1BSUpIOHDig3377TQ899NAF2y5atEhZWVmaNm2aUlJStHDhQqWkpGjatGnaunWrEhMTa6hqAABQF7htALrlllsUEhJy0XaGYSgpKUn+/v6Ki4srtS8uLk7+/v5KSkpyVZkAAKAOctsAVFnZ2dnKz89XZGSkrFZrqX1Wq1WRkZHat2+fcnNza6lCAADgbupFAJKksLCwcveXbC9pV56EhAS1bNnS8erYsaPzCwUAAG7DbQdBV1ZBQYEkKTAwsNz9AQEBpdqVx2azKS8vz/nF1SOh0z91yXEPLLzDJccFAOBC6nwAcoaAgAC1aNHC8d5ut+vQoUO1WBEAAHClOn8LrKTnp6IeHpvNVqpdeWJiYvTzzz87Xrt373Z+oQAAwG3U+QB0sTE+FxsjBAAAPE+9CEDBwcFKT09XUVFRqX1FRUVKT09XmzZt1KpVq1qqEAAAuJs6H4AsFovGjx+vwsJCzZ07t9S+uXPnqrCwUBMmTKil6gAAgDty20HQSUlJ+vrrryVJ33//vWPbhg0bJEk9evTQ+PHjJUmxsbFavXq14uPjlZmZqYiICG3fvl2pqanq2rWrJk2aVBtfAQAAuCm3DUBff/21li1bVmpbenq60tPTHe9LApDValVaWppmz56tlStXav369WrevLkmT56sWbNmqWHDhjVaOwAAcG8WwzCM2i7C3dhsNgUGBqqgoMDxHCFnctUzdeoingMEAHCWqvz+dtseIKC6eHgjAKAidX4QNAAAQFURgAAAgMchAAEAAI9DAAIAAB6HAAQAADwOAQgAAHgcAhAAAPA4BCAAAOBxCEAAAMDjEIAAAIDHIQABAACPQwACAAAehwAEAAA8DqvBo1a5asV2lObKn/OBhXe47NgA4Cr0AAEAAI9DAAIAAB6HAAQAADwOAQgAAHgcBkEDVcSA4tJc9fOoiz8LAHUHPUAAAMDjEIAAAIDHIQABAACPQwACAAAeh0HQAACgXPV5kgM9QAAAwOMQgAAAgMchAAEAAI/DGCDAjbjyIYv4Lx5mCYAeIAAA4HEIQAAAwOMQgAAAgMepVwFo69at6t+/vxo3biyr1aru3bvrgw8+qO2yAACAm6k3g6DXr1+v6Oho+fn5aejQoWrUqJFWrlyp++67T7m5uZo8eXJtlwigChgQXnPq4sPu6mLNrsLfFXPqRQ/Q2bNnNWHCBHl5eWnjxo16/fXXtWTJEn333Xdq3769nnzySeXk5NR2mQAAwE3UiwC0bt067d27V/fff7/Cw8Md2wMDA/Xkk0/qzJkzWrZsWe0VCAAA3Eq9CEAbNmyQJEVFRZXZFx0dLUlKS0uryZIAAIAbqxdjgLKzsyVJYWFhZfYFBQXJ39/f0aY8CQkJSkhIcLw/d+6cJMlmszm50vPsp0+45LgAap+r/r/hSq76f5IrfxZ1sWZXqYu/U1z1cy45rmEYF21bLwJQQUGBpPO3vMoTEBDgaFMem82mvLy8MttbtWrlnAIBeIzA52u7AvdRF38WdbHmusjVP+fjx49XmAlK1IsAVF0BAQFq0aKF473dbtf48eM1efJkWSwWp5yjY8eOOnTokJo3b67du3c75ZioPq6Le+K6uC+ujXviupxnGIaOHz+u4ODgi7atFwGoJOVV1Mtjs9n0l7/8pcLPx8TEKCYmxiW1lfDy8nL8NyAgwKXnQuVxXdwT18V9cW3cE9flvy7W81OiXgyCLhn7U944n19++UWFhYXljg8CAACeqV4EoN69e0uSUlNTy+xLSUkp1QYAAKBe3ALr16+f2rZtqxUrVuixxx5zPAuooKBA8+fPl4+Pj0aOHFmrNcbExMhms3l816S74bq4J66L++LauCeuS9VZjMrMFasDKloKIycnR4sXL2YpDAAA4FBvApAkbdmyRbNmzdI333yj4uJiXXPNNYqJidF9991X26UBAAA3Uq8CEAAAQGXUi0HQAAAAVUEAAgAAHocA5GJbt25V//791bhxY1mtVnXv3l0ffPBBbZflEZYvX64HH3xQXbp0ka+vrywWi5KTkytsb7PZFBMTo5CQEPn6+io0NFRTp05VYWFhzRVdz+Xl5en5559XVFSUWrduLR8fHwUFBWnIkCHavHlzuZ/hutSMU6dOKSYmRr169VJwcLD8/PwUFBSkyMhILV26VMXFxWU+w7WpPfHx8bJYLLJYLPr222/L7OfaVIIBl1m3bp1xySWXGI0aNTImTJhgxMTEGCEhIYYkY/HixbVdXr1X8rO+/PLLHX9eunRpuW0LCwuN8PBwQ5IRFRVlTJs2zYiKijIkGV27djVOnjxZs8XXU9OmTTMkGe3atTPGjRtnTJ8+3RgyZIjh7e1teHl5Ge+9916p9lyXmvPbb78Zfn5+Rq9evYzx48cbM2bMMB566CHH352oqCjj3LlzjvZcm9rz/fffG76+vobVajUkGZs2bSq1n2tTOQQgFykuLjbatWtn+Pr6GpmZmY7tx44dM9q3b2/4+PgYBw4cqL0CPcAXX3zh+BkvWLDgggFo5syZhiRj2rRppbaX/MKeP3++q8v1CCtXrjQ2bNhQZvvGjRuNSy65xPjLX/5inDp1yrGd61Jzzp07Z5w+fbrM9uLiYuPmm282JBmffPKJYzvXpnacOXPGiIiIMG644QZj+PDh5QYgrk3lEIBcJCUlxZBkjBkzpsy+5ORkQ5IxZ86cWqjMM10oANntdiM4ONjw9/c3CgsLS+0rLCw0/P39jbZt29ZQpZ6r5F+oW7duNQyD6+JOXnjhBUOS8fzzzxuGwbWpTbNmzTJ8fX2NH374wRg1alSZAMS1qTzGALnIhg0bJElRUVFl9kVHR0uS0tLSarIkVCA7O1v5+fmKjIyU1Wottc9qtSoyMlL79u1Tbm5uLVXoGS655BJJUoMG5x9Qz3VxD3a7XZ9//rkkqVOnTpK4NrVl+/btmjdvnmbNmqWrrrqq3DZcm8ojALlIycKs5S3CGhQUJH9//3IXb0XNu9C1+uN2rpfrHDx4UF9++aWaN2+ua665RhLXpbacOXNGs2fP1qxZs/T//t//09VXX601a9ZozJgx6tevnySuTW04ffq0Ro4cqfDwcMXGxlbYjmtTefViLTB3VFBQIEkKDAwsd39AQICjDWpXZa7VH9vBuYqLizVixAidPn1a8fHx8vb2lsR1qS1nzpzRnDlzHO8tFoumTJmiBQsWOLZxbWrezJkzlZ2drW3btjn+jpSHa1N59AABqDV2u12jR4/Wxo0bNWHCBI0YMaK2S/J4/v7+MgxD586dU25urv72t78pKSlJN998s2w2W22X55E2bdqkxYsX6+mnn3bchkT1EYBcpCR9V5SybTZbhQkdNasy1+qP7eAcdrtdY8eO1YoVKzR8+HC9+uqrpfZzXWqXl5eXWrZsqYkTJ+r1119Xenq65s2bJ4lrU5POnj2rUaNG6dprr9X06dMv2p5rU3ncAnORP95nvf7660vt++WXX1RYWKhu3brVRmn4k4vdE7/YPXVUnd1u15gxY/TWW29p2LBhSk5OlpdX6X+PcV3cR8lkjpLJHVybmlNYWOj4efr4+JTb5sYbb5Qkffzxx47B0VybiyMAuUjv3r21YMECpaamaujQoaX2paSkONqg9oWFhSk4OFjp6ekqKioqNXOiqKhI6enpatOmjVq1alWLVdYffww/9913n95+++1yxzRwXdxHfn6+pP/O1OPa1BxfX1+NGzeu3H0bN25Udna2Bg0apKZNmyo0NJRrUxW1PQ+/viouLjbatm17wQch7t+/v9bq8zQ8CNE9nDt3zvHsknvuuccoLi6+YHuuS8354YcfjKKiojLbi4qKjNtuu82QZMybN8+xnWtT+8p7DpBhcG0qy2IYhlE70av+W79+vaKjo+Xn56ehQ4eqUaNGWrlypXJycrR48WJNnjy5tkus15KSkvT1119Lkr7//ntt375dkZGRuuKKKyRJPXr00Pjx4yWd/5dRZGSkvvvuO0VFRSkiIkLbt29XamqqunbtqrS0NDVs2LDWvkt9MXv2bM2ZM0f+/v56/PHHHc/8+aO77rpL4eHhkrguNWn27NlKSEhQjx49FBoaqoCAAOXl5WnNmjU6cuSIevbsqZSUFMfPm2tT+0aPHq1ly5Zp06ZN6t69u2M716aSajuB1XebN282brvtNiMgIMBo2LCh0a1btzLrHcE1Sv51VNFr1KhRpdofO3bMmDRpktGqVSvjkksuMVq3bm1MnjzZsNlstfMF6qGLXROV00vHdakZW7duNSZMmGBcffXVRuPGjY0GDRoYl112mdGnTx/jtddeK7e3jmtTuyrqATIMrk1l0AMEAAA8DtPgAQCAxyEAAQAAj0MAAgAAHocABAAAPA4BCAAAeBwCEAAA8DgEIAAA4HEIQAAAwOMQgAAAgMchAAFwutDQUFkslou+kpOTK33MP3+2ZB238pw9e1bLly/XPffco9DQUPn7+8vX11dBQUHq27evZs6cqZ07d1b7ex4/flz+/v6yWCz6/PPPK/WZ8PBwWSwWLVq0yLHNz8+v1HcbPXp0tWsDcGFlVyIEACf54+Kz5bnQvoqMGjXKcezyZGZm6p577tHevXtlsVh01VVXqXPnzmrYsKEOHz6sjIwMrV+/XnPnztWkSZOUmJhY5RpKNGrUSPfcc4+Sk5P15ptv6rbbbrtg+23btum7775TgwYNNHLkSMf2ESNGqLi4WHv27FF6errpegBUHgEIgMuMHz/e6b0ZF+o12rZtm3r16qUTJ05owIABSkhIUFhYWKk2drtda9eu1YIFC7Rr165q1zNu3DglJyfrn//8p37//Xc1adKkwrZvvvmmJOmOO+5QUFCQY/sbb7wh6fx3IwABNYNbYADqheLiYt1zzz06ceKE7r77bq1evbpM+JEkLy8v3XrrrVq3bp3mzp1b7fP26NFDHTp00OnTp/XOO+9U2O706dN69913JUljx46t9nkBVA8BCEC98M4772j//v3y9fXVK6+8Ii+vi//vrWvXruVuP3nypJYsWaLu3burcePG8vPzU4cOHRQbG6sjR46UaT9u3DhJ/+3hKc/HH3+so0ePKigoSP3796/ktwLgKgQgAPXC6tWrJUnR0dFq2rSp6ePk5+frhhtu0JQpU5Sdna2uXbuqf//+On36tJ577jl16dJFOTk5pT4zcuRINWjQQFlZWcrMzCz3uCXhaNSoUWrQgNEHQG0jAAGoF7Zt2yap4l6dyjAMQ/fee6++//57jRs3TgcOHNAXX3yhjz76SHv27NHkyZN14MABjRkzptTn/vrXv2rAgAGSpKVLl5Y5bm5urtauXSuJ21+AuyAAAXCZMWPGXHAa/LFjx5x2rsOHD0tShb0/77//vkaPHl3mVfI5SUpJSVF6errCw8P16quvqlGjRo59DRo00KJFi9SpUyetX7++zDT6kttgK1as0JkzZ0rtS05Olt1uV48ePdS+fXunfF8A1UM/LACXudg0eB8fnxqrZevWrVq2bFmZ7bNnz9bll18uSfr0008lSUOGDCn3NpWXl5d69eqlnTt36ptvvlGnTp0c+26//XYFBwcrPz9fq1at0r333ivpfK9Sycy1kpAEoPYRgAC4jCumwVfk8ssvV25urn777bdy9y9evFiLFy92vG/QoIHOnTtXqs2+ffskSXFxcYqLi7vg+f58Hm9vb40ePVrz58/Xm2++6QhAGzZs0L59+xzPDALgHghAAOqFiIgI5ebmKiMjw/Qx7Ha7pPNT29u1a3fBtldffXWZbWPHjtWCBQv0xRdf6Oeff1bLli0dY4KGDh0qq9VqujYAzkUAAlAvDBo0SKtXr1ZKSooOHz7suK1VFa1atZIk3XnnnZoyZUqVP9+uXTv17t1bGzZs0LJly/Too49q5cqVkhj8DLgbBkEDqBeGDx+ukJAQnTp1So888ogMw6jyMW6//XZJ0ocffmjq85Ica5QlJyfrvffe04kTJ3TVVVepe/fupo4HwDUIQADqBR8fH3344Yfy8/PTBx98oMGDB2vPnj3ltv3mm2/KDTh33nmnunbtqi1btmjMmDHljic6evSoXn31VZ09e7bcYw8ZMkSNGzfWnj179PTTT0ti8DPgjiyG2X/mAEAFQkNDlZOTc9FZYFFRUbr//vsrdUyLxSJJF+2ZycjI0L333qv9+/fLYrHo6quvVlhYmKxWqwoKCrRjxw7HgwwHDhyoFStWyN/f3/H5/Px83XHHHcrKypLVatV1112n1q1b68yZM9q3b5++//57nTt3TidPnpSfn1+5NTzyyCN65ZVXJEmXXHKJ8vLyKvVwxuTkZI0ZM0ajRo264JpnAKqPMUAAXCY9Pf2Ci3s2bty40gGosrp06aIff/xR7777rlavXq2MjAx9/vnnOnfunBo3bqyOHTvq/vvv1/33319qGnuJ4OBgffvtt0pOTtb777+vHTt2aMuWLWrSpImCg4P10EMPadCgQRWGH+l8j09JABo4cGC1nkwNwDXoAQJQJ1S2B6guowcIqDn0AAGoU0qeK9S7d+8yS1LUVRMmTFBxcXGFY5YAOB8BCECdUvI05wYNGtSbAPT222/r9OnTtV0G4FG4BQYAADwO0+ABAIDHIQABAACPQwACAAAehwAEAAA8DgEIAAB4HAIQAADwOAQgAADgcQhAAADA4xCAAACAx/n/r9vzadQMpacAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plt.hist(n_clusters, bins=16, range=(-0.5, 15.5))\n", + "plt.xticks(range(0, 16, 2))\n", + "plt.xlabel(\"Number of clusters\")\n", + "plt.ylabel(\"Number of events\")\n", + "plt.show()\n", + "plt.clf()\n", + "\n", + "plt.hist(n_hadrons, bins=26, range=(4.5, 30.5))\n", + "plt.xlabel(\"Number of hadrons\")\n", + "plt.ylabel(\"Number of events\")\n", + "plt.show()\n", + "plt.clf()\n", + "\n", + "plt.hist(had_kin[:, 0], bins=20)\n", + "plt.xlabel(\"E [GeV]\")\n", + "plt.ylabel(\"Number of hadrons\")\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/pyproject.toml b/pyproject.toml index 6a737bc..8016895 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ lint.ignore = [ "PLC0415", "G004", "PD901", "N802", "C901", "DTZ005", "DTZ007", "INP", "EXE002", "TD002", "ANN001", "ANN002", "ANN003", "ANN101", "ANN201", "ANN202", "ANN204", "CPY001", "TRY003", "N803", "N806", "N812", "T201", "PLW1514", "PTH123", "RUF015", "RUF017", "PLR6301", "ERA", "ISC001", - "RET504", "SLF001", "S403", "PLR1702" + "RET504", "SLF001", "S403", "PLR1702", "UP035" ] [tool.ruff.lint.pydocstyle] diff --git a/requirements.txt b/requirements.txt index 127411f..647768d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,16 +1,17 @@ # --------- pytorch --------- # -torch>=1.10.0 -torchvision>=0.11.0 -pytorch-lightning==1.8.1 -torchmetrics==0.10.0 +torch>=2.0 +torchvision +lightning +torchmetrics +torch_geometric # --------- hydra --------- # -hydra-core==1.2.0 -hydra-colorlog==1.2.0 -hydra-optuna-sweeper==1.2.0 +hydra-core>=1.2.0 +hydra-colorlog>=1.2.0 +hydra-optuna-sweeper>=1.2.0 # --------- loggers --------- # -# wandb +wandb # neptune-client # mlflow # comet-ml @@ -24,9 +25,14 @@ more_itertools pandas matplotlib pot -# sh # for running bash commands in some tests (linux/macos only) +ruff # for python distribution build twine pytest-cov + +-f https://data.pyg.org/whl/torch-2.2.0+cu121.html +pyg_lib +torch_scatter +torch-cluster diff --git a/scripts/check_env.py b/scripts/check_env.py new file mode 100755 index 0000000..76507db --- /dev/null +++ b/scripts/check_env.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +"""Check if the python environment is set up correctly for the project.""" + + +def check(): + # python interpreter + import sys + + print(f"python interpreter: {sys.executable}") + print(f"python version: {sys.version}") + + try: + import torch + + print("torch: ", torch.__version__) + print("torch cuda: ", torch.cuda.is_available()) + print("torch cuda device count: ", torch.cuda.device_count()) + print("torch cuda device name: ", torch.cuda.get_device_name()) + print("torch cuda device capability: ", torch.cuda.get_device_capability()) + print("torch distributed :", torch.distributed.is_available()) + except ImportError: + print("torch not found") + + try: + import lightning + + print("lightning: ", lightning.__version__) + except ImportError: + print("lightning not found") + + try: + import torch_geometric + + print("pyg: ", torch_geometric.__version__) + except (ImportError, OSError) as error: + print(error) + print("pyg not found") + + try: + import torch_scatter + + print("torch_scatter: ", torch_scatter.__version__) + import torch + from torch_scatter import scatter_max + + device = "cuda" if torch.cuda.is_available() else "cpu" + + print(f"Test scatter_max in {device}.") + src = torch.tensor([[2, 0, 1, 4, 3], [0, 2, 1, 3, 4]]).to(device) + index = torch.tensor([[4, 5, 4, 2, 3], [0, 0, 2, 2, 1]]).to(device) + + out, argmax = scatter_max(src, index, dim=-1) + print("out:", out) + print("argmax:", argmax) + + except ImportError: + print("torch_scatter not found") + + +if __name__ == "__main__": + check() diff --git a/src/hadml/datamodules/components/herwig.py b/src/hadml/datamodules/components/herwig.py index f6c4706..fb91774 100644 --- a/src/hadml/datamodules/components/herwig.py +++ b/src/hadml/datamodules/components/herwig.py @@ -1,33 +1,29 @@ -import pathlib -from collections import Counter +import glob +import math import os +import pathlib import pickle +from collections import Counter from typing import Dict, Optional, Tuple -import glob -import math import numpy as np import pandas as pd - import torch from pytorch_lightning import LightningDataModule from pytorch_lightning.core.mixins import HyperparametersMixin +from torch.utils.data import Dataset as TorchDataset +from torch_geometric.data import Data, InMemoryDataset from hadml.datamodules.components.utils import ( - read_dataframe, - split_to_float, InputScaler, boost, - process_data_split, - get_num_asked_events, get_angles, + get_num_asked_events, + process_data_split, + read_dataframe, + split_to_float, ) -from torch_geometric.data import Data -from torch_geometric.data import InMemoryDataset - -from torch.utils.data import Dataset as TorchDataset - pid_map_fname = "pids_to_ix.pkl" @@ -78,9 +74,7 @@ def prepare_data(self): fname = os.path.join(self.hparams.data_dir, self.hparams.origin_fname) if not os.path.exists(fname): raise FileNotFoundError(f"File {fname} not found.") - df = pd.read_csv( - fname, usecols=[3, 4], sep=";", header=None, names=None, engine="c" - ) + df = pd.read_csv(fname, usecols=[3, 4], sep=";", header=None, names=None, engine="c") def extract_type(df, sep=","): out = df @@ -88,7 +82,7 @@ def extract_type(df, sep=","): out = df.str.extract(f"^([^{sep}]+)").astype(np.int16) return out - h1_type, h2_type = [extract_type(df[idx]) for idx in [3, 4]] + h1_type, h2_type = (extract_type(df[idx]) for idx in [3, 4]) del df all_types = np.concatenate([h1_type, h2_type]).squeeze() count = Counter(all_types) @@ -104,7 +98,9 @@ def extract_type(df, sep=","): def create_dataset(self) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: """It creates the dataset for training a conditional GAN. - Returns: + + Returns + ------- cond_info: conditional information x_truth: target truth information with conditonal information """ @@ -126,8 +122,10 @@ def create_dataset(self) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: org_inputs = np.concatenate([cluster, q1, q2, h1, h2], axis=1) del cluster, q1, q2, h1, h2 - event_labels, q1_types, q2_types, h1_types, h2_types, org_inputs = self._filter_unnamed_types( - event_labels, q1_types, q2_types, h1_types, h2_types, org_inputs + event_labels, q1_types, q2_types, h1_types, h2_types, org_inputs = ( + self._filter_unnamed_types( + event_labels, q1_types, q2_types, h1_types, h2_types, org_inputs + ) ) num_tot_evts = len(org_inputs) @@ -189,7 +187,9 @@ def create_dataset(self) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: self.summarize() return dataset - def _filter_unnamed_types(self, event_labels, q1_types, q2_types, h1_types, h2_types, org_inputs): + def _filter_unnamed_types( + self, event_labels, q1_types, q2_types, h1_types, h2_types, org_inputs + ): mask = (np.isin(h1_types.reshape(-1), list(self.pids_to_ix.keys()))) & ( np.isin(h2_types.reshape(-1), list(self.pids_to_ix.keys())) ) @@ -211,19 +211,15 @@ def _load_dataset(self, fname): ) for name in ["q1", "q2", "c", "h1", "h2", "event_labels"] } - load_from_cache = all( - [os.path.exists(f) for f in processed_fnames.values()] - ) + load_from_cache = all([os.path.exists(f) for f in processed_fnames.values()]) if not load_from_cache: cluster, event_labels, h1, h2, q1, q2 = self._load_raw_dataset(fname) if processed_fnames is not None: self._save_data_arrays(event_labels, h1, h2, processed_fnames, q1, q2, cluster) else: - print(f"Loading cached data arrays...") - cluster, event_labels, h1, h2, q1, q2 = self._load_data_arrays( - processed_fnames - ) + print("Loading cached data arrays...") + cluster, event_labels, h1, h2, q1, q2 = self._load_data_arrays(processed_fnames) return cluster, event_labels, h1, h2, q1, q2 def _load_data_arrays(self, processed_fnames): @@ -291,9 +287,7 @@ def summarize(self): print(f"\tNumber of conditional variables: {self.cond_dim}") print(f"\tNumber of output variables: {self.output_dim}") print(f"\tNumber of output hadrons: {self.hparams.num_output_hadrons}") - print( - f"\tNumber of particle kinematics: {self.hparams.num_particle_kinematics}" - ) + print(f"\tNumber of particle kinematics: {self.hparams.num_particle_kinematics}") class HerwigClusterDataset(TorchDataset, HyperparametersMixin): @@ -327,9 +321,7 @@ def __init__( super().__init__() self.save_hyperparameters(logger=False) - pids_map_path = ( - os.path.join(root, pid_map_fname) if root is not None else pid_map_fname - ) + pids_map_path = os.path.join(root, pid_map_fname) if root is not None else pid_map_fname if os.path.exists(pids_map_path): print("Loading existing pids map: ", pids_map_path) self.pids_to_ix = pickle.load(open(pids_map_path, "rb")) @@ -348,17 +340,13 @@ def __call__(self, *args, **kwargs): else: self.convert_cluster_decay(*args, **kwargs) - def convert_cluster_decay( - self, filename: str, outname: str, do_check_only: bool = False - ): + def convert_cluster_decay(self, filename: str, outname: str, do_check_only: bool = False): mode = self.hparams.mode with_pert = self.hparams.with_pert with_quark = self.hparams.with_quark outname = ( - outname + f"_mode{mode}" + "_with_quark" - if with_quark - else outname + f"_mode{mode}" + outname + f"_mode{mode}" + "_with_quark" if with_quark else outname + f"_mode{mode}" ) outname = outname + "_with_pert" if with_pert else outname if do_check_only: @@ -368,7 +356,7 @@ def convert_cluster_decay( print(f"reading from {filename}") df = read_dataframe(filename, ";", "python") - q1, q2, c, h1, h2 = [split_to_float(df[idx]) for idx in range(5)] + q1, q2, c, h1, h2 = (split_to_float(df[idx]) for idx in range(5)) if mode == 0: selections = (q1[5] == 1) & (q2[5] == 1) @@ -409,7 +397,7 @@ def convert_cluster_decay( new_inputs = boost(org_inputs) out_4vec = new_inputs[:, -4:] - _, px, py, pz = [out_4vec[:, idx] for idx in range(4)] + _, px, py, pz = (out_4vec[:, idx] for idx in range(4)) pT = np.sqrt(px**2 + py**2) phi = np.arctan(px / py) theta = np.arctan(pT / pz) @@ -489,7 +477,7 @@ def convert_events(self, filename, outname, *args, **kwargs): clusters = [c.split(";")[:-1] for c in items] df = pd.DataFrame(clusters) - q1, q2, c, h1, h2 = [split_to_float(df[idx]) for idx in range(5)] + q1, q2, c, h1, h2 = (split_to_float(df[idx]) for idx in range(5)) cluster = c[[1, 2, 3, 4]].values @@ -509,7 +497,7 @@ def convert_events(self, filename, outname, *args, **kwargs): new_inputs = np.array([boost(row) for row in org_inputs]) out_4vec = new_inputs[:, -4:] - _, px, py, pz = [out_4vec[:, idx] for idx in range(4)] + _, px, py, pz = (out_4vec[:, idx] for idx in range(4)) pT = np.sqrt(px**2 + py**2) phi = np.arctan(px / py) theta = np.arctan(pT / pz) @@ -520,12 +508,8 @@ def convert_events(self, filename, outname, *args, **kwargs): # convert particle IDs to indices # then these indices can be embedded in N dim. space - h1_type_indices = torch.from_numpy( - np.vectorize(self.pids_to_ix.get)(h1_types) - ) - h2_type_indices = torch.from_numpy( - np.vectorize(self.pids_to_ix.get)(h2_types) - ) + h1_type_indices = torch.from_numpy(np.vectorize(self.pids_to_ix.get)(h1_types)) + h2_type_indices = torch.from_numpy(np.vectorize(self.pids_to_ix.get)(h2_types)) data = Data( x=torch.from_numpy(out_truth), @@ -537,9 +521,7 @@ def convert_events(self, filename, outname, *args, **kwargs): ) torch.save( data, - os.path.join( - self.hparams.data_dir, f"{outname}_{len(datasets)}.pt" - ), + os.path.join(self.hparams.data_dir, f"{outname}_{len(datasets)}.pt"), ) @@ -553,13 +535,10 @@ def __init__( raw_file_list=None, processed_file_name="herwig_graph_data.pt", ): - self.raw_file_list = [] for pattern in raw_file_list: self.raw_file_list += glob.glob(os.path.join(root, "raw", pattern)) - self.raw_file_list = [ - os.path.basename(raw_file) for raw_file in self.raw_file_list - ] + self.raw_file_list = [os.path.basename(raw_file) for raw_file in self.raw_file_list] self.processed_file_name = processed_file_name if root: @@ -611,7 +590,7 @@ def _create_data(self, line): clusters = [c.split(";")[:-1] for c in items] df = pd.DataFrame(clusters) - q1, q2, c, h1, h2 = [split_to_float(df[idx]) for idx in range(5)] + q1, q2, c, h1, h2 = (split_to_float(df[idx]) for idx in range(5)) cluster = c[[1, 2, 3, 4]].values @@ -631,7 +610,7 @@ def _create_data(self, line): new_inputs = np.array([boost(row) for row in org_inputs]) def get_angles(four_vector): - _, px, py, pz = [four_vector[:, idx] for idx in range(4)] + _, px, py, pz = (four_vector[:, idx] for idx in range(4)) pT = np.sqrt(px**2 + py**2) phi = np.arctan(px / py) theta = np.arctan(pT / pz) @@ -670,13 +649,10 @@ def __init__( raw_file_list=None, processed_file_name="herwig_graph_data.pt", ): - self.raw_file_list = [] for pattern in raw_file_list: self.raw_file_list += glob.glob(os.path.join(root, "raw", pattern)) - self.raw_file_list = [ - os.path.basename(raw_file) for raw_file in self.raw_file_list - ] + self.raw_file_list = [os.path.basename(raw_file) for raw_file in self.raw_file_list] self.processed_file_name = processed_file_name if root: @@ -685,7 +661,7 @@ def __init__( print("Loading existing pids map: ", pids_map_path) self.pids_to_ix = pickle.load(open(pids_map_path, "rb")) else: - raise RuntimeError("No pids map found at", pids_map_path) + raise RuntimeError(f"No pids map found at: {pids_map_path}") super().__init__(root, transform, pre_transform, pre_filter) @@ -723,19 +699,19 @@ def process(self): def _create_data(self, line): items = line.split("|")[:-1] - clusters = [pd.Series(c.split(";")[:-1]).str.split(',', expand=True) for c in items] + clusters = [pd.Series(c.split(";")[:-1]).str.split(",", expand=True) for c in items] q1s, q2s, cs, hadrons, cluster_labels, cs_for_hadrons = [], [], [], [], [], [] for i, cluster in enumerate(clusters): # select the two quarks and the cluster /heavy cluster from the cluster - q1 = cluster.iloc[0].to_numpy()[[1,3,4,5,6]].astype(float).reshape(1, -1) - q2 = cluster.iloc[1].to_numpy()[[1,3,4,5,6]].astype(float).reshape(1, -1) - c = cluster.iloc[2].to_numpy()[[1,3,4,5,6]].astype(float).reshape(1, -1) + q1 = cluster.iloc[0].to_numpy()[[1, 3, 4, 5, 6]].astype(float).reshape(1, -1) + q2 = cluster.iloc[1].to_numpy()[[1, 3, 4, 5, 6]].astype(float).reshape(1, -1) + c = cluster.iloc[2].to_numpy()[[1, 3, 4, 5, 6]].astype(float).reshape(1, -1) q1s.append(q1) q2s.append(q2) cs.append(c) # select the final states from the cluster - hadron = cluster[cluster[2] == '[ ]'].to_numpy()[:, [1,3,4,5,6]].astype(float) + hadron = cluster[cluster[2] == "[ ]"].to_numpy()[:, [1, 3, 4, 5, 6]].astype(float) hadrons.append(hadron) # assign cluster label to all hadrons cluster_labels += [i] * len(hadron) @@ -747,20 +723,26 @@ def _create_data(self, line): cs = np.concatenate(cs) hadrons = np.concatenate(hadrons) cs_for_hadrons = np.concatenate(cs_for_hadrons) - cond_kin = np.concatenate([cs[:, [1,2,3,4]], q1s[:, [1,2,3,4]], q2s[:, [1,2,3,4]]], axis=1) - had_kin = np.concatenate([cs_for_hadrons[:, [1,2,3,4]], hadrons[:, [1,2,3,4]]], axis=1) + cond_kin = np.concatenate( + [cs[:, [1, 2, 3, 4]], q1s[:, [1, 2, 3, 4]], q2s[:, [1, 2, 3, 4]]], axis=1 + ) + had_kin = np.concatenate( + [cs_for_hadrons[:, [1, 2, 3, 4]], hadrons[:, [1, 2, 3, 4]]], axis=1 + ) cond_kin_rest_frame = boost(cond_kin) had_kin_rest_frame = torch.from_numpy(boost(had_kin)[:, 4:]) had_kin = torch.from_numpy(had_kin[:, 4:]) q_phi, q_theta = get_angles(cond_kin_rest_frame[:, 4:8]) q_momenta = np.stack([q_phi, q_theta], axis=1) - cond_info = np.concatenate([cs[:, [1,2,3,4]], q1s[:, :1], q2s[:, :1], q_momenta], axis=1) + cond_info = np.concatenate([cs[:, [1, 2, 3, 4]], q1s[:, :1], q2s[:, :1], q_momenta], axis=1) cond_info = torch.from_numpy(cond_info.astype(np.float32)) # convert particle IDs to indices # then these indices can be embedded in N dim. space - had_type_indices = torch.from_numpy(np.vectorize(self.pids_to_ix.get)(hadrons[:, [0]].astype(np.int16))) + had_type_indices = torch.from_numpy( + np.vectorize(self.pids_to_ix.get)(hadrons[:, [0]].astype(np.int16)) + ) data = Data( x=cond_info.float(), diff --git a/src/hadml/datamodules/components/pythia.py b/src/hadml/datamodules/components/pythia.py index 03471dd..449739e 100644 --- a/src/hadml/datamodules/components/pythia.py +++ b/src/hadml/datamodules/components/pythia.py @@ -1,34 +1,35 @@ +import glob import os import pickle -import glob import numpy as np - import torch - -from torch_geometric.data import Data -from torch_geometric.data import InMemoryDataset +from torch_geometric.data import Data, InMemoryDataset pid_map_fname = "pids_to_ix.pkl" class PythiaEventDataset(InMemoryDataset): - def __init__(self, root, transform=None, pre_transform=None, - pre_filter=None, raw_file_list=None, - processed_file_name='pythia_graph_data.pt'): - + def __init__( + self, + root, + transform=None, + pre_transform=None, + pre_filter=None, + raw_file_list=None, + processed_file_name="pythia_graph_data.pt", + ): self.raw_file_list = [] for pattern in raw_file_list: self.raw_file_list += glob.glob(os.path.join(root, "raw", pattern)) - self.raw_file_list = [ - os.path.basename(raw_file) for raw_file in self.raw_file_list] + self.raw_file_list = [os.path.basename(raw_file) for raw_file in self.raw_file_list] self.processed_file_name = processed_file_name if root: pids_map_path = os.path.join(root, pid_map_fname) if os.path.exists(pids_map_path): print("Loading existing pids map: ", pids_map_path) - self.pids_to_ix = pickle.load(open(pids_map_path, 'rb')) + self.pids_to_ix = pickle.load(open(pids_map_path, "rb")) else: raise RuntimeError("No pids map found at", pids_map_path) @@ -40,7 +41,7 @@ def __init__(self, root, transform=None, pre_transform=None, def raw_file_names(self): if self.raw_file_list is not None: return self.raw_file_list - return ['pythia_data.dat'] + return ["pythia_data.dat"] @property def processed_file_names(self): @@ -56,8 +57,7 @@ def process(self): data_list = [self._create_data(line) for line in f][:-1] if self.pre_filter is not None: - data_list = [ - data for data in data_list if self.pre_filter(data)] + data_list = [data for data in data_list if self.pre_filter(data)] if self.pre_transform is not None: data_list = [self.pre_transform(data) for data in data_list] @@ -68,22 +68,20 @@ def process(self): torch.save((data, slices), self.processed_paths[0]) def _create_data(self, line): - - particles = line.split(';')[:-1] - particles = [particle.split(',') for particle in particles] + particles = line.split(";")[:-1] + particles = [particle.split(",") for particle in particles] particles = np.array(particles).astype(float) if particles.shape == (0,): - return + return None h_types = particles[:, 0:1] h = particles[:, 1:] # convert particle IDs to indices # then these indices can be embedded in N dim. space - h_type_indices = torch.from_numpy( - np.vectorize(self.pids_to_ix.get)(h_types)) + h_type_indices = torch.from_numpy(np.vectorize(self.pids_to_ix.get)(h_types)) data = Data( x=torch.from_numpy(h).float(), diff --git a/src/hadml/datamodules/components/utils.py b/src/hadml/datamodules/components/utils.py index e9e4873..f962e3a 100644 --- a/src/hadml/datamodules/components/utils.py +++ b/src/hadml/datamodules/components/utils.py @@ -1,19 +1,16 @@ -import itertools import pickle from functools import reduce -from typing import Tuple, List, Optional +from typing import List, Optional, Tuple import numpy as np import pandas as pd - from sklearn.preprocessing import MinMaxScaler GAN_INPUT_DATA_TYPE = Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, List[str]] def shuffle(array: np.ndarray): - from numpy.random import MT19937 - from numpy.random import RandomState, SeedSequence + from numpy.random import MT19937, RandomState, SeedSequence np_rs = RandomState(MT19937(SeedSequence(123456789))) np_rs.shuffle(array) @@ -23,8 +20,7 @@ def read_dataframe(filename, sep=",", engine=None): if type(filename) == list: print(filename) df_list = [ - pd.read_csv(f, sep=sep, header=None, names=None, engine=engine) - for f in filename + pd.read_csv(f, sep=sep, header=None, names=None, engine=engine) for f in filename ] df = pd.concat(df_list, ignore_index=True) filename = filename[0] @@ -55,7 +51,8 @@ def calculate_mass(lorentz_vector: np.ndarray) -> float: Args: lorentz_vector: 4 vector [E, px, py, pz] - Returns: + Returns + ------- invariant mass """ return (lorentz_vector**2 * np.array([1, -1, -1, -1])).sum(axis=1) @@ -81,9 +78,7 @@ def boost_fn(lab_4vec: np.ndarray): n_dot_p = np.sum((n * p), axis=1) E_prime = gamma * (E - v_mag * n_dot_p) P_prime = ( - p - + ((gamma - 1) * n_dot_p).reshape(-1, 1) * n - - (gamma * E * v_mag).reshape(-1, 1) * n + p + ((gamma - 1) * n_dot_p).reshape(-1, 1) * n - (gamma * E * v_mag).reshape(-1, 1) * n ) return np.concatenate([E_prime.reshape(-1, 1), P_prime], axis=1) @@ -104,8 +99,7 @@ def inv_boost_fn(boost_4vec: np.ndarray): def boost(a_row: np.ndarray): - """boost all particles to the rest frame of the first particle in the list""" - + """Boost all particles to the rest frame of the first particle in the list""" assert a_row.shape[1] % 4 == 0, "a_row should be a 4-vector" boost_fn, _ = create_boost_fn(a_row[:, :4]) n_particles = (a_row.shape[1]) // 4 @@ -114,19 +108,16 @@ def boost(a_row: np.ndarray): def inv_boost(a_row: np.ndarray): - """boost all particles to the rest frame of the first particle in the list""" - + """Boost all particles to the rest frame of the first particle in the list""" assert a_row.shape[0] % 4 == 0, "a_row should be a 4-vector" _, inv_boost_fn = create_boost_fn(a_row[:, :4]) n_particles = (a_row.shape[1]) // 4 - results = [ - inv_boost_fn(a_row[:, 4 * x : 4 * (x + 1)]) for x in range(1, n_particles) - ] + results = [inv_boost_fn(a_row[:, 4 * x : 4 * (x + 1)]) for x in range(1, n_particles)] return np.concatenate(a_row[:, :4] + results, axis=1) def get_angles(four_vector): - _, px, py, pz = [four_vector[:, idx] for idx in range(4)] + _, px, py, pz = (four_vector[:, idx] for idx in range(4)) pT = np.sqrt(px**2 + py**2) phi = np.arctan(px / py) theta = np.arctan(pT / pz) @@ -157,8 +148,8 @@ def load(self, outname): def dump(self): print( "Min and Max for inputs: {", - ", ".join(["{:.6f}".format(x) for x in self.scaler.data_min_]), - ", ".join(["{:.6f}".format(x) for x in self.scaler.data_max_]), + ", ".join([f"{x:.6f}" for x in self.scaler.data_min_]), + ", ".join([f"{x:.6f}" for x in self.scaler.data_max_]), "}", ) @@ -201,15 +192,11 @@ def read(filename, max_evts=None, testing_frac=0.1) -> GAN_INPUT_DATA_TYPE: return (train_in, train_truth, test_in, test_truth, xlabels) -def create_dataloader( - filename, batch_size, num_workers, max_evts=None, testing_frac=0.1 -): +def create_dataloader(filename, batch_size, num_workers, max_evts=None, testing_frac=0.1): import torch - from torch.utils.data import TensorDataset, DataLoader + from torch.utils.data import DataLoader, TensorDataset - train_cond, train_truth, test_cond, test_truth, xlabels = read( - filename, max_evts, testing_frac - ) + train_cond, train_truth, test_cond, test_truth, xlabels = read(filename, max_evts, testing_frac) train_cond = torch.from_numpy(train_cond) train_truth = torch.from_numpy(train_truth) test_cond = torch.from_numpy(test_cond) @@ -233,9 +220,7 @@ def process_data_split( frac_data_used: Optional[float], train_val_test_split: Tuple[float, float, float], ): - split_by_count = reduce( - lambda prev, x: isinstance(x, int) and prev, train_val_test_split, True - ) + split_by_count = reduce(lambda prev, x: isinstance(x, int) and prev, train_val_test_split, True) if split_by_count: if examples_used is not None or frac_data_used is not None: @@ -245,13 +230,9 @@ def process_data_split( examples_used = sum(train_val_test_split) else: if not np.isclose(sum(train_val_test_split), 1.0): - raise ValueError( - "`train_val_test_split` must sum up to 1.0 when fractions are used" - ) + raise ValueError("`train_val_test_split` must sum up to 1.0 when fractions are used") if frac_data_used is not None and examples_used is not None: - raise ValueError( - "Specify either `frac_data_used` or `examples_used` but not both!" - ) + raise ValueError("Specify either `frac_data_used` or `examples_used` but not both!") if frac_data_used is not None and not (0 < frac_data_used <= 1.0): raise ValueError( f"Fraction of data used must be in range (0, 1], but found {frac_data_used}" diff --git a/src/hadml/datamodules/gan_datamodule.py b/src/hadml/datamodules/gan_datamodule.py index 03b03bd..d11624a 100644 --- a/src/hadml/datamodules/gan_datamodule.py +++ b/src/hadml/datamodules/gan_datamodule.py @@ -1,14 +1,13 @@ -from typing import Any, Dict, Optional, Tuple, Protocol +from typing import Any, Dict, Optional, Protocol, Tuple import torch from pytorch_lightning import LightningDataModule from pytorch_lightning.trainer.supporters import CombinedLoader from torch.utils.data import DataLoader, Dataset, TensorDataset, random_split - -from torch_geometric.loader import DataLoader as GeometricDataLoader from torch_geometric.data.dataset import Dataset as GeometricDataset +from torch_geometric.loader import DataLoader as GeometricDataLoader -from hadml.datamodules.components.utils import process_data_split, get_num_asked_events +from hadml.datamodules.components.utils import get_num_asked_events, process_data_split class GANDataProtocol(Protocol): @@ -16,11 +15,14 @@ class GANDataProtocol(Protocol): def prepare_data(self) -> None: """Prepare data for training and validation. - Before the create_dataset function is called.""" + Before the create_dataset function is called. + """ def create_dataset(self) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: """Create dataset from core dataset. - Returns: + + Returns + ------- torch.Tensor: conditioinal information torch.Tensor: particle kinematics torch.Tensor: particle types @@ -105,7 +107,6 @@ def test_dataloader(self): def teardown(self, stage: Optional[str] = None): """Clean up after fit or test.""" - pass def state_dict(self): """Extra things to save to checkpoint.""" @@ -113,7 +114,6 @@ def state_dict(self): def load_state_dict(self, state_dict: Dict[str, Any]): """Things to do when loading checkpoint.""" - pass class EventGANDataModule(LightningDataModule): @@ -164,12 +164,7 @@ def setup(self, stage: Optional[str] = None): with both `trainer.fit()` and `trainer.test()` so be careful not to execute things like random split twice! """ - if ( - not self.cond_data_train - and not self.cond_data_val - and not self.cond_data_test - ): - + if not self.cond_data_train and not self.cond_data_val and not self.cond_data_test: ( self.cond_data_train, self.cond_data_val, @@ -181,7 +176,6 @@ def setup(self, stage: Optional[str] = None): ) if not self.obs_data_train and not self.obs_data_val and not self.obs_data_test: - ( self.obs_data_train, self.obs_data_val, diff --git a/src/hadml/eval.py b/src/hadml/eval.py index c5f6f49..453f40f 100644 --- a/src/hadml/eval.py +++ b/src/hadml/eval.py @@ -38,7 +38,7 @@ import hydra from omegaconf import DictConfig from pytorch_lightning import LightningDataModule, LightningModule, Trainer -from pytorch_lightning.loggers import LightningLoggerBase +from pytorch_lightning.loggers.logger import Logger as LightningLoggerBase from hadml import utils @@ -55,10 +55,10 @@ def evaluate(cfg: DictConfig) -> Tuple[dict, dict]: Args: cfg (DictConfig): Configuration composed by Hydra. - Returns: + Returns + ------- Tuple[dict, dict]: Dict with metrics and dict with all instantiated objects. """ - assert cfg.ckpt_path log.info(f"Instantiating datamodule <{cfg.datamodule._target_}>") diff --git a/src/hadml/loss/loss.py b/src/hadml/loss/loss.py index 065928f..962a178 100644 --- a/src/hadml/loss/loss.py +++ b/src/hadml/loss/loss.py @@ -1,5 +1,4 @@ -import torch.nn as nn -from torch import Tensor +from torch import Tensor, nn class ls(nn.Module): diff --git a/src/hadml/metrics/compare_fn.py b/src/hadml/metrics/compare_fn.py index 9fc201c..ab78377 100644 --- a/src/hadml/metrics/compare_fn.py +++ b/src/hadml/metrics/compare_fn.py @@ -1,19 +1,17 @@ import math import os -from typing import List, Tuple, Optional, Any, Dict +from typing import Any, Dict, List, Optional, Tuple +import matplotlib.pyplot as plt +import numpy as np from matplotlib import ticker from pytorch_lightning.core.mixins import HyperparametersMixin -import numpy as np -import matplotlib.pyplot as plt - from .image_converter import fig_to_array + def create_plots(nrows, ncols): - fig, axs = plt.subplots( - nrows, ncols, figsize=(4 * ncols, 4 * nrows), constrained_layout=False - ) + fig, axs = plt.subplots(nrows, ncols, figsize=(4 * ncols, 4 * nrows), constrained_layout=False) axs = axs.flatten() return fig, axs @@ -53,27 +51,28 @@ def __call__( outname = None needed_plot_count = self.hparams.num_kinematics * (1 + self.hparams.num_particle_ids) - plot_row_count = math.ceil(needed_plot_count ** 0.5) + plot_row_count = math.ceil(needed_plot_count**0.5) plot_col_count = math.ceil(needed_plot_count / plot_row_count) fig, axs = create_plots(plot_row_count, plot_col_count) axs = axs.reshape(-1, 2) self._plot_kinematics(predictions, truths, xbins, xlabels, xranges, fig, axs[0, :]) for i in range(self.hparams.num_particle_ids): - sim_particle_types = (predictions[:, self.hparams.num_kinematics:] == i).sum(-1) > 0 - true_particle_types = (truths[:, self.hparams.num_kinematics:] == i).sum(-1) > 0 + sim_particle_types = (predictions[:, self.hparams.num_kinematics :] == i).sum(-1) > 0 + true_particle_types = (truths[:, self.hparams.num_kinematics :] == i).sum(-1) > 0 predictions_i = predictions[sim_particle_types] truths_i = truths[true_particle_types] xlabels_i = [l + f" [pid={i}]" for l in xlabels] - self._plot_kinematics(predictions_i, truths_i, xbins, - xlabels_i, xranges, fig, axs[1 + i, :]) + self._plot_kinematics( + predictions_i, truths_i, xbins, xlabels_i, xranges, fig, axs[1 + i, :] + ) if outname is not None: plt.savefig(outname + "-angles.png") plt.savefig(outname + "-angles.pdf") # convert the image to a numpy array - out_images[f"particle kinematics"] = fig_to_array(fig) + out_images["particle kinematics"] = fig_to_array(fig) plt.close("all") config = dict(alpha=0.5, lw=2, density=True) @@ -141,11 +140,10 @@ def _plot_kinematics(self, predictions, truths, xbins, xlabels, xranges, fig, ax label="Generator", **config, ) - ax.set_xlabel(r"{}".format(xlabels[idx])) + ax.set_xlabel(rf"{xlabels[idx]}") ax.set_ylim(0, max_y) ax.legend() - @staticmethod def set_hist_log_scale(ax, patches, bin_heights): """Set log scale on y-axis of `ax`. @@ -170,10 +168,8 @@ def set_hist_log_scale(ax, patches, bin_heights): patch.set_y(ylim_bot) minor_ticks = [] - for i in range(0, -int(np.floor(ylim_bot))): - minor_ticks += [ - np.log10(10 ** (-i) - j / 10 ** (i + 1)) for j in range(1, 9) - ] + for i in range(-int(np.floor(ylim_bot))): + minor_ticks += [np.log10(10 ** (-i) - j / 10 ** (i + 1)) for j in range(1, 9)] ax.yaxis.set_major_locator(ticker.MultipleLocator(1)) ax.yaxis.set_minor_locator(ticker.FixedLocator(minor_ticks)) @@ -223,14 +219,16 @@ def __call__( ax = axs[idx] max_y = 0 if len(angles_truths) > 0: - yvals, _, _ = ax.hist(angles_truths[:, idx], bins=xbin, - range=xrange, label='Truth', **config) + yvals, _, _ = ax.hist( + angles_truths[:, idx], bins=xbin, range=xrange, label="Truth", **config + ) max_y = np.max(yvals) - yvals, _, _ = ax.hist(angles_predictions[:, idx], bins=xbin, - range=xrange, label='Generator', **config) + yvals, _, _ = ax.hist( + angles_predictions[:, idx], bins=xbin, range=xrange, label="Generator", **config + ) max_y = max(max_y, np.max(yvals)) * 1.1 - ax.set_xlabel(r"{}".format(xlabels[idx])) + ax.set_xlabel(rf"{xlabels[idx]}") ax.set_ylim(0, max_y * 1.1) ax.legend() @@ -251,7 +249,7 @@ def __call__( label="Generator", **config, ) - ax.set_xlabel(r"{}".format(xlabels[idx + 2])) + ax.set_xlabel(rf"{xlabels[idx + 2]}") ax.set_ylim(0, max_y) ax.legend() diff --git a/src/hadml/metrics/image_converter.py b/src/hadml/metrics/image_converter.py index a4f04bd..8179e56 100644 --- a/src/hadml/metrics/image_converter.py +++ b/src/hadml/metrics/image_converter.py @@ -1,5 +1,4 @@ import numpy as np - from matplotlib.figure import Figure diff --git a/src/hadml/metrics/media_logger.py b/src/hadml/metrics/media_logger.py index a569d82..34c78d6 100644 --- a/src/hadml/metrics/media_logger.py +++ b/src/hadml/metrics/media_logger.py @@ -1,13 +1,12 @@ """Utility functions for logging media to TensorBoard or WandB.""" -from typing import List, Any -from pytorch_lightning.loggers import TensorBoardLogger, WandbLogger +from typing import Any, List + import pytorch_lightning as pl +from pytorch_lightning.loggers import TensorBoardLogger, WandbLogger + -def log_images( - logger: pl.loggers.logger.Logger, - key: str, - images: List[Any], **kwags: Any) -> None: +def log_images(logger: pl.loggers.logger.Logger, key: str, images: List[Any], **kwags: Any) -> None: """Log images to TensorBoard or WandB. For TensorBoard: images (torch.Tensor, numpy.ndarray, or string/blobname) @@ -28,4 +27,4 @@ def log_images( logger.log_image(key, images=images, **kwags) else: - raise TypeError(f'Expected a TensorBoardLogger or WandbLogger, found {type(logger)}') + raise TypeError(f"Expected a TensorBoardLogger or WandbLogger, found {type(logger)}") diff --git a/src/hadml/models/cgan/cond_event_gan.py b/src/hadml/models/cgan/cond_event_gan.py index e95b01d..dbd4f4d 100644 --- a/src/hadml/models/cgan/cond_event_gan.py +++ b/src/hadml/models/cgan/cond_event_gan.py @@ -1,11 +1,11 @@ -from typing import Any, List, Optional, Dict, Callable, Tuple import os +from typing import Any, Callable, Dict, List, Optional, Tuple +import numpy as np import torch from pytorch_lightning import LightningModule from scipy import stats -from torchmetrics import MinMetric, MeanMetric -import numpy as np +from torchmetrics import MeanMetric, MinMetric from hadml.metrics.media_logger import log_images from hadml.models.components.transform import InvsBoost @@ -22,7 +22,8 @@ class CondEventGANModule(LightningModule): Have not considered the particle types for now. - Parameters: + Parameters + ---------- noise_dim: dimension of noise vector generator: generator network discriminator: discriminator network @@ -91,9 +92,7 @@ def __init__( def forward( self, cond_info: Optional[torch.Tensor] = None ) -> Tuple[torch.Tensor, torch.Tensor]: - noise = torch.randn( - len(cond_info), self.hparams.noise_dim, device=cond_info.device - ) + noise = torch.randn(len(cond_info), self.hparams.noise_dim, device=cond_info.device) cond_info = self.generator_prescale(cond_info) x_fake = conditional_cat(cond_info, noise, dim=1) fakes = self.generator(x_fake) @@ -101,12 +100,8 @@ def forward( return fakes def configure_optimizers(self): - opt_gen = self.hparams.optimizer_generator( - params=self.generator.parameters() - ) # type: ignore - opt_disc = self.hparams.optimizer_discriminator( - params=self.discriminator.parameters() - ) # type: ignore + opt_gen = self.hparams.optimizer_generator(params=self.generator.parameters()) # type: ignore + opt_disc = self.hparams.optimizer_discriminator(params=self.discriminator.parameters()) # type: ignore # define schedulers if self.hparams.scheduler_generator is not None: @@ -237,9 +232,7 @@ def step(self, batch: Any, batch_idx: int) -> Dict[str, Any]: observed_event_label = observed_event_label.cpu().detach().numpy() distances = [ - stats.wasserstein_distance( - hadrons_predictions[:, idx], hadrons_truths[:, idx] - ) + stats.wasserstein_distance(hadrons_predictions[:, idx], hadrons_truths[:, idx]) for idx in range(4) ] wd_distance = sum(distances) / len(distances) @@ -344,26 +337,18 @@ def validation_epoch_end(self, validation_step_outputs): generated_event_label = ( perf["generated_event_label"] if len(generated_event_label) == 0 - else np.concatenate( - ( - generated_event_label, - perf["generated_event_label"] - + generated_event_label[-1] - + 1, - ) - ) + else np.concatenate(( + generated_event_label, + perf["generated_event_label"] + generated_event_label[-1] + 1, + )) ) observed_event_label = ( perf["observed_event_label"] if len(observed_event_label) == 0 - else np.concatenate( - ( - observed_event_label, - perf["observed_event_label"] - + observed_event_label[-1] - + 1 - ) - ) + else np.concatenate(( + observed_event_label, + perf["observed_event_label"] + observed_event_label[-1] + 1, + )) ) self.compare( angles_predictions, @@ -374,22 +359,26 @@ def validation_epoch_end(self, validation_step_outputs): ) if self.current_epoch == 0: os.makedirs(self.hparams.outdir, exist_ok=True) - np.savez_compressed(os.path.join(self.hparams.outdir, "initial.npz"), - angles_predictions=angles_predictions, - angles_truths=angles_truths, - hadrons_predictions=hadrons_predictions, - hadrons_truths=hadrons_truths, - generated_event_label=generated_event_label, - observed_event_label=observed_event_label) + np.savez_compressed( + os.path.join(self.hparams.outdir, "initial.npz"), + angles_predictions=angles_predictions, + angles_truths=angles_truths, + hadrons_predictions=hadrons_predictions, + hadrons_truths=hadrons_truths, + generated_event_label=generated_event_label, + observed_event_label=observed_event_label, + ) if self.current_epoch == self.trainer.max_epochs - 1: os.makedirs(self.hparams.outdir, exist_ok=True) - np.savez_compressed(os.path.join(self.hparams.outdir, "final.npz"), - angles_predictions=angles_predictions, - angles_truths=angles_truths, - hadrons_predictions=hadrons_predictions, - hadrons_truths=hadrons_truths, - generated_event_label=generated_event_label, - observed_event_label=observed_event_label) + np.savez_compressed( + os.path.join(self.hparams.outdir, "final.npz"), + angles_predictions=angles_predictions, + angles_truths=angles_truths, + hadrons_predictions=hadrons_predictions, + hadrons_truths=hadrons_truths, + generated_event_label=generated_event_label, + observed_event_label=observed_event_label, + ) def test_step(self, batch: Any, batch_idx: int): """Test step""" @@ -442,26 +431,18 @@ def test_epoch_end(self, test_step_outputs): generated_event_label = ( perf["generated_event_label"] if len(generated_event_label) == 0 - else np.concatenate( - ( - generated_event_label, - perf["generated_event_label"] - + generated_event_label[-1] - + 1 - ) - ) + else np.concatenate(( + generated_event_label, + perf["generated_event_label"] + generated_event_label[-1] + 1, + )) ) observed_event_label = ( perf["observed_event_label"] if len(observed_event_label) == 0 - else np.concatenate( - ( - observed_event_label, - perf["observed_event_label"] - + observed_event_label[-1] - + 1 - ) - ) + else np.concatenate(( + observed_event_label, + perf["observed_event_label"] + observed_event_label[-1] + 1, + )) ) self.compare( angles_predictions, @@ -472,11 +453,12 @@ def test_epoch_end(self, test_step_outputs): ) os.makedirs(self.hparams.outdir, exist_ok=True) - np.savez_compressed(os.path.join(self.hparams.outdir, "best.npz"), - angles_predictions=angles_predictions, - angles_truths=angles_truths, - hadrons_predictions=hadrons_predictions, - hadrons_truths=hadrons_truths, - generated_event_label=generated_event_label, - observed_event_label=observed_event_label) - + np.savez_compressed( + os.path.join(self.hparams.outdir, "best.npz"), + angles_predictions=angles_predictions, + angles_truths=angles_truths, + hadrons_predictions=hadrons_predictions, + hadrons_truths=hadrons_truths, + generated_event_label=generated_event_label, + observed_event_label=observed_event_label, + ) diff --git a/src/hadml/models/cgan/cond_particle_gan.py b/src/hadml/models/cgan/cond_particle_gan.py index 3991226..97064c4 100644 --- a/src/hadml/models/cgan/cond_particle_gan.py +++ b/src/hadml/models/cgan/cond_particle_gan.py @@ -1,28 +1,30 @@ -from typing import Any, List, Optional, Dict, Callable, Tuple +import os +from typing import Any, Callable, Dict, List, Optional, Tuple import numpy as np +import ot import torch import torch.nn.functional as F from pytorch_lightning import LightningModule -from torchmetrics import MinMetric, MeanMetric from torch.optim import Optimizer -import ot -import os +from torchmetrics import MeanMetric, MinMetric from hadml.metrics.media_logger import log_images +from hadml.models.components.mlp import OneHotEmbeddingModule from hadml.models.components.transform import InvsBoost from hadml.utils.utils import ( - get_wasserstein_grad_penalty, conditional_cat, - get_r1_grad_penalty, get_one_hot, + get_r1_grad_penalty, + get_wasserstein_grad_penalty, ) -from hadml.models.components.mlp import OneHotEmbeddingModule class CondParticleGANModule(LightningModule): """Conditional GAN predicting particle momenta and types. - Parameters: + + Parameters + ---------- noise_dim: dimension of noise vector num_particle_ids: maximum number of particle types num_output_particles: number of outgoing particles @@ -132,19 +134,15 @@ def forward( else: particle_kinematics, particle_types = self._call_mlp_generator(x_fake) particle_kinematics = torch.tanh(particle_kinematics) - particle_types = F.gumbel_softmax(particle_types, - self.current_gumbel_temp) + particle_types = F.gumbel_softmax(particle_types, self.current_gumbel_temp) return particle_kinematics, particle_types - def _call_mlp_particle_generator( self, x_fake: torch.Tensor ) -> Tuple[torch.Tensor, torch.Tensor]: return self.generator(x_fake) - def _call_mlp_generator( - self, x_fake: torch.Tensor - ) -> Tuple[torch.Tensor, torch.Tensor]: + def _call_mlp_generator(self, x_fake: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: fakes = self.generator(x_fake) num_evts = x_fake.shape[0] @@ -232,13 +230,11 @@ def _discriminator_loss( elif loss_type == "bce": loss_disc = F.binary_cross_entropy_with_logits( score_real, torch.ones_like(score_real) - ) + F.binary_cross_entropy_with_logits( - score_fake, torch.zeros_like(score_fake) - ) + ) + F.binary_cross_entropy_with_logits(score_fake, torch.zeros_like(score_fake)) elif loss_type == "ls": - loss_disc = 0.5 * ((score_real - 1) ** 2).mean(0).view(1) + 0.5 * ( - score_fake**2 - ).mean(0).view(1) + loss_disc = 0.5 * ((score_real - 1) ** 2).mean(0).view(1) + 0.5 * (score_fake**2).mean( + 0 + ).view(1) else: raise ValueError(f"Unknown loss type: {loss_type}") return loss_disc @@ -255,9 +251,7 @@ def training_step(self, batch: Any, batch_idx: int, optimizer_idx: int): num_evts = x_momenta.shape[0] device = x_momenta.device - particle_type_data, x_generated = self._prepare_fake_batch( - cond_info, num_evts, device - ) + particle_type_data, x_generated = self._prepare_fake_batch(cond_info, num_evts, device) if optimizer_idx == 0: return self._discriminator_step( @@ -280,17 +274,12 @@ def _prepare_fake_batch( particle_kinematics, particle_types = self(noise, cond_info) if not isinstance(self.embedding_module, OneHotEmbeddingModule): raise NotImplementedError("Embedding module must be `OneHotEmbeddingModule`.") - else: - particle_types = particle_types.reshape( - particle_kinematics.shape[0], -1 - ) + particle_types = particle_types.reshape(particle_kinematics.shape[0], -1) x_generated = conditional_cat(cond_info, particle_kinematics, dim=1) return particle_types, x_generated - def _generator_step( - self, particle_type_data: torch.Tensor, x_generated: torch.Tensor - ): + def _generator_step(self, particle_type_data: torch.Tensor, x_generated: torch.Tensor): # x_generated = self.discriminator_prescale(x_generated) score_fakes = self.discriminator(x_generated, particle_type_data).squeeze(-1) loss_gen = self._generator_loss(score_fakes) @@ -315,9 +304,9 @@ def _discriminator_step( score_truth = self.discriminator(x_truth, x_type_data).squeeze(-1) # with fake batch # x_generated = self.discriminator_prescale(x_generated) - score_fakes = self.discriminator( - x_generated.detach(), particle_type_data.detach() - ).squeeze(-1) + score_fakes = self.discriminator(x_generated.detach(), particle_type_data.detach()).squeeze( + -1 + ) loss_disc = self._discriminator_loss(score_truth, score_fakes) r1_grad_penalty, wasserstein_grad_penalty = self._get_grad_penalties( @@ -331,15 +320,11 @@ def _log_metrics(self, loss_disc, r1_grad_penalty, wasserstein_grad_penalty): self.train_loss_disc(loss_disc) self.log("lossD", loss_disc, prog_bar=True) if self.wasserstein_reg > 0: - self.log( - "wasserstein_grad_penalty", wasserstein_grad_penalty, prog_bar=True - ) + self.log("wasserstein_grad_penalty", wasserstein_grad_penalty, prog_bar=True) if self.r1_reg > 0: self.log("r1_grad_penalty", r1_grad_penalty, prog_bar=True) - def _get_grad_penalties( - self, particle_type_data, x_generated, x_truth, x_type_data - ): + def _get_grad_penalties(self, particle_type_data, x_generated, x_truth, x_type_data): wasserstein_grad_penalty = 0.0 if self.wasserstein_reg > 0: wasserstein_grad_penalty = ( @@ -368,7 +353,6 @@ def training_epoch_end(self, outputs: List[Any]): def step(self, batch: Any, batch_idx: int) -> Dict[str, Any]: """Common steps for valiation and testing""" - cond_info, x_angles, x_type_indices, x_momenta, event_labels = batch num_evts, _ = x_angles.shape scaled_cond_info = self.generator_prescale(cond_info) @@ -411,20 +395,13 @@ def step(self, batch: Any, batch_idx: int) -> Dict[str, Any]: [particle_angles.detach().cpu().numpy(), fake_output_particles], axis=1, ), - np.concatenate( - [scaled_x_angles.detach().cpu().numpy(), true_output_particles], axis=1 - ), + np.concatenate([scaled_x_angles.detach().cpu().numpy(), true_output_particles], axis=1), n_projections=100, ) particle_angles = self.generator_postscale(particle_angles) particle_momenta = InvsBoost(cond_info[:, :4], particle_angles).reshape((-1, 4)) - predictions = ( - torch.cat([particle_angles, particle_type_idx], dim=1) - .cpu() - .detach() - .numpy() - ) + predictions = torch.cat([particle_angles, particle_type_idx], dim=1).cpu().detach().numpy() truths = torch.cat([x_angles, x_type_indices], dim=1).cpu().detach().numpy() return { @@ -436,7 +413,7 @@ def step(self, batch: Any, batch_idx: int) -> Dict[str, Any]: "particle_momenta": particle_momenta.cpu().detach().numpy(), "x_momenta": x_momenta.reshape((-1, 4)).cpu().detach().numpy(), "event_labels": event_labels.cpu().detach().numpy(), - "cond_info": cond_info.cpu().detach().numpy() + "cond_info": cond_info.cpu().detach().numpy(), } def compare(self, predictions, truths, x_momenta, particle_momenta, outname) -> None: @@ -465,7 +442,6 @@ def validation_step(self, batch: Any, batch_idx: int): return perf def validation_epoch_end(self, outputs: List[Any]): - swd_distance = self.val_swd.compute() particle_swd = self.val_particle_swd.compute() kinematic_swd = self.val_kinematic_swd.compute() @@ -474,9 +450,7 @@ def validation_epoch_end(self, outputs: List[Any]): self.val_min_avg_particle_swd(particle_swd) self.val_min_avg_kinematic_swd(kinematic_swd) self.log("val/swd", swd_distance, on_step=False, on_epoch=True, prog_bar=True) - self.log( - "val/type_swd", particle_swd, on_step=False, on_epoch=True, prog_bar=True - ) + self.log("val/type_swd", particle_swd, on_step=False, on_epoch=True, prog_bar=True) self.log( "val/kinematic_swd", kinematic_swd, @@ -507,7 +481,6 @@ def validation_epoch_end(self, outputs: List[Any]): or kinematic_swd <= self.val_kinematic_swd.compute() or particle_swd <= self.val_particle_swd.compute() ): - predictions = [] truths = [] particle_momenta = [] @@ -521,9 +494,7 @@ def validation_epoch_end(self, outputs: List[Any]): else np.concatenate((predictions, perf["predictions"])) ) truths = ( - perf["truths"] - if len(truths) == 0 - else np.concatenate((truths, perf["truths"])) + perf["truths"] if len(truths) == 0 else np.concatenate((truths, perf["truths"])) ) particle_momenta = ( perf["particle_momenta"] @@ -556,22 +527,26 @@ def validation_epoch_end(self, outputs: List[Any]): if self.current_epoch == 0: os.makedirs(self.hparams.outdir, exist_ok=True) - np.savez_compressed(os.path.join(self.hparams.outdir, "initial.npz"), - predictions=predictions, - truths=truths, - x_momenta=x_momenta, - particle_momenta=particle_momenta, - event_labels=event_labels, - cond_info=cond_info) + np.savez_compressed( + os.path.join(self.hparams.outdir, "initial.npz"), + predictions=predictions, + truths=truths, + x_momenta=x_momenta, + particle_momenta=particle_momenta, + event_labels=event_labels, + cond_info=cond_info, + ) if self.current_epoch == self.trainer.max_epochs - 1: os.makedirs(self.hparams.outdir, exist_ok=True) - np.savez_compressed(os.path.join(self.hparams.outdir, "final.npz"), - predictions=predictions, - truths=truths, - x_momenta=x_momenta, - particle_momenta=particle_momenta, - event_labels=event_labels, - cond_info=cond_info) + np.savez_compressed( + os.path.join(self.hparams.outdir, "final.npz"), + predictions=predictions, + truths=truths, + x_momenta=x_momenta, + particle_momenta=particle_momenta, + event_labels=event_labels, + cond_info=cond_info, + ) def test_step(self, batch: Any, batch_idx: int): """Test step""" @@ -583,15 +558,12 @@ def test_step(self, batch: Any, batch_idx: int): return perf def test_epoch_end(self, outputs: List[Any]): - swd_distance = self.test_swd.compute() particle_swd = self.test_particle_swd.compute() kinematic_swd = self.test_kinematic_swd.compute() self.log("test/swd", swd_distance, on_step=False, on_epoch=True, prog_bar=True) - self.log( - "test/type_swd", particle_swd, on_step=False, on_epoch=True, prog_bar=True - ) + self.log("test/type_swd", particle_swd, on_step=False, on_epoch=True, prog_bar=True) self.log( "test/kinematic_swd", kinematic_swd, @@ -617,9 +589,7 @@ def test_epoch_end(self, outputs: List[Any]): else np.concatenate((predictions, perf["predictions"])) ) truths = ( - perf["truths"] - if len(truths) == 0 - else np.concatenate((truths, perf["truths"])) + perf["truths"] if len(truths) == 0 else np.concatenate((truths, perf["truths"])) ) particle_momenta = ( perf["particle_momenta"] @@ -646,10 +616,12 @@ def test_epoch_end(self, outputs: List[Any]): self.compare(predictions, truths, x_momenta, particle_momenta, outname) os.makedirs(self.hparams.outdir, exist_ok=True) - np.savez_compressed(os.path.join(self.hparams.outdir, "best.npz"), - predictions=predictions, - truths=truths, - x_momenta=x_momenta, - particle_momenta=particle_momenta, - event_labels=event_labels, - cond_info=cond_info) + np.savez_compressed( + os.path.join(self.hparams.outdir, "best.npz"), + predictions=predictions, + truths=truths, + x_momenta=x_momenta, + particle_momenta=particle_momenta, + event_labels=event_labels, + cond_info=cond_info, + ) diff --git a/src/hadml/models/components/deep_set.py b/src/hadml/models/components/deep_set.py index 1ed81d0..8d47de8 100644 --- a/src/hadml/models/components/deep_set.py +++ b/src/hadml/models/components/deep_set.py @@ -1,8 +1,6 @@ from typing import List, Optional -import torch.nn as nn -from torch import Tensor - +from torch import Tensor, nn from torch_scatter import scatter_mean from hadml.models.components.mlp import MLPModule diff --git a/src/hadml/models/components/epic_gan.py b/src/hadml/models/components/epic_gan.py index 303e6a6..dfb221d 100644 --- a/src/hadml/models/components/epic_gan.py +++ b/src/hadml/models/components/epic_gan.py @@ -2,9 +2,10 @@ # https://github.com/uhh-pd-ml/EPiC-GAN/blob/main/models.py import torch -import torch.nn as nn import torch.nn.functional as F -import torch.nn.utils.weight_norm as weight_norm +from torch import nn +from torch.nn.utils import weight_norm + class EPiC_layer(nn.Module): """Permutation equivariant layer. @@ -12,6 +13,7 @@ class EPiC_layer(nn.Module): Equivariant layer with global concat & residual connections inside this module & weight_norm ordered: first update global, then local. """ + def __init__(self, local_in_dim, hid_dim, latent_dim): super(EPiC_layer, self).__init__() self.fc_global1 = weight_norm(nn.Linear(int(2 * hid_dim) + latent_dim, hid_dim)) @@ -19,19 +21,25 @@ def __init__(self, local_in_dim, hid_dim, latent_dim): self.fc_local1 = weight_norm(nn.Linear(local_in_dim + latent_dim, hid_dim)) self.fc_local2 = weight_norm(nn.Linear(hid_dim, hid_dim)) - def forward(self, x_global, x_local): # shapes: x_global[b,latent], x_local[b,n,latent_local] + def forward(self, x_global, x_local): # shapes: x_global[b,latent], x_local[b,n,latent_local] batch_size, n_points, latent_local = x_local.size() latent_global = x_global.size(1) x_pooled_mean = x_local.mean(1, keepdim=False) x_pooled_sum = x_local.sum(1, keepdim=False) x_pooledCATglobal = torch.cat([x_pooled_mean, x_pooled_sum, x_global], 1) - x_global1 = F.leaky_relu(self.fc_global1(x_pooledCATglobal)) # new intermediate step - x_global = F.leaky_relu(self.fc_global2(x_global1) + x_global) # with residual connection before AF - - x_global2local = x_global.view(-1, 1, latent_global).repeat(1, n_points, 1) # first add dimension, than expand it + x_global1 = F.leaky_relu(self.fc_global1(x_pooledCATglobal)) # new intermediate step + x_global = F.leaky_relu( + self.fc_global2(x_global1) + x_global + ) # with residual connection before AF + + x_global2local = x_global.view(-1, 1, latent_global).repeat( + 1, n_points, 1 + ) # first add dimension, than expand it x_localCATglobal = torch.cat([x_local, x_global2local], 2) - x_local1 = F.leaky_relu(self.fc_local1(x_localCATglobal)) # with residual connection before AF + x_local1 = F.leaky_relu( + self.fc_local1(x_localCATglobal) + ) # with residual connection before AF x_local = F.leaky_relu(self.fc_local2(x_local1) + x_local) return x_global, x_local @@ -43,13 +51,16 @@ class EPiC_generator(nn.Module): added same global and local usage in EPiC layer order: global first, then local """ + def __init__( - self, latent: int, - latent_local: int, - hid_d: int, - feats: int, - equiv_layers: int, - return_latent_space: bool): + self, + latent: int, + latent_local: int, + hid_d: int, + feats: int, + equiv_layers: int, + return_latent_space: bool, + ): """EPiC Generator with Variable Number of Equivariant Layers Parameters: latent (int): latent vector size @@ -73,8 +84,7 @@ def __init__( self.local_1 = weight_norm(nn.Linear(hid_d, feats)) - - def forward(self, z_global, z_local): # shape: [batch, points, feats] + def forward(self, z_global, z_local): # shape: [batch, points, feats] batch_size, _, _ = z_local.size() latent_tensor = z_global.clone().reshape(batch_size, 1, -1) @@ -88,17 +98,21 @@ def forward(self, z_global, z_local): # shape: [batch, points, feats] # equivariant connections, each one_hot conditined for i in range(self.equiv_layers): - z_global, z_local = self.nn_list[i](z_global, z_local) # contains residual connection - z_global, z_local = z_global + z_global_in, z_local + z_local_in # skip connection to sampled input - latent_tensor = torch.cat([latent_tensor, z_global.clone().reshape(batch_size, 1, -1)], 1) + z_global, z_local = self.nn_list[i](z_global, z_local) # contains residual connection + z_global, z_local = ( + z_global + z_global_in, + z_local + z_local_in, + ) # skip connection to sampled input + latent_tensor = torch.cat( + [latent_tensor, z_global.clone().reshape(batch_size, 1, -1)], 1 + ) # final local NN to get down to input feats size out = self.local_1(z_local) if self.return_latent_space: return out, latent_tensor - else: - return out # [batch, points, feats] + return out # [batch, points, feats] class EPiC_discriminator(nn.Module): @@ -106,6 +120,7 @@ class EPiC_discriminator(nn.Module): Discriminator: Deep Sets like 3 + 3 layer with residual connections & weight_norm & mix(mean/sum/max) pooling & NO multipl. cond. """ + def __init__(self, hid_d, feats, equiv_layers, latent): super().__init__() self.equiv_layers = equiv_layers @@ -138,7 +153,7 @@ def forward(self, x): # equivariant connections for i in range(self.equiv_layers): - x_global, x_local = self.nn_list[i](x_global, x_local) # contains residual connection + x_global, x_local = self.nn_list[i](x_global, x_local) # contains residual connection x_mean = x_local.mean(1, keepdim=False) # mean over points dim. x_sum = x_local.sum(1, keepdim=False) # sum over points dim. diff --git a/src/hadml/models/components/mlp.py b/src/hadml/models/components/mlp.py index 0346c50..d34596c 100644 --- a/src/hadml/models/components/mlp.py +++ b/src/hadml/models/components/mlp.py @@ -1,4 +1,5 @@ """Multilayer Perceptron (MLP) module.""" + from typing import List, Optional, Tuple try: @@ -7,8 +8,8 @@ from more_itertools import pairwise import torch -import torch.nn as nn import torch.nn.functional as F +from torch import nn def build_linear_layers( @@ -20,7 +21,6 @@ def build_linear_layers( last_activation: Optional[torch.nn.Module] = None, leaky_ratio: float = 0.2, ) -> List[nn.Module]: - layer_list = [ torch.nn.Linear(input_dim, hidden_dims[0]), torch.nn.LeakyReLU(leaky_ratio), @@ -84,9 +84,7 @@ def __init__( # build the linear model self.encoder = nn.Sequential( - *build_linear_layers( - input_dim, hidden_dims, hidden_dims[-1], layer_norm, dropout - ) + *build_linear_layers(input_dim, hidden_dims, hidden_dims[-1], layer_norm, dropout) ) self.particle_type = nn.Sequential( *build_linear_layers( diff --git a/src/hadml/models/components/mlp_res.py b/src/hadml/models/components/mlp_res.py index eed33df..c895a25 100644 --- a/src/hadml/models/components/mlp_res.py +++ b/src/hadml/models/components/mlp_res.py @@ -1,11 +1,13 @@ """Residual Network (ResNet) implementation Based on MLPs.""" + try: from itertools import pairwise except ImportError: from more_itertools import pairwise import torch -import torch.nn as nn +from torch import nn + class ResidualBlock(nn.Module): def __init__( @@ -14,9 +16,7 @@ def __init__( ): super().__init__() self.model = nn.Sequential( - nn.Linear(input_dim, input_dim), - nn.BatchNorm1d(input_dim), - nn.ReLU() + nn.Linear(input_dim, input_dim), nn.BatchNorm1d(input_dim), nn.ReLU() ) def forward(self, x) -> torch.Tensor: @@ -34,10 +34,15 @@ def build_layers(input_dim, hidden_dims, output_dim): class ResMLPModule(nn.Module): - def __init__(self, input_dim, hidden_dims, output_dim, - dropout=0.0, # not used. - layer_norm=True, # not used. - last_activation=None): + def __init__( + self, + input_dim, + hidden_dims, + output_dim, + dropout=0.0, # not used. + layer_norm=True, # not used. + last_activation=None, + ): super().__init__() layers = build_layers(input_dim, hidden_dims, output_dim) @@ -46,6 +51,5 @@ def __init__(self, input_dim, hidden_dims, output_dim, self.model = nn.Sequential(*layers) - def forward(self, x) -> torch.Tensor: return self.model(x) diff --git a/src/hadml/models/components/simple_dense_net.py b/src/hadml/models/components/simple_dense_net.py index 4586bf4..a276b9e 100644 --- a/src/hadml/models/components/simple_dense_net.py +++ b/src/hadml/models/components/simple_dense_net.py @@ -1,5 +1,6 @@ from torch import nn + class SimpleDenseNet(nn.Module): def __init__( self, diff --git a/src/hadml/models/components/transform.py b/src/hadml/models/components/transform.py index 8894890..bc353c2 100644 --- a/src/hadml/models/components/transform.py +++ b/src/hadml/models/components/transform.py @@ -1,8 +1,7 @@ from typing import List import torch -import torch.nn as nn -from torch import Tensor +from torch import Tensor, nn def InvsBoost( @@ -19,10 +18,10 @@ def InvsBoost( mass_1: rest mass of the first outgoing particle in GeV mass_2: rest mass of the second outgoing particle in GeV - Returns: + Returns + ------- 4 momentum of the two outgoing particles in the lab frame """ - device = cluster.device E0, p0 = cluster[:, 0:1], cluster[:, 1:] diff --git a/src/hadml/models/node_embedding_module.py b/src/hadml/models/node_embedding_module.py index 9095552..e07df95 100644 --- a/src/hadml/models/node_embedding_module.py +++ b/src/hadml/models/node_embedding_module.py @@ -1,12 +1,13 @@ -from typing import Optional, Callable, Tuple +from typing import Callable, Optional, Tuple import torch from pytorch_lightning import LightningModule -from torchmetrics import MinMetric, MeanMetric +from torchmetrics import MeanMetric, MinMetric + class ParticleEmbeddingModule(LightningModule): - """Metric Learning. Embedding nodes into a vector space so that similar nodes are close together. - """ + """Metric Learning. Embedding nodes into a vector space so that similar nodes are close together.""" + def __init__( self, net: torch.nn.Module, @@ -34,7 +35,6 @@ def on_train_start(self): self.test_min_loss.reset() def training_step(self, batch: Tuple[torch.Tensor, torch.Tensor], batch_idx: int): - _, _, x_type_indices = batch num_particles = x_type_indices.shape[1] type_encoding = [self.net(x_type_indices[:, idx]) for idx in range(num_particles)] diff --git a/src/hadml/utils/pylogger.py b/src/hadml/utils/pylogger.py index 92ffa71..55161d8 100644 --- a/src/hadml/utils/pylogger.py +++ b/src/hadml/utils/pylogger.py @@ -5,7 +5,6 @@ def get_pylogger(name=__name__) -> logging.Logger: """Initializes multi-GPU-friendly python command line logger.""" - logger = logging.getLogger(name) # this ensures all logging levels get marked with the rank zero decorator diff --git a/src/hadml/utils/rich_utils.py b/src/hadml/utils/rich_utils.py index 2390c36..038667a 100644 --- a/src/hadml/utils/rich_utils.py +++ b/src/hadml/utils/rich_utils.py @@ -1,5 +1,5 @@ +from collections.abc import Sequence from pathlib import Path -from typing import Sequence import rich import rich.syntax @@ -37,7 +37,6 @@ def print_config_tree( resolve (bool, optional): Whether to resolve reference fields of DictConfig. save_to_file (bool, optional): Whether to export config to the hydra output folder. """ - style = "dim" tree = rich.tree.Tree("CONFIG", style=style, guide_style=style) @@ -78,7 +77,6 @@ def print_config_tree( @rank_zero_only def enforce_tags(cfg: DictConfig, save_to_file: bool = False) -> None: """Prompts user to input tags from command line if no tags are provided in config.""" - if not cfg.get("tags"): if "id" in HydraConfig().cfg.hydra.job: raise ValueError("Specify tags before launching a multirun!") diff --git a/src/hadml/utils/utils.py b/src/hadml/utils/utils.py index 8715de7..e2636c7 100644 --- a/src/hadml/utils/utils.py +++ b/src/hadml/utils/utils.py @@ -1,17 +1,18 @@ import time import warnings +from collections.abc import Iterable from importlib.util import find_spec from pathlib import Path -from typing import Callable, List, Iterable, Optional, Union +from typing import Callable, List, Optional, Union import hydra import numpy as np import torch from omegaconf import DictConfig from pytorch_lightning import Callback -from pytorch_lightning.loggers import LightningLoggerBase +from pytorch_lightning.loggers.logger import Logger from pytorch_lightning.utilities import rank_zero_only -from torch import nn, Tensor +from torch import Tensor, nn from hadml.utils import pylogger, rich_utils @@ -32,7 +33,6 @@ def task_wrapper(task_func: Callable) -> Callable: """ def wrap(cfg: DictConfig): - # apply extra utilities extras(cfg) @@ -45,12 +45,8 @@ def wrap(cfg: DictConfig): raise ex finally: path = Path(cfg.paths.output_dir, "exec_time.log") - content = ( - f"'{cfg.task_name}' execution time: {time.time() - start_time} (s)" - ) - save_file( - path, content - ) # save task execution time (even if exception occurs) + content = f"'{cfg.task_name}' execution time: {time.time() - start_time} (s)" + save_file(path, content) # save task execution time (even if exception occurs) close_loggers() # close loggers (even if exception occurs so multirun won't fail) log.info(f"Output dir: {cfg.paths.output_dir}") @@ -68,7 +64,6 @@ def extras(cfg: DictConfig) -> None: - Setting tags from command line - Rich config printing """ - # return if no `extras` config if not cfg.get("extras"): log.warning("Extras config not found! ") @@ -116,9 +111,9 @@ def instantiate_callbacks(callbacks_cfg: DictConfig) -> List[Callback]: return callbacks -def instantiate_loggers(logger_cfg: DictConfig) -> List[LightningLoggerBase]: +def instantiate_loggers(logger_cfg: DictConfig) -> List[Logger]: """Instantiates loggers from config.""" - logger: List[LightningLoggerBase] = [] + logger: List[Logger] = [] if not logger_cfg: log.warning("Logger config is empty.") @@ -142,7 +137,6 @@ def log_hyperparameters(object_dict: dict) -> None: Additionally saves: - Number of model parameters """ - hparams = {} cfg = object_dict["cfg"] @@ -181,7 +175,6 @@ def log_hyperparameters(object_dict: dict) -> None: def get_metric_value(metric_dict: dict, metric_name: str) -> float: """Safely retrieves value of the metric logged in LightningModule.""" - if not metric_name: log.info("Metric name is None! Skipping metric value retrieval...") return None @@ -201,7 +194,6 @@ def get_metric_value(metric_dict: dict, metric_name: str) -> float: def close_loggers() -> None: """Makes sure all loggers closed properly (prevents logging failure during multirun).""" - log.info("Closing loggers...") if find_spec("wandb"): # if wandb is installed @@ -217,14 +209,14 @@ def get_wasserstein_grad_penalty( real_inputs: Union[Iterable[torch.Tensor], torch.Tensor], fake_inputs: Union[Iterable[torch.Tensor], torch.Tensor], ): - """Gradient penalty from https://arxiv.org/abs/1704.00028""" + """Gradient penalty from https://arxiv.org/abs/1704.00028.""" if isinstance(real_inputs, torch.Tensor): real_inputs = [real_inputs] if isinstance(fake_inputs, torch.Tensor): fake_inputs = [fake_inputs] - if (len(real_inputs) != len(fake_inputs)) or np.any( - [real.shape != fake.shape for real, fake in zip(real_inputs, fake_inputs)] - ): + if (len(real_inputs) != len(fake_inputs)) or np.any([ + real.shape != fake.shape for real, fake in zip(real_inputs, fake_inputs) + ]): raise ValueError("Inputs must match in length and shapes!") device = real_inputs[0].device @@ -246,9 +238,7 @@ def get_wasserstein_grad_penalty( return gradient_penalty -def get_r1_grad_penalty( - D: nn.Module, real_inputs: Union[Iterable[torch.Tensor], torch.Tensor] -): +def get_r1_grad_penalty(D: nn.Module, real_inputs: Union[Iterable[torch.Tensor], torch.Tensor]): """Gradient penalty from https://arxiv.org/abs/1801.04406""" if isinstance(real_inputs, torch.Tensor): real_inputs = [real_inputs] diff --git a/test_data.ipynb b/test_data.ipynb deleted file mode 100644 index f355970..0000000 --- a/test_data.ipynb +++ /dev/null @@ -1,165 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "import torch\n", - "from hadml import utils, datamodules\n", - "from hadml.datamodules.components.herwig import HerwigEventMultiHadronDataset\n", - "import matplotlib.pyplot as plt" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Loading existing pids map: data/Herwig/pids_to_ix.pkl\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Processing...\n", - "Done!\n" - ] - } - ], - "source": [ - "data = HerwigEventMultiHadronDataset(\n", - " \"data/Herwig\",\n", - " raw_file_list=[\"AllClusters_paper4.dat\"],\n", - " processed_file_name=\"herwig_multihadron_graph_obs_data_variation.pt\",\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Data(x=[13, 8], had_kin_rest_frame=[30, 4], had_kin=[30, 4], had_type_indices=[30, 1], cluster_labels=[30])" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "data[0]" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [], - "source": [ - "n_clusters = []\n", - "n_hadrons = []\n", - "# n_hadrons_per_cluster = []\n", - "had_kin = []\n", - "for d in data:\n", - " n_clusters.append(len(d.x))\n", - " n_hadrons.append(len(d.had_kin))\n", - " had_kin.append(d.had_kin)\n", - "had_kin = torch.cat(had_kin)" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAioAAAGwCAYAAACHJU4LAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAAApXUlEQVR4nO3deXQUZaL+8acJ0IGQBINAyBAgyL5vjhIYFtkuMghHHURRNlE5BgmENXpZFZKgclVAFBfAO8Ny5gLuIMgSBGQLZMAQdggZFpHFNAQNkNTvDw/9sycB0rFDvSHfzzl9jvVWdfXTJdDPeauq22FZliUAAAADlbA7AAAAwM1QVAAAgLEoKgAAwFgUFQAAYCyKCgAAMBZFBQAAGIuiAgAAjFXS7gB/RE5Ojk6dOqXAwEA5HA674wAAgHywLEuXLl1SWFiYSpS49ZxJkS4qp06dUnh4uN0xAABAAaSnp6tq1aq33KZIF5XAwEBJv73RoKAgm9MAAID8cLlcCg8Pd3+O30qRLio3TvcEBQVRVAAAKGLyc9kGF9MCAABjUVQAAICxKCoAAMBYFBUAAGAsigoAADAWRQUAABiLogIAAIxFUQEAAMaiqAAAAGNRVAAAgLEoKgAAwFi2FpXJkyfL4XB4PEJDQ+2MBAAADGL7jxI2bNhQ3377rXvZz8/PxjQAAMAktheVkiVLMosCAADyZPs1KocOHVJYWJgiIiLUt29fHT169KbbZmVlyeVyeTwAAMDdy9YZlQceeECffPKJ6tSpox9//FGvvfaaIiMjlZKSogoVKuTaPi4uTlOmTLlj+WqM/+qOvZavHI/vYXcEAAB8xmFZlmV3iBsyMzN13333aezYsYqJicm1PisrS1lZWe5ll8ul8PBwZWRkKCgoyOd5KCoAAPiey+VScHBwvj6/bb9G5fcCAgLUuHFjHTp0KM/1TqdTTqfzDqcCAAB2sf0ald/LyspSamqqqlSpYncUAABgAFuLyujRo5WYmKhjx45p27Ztevzxx+VyuTRgwAA7YwEAAEPYeurn3//+t5588kmdO3dOFStW1IMPPqitW7eqevXqdsYCAACGsLWoLFmyxM6XBwAAhjPqGhUAAIDfo6gAAABjUVQAAICxKCoAAMBYFBUAAGAsigoAADAWRQUAABiLogIAAIxFUQEAAMaiqAAAAGNRVAAAgLEoKgAAwFgUFQAAYCyKCgAAMBZFBQAAGIuiAgAAjEVRAQAAxqKoAAAAY1FUAACAsSgqAADAWBQVAABgLIoKAAAwFkUFAAAYi6ICAACMRVEBAADGoqgAAABjUVQAAICxKCoAAMBYFBUAAGAsigoAADAWRQUAABiLogIAAIxFUQEAAMaiqAAAAGNRVAAAgLEoKgAAwFgUFQAAYCyKCgAAMBZFBQAAGIuiAgAAjEVRAQAAxqKoAAAAY1FUAACAsSgqAADAWBQVAABgLIoKAAAwFkUFAAAYi6ICAACMRVEBAADGoqgAAABjUVQAAICxKCoAAMBYFBUAAGAsigoAADAWRQUAABiLogIAAIxFUQEAAMaiqAAAAGNRVAAAgLEoKgAAwFjGFJW4uDg5HA6NGDHC7igAAMAQRhSVHTt2aN68eWrSpIndUQAAgEFsLyqXL19Wv3799MEHH+iee+655bZZWVlyuVweDwAAcPeyvahERUWpR48e6ty58223jYuLU3BwsPsRHh5+BxICAAC72FpUlixZol27dikuLi5f28fGxiojI8P9SE9PL+SEAADATiXteuH09HRFR0dr9erV8vf3z9dznE6nnE5nIScDAACmsK2oJCUl6ezZs2rZsqV7LDs7Wxs3btTs2bOVlZUlPz8/u+IBAAAD2FZUOnXqpL1793qMDRo0SPXq1dO4ceMoKQAAwL6iEhgYqEaNGnmMBQQEqEKFCrnGAQBA8WT7XT8AAAA3Y9uMSl42bNhgdwQAAGAQZlQAAICxKCoAAMBYFBUAAGAsigoAADAWRQUAABiLogIAAIxFUQEAAMaiqAAAAGNRVAAAgLEoKgAAwFgUFQAAYCyKCgAAMBZFBQAAGIuiAgAAjEVRAQAAxqKoAAAAY1FUAACAsSgqAADAWBQVAABgLIoKAAAwFkUFAAAYi6ICAACMRVEBAADGoqgAAABjUVQAAICxKCoAAMBYFBUAAGAsigoAADAWRQUAABjL66KycOFCffXVV+7lsWPHqnz58oqMjFRaWppPwwEAgOLN66Iyffp0lSlTRpL0/fffa/bs2ZoxY4buvfdejRw50ucBAQBA8VXS2yekp6erVq1akqRPP/1Ujz/+uJ5//nm1adNGHTp08HU+AABQjHk9o1KuXDmdP39ekrR69Wp17txZkuTv769ffvnFt+kAAECx5vWMSpcuXTRkyBA1b95cBw8eVI8ePSRJKSkpqlGjhq/zAQCAYszrGZU5c+aodevW+umnn7Rs2TJVqFBBkpSUlKQnn3zS5wEBAEDx5fWMisvl0jvvvKMSJTw7zuTJk5Wenu6zYAAAAF7PqEREROjcuXO5xi9cuKCIiAifhAIAAJAKUFQsy8pz/PLly/L39//DgQAAAG7I96mfmJgYSZLD4dDEiRNVtmxZ97rs7Gxt27ZNzZo183lAAABQfOW7qOzevVvSbzMqe/fuVenSpd3rSpcuraZNm2r06NG+TwgAAIqtfBeV9evXS5IGDRqkt99+W0FBQYUWCgAAQCrAXT/z588vjBwAAAC5eF1UMjMzFR8fr7Vr1+rs2bPKycnxWH/06FGfhQMAAMWb10VlyJAhSkxM1DPPPKMqVarI4XAURi4AAADvi8rKlSv11VdfqU2bNoWRBwAAwM3r71G55557FBISUhhZAAAAPHhdVF599VVNnDhRV65cKYw8AAAAbl6f+nnzzTd15MgRVa5cWTVq1FCpUqU81u/atctn4QAAQPHmdVHp3bt3IcQAAADIzeuiMmnSpMLIAQAAkIvX16hI0s8//6wPP/xQsbGxunDhgqTfTvmcPHnSp+EAAEDx5vWMyp49e9S5c2cFBwfr+PHjeu655xQSEqIVK1YoLS1Nn3zySWHkBAAAxZDXMyoxMTEaOHCgDh06JH9/f/d49+7dtXHjRp+GAwAAxZvXRWXHjh164YUXco3/6U9/0pkzZ3wSCgAAQCpAUfH395fL5co1fuDAAVWsWNEnoQAAAKQCFJVevXpp6tSpunbtmiTJ4XDoxIkTGj9+vB577DGfBwQAAMWX10XljTfe0E8//aRKlSrpl19+Ufv27VWrVi0FBgZq2rRphZERAAAUU17f9RMUFKRNmzZp3bp12rVrl3JyctSiRQt17ty5MPIBAIBizOuicvz4cdWoUUMPPfSQHnroocLIBAAAIKkAp35q1qyptm3b6v3333d/2VtBzZ07V02aNFFQUJCCgoLUunVrrVy58g/tEwAA3D28Lio7d+5U69at9dprryksLEy9evXSP//5T2VlZXn94lWrVlV8fLx27typnTt36qGHHlKvXr2UkpLi9b4AAMDdx2FZllWQJ1qWpQ0bNmjRokVatmyZsrOz9dhjj+njjz/+Q4FCQkL0+uuv69lnn73tti6XS8HBwcrIyFBQUNAfet281Bj/lc/3WdiOx/ewOwIAALfkzed3gX7rR/rttuSOHTvqgw8+0LfffquaNWtq4cKFBd2dsrOztWTJEmVmZqp169Z5bpOVlSWXy+XxAAAAdy+vL6a9IT09XYsXL9aiRYu0d+9etW7dWrNnz/Z6Pzee++uvv6pcuXJasWKFGjRokOe2cXFxmjJlSkEjFwvMAgEA7iZen/qZN2+e/vGPf2jz5s2qW7eu+vXrp6eeeko1atQoUICrV6/qxIkT+vnnn7Vs2TJ9+OGHSkxMzLOsZGVleVwL43K5FB4ezqmfIo6iAgDFizenfrwuKuHh4erbt6/69eunZs2a/ZGceercubPuu+8+vf/++7fdlmtU7g4UFQAoXrz5/Pb61M+JEyfkcDgKHO52LMsq0B1EAADg7uP1xbQOh0Pfffednn76abVu3VonT56UJP3v//6vNm3a5NW+Xn75ZX333Xc6fvy49u7dq1deeUUbNmxQv379vI0FAADuQl4XlWXLlqlbt24qU6aMdu/e7Z79uHTpkqZPn+7Vvn788Uc988wzqlu3rjp16qRt27Zp1apV6tKli7exAADAXcjrUz+vvfaa3nvvPfXv319Llixxj0dGRmrq1Kle7eujjz7y9uUBAEAx4vWMyoEDB9SuXbtc40FBQfr55599kQkAAEBSAYpKlSpVdPjw4VzjmzZtUs2aNX0SCgAAQCpAUXnhhRcUHR2tbdu2yeFw6NSpU/rHP/6h0aNH68UXXyyMjAAAoJjy+hqVsWPHKiMjQx07dtSvv/6qdu3ayel0avTo0Ro2bFhhZAQAAMVUgb5Cf9q0aXrllVe0b98+5eTkqEGDBipXrpyvswEAgGKuwL/1U7ZsWbVq1cqXWQAAADwU+NeTAQAAChtFBQAAGIuiAgAAjJWvotKiRQtdvHhRkjR16lRduXKlUEMBAABI+SwqqampyszMlCRNmTJFly9fLtRQAAAAUj7v+mnWrJkGDRqktm3byrIsvfHGGze9HXnixIk+DQgAAIqvfBWVBQsWaNKkSfryyy/lcDi0cuVKlSyZ+6kOh4OiAgAAfCZfRaVu3bruX0ouUaKE1q5dq0qVKhVqMAAAAK+/8C0nJ6cwcgAAAORSoG+mPXLkiN566y2lpqbK4XCofv36io6O1n333efrfAAAoBjz+ntUvvnmGzVo0EDbt29XkyZN1KhRI23btk0NGzbUmjVrCiMjAAAopryeURk/frxGjhyp+Pj4XOPjxo1Tly5dfBYOAAAUb17PqKSmpurZZ5/NNT548GDt27fPJ6EAAACkAhSVihUrKjk5Odd4cnIydwIBAACf8vrUz3PPPafnn39eR48eVWRkpBwOhzZt2qSEhASNGjWqMDICAIBiyuuiMmHCBAUGBurNN99UbGysJCksLEyTJ0/W8OHDfR4QAAAUX14XFYfDoZEjR2rkyJG6dOmSJCkwMNDnwQAAAAr0PSo3UFAAAEBh8vpiWgAAgDuFogIAAIxFUQEAAMbyqqhcu3ZNHTt21MGDBwsrDwAAgJtXRaVUqVL64Ycf5HA4CisPAACAm9enfvr376+PPvqoMLIAAAB48Pr25KtXr+rDDz/UmjVr1KpVKwUEBHisnzlzps/CAQCA4s3rovLDDz+oRYsWkpTrWhVOCQEAAF/yuqisX7++MHIAAADkUuDbkw8fPqxvvvlGv/zyiyTJsiyfhQIAAJAKUFTOnz+vTp06qU6dOnr44Yd1+vRpSdKQIUP49WQAAOBTXheVkSNHqlSpUjpx4oTKli3rHn/iiSe0atUqn4YDAADFm9fXqKxevVrffPONqlat6jFeu3ZtpaWl+SwYAACA1zMqmZmZHjMpN5w7d05Op9MnoQAAAKQCFJV27drpk08+cS87HA7l5OTo9ddfV8eOHX0aDgAAFG9en/p5/fXX1aFDB+3cuVNXr17V2LFjlZKSogsXLmjz5s2FkREAABRTXs+oNGjQQHv27NGf//xndenSRZmZmXr00Ue1e/du3XfffYWREQAAFFNez6hIUmhoqKZMmeLrLAAAAB4KVFQuXryojz76SKmpqXI4HKpfv74GDRqkkJAQX+cDAADFmNenfhITExUREaF33nlHFy9e1IULF/TOO+8oIiJCiYmJhZERAAAUU17PqERFRalPnz6aO3eu/Pz8JEnZ2dl68cUXFRUVpR9++MHnIQEAQPHk9YzKkSNHNGrUKHdJkSQ/Pz/FxMToyJEjPg0HAACKN6+LSosWLZSampprPDU1Vc2aNfNFJgAAAEn5PPWzZ88e938PHz5c0dHROnz4sB588EFJ0tatWzVnzhzFx8cXTkoAAFAsOSzLsm63UYkSJeRwOHS7TR0Oh7Kzs30W7nZcLpeCg4OVkZGhoKAgn++/xvivfL5P5HY8vofdEQAAd5A3n9/5mlE5duyYT4IBAAB4I19FpXr16oWdAwAAIJcCfeHbyZMntXnzZp09e1Y5OTke64YPH+6TYAAAAF4Xlfnz52vo0KEqXbq0KlSoIIfD4V7ncDgoKgAAwGe8LioTJ07UxIkTFRsbqxIlvL67GQAAIN+8bhpXrlxR3759KSkAAKDQed02nn32Wf3zn/8sjCwAAAAevD71ExcXp7/+9a9atWqVGjdurFKlSnmsnzlzps/CAQCA4s3rojJ9+nR98803qlu3riTlupgWAADAV7wuKjNnztTHH3+sgQMHFkIcAACA/8/ra1ScTqfatGnjkxePi4vT/fffr8DAQFWqVEm9e/fWgQMHfLJvAABQ9HldVKKjozVr1iyfvHhiYqKioqK0detWrVmzRtevX1fXrl2VmZnpk/0DAICizetTP9u3b9e6dev05ZdfqmHDhrkupl2+fHm+97Vq1SqP5fnz56tSpUpKSkpSu3btvI0GAADuMl4XlfLly+vRRx8tjCzKyMiQJIWEhOS5PisrS1lZWe5ll8tVKDkAAIAZCvQV+oXBsizFxMSobdu2atSoUZ7bxMXFacqUKYXy+gAAwDzGfL3ssGHDtGfPHi1evPim28TGxiojI8P9SE9Pv4MJAQDAneb1jEpERMQtvy/l6NGjXod46aWX9Pnnn2vjxo2qWrXqTbdzOp1yOp1e7x8AABRNXheVESNGeCxfu3ZNu3fv1qpVqzRmzBiv9mVZll566SWtWLFCGzZsUEREhLdxAADAXczrohIdHZ3n+Jw5c7Rz506v9hUVFaVFixbps88+U2BgoM6cOSNJCg4OVpkyZbyNBgAA7jI+u0ale/fuWrZsmVfPmTt3rjIyMtShQwdVqVLF/Vi6dKmvYgEAgCLM6xmVm/m///u/m95WfDOWZfnq5QEAwF3I66LSvHlzj4tpLcvSmTNn9NNPP+ndd9/1aTgAAFC8eV1Uevfu7bFcokQJVaxYUR06dFC9evV8lQsAAMD7ojJp0qTCyAEAAJCLMV/4BgAA8J/yPaNSokSJW37RmyQ5HA5dv379D4cCAACQvCgqK1asuOm6LVu2aNasWdzFAwAAfCrfRaVXr165xvbv36/Y2Fh98cUX6tevn1599VWfhgMAAMVbga5ROXXqlJ577jk1adJE169fV3JyshYuXKhq1ar5Oh8AACjGvCoqGRkZGjdunGrVqqWUlBStXbtWX3zxhRo1alRY+QAAQDGW71M/M2bMUEJCgkJDQ7V48eI8TwUBAAD4ksPK5xWwJUqUUJkyZdS5c2f5+fnddLvly5f7LNztuFwuBQcHKyMjQ0FBQT7ff43xX/l8n8jteHwPuyMAAO4gbz6/8z2j0r9//9vengwAAOBL+S4qCxYsKMQYAAAAufHNtAAAwFgUFQAAYCyKCgAAMBZFBQAAGIuiAgAAjEVRAQAAxqKoAAAAY1FUAACAsSgqAADAWBQVAABgLIoKAAAwFkUFAAAYi6ICAACMRVEBAADGoqgAAABjUVQAAICxKCoAAMBYFBUAAGAsigoAADAWRQUAABiLogIAAIxFUQEAAMaiqAAAAGNRVAAAgLEoKgAAwFgUFQAAYCyKCgAAMBZFBQAAGIuiAgAAjEVRAQAAxqKoAAAAY1FUAACAsSgqAADAWBQVAABgLIoKAAAwFkUFAAAYi6ICAACMRVEBAADGoqgAAABjUVQAAICxKCoAAMBYFBUAAGAsigoAADAWRQUAABiLogIAAIxFUQEAAMaiqAAAAGPZWlQ2btyonj17KiwsTA6HQ59++qmdcQAAgGFsLSqZmZlq2rSpZs+ebWcMAABgqJJ2vnj37t3VvXv3fG+flZWlrKws97LL5SqMWAAAwBC2FhVvxcXFacqUKXbHAICbqjH+K7sjeO14fA+7IxQb/PnwXpG6mDY2NlYZGRnuR3p6ut2RAABAISpSMypOp1NOp9PuGAAA4A4pUjMqAACgeKGoAAAAY9l66ufy5cs6fPiwe/nYsWNKTk5WSEiIqlWrZmMyAABgAluLys6dO9WxY0f3ckxMjCRpwIABWrBggU2pAACAKWwtKh06dJBlWXZGAAAABuMaFQAAYCyKCgAAMBZFBQAAGIuiAgAAjEVRAQAAxqKoAAAAY1FUAACAsSgqAADAWBQVAABgLIoKAAAwFkUFAAAYi6ICAACMRVEBAADGoqgAAABjUVQAAICxKCoAAMBYFBUAAGAsigoAADAWRQUAABiLogIAAIxFUQEAAMaiqAAAAGNRVAAAgLEoKgAAwFgUFQAAYCyKCgAAMBZFBQAAGIuiAgAAjEVRAQAAxqKoAAAAY1FUAACAsSgqAADAWBQVAABgLIoKAAAwFkUFAAAYi6ICAACMRVEBAADGoqgAAABjUVQAAICxKCoAAMBYFBUAAGAsigoAADAWRQUAABiLogIAAIxFUQEAAMaiqAAAAGNRVAAAgLEoKgAAwFgUFQAAYCyKCgAAMBZFBQAAGIuiAgAAjEVRAQAAxqKoAAAAY1FUAACAsSgqAADAWBQVAABgLIoKAAAwFkUFAAAYy/ai8u677yoiIkL+/v5q2bKlvvvuO7sjAQAAQ9haVJYuXaoRI0bolVde0e7du/WXv/xF3bt314kTJ+yMBQAADGFrUZk5c6aeffZZDRkyRPXr19dbb72l8PBwzZ07185YAADAECXteuGrV68qKSlJ48eP9xjv2rWrtmzZkudzsrKylJWV5V7OyMiQJLlcrkLJmJN1pVD2C0+F9f8PsENR/HeDv4N3Dn8+PPdpWdZtt7WtqJw7d07Z2dmqXLmyx3jlypV15syZPJ8TFxenKVOm5BoPDw8vlIy4M4LfsjsBULzxdxC3Uph/Pi5duqTg4OBbbmNbUbnB4XB4LFuWlWvshtjYWMXExLiXc3JydOHCBVWoUOGmzykol8ul8PBwpaenKygoyKf7Lk44jr7BcfQNjqNvcBx9ozgfR8uydOnSJYWFhd12W9uKyr333is/P79csydnz57NNctyg9PplNPp9BgrX758YUWUJAUFBRW7P0CFgePoGxxH3+A4+gbH0TeK63G83UzKDbZdTFu6dGm1bNlSa9as8Rhfs2aNIiMjbUoFAABMYuupn5iYGD3zzDNq1aqVWrdurXnz5unEiRMaOnSonbEAAIAhbC0qTzzxhM6fP6+pU6fq9OnTatSokb7++mtVr17dzliSfjvNNGnSpFynmuAdjqNvcBx9g+PoGxxH3+A45o/Dys+9QQAAADaw/Sv0AQAAboaiAgAAjEVRAQAAxqKoAAAAY1FUficuLk7333+/AgMDValSJfXu3VsHDhywO1aRFxcXJ4fDoREjRtgdpcg5efKknn76aVWoUEFly5ZVs2bNlJSUZHesIuX69ev67//+b0VERKhMmTKqWbOmpk6dqpycHLujGW3jxo3q2bOnwsLC5HA49Omnn3qstyxLkydPVlhYmMqUKaMOHTooJSXFnrAGu9VxvHbtmsaNG6fGjRsrICBAYWFh6t+/v06dOmVfYANRVH4nMTFRUVFR2rp1q9asWaPr16+ra9euyszMtDtakbVjxw7NmzdPTZo0sTtKkXPx4kW1adNGpUqV0sqVK7Vv3z69+eabhf5tzHebhIQEvffee5o9e7ZSU1M1Y8YMvf7665o1a5bd0YyWmZmppk2bavbs2XmunzFjhmbOnKnZs2drx44dCg0NVZcuXXTp0qU7nNRstzqOV65c0a5duzRhwgTt2rVLy5cv18GDB/XII4/YkNRgFm7q7NmzliQrMTHR7ihF0qVLl6zatWtba9assdq3b29FR0fbHalIGTdunNW2bVu7YxR5PXr0sAYPHuwx9uijj1pPP/20TYmKHknWihUr3Ms5OTlWaGioFR8f7x779ddfreDgYOu9996zIWHR8J/HMS/bt2+3JFlpaWl3JlQRwIzKLWRkZEiSQkJCbE5SNEVFRalHjx7q3Lmz3VGKpM8//1ytWrXS3/72N1WqVEnNmzfXBx98YHesIqdt27Zau3atDh48KEn617/+pU2bNunhhx+2OVnRdezYMZ05c0Zdu3Z1jzmdTrVv315btmyxMVnRl5GRIYfDwczp79j+68mmsixLMTExatu2rRo1amR3nCJnyZIl2rVrl3bs2GF3lCLr6NGjmjt3rmJiYvTyyy9r+/btGj58uJxOp/r37293vCJj3LhxysjIUL169eTn56fs7GxNmzZNTz75pN3RiqwbPyb7nz8gW7lyZaWlpdkR6a7w66+/avz48XrqqaeK5Y8U3gxF5SaGDRumPXv2aNOmTXZHKXLS09MVHR2t1atXy9/f3+44RVZOTo5atWql6dOnS5KaN2+ulJQUzZ07l6LihaVLl+rvf/+7Fi1apIYNGyo5OVkjRoxQWFiYBgwYYHe8Is3hcHgsW5aVawz5c+3aNfXt21c5OTl699137Y5jFIpKHl566SV9/vnn2rhxo6pWrWp3nCInKSlJZ8+eVcuWLd1j2dnZ2rhxo2bPnq2srCz5+fnZmLBoqFKliho0aOAxVr9+fS1btsymREXTmDFjNH78ePXt21eS1LhxY6WlpSkuLo6iUkChoaGSfptZqVKlinv87NmzuWZZcHvXrl1Tnz59dOzYMa1bt47ZlP/ANSq/Y1mWhg0bpuXLl2vdunWKiIiwO1KR1KlTJ+3du1fJycnuR6tWrdSvXz8lJydTUvKpTZs2uW6PP3jwoBE/2lmUXLlyRSVKeP5T5+fnx+3Jf0BERIRCQ0O1Zs0a99jVq1eVmJioyMhIG5MVPTdKyqFDh/Ttt9+qQoUKdkcyDjMqvxMVFaVFixbps88+U2BgoPs8bHBwsMqUKWNzuqIjMDAw13U9AQEBqlChAtf7eGHkyJGKjIzU9OnT1adPH23fvl3z5s3TvHnz7I5WpPTs2VPTpk1TtWrV1LBhQ+3evVszZ87U4MGD7Y5mtMuXL+vw4cPu5WPHjik5OVkhISGqVq2aRowYoenTp6t27dqqXbu2pk+frrJly+qpp56yMbV5bnUcw8LC9Pjjj2vXrl368ssvlZ2d7f7cCQkJUenSpe2KbRab7zoyiqQ8H/Pnz7c7WpHH7ckF88UXX1iNGjWynE6nVa9ePWvevHl2RypyXC6XFR0dbVWrVs3y9/e3atasab3yyitWVlaW3dGMtn79+jz/PRwwYIBlWb/dojxp0iQrNDTUcjqdVrt27ay9e/faG9pAtzqOx44du+nnzvr16+2ObgyHZVnWnSxGAAAA+cU1KgAAwFgUFQAAYCyKCgAAMBZFBQAAGIuiAgAAjEVRAQAAxqKoAAAAY1FUAACAsSgqAPJ0/PhxORwOJScn2x3Fbf/+/XrwwQfl7++vZs2aef18E98TgFujqACGGjhwoBwOh+Lj4z3GP/30UzkcDptS2WvSpEkKCAjQgQMHtHbtWrvjaMGCBSpfvrzdMYC7GkUFMJi/v78SEhJ08eJFu6P4zNWrVwv83CNHjqht27aqXr36XfUrs9nZ2fyaM3ATFBXAYJ07d1ZoaKji4uJuus3kyZNznQZ56623VKNGDffywIED1bt3b02fPl2VK1dW+fLlNWXKFF2/fl1jxoxRSEiIqlatqo8//jjX/vfv36/IyEj5+/urYcOG2rBhg8f6ffv26eGHH1a5cuVUuXJlPfPMMzp37px7fYcOHTRs2DDFxMTo3nvvVZcuXfJ8Hzk5OZo6daqqVq0qp9OpZs2aadWqVe71DodDSUlJmjp1qhwOhyZPnnzT/SQkJKhWrVpyOp2qVq2apk2blue2ec2I/OeM1b/+9S917NhRgYGBCgoKUsuWLbVz505t2LBBgwYNUkZGhhwOh0emq1evauzYsfrTn/6kgIAAPfDAAx7H7cbrfvnll2rQoIGcTqfS0tK0YcMG/fnPf1ZAQIDKly+vNm3aKC0tLc/sQHFBUQEM5ufnp+nTp2vWrFn697///Yf2tW7dOp06dUobN27UzJkzNXnyZP31r3/VPffco23btmno0KEaOnSo0tPTPZ43ZswYjRo1Srt371ZkZKQeeeQRnT9/XpJ0+vRptW/fXs2aNdPOnTu1atUq/fjjj+rTp4/HPhYuXKiSJUtq8+bNev/99/PM9/bbb+vNN9/UG2+8oT179qhbt2565JFHdOjQIfdrNWzYUKNGjdLp06c1evToPPcTGxurhIQETZgwQfv27dOiRYtUuXLlAh+3fv36qWrVqtqxY4eSkpI0fvx4lSpVSpGRkXrrrbcUFBSk06dPe2QaNGiQNm/erCVLlmjPnj3629/+pv/6r/9yvxdJunLliuLi4vThhx8qJSVFISEh6t27t9q3b689e/bo+++/1/PPP19sT/MBbnb/fDOAvA0YMMDq1auXZVmW9eCDD1qDBw+2LMuyVqxYYf3+r+6kSZOspk2bejz3f/7nf6zq1at77Kt69epWdna2e6xu3brWX/7yF/fy9evXrYCAAGvx4sWWZVnun6CPj493b3Pt2jWratWqVkJCgmVZljVhwgSra9euHq+dnp5uSbIOHDhgWZZltW/f3mrWrNlt329YWJg1bdo0j7H777/fevHFF93LTZs2tSZNmnTTfbhcLsvpdFoffPBBnutvvKfdu3dblmVZ8+fPt4KDgz22+c/jGxgYaC1YsCDP/eX1/MOHD1sOh8M6efKkx3inTp2s2NhY9/MkWcnJye7158+ftyRZGzZsuOn7A4ojZlSAIiAhIUELFy7Uvn37CryPhg0bqkSJ//9XvnLlymrcuLF72c/PTxUqVNDZs2c9nte6dWv3f5csWVKtWrVSamqqJCkpKUnr169XuXLl3I969epJ+u16khtatWp1y2wul0unTp1SmzZtPMbbtGnjfq38SE1NVVZWljp16pTv59xOTEyMhgwZos6dOys+Pt7jfeVl165dsixLderU8TguiYmJHs8tXbq0mjRp4l4OCQnRwIED1a1bN/Xs2VNvv/22Tp8+7bP3ARRVFBWgCGjXrp26deuml19+Ode6EiVKyLIsj7Fr167l2q5UqVIeyw6HI8+x/FzUeeN0RE5Ojnr27Knk5GSPx6FDh9SuXTv39gEBAbfd5+/3e4NlWV6d+ihTpky+t5Xyd+wmT56slJQU9ejRQ+vWrVODBg20YsWKm+4zJydHfn5+SkpK8jgmqampevvttz2y/ud7mz9/vr7//ntFRkZq6dKlqlOnjrZu3erVewLuNhQVoIiIj4/XF198oS1btniMV6xYUWfOnPH4wPXl94T8/oPy+vXrSkpKcs+atGjRQikpKapRo4Zq1arl8chvOZGkoKAghYWFadOmTR7jW7ZsUf369fO9n9q1a6tMmTL5vnW5YsWKunTpkjIzM91jeR27OnXqaOTIkVq9erUeffRRzZ8/X9JvsyLZ2dke2zZv3lzZ2dk6e/ZsrmMSGhp620zNmzdXbGystmzZokaNGmnRokX5ei/A3YqiAhQRjRs3Vr9+/TRr1iyP8Q4dOuinn37SjBkzdOTIEc2ZM0crV6702evOmTNHK1as0P79+xUVFaWLFy9q8ODBkqSoqChduHBBTz75pLZv366jR49q9erVGjx4cK4P8NsZM2aMEhIStHTpUh04cEDjx49XcnKyoqOj870Pf39/jRs3TmPHjtUnn3yiI0eOaOvWrfroo4/y3P6BBx5Q2bJl9fLLL+vw4cNatGiRFixY4F7/yy+/aNiwYdqwYYPS0tK0efNm7dixw12eatSoocuXL2vt2rU6d+6crly5ojp16qhfv37q37+/li9frmPHjmnHjh1KSEjQ119/fdPsx44dU2xsrL7//nulpaVp9erVOnjwoFdFDbgbUVSAIuTVV1/Ndaqifv36evfddzVnzhw1bdpU27dvv+kdMQURHx+vhIQENW3aVN99950+++wz3XvvvZKksLAwbd68WdnZ2erWrZsaNWqk6OhoBQcHe1wPkx/Dhw/XqFGjNGrUKDVu3FirVq3S559/rtq1a3u1nwkTJmjUqFGaOHGi6tevryeeeCLXdTc3hISE6O9//7u+/vprNW7cWIsXL/a47dnPz0/nz59X//79VadOHfXp00fdu3fXlClTJEmRkZEaOnSonnjiCVWsWFEzZsyQ9NspnP79+2vUqFGqW7euHnnkEW3btk3h4eE3zV22bFnt379fjz32mOrUqaPnn39ew4YN0wsvvODV+wfuNg7rP//VAwAAMAQzKgAAwFgUFQAAYCyKCgAAMBZFBQAAGIuiAgAAjEVRAQAAxqKoAAAAY1FUAACAsSgqAADAWBQVAABgLIoKAAAw1v8DoM+PLxT9hrcAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjcAAAGwCAYAAABVdURTAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAwKElEQVR4nO3de1xVdaL///dWEbwAhsptRME0RvFCSmfE+xXDptHJc7Kmk5rp5FHTRDOxmbxUgqYe85J28da9mUHtopKcVMy8hcHRFM07fBUitUBpApH1+6Of+8wOVBbuzYbV6/l47MfD9Vlr7f1e67HMd+uyt80wDEMAAAAWUcvdAQAAAJyJcgMAACyFcgMAACyFcgMAACyFcgMAACyFcgMAACyFcgMAACyljrsDVLXS0lKdP39e3t7estls7o4DAAAqwDAMXb58WcHBwapV6+bnZn515eb8+fMKCQlxdwwAAFAJ2dnZatas2U2X+dWVG29vb0k/7xwfHx83pwEAABVRUFCgkJAQ+7/jN/OrKzfXL0X5+PhQbgAAqGEqcksJNxQDAABLodwAAABLodwAAABLodwAAABLodwAAABLodwAAABLodwAAABLodwAAABLodwAAABLodwAAABLodwAAABLcWu5WbFihTp06GD/nafo6Ght2bLlpuukpqaqc+fO8vLyUsuWLbVy5coqSgsAAGoCt5abZs2aKTExUWlpaUpLS1Pfvn01ePBgHT58uNzlT58+rUGDBqlHjx5KT0/XjBkzNHHiRCUlJVVxcgAAUF3ZDMMw3B3iX/n5+emll17S448/XmbeM888o48++kiZmZn2sbFjx+p///d/tWfPngq9f0FBgXx9fZWfn8+vggMAUEOY+fe72txzc+3aNb3//vsqLCxUdHR0ucvs2bNHMTExDmMDBw5UWlqarl69Wu46RUVFKigocHgBAADrquPuAIcOHVJ0dLR++uknNWzYUBs2bFDbtm3LXTY3N1cBAQEOYwEBASopKdGFCxcUFBRUZp2EhATNnj3bJdkBM0Knb3J3BNPOJN7n7ggAYJrbz9yEh4crIyNDe/fu1X/9139pxIgROnLkyA2Xt9lsDtPXr6r9cvy6+Ph45efn21/Z2dnOCw8AAKodt5+5qVu3rlq1aiVJioqK0pdffqmXX35Zr776apllAwMDlZub6zCWl5enOnXqqHHjxuW+v6enpzw9PZ0fHAAAVEtuP3PzS4ZhqKioqNx50dHRSklJcRjbunWroqKi5OHhURXxAABANefWcjNjxgx9/vnnOnPmjA4dOqRnn31WO3bs0COPPCLp50tKw4cPty8/duxYnT17VnFxccrMzNTq1au1atUqTZ061V2bAAAAqhm3Xpb69ttv9eijjyonJ0e+vr7q0KGDkpOTNWDAAElSTk6OsrKy7MuHhYVp8+bNmjx5spYvX67g4GAtWbJEQ4cOddcmAACAaqbafc+Nq/E9N3AXnpYCgMqrkd9zAwAA4AyUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCmUGwAAYCluLTcJCQm655575O3tLX9/fw0ZMkTHjh276To7duyQzWYr8zp69GgVpQYAANWZW8tNamqqxo8fr7179yolJUUlJSWKiYlRYWHhLdc9duyYcnJy7K/WrVtXQWIAAFDd1XHnhycnJztMr1mzRv7+/jpw4IB69ux503X9/f3VqFGjW35GUVGRioqK7NMFBQWVygoAAGqGanXPTX5+viTJz8/vlsvefffdCgoKUr9+/bR9+/YbLpeQkCBfX1/7KyQkxGl5AQBA9VNtyo1hGIqLi1P37t3Vrl27Gy4XFBSk1157TUlJSVq/fr3Cw8PVr18/7dy5s9zl4+PjlZ+fb39lZ2e7ahMAAEA14NbLUv9qwoQJOnjwoHbt2nXT5cLDwxUeHm6fjo6OVnZ2thYsWFDupSxPT095eno6PS8AAKieqsWZmyeffFIfffSRtm/frmbNmplev0uXLjp+/LgLkgEAgJrGrWduDMPQk08+qQ0bNmjHjh0KCwur1Pukp6crKCjIyekAAEBN5NZyM378eL377rv68MMP5e3trdzcXEmSr6+v6tWrJ+nne2bOnTunN998U5K0ePFihYaGKiIiQsXFxXr77beVlJSkpKQkt20HAACoPtxablasWCFJ6t27t8P4mjVrNHLkSElSTk6OsrKy7POKi4s1depUnTt3TvXq1VNERIQ2bdqkQYMGVVVsAABQjdkMwzDcHaIqFRQUyNfXV/n5+fLx8XF3HPyKhE7f5O4Ipp1JvM/dEQBAkrl/v6vFDcUAAADOQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWYrrcrFu3Tps2bbJPT5s2TY0aNVLXrl119uxZp4YDAAAwy3S5mTt3rurVqydJ2rNnj5YtW6b58+erSZMmmjx5stMDAgAAmFHH7ArZ2dlq1aqVJGnjxo3693//d/35z39Wt27d1Lt3b2fnAwAAMMX0mZuGDRvq4sWLkqStW7eqf//+kiQvLy/985//dG46AAAAk0yfuRkwYIBGjx6tu+++W998843uu+8+SdLhw4cVGhrq7HwAAACmmD5zs3z5ckVHR+u7775TUlKSGjduLEk6cOCAHn74YacHBAAAMMN0uSkoKNCSJUv04Ycf6t5777WPz5o1S48++qip90pISNA999wjb29v+fv7a8iQITp27Ngt10tNTVXnzp3l5eWlli1bauXKlWY3AwAAWJTpchMWFqYLFy6UGb906ZLCwsJMvVdqaqrGjx+vvXv3KiUlRSUlJYqJiVFhYeEN1zl9+rQGDRqkHj16KD09XTNmzNDEiROVlJRkdlMAAIAFmb7nxjCMcsevXLkiLy8vU++VnJzsML1mzRr5+/vrwIED6tmzZ7nrrFy5Us2bN9fixYslSW3atFFaWpoWLFigoUOHmvp8AABgPRUuN3FxcZIkm82m5557TvXr17fPu3btmvbt26fIyMjbCpOfny9J8vPzu+Eye/bsUUxMjMPYwIEDtWrVKl29elUeHh4O84qKilRUVGSfLigouK2MAACgeqtwuUlPT5f085mbQ4cOqW7duvZ5devWVceOHTV16tRKBzEMQ3FxcerevbvatWt3w+Vyc3MVEBDgMBYQEKCSkhJduHBBQUFBDvMSEhI0e/bsSucyK3T6plsvVM2cSbzP3REAAHCaCpeb7du3S5Iee+wxvfzyy/Lx8XFqkAkTJujgwYPatWvXLZe12WwO09cvlf1yXJLi4+PtZ52kn8/chISE3GZaAABQXZm+52bNmjVOD/Hkk0/qo48+0s6dO9WsWbObLhsYGKjc3FyHsby8PNWpU8f+WPq/8vT0lKenp1PzAgCA6st0uSksLFRiYqI+++wz5eXlqbS01GH+qVOnKvxehmHoySef1IYNG7Rjx44KPW0VHR2tjz/+2GFs69atioqKKnO/DQAA+PUxXW5Gjx6t1NRUPfroowoKCir3UlBFjR8/Xu+++64+/PBDeXt728/I+Pr62n+cMz4+XufOndObb74pSRo7dqyWLVumuLg4jRkzRnv27NGqVav03nvvVToHAACwDtPlZsuWLdq0aZO6det22x++YsUKSSrzg5tr1qzRyJEjJUk5OTnKysqyzwsLC9PmzZs1efJkLV++XMHBwVqyZAmPgQMAAEmVKDd33HHHTR/VNuNG35nzr9auXVtmrFevXvrqq6+ckgEAAFiL6W8ofv755/Xcc8/pxx9/dEUeAACA22L6zM3ChQt18uRJBQQEKDQ0tMxNvJxRAQAA7mS63AwZMsQFMQAAAJzDdLmZOXOmK3IAAAA4hel7biTphx9+0BtvvKH4+HhdunRJ0s+Xo86dO+fUcAAAAGaZPnNz8OBB9e/fX76+vjpz5ozGjBkjPz8/bdiwQWfPnrV/Hw0AAIA7mD5zExcXp5EjR+r48ePy8vKyj8fGxmrnzp1ODQcAAGCW6XLz5Zdf6oknnigz/pvf/KbMbz4BAABUNdPlxsvLSwUFBWXGjx07pqZNmzolFAAAQGWZLjeDBw/WnDlzdPXqVUmSzWZTVlaWpk+fzk8gAAAAtzNdbhYsWKDvvvtO/v7++uc//6levXqpVatW8vb21osvvuiKjAAAABVm+mkpHx8f7dq1S9u2bdNXX32l0tJSderUSf3793dFPgAAAFNMl5szZ84oNDRUffv2Vd++fV2RCQAAoNJMX5Zq2bKlunfvrldffdX+BX4AAADVhelyk5aWpujoaL3wwgsKDg7W4MGD9fe//11FRUWuyAcAAGCK6XLTqVMnvfTSS8rKytKWLVvk7++vJ554Qv7+/ho1apQrMgIAAFRYpX5bSvr5EfA+ffro9ddf1//8z/+oZcuWWrdunTOzAQAAmFbpcpOdna358+crMjJS99xzjxo0aKBly5Y5MxsAAIBppp+Weu211/TOO+/oiy++UHh4uB555BFt3LhRoaGhLogHAABgjuly8/zzz+uhhx7Syy+/rMjISBdEAgAAqDzT5SYrK0s2m80VWQAAAG6b6XtubDabPv/8c/3nf/6noqOjde7cOUnSW2+9pV27djk9IAAAgBmmy01SUpIGDhyoevXqKT093f79NpcvX9bcuXOdHhAAAMAM0+XmhRde0MqVK/X666/Lw8PDPt61a1d99dVXTg0HAABglulyc+zYMfXs2bPMuI+Pj3744QdnZAIAAKg00+UmKChIJ06cKDO+a9cutWzZ0imhAAAAKst0uXniiSc0adIk7du3TzabTefPn9c777yjqVOnaty4ca7ICAAAUGGmHwWfNm2a8vPz1adPH/3000/q2bOnPD09NXXqVE2YMMEVGQEAACrMdLmRpBdffFHPPvusjhw5otLSUrVt21YNGzZ0djYAAADTKlVuJKl+/fqKiopyZhYAAIDbVukfzgQAAKiOKDcAAMBSKDcAAMBSKlRuOnXqpO+//16SNGfOHP34448uDQUAAFBZFSo3mZmZKiwslCTNnj1bV65ccWkoAACAyqrQ01KRkZF67LHH1L17dxmGoQULFtzw0e/nnnvOqQEBAADMqFC5Wbt2rWbOnKlPPvlENptNW7ZsUZ06ZVe12WyUGwAA4FYVKjfh4eF6//33JUm1atXSZ599Jn9/f5cGAwAAqAzTX+JXWlrqihwAAABOUalvKD558qQWL16szMxM2Ww2tWnTRpMmTdKdd97p7HwAAACmmP6em08//VRt27bV/v371aFDB7Vr10779u1TRESEUlJSXJERAACgwkyfuZk+fbomT56sxMTEMuPPPPOMBgwY4LRwAAAAZpk+c5OZmanHH3+8zPioUaN05MgRp4QCAACoLNPlpmnTpsrIyCgznpGRwRNUAADA7UxflhozZoz+/Oc/69SpU+ratatsNpt27dqlefPmacqUKa7ICAAAUGGmy81f//pXeXt7a+HChYqPj5ckBQcHa9asWZo4caLTAwIAAJhhutzYbDZNnjxZkydP1uXLlyVJ3t7eTg8GAABQGZX6npvrKDUAAKC6MX1DsTPt3LlT999/v4KDg2Wz2bRx48abLr9jxw7ZbLYyr6NHj1ZNYAAAUO3d1pmb21VYWKiOHTvqscce09ChQyu83rFjx+Tj42Ofbtq0qSviAQCAGsit5SY2NlaxsbGm1/P391ejRo2cHwgAANR4pi5LXb16VX369NE333zjqjwVcvfddysoKEj9+vXT9u3bb7psUVGRCgoKHF4AAMC6TJUbDw8Pff3117LZbK7Kc1NBQUF67bXXlJSUpPXr1ys8PFz9+vXTzp07b7hOQkKCfH197a+QkJAqTAwAAKqa6RuKhw8frlWrVrkiyy2Fh4drzJgx6tSpk6Kjo/XKK6/ovvvu04IFC264Tnx8vPLz8+2v7OzsKkwMAACqmul7boqLi/XGG28oJSVFUVFRatCggcP8RYsWOS1cRXTp0kVvv/32Ded7enrK09OzChMBAAB3Ml1uvv76a3Xq1EmSytx7447LVenp6QoKCqryzwUAANWT6XJzqxt4zbhy5YpOnDhhnz59+rQyMjLk5+en5s2bKz4+XufOndObb74pSVq8eLFCQ0MVERGh4uJivf3220pKSlJSUpLTMgEAgJqt0o+CnzhxQidPnlTPnj1Vr149GYZh+sxNWlqa+vTpY5+Oi4uTJI0YMUJr165VTk6OsrKy7POLi4s1depUnTt3TvXq1VNERIQ2bdqkQYMGVXYzAACAxZguNxcvXtSDDz6o7du3y2az6fjx42rZsqVGjx6tRo0aaeHChRV+r969e8swjBvOX7t2rcP0tGnTNG3aNLORAQDAr4jpp6UmT54sDw8PZWVlqX79+vbxYcOGKTk52anhAAAAzDJ95mbr1q369NNP1axZM4fx1q1b6+zZs04LBgAAUBmmz9wUFhY6nLG57sKFCzxyDQAA3M50uenZs6f96SXp58e/S0tL9dJLLzncHAwAAOAOpi9LvfTSS+rdu7fS0tJUXFysadOm6fDhw7p06ZK++OILV2QEAACoMNNnbtq2bauDBw/q3/7t3zRgwAAVFhbqgQceUHp6uu68805XZAQAAKiwSn3PTWBgoGbPnu3sLAAAALetUuXm+++/16pVq5SZmSmbzaY2bdrosccek5+fn7PzAQAAmGL6slRqaqrCwsK0ZMkSff/997p06ZKWLFmisLAwpaamuiIjAABAhZk+czN+/Hg9+OCDWrFihWrXri1JunbtmsaNG6fx48fr66+/dnpIAACAijJ95ubkyZOaMmWKvdhIUu3atRUXF6eTJ086NRwAAIBZpstNp06dlJmZWWY8MzNTkZGRzsgEAABQaRW6LHXw4EH7nydOnKhJkybpxIkT6tKliyRp7969Wr58uRITE12TEgAAoIIqVG4iIyNls9kcfsG7vF/n/tOf/qRhw4Y5Lx0AAIBJFSo3p0+fdnUOAAAAp6hQuWnRooWrcwAAADhFpb7E79y5c/riiy+Ul5en0tJSh3kTJ050SjAAAIDKMF1u1qxZo7Fjx6pu3bpq3LixbDabfZ7NZqPcAAAAtzJdbp577jk999xzio+PV61app8kBwAAcCnT7eTHH3/UQw89RLEBAADVkumG8vjjj+vvf/+7K7IAAADcNtOXpRISEvT73/9eycnJat++vTw8PBzmL1q0yGnhAAAAzDJdbubOnatPP/1U4eHhklTmhmIAAAB3Ml1uFi1apNWrV2vkyJEuiAMAAHB7TN9z4+npqW7durkiCwAAwG0zXW4mTZqkpUuXuiILAADAbTN9WWr//v3atm2bPvnkE0VERJS5oXj9+vVOCwcAAGCW6XLTqFEjPfDAA67IAgAAcNsq9fMLAAAA1RVfMwwAACzF9JmbsLCwm36fzalTp24rEAAAwO0wXW6eeuoph+mrV68qPT1dycnJevrpp52VCwAAoFJMl5tJkyaVO758+XKlpaXddiAAAIDb4bR7bmJjY5WUlOSstwMAAKgUp5Wbf/zjH/Lz83PW2wEAAFSK6ctSd999t8MNxYZhKDc3V999951eeeUVp4YDAAAwy3S5GTJkiMN0rVq11LRpU/Xu3Vu//e1vnZULAACgUkyXm5kzZ7oiBwAAgFPwJX4AAMBSKnzmplatWjf98j5JstlsKikpue1QAAAAlVXhcrNhw4Ybztu9e7eWLl0qwzCcEgoAAKCyKlxuBg8eXGbs6NGjio+P18cff6xHHnlEzz//vFPDAQAAmFWpe27Onz+vMWPGqEOHDiopKVFGRobWrVun5s2bOzsfAACAKabKTX5+vp555hm1atVKhw8f1meffaaPP/5Y7dq1c1U+AAAAUyp8WWr+/PmaN2+eAgMD9d5775V7mQoAAMDdKlxupk+frnr16qlVq1Zat26d1q1bV+5y69evd1o4AAAAsypcboYPH37LR8EBAADcrcLlZu3atS6MAQAA4Bxu/YbinTt36v7771dwcLBsNps2btx4y3VSU1PVuXNneXl5qWXLllq5cqXrgwIAgBrDreWmsLBQHTt21LJlyyq0/OnTpzVo0CD16NFD6enpmjFjhiZOnKikpCQXJwUAADWF6R/OdKbY2FjFxsZWePmVK1eqefPmWrx4sSSpTZs2SktL04IFCzR06NBy1ykqKlJRUZF9uqCg4LYyAwCA6s2t5casPXv2KCYmxmFs4MCBWrVqla5evSoPD48y6yQkJGj27NlVFRFVJHT6JndH+FVgP1eNM4n3uTtCpdTE46Mm7mv2s3k16lfBc3NzFRAQ4DAWEBCgkpISXbhwodx14uPjlZ+fb39lZ2dXRVQAAOAmNerMjaQyj6Nf/7HOGz2m7unpKU9PT5fnAgAA1UONOnMTGBio3Nxch7G8vDzVqVNHjRs3dlMqAABQndSochMdHa2UlBSHsa1btyoqKqrc+20AAMCvj1vLzZUrV5SRkaGMjAxJPz/qnZGRoaysLEk/3y8zfPhw+/Jjx47V2bNnFRcXp8zMTK1evVqrVq3S1KlT3REfAABUQ2695yYtLU19+vSxT8fFxUmSRowYobVr1yonJ8dedCQpLCxMmzdv1uTJk7V8+XIFBwdryZIlN3wMHAAA/Pq4tdz07t3bfkNwecr7yYdevXrpq6++cmEqAABQk9Woe24AAABuhXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAshXIDAAAsxe3l5pVXXlFYWJi8vLzUuXNnff755zdcdseOHbLZbGVeR48ercLEAACgOnNrufnggw/01FNP6dlnn1V6erp69Oih2NhYZWVl3XS9Y8eOKScnx/5q3bp1FSUGAADVnVvLzaJFi/T4449r9OjRatOmjRYvXqyQkBCtWLHipuv5+/srMDDQ/qpdu3YVJQYAANWd28pNcXGxDhw4oJiYGIfxmJgY7d69+6br3n333QoKClK/fv20ffv2my5bVFSkgoIChxcAALAut5WbCxcu6Nq1awoICHAYDwgIUG5ubrnrBAUF6bXXXlNSUpLWr1+v8PBw9evXTzt37rzh5yQkJMjX19f+CgkJcep2AACA6qWOuwPYbDaHacMwyoxdFx4ervDwcPt0dHS0srOztWDBAvXs2bPcdeLj4xUXF2efLigooOAAAGBhbjtz06RJE9WuXbvMWZq8vLwyZ3NupkuXLjp+/PgN53t6esrHx8fhBQAArMtt5aZu3brq3LmzUlJSHMZTUlLUtWvXCr9Penq6goKCnB0PAADUUG69LBUXF6dHH31UUVFRio6O1muvvaasrCyNHTtW0s+XlM6dO6c333xTkrR48WKFhoYqIiJCxcXFevvtt5WUlKSkpCR3bgYAAKhG3Fpuhg0bposXL2rOnDnKyclRu3bttHnzZrVo0UKSlJOT4/CdN8XFxZo6darOnTunevXqKSIiQps2bdKgQYPctQkAAKCacfsNxePGjdO4cePKnbd27VqH6WnTpmnatGlVkAoAANRUbv/5BQAAAGei3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEuh3AAAAEtxe7l55ZVXFBYWJi8vL3Xu3Fmff/75TZdPTU1V586d5eXlpZYtW2rlypVVlBQAANQEbi03H3zwgZ566ik9++yzSk9PV48ePRQbG6usrKxylz99+rQGDRqkHj16KD09XTNmzNDEiROVlJRUxckBAEB15dZys2jRIj3++OMaPXq02rRpo8WLFyskJEQrVqwod/mVK1eqefPmWrx4sdq0aaPRo0dr1KhRWrBgQRUnBwAA1VUdd31wcXGxDhw4oOnTpzuMx8TEaPfu3eWus2fPHsXExDiMDRw4UKtWrdLVq1fl4eFRZp2ioiIVFRXZp/Pz8yVJBQUFt7sJ5Sot+tEl7+tKrtoXrlQT9zNwIzXx76BUM/8e1sR9zX52fE/DMG65rNvKzYULF3Tt2jUFBAQ4jAcEBCg3N7fcdXJzc8tdvqSkRBcuXFBQUFCZdRISEjR79uwy4yEhIbeR3lp8F7s7AfDrxt/BqsO+rhqu3M+XL1+Wr6/vTZdxW7m5zmazOUwbhlFm7FbLlzd+XXx8vOLi4uzTpaWlunTpkho3bnzTz3GXgoIChYSEKDs7Wz4+Pu6OYynsW9dh37oG+9V12Leu46p9axiGLl++rODg4Fsu67Zy06RJE9WuXbvMWZq8vLwyZ2euCwwMLHf5OnXqqHHjxuWu4+npKU9PT4exRo0aVT54FfHx8eEvnIuwb12Hfesa7FfXYd+6jiv27a3O2FznthuK69atq86dOyslJcVhPCUlRV27di13nejo6DLLb926VVFRUeXebwMAAH593Pq0VFxcnN544w2tXr1amZmZmjx5srKysjR27FhJP19SGj58uH35sWPH6uzZs4qLi1NmZqZWr16tVatWaerUqe7aBAAAUM249Z6bYcOG6eLFi5ozZ45ycnLUrl07bd68WS1atJAk5eTkOHznTVhYmDZv3qzJkydr+fLlCg4O1pIlSzR06FB3bYLTeXp6aubMmWUupeH2sW9dh33rGuxX12Hfuk512Lc2oyLPVAEAANQQbv/5BQAAAGei3AAAAEuh3AAAAEuh3AAAAEuh3FQTs2bNks1mc3gFBga6O1aNtHPnTt1///0KDg6WzWbTxo0bHeYbhqFZs2YpODhY9erVU+/evXX48GH3hK1BbrVfR44cWeYY7tKli3vC1jAJCQm655575O3tLX9/fw0ZMkTHjh1zWIbj1ryK7FeO28pZsWKFOnToYP+ivujoaG3ZssU+393HK+WmGomIiFBOTo79dejQIXdHqpEKCwvVsWNHLVu2rNz58+fP16JFi7Rs2TJ9+eWXCgwM1IABA3T58uUqTlqz3Gq/StK9997rcAxv3ry5ChPWXKmpqRo/frz27t2rlJQUlZSUKCYmRoWFhfZlOG7Nq8h+lThuK6NZs2ZKTExUWlqa0tLS1LdvXw0ePNheYNx+vBqoFmbOnGl07NjR3TEsR5KxYcMG+3RpaakRGBhoJCYm2sd++uknw9fX11i5cqUbEtZMv9yvhmEYI0aMMAYPHuyWPFaTl5dnSDJSU1MNw+C4dZZf7lfD4Lh1pjvuuMN44403qsXxypmbauT48eMKDg5WWFiYHnroIZ06dcrdkSzn9OnTys3NVUxMjH3M09NTvXr10u7du92YzBp27Nghf39/3XXXXRozZozy8vLcHalGys/PlyT5+flJ4rh1ll/u1+s4bm/PtWvX9P7776uwsFDR0dHV4nil3FQTv/vd7/Tmm2/q008/1euvv67c3Fx17dpVFy9edHc0S7n+w6u//HHWgICAMj/KCnNiY2P1zjvvaNu2bVq4cKG+/PJL9e3bV0VFRe6OVqMYhqG4uDh1795d7dq1k8Rx6wzl7VeJ4/Z2HDp0SA0bNpSnp6fGjh2rDRs2qG3bttXieHXrzy/g/8TGxtr/3L59e0VHR+vOO+/UunXrFBcX58Zk1mSz2RymDcMoMwZzhg0bZv9zu3btFBUVpRYtWmjTpk164IEH3JisZpkwYYIOHjyoXbt2lZnHcVt5N9qvHLeVFx4eroyMDP3www9KSkrSiBEjlJqaap/vzuOVMzfVVIMGDdS+fXsdP37c3VEs5foTaL/8v4e8vLwy/5eB2xMUFKQWLVpwDJvw5JNP6qOPPtL27dvVrFkz+zjH7e250X4tD8dtxdWtW1etWrVSVFSUEhIS1LFjR7388svV4nil3FRTRUVFyszMVFBQkLujWEpYWJgCAwOVkpJiHysuLlZqaqq6du3qxmTWc/HiRWVnZ3MMV4BhGJowYYLWr1+vbdu2KSwszGE+x23l3Gq/lofjtvIMw1BRUVG1OF65LFVNTJ06Vffff7+aN2+uvLw8vfDCCyooKNCIESPcHa3GuXLlik6cOGGfPn36tDIyMuTn56fmzZvrqaee0ty5c9W6dWu1bt1ac+fOVf369fWnP/3Jjamrv5vtVz8/P82aNUtDhw5VUFCQzpw5oxkzZqhJkyb64x//6MbUNcP48eP17rvv6sMPP5S3t7f9/3h9fX1Vr1492Ww2jttKuNV+vXLlCsdtJc2YMUOxsbEKCQnR5cuX9f7772vHjh1KTk6uHsdrlTyThVsaNmyYERQUZHh4eBjBwcHGAw88YBw+fNjdsWqk7du3G5LKvEaMGGEYxs+P1c6cOdMIDAw0PD09jZ49exqHDh1yb+ga4Gb79ccffzRiYmKMpk2bGh4eHkbz5s2NESNGGFlZWe6OXSOUt18lGWvWrLEvw3Fr3q32K8dt5Y0aNcpo0aKFUbduXaNp06ZGv379jK1bt9rnu/t4tRmGYVRNjQIAAHA97rkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkBAACWQrkB4BRnzpyRzWZTRkaGu6PYHT16VF26dJGXl5ciIyPLXaZ379566qmnnP7Za9euVaNGjZz+vgBujXIDWMTIkSNls9mUmJjoML5x40bZbDY3pXKvmTNnqkGDBjp27Jg+++wzd8cBUEUoN4CFeHl5ad68efr+++/dHcVpiouLK73uyZMn1b17d7Vo0UKNGzd2Yqrbd/XqVXdHACyLcgNYSP/+/RUYGKiEhIQbLjNr1qwyl2gWL16s0NBQ+/TIkSM1ZMgQzZ07VwEBAWrUqJFmz56tkpISPf300/Lz81OzZs20evXqMu9/9OhRde3aVV5eXoqIiNCOHTsc5h85ckSDBg1Sw4YNFRAQoEcffVQXLlywz+/du7cmTJiguLg4NWnSRAMGDCh3O0pLSzVnzhw1a9ZMnp6eioyMVHJysn2+zWbTgQMHNGfOHNlsNs2aNeuG+6S0tFTTpk2Tn5+fAgMDyyy7aNEitW/fXg0aNFBISIjGjRunK1euOCyzdu1aNW/eXPXr19cf//hHXbx40WH+9f2+evVqtWzZUp6enjIMQ1lZWRo8eLAaNmwoHx8fPfjgg/r222/LrPfWW28pNDRUvr6+euihh3T58mX7Mv/4xz/Uvn171atXT40bN1b//v1VWFh4w+0FrI5yA1hI7dq1NXfuXC1dulT/7//9v9t6r23btun8+fPauXOnFi1apFmzZun3v/+97rjjDu3bt09jx47V2LFjlZ2d7bDe008/rSlTpig9PV1du3bVH/7wB/s/9Dk5OerVq5ciIyOVlpam5ORkffvtt3rwwQcd3mPdunWqU6eOvvjiC7366qvl5nv55Ze1cOFCLViwQAcPHtTAgQP1hz/8QcePH7d/VkREhKZMmaKcnBxNnTr1htu6bt06NWjQQPv27dP8+fM1Z84cpaSk2OfXqlVLS5Ys0ddff61169Zp27ZtmjZtmn3+vn37NGrUKI0bN04ZGRnq06ePXnjhhTKfc+LECf3tb39TUlKS/d6kIUOG6NKlS0pNTVVKSopOnjypYcOGOax38uRJbdy4UZ988ok++eQTpaam2i8/5uTk6OGHH9aoUaOUmZmpHTt26IEHHhC/iYxftSr7/XEALjVixAhj8ODBhmEYRpcuXYxRo0YZhmEYGzZsMP71r/rMmTONjh07Oqz73//930aLFi0c3qtFixbGtWvX7GPh4eFGjx497NMlJSVGgwYNjPfee88wDMM4ffq0IclITEy0L3P16lWjWbNmxrx58wzDMIy//vWvRkxMjMNnZ2dnG5KMY8eOGYZhGL169TIiIyNvub3BwcHGiy++6DB2zz33GOPGjbNPd+zY0Zg5c+ZN36dXr15G9+7dy7zPM888c8N1/va3vxmNGze2Tz/88MPGvffe67DMsGHDDF9fX/v0zJkzDQ8PDyMvL88+tnXrVqN27dpGVlaWfezw4cOGJGP//v329erXr28UFBTYl3n66aeN3/3ud4ZhGMaBAwcMScaZM2duup3ArwlnbgALmjdvntatW6cjR45U+j0iIiJUq9b//SciICBA7du3t0/Xrl1bjRs3Vl5ensN60dHR9j/XqVNHUVFRyszMlCQdOHBA27dvV8OGDe2v3/72t5J+PjtxXVRU1E2zFRQU6Pz58+rWrZvDeLdu3eyfZUaHDh0cpoOCghy2a/v27RowYIB+85vfyNvbW8OHD9fFixftl34yMzMdtltSmWlJatGihZo2bWqfzszMVEhIiEJCQuxjbdu2VaNGjRy2IzQ0VN7e3uXm69ixo/r166f27dvrP/7jP/T6669b6p4roDIoN4AF9ezZUwMHDtSMGTPKzKtVq1aZSxbl3dzq4eHhMG2z2codKy0tvWWe609rlZaW6v7771dGRobD6/jx4+rZs6d9+QYNGtzyPf/1fa8zDKNST4bdbLvOnj2rQYMGqV27dkpKStKBAwe0fPlySf+33365P2/kl9t1o7y/HL9Zvtq1ayslJUVbtmxR27ZttXTpUoWHh+v06dMVygRYEeUGsKjExER9/PHH2r17t8N406ZNlZub6/APsjO/m2bv3r32P5eUlOjAgQP2szOdOnXS4cOHFRoaqlatWjm8KlpoJMnHx0fBwcHatWuXw/ju3bvVpk0b52zI/y8tLU0lJSVauHChunTporvuukvnz593WKZt27YO2y2pzHR52rZtq6ysLIf7lo4cOaL8/HxT22Gz2dStWzfNnj1b6enpqlu3rjZs2FDh9QGrodwAFtW+fXs98sgjWrp0qcN479699d1332n+/Pk6efKkli9fri1btjjtc5cvX64NGzbo6NGjGj9+vL7//nuNGjVKkjR+/HhdunRJDz/8sPbv369Tp05p69atGjVqlK5du2bqc55++mnNmzdPH3zwgY4dO6bp06crIyNDkyZNctq2SNKdd96pkpISLV26VKdOndJbb72llStXOiwzceJEJScna/78+frmm2+0bNkyhye3bqR///7q0KGDHnnkEX311Vfav3+/hg8frl69et3y0tx1+/bt09y5c5WWlqasrCytX79e3333ndNLHlCTUG4AC3v++efLXDJp06aNXnnlFS1fvlwdO3bU/v37b/okkVmJiYmaN2+eOnbsqM8//1wffvihmjRpIkkKDg7WF198oWvXrmngwIFq166dJk2aJF9fX4f7eypi4sSJmjJliqZMmaL27dsrOTlZH330kVq3bu20bZGkyMhILVq0SPPmzVO7du30zjvvlHnUvkuXLnrjjTe0dOlSRUZGauvWrfrLX/5yy/e22WzauHGj7rjjDvXs2VP9+/dXy5Yt9cEHH1Q4n4+Pj3bu3KlBgwbprrvu0l/+8hctXLhQsbGxprcVsAqbUdGLxQAAADUAZ24AAIClUG4AAIClUG4AAIClUG4AAIClUG4AAIClUG4AAIClUG4AAIClUG4AAIClUG4AAIClUG4AAIClUG4AAICl/H/aGdz1yJdz4QAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjsAAAGwCAYAAABPSaTdAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAqeElEQVR4nO3de3hNd6L/8c9GbEESl7K3PIKUtHUpdWlV6hJFVHUejuO0PXq0bh0eQUJb5GdKaCdxmebkEPQxbV2mdZkZl+nllMQtRjPOEIzLSfUgxVE5mVaaRKQJyfr94bEfe4Jmy9r2zvJ+Pc9+Hvu71l4+6XfafOa71trLZhiGIQAAAIuq5esAAAAA3kTZAQAAlkbZAQAAlkbZAQAAlkbZAQAAlkbZAQAAlkbZAQAAllbH1wH8QUVFhb777jsFBQXJZrP5Og4AAKgCwzBUVFSk0NBQ1ap15/Ubyo6k7777TmFhYb6OAQAA7sGFCxfUsmXLO26n7EgKCgqSdOMfVnBwsI/TAACAqigsLFRYWJjr9/idUHYk16mr4OBgyg4AADXMz12CwgXKAADA0ig7AADA0ig7AADA0ig7AADA0ig7AADA0ig7AADA0ig7AADA0ig7AADA0ig7AADA0ig7AADA0ig7AADA0ig7AADA0ig7AADA0ig7AADA0ig7AADA0ur48i/ft2+flixZoqysLF26dElbt27V8OHDXdsNw9D8+fO1atUq5efnq2fPnlq+fLk6duzo2qe0tFRvvvmmNmzYoJKSEg0YMEArVqxQy5YtffATVdZm9he+jnBPvl041NcRAAAwhU9XdoqLi9WlSxelpqbedvvixYuVnJys1NRUHTx4UE6nU4MGDVJRUZFrn7i4OG3dulUbN27U/v37deXKFb3wwgsqLy+/Xz8GAADwYz5d2RkyZIiGDBly222GYSglJUVz5szRiBEjJElr166Vw+HQ+vXrNXHiRBUUFOjDDz/U7373Ow0cOFCS9PHHHyssLEw7d+7U4MGDb3vs0tJSlZaWut4XFhaa/JMBAAB/4bfX7OTk5Cg3N1fR0dGuMbvdrn79+ikzM1OSlJWVpWvXrrntExoaqk6dOrn2uZ2kpCSFhIS4XmFhYd77QQAAgE/5bdnJzc2VJDkcDrdxh8Ph2pabm6u6deuqcePGd9znduLj41VQUOB6XbhwweT0AADAX/j0NFZV2Gw2t/eGYVQa+0c/t4/dbpfdbjclHwAA8G9+u7LjdDolqdIKTV5enmu1x+l0qqysTPn5+XfcBwAAPNj8tuyEh4fL6XQqPT3dNVZWVqaMjAxFRkZKkrp3766AgAC3fS5duqQTJ0649gEAAA82n57GunLlik6fPu16n5OTo6NHj6pJkyZq1aqV4uLilJiYqIiICEVERCgxMVH169fXqFGjJEkhISEaP3683njjDTVt2lRNmjTRm2++qccff9x1dxYAAHiw+bTsHDp0SP3793e9nzFjhiTptdde05o1azRz5kyVlJRo8uTJri8VTEtLU1BQkOsz//7v/646deroxRdfdH2p4Jo1a1S7du37/vMAAAD/YzMMw/B1CF8rLCxUSEiICgoKFBwcbOqx+QZlAAC8o6q/v/32mh0AAAAzUHYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAIClUXYAAICl+XXZuX79un71q18pPDxcgYGBevjhh7VgwQJVVFS49jEMQwkJCQoNDVVgYKCioqJ08uRJH6YGAAD+xK/LzqJFi/T+++8rNTVV2dnZWrx4sZYsWaJly5a59lm8eLGSk5OVmpqqgwcPyul0atCgQSoqKvJhcgAA4C/8uuz85S9/0bBhwzR06FC1adNGI0eOVHR0tA4dOiTpxqpOSkqK5syZoxEjRqhTp05au3atrl69qvXr1/s4PQAA8Ad+XXZ69+6tXbt26ZtvvpEk/e1vf9P+/fv1/PPPS5JycnKUm5ur6Oho12fsdrv69eunzMzMOx63tLRUhYWFbi8AAGBNdXwd4G5mzZqlgoICPfbYY6pdu7bKy8v161//Wv/6r/8qScrNzZUkORwOt885HA6dO3fujsdNSkrS/PnzvRccAAD4Db9e2dm0aZM+/vhjrV+/XocPH9batWv1m9/8RmvXrnXbz2azub03DKPS2K3i4+NVUFDgel24cMEr+QEAgO/59crOW2+9pdmzZ+vll1+WJD3++OM6d+6ckpKS9Nprr8npdEq6scLTokUL1+fy8vIqrfbcym63y263ezc8AADwC369snP16lXVquUesXbt2q5bz8PDw+V0OpWenu7aXlZWpoyMDEVGRt7XrAAAwD/59crOL37xC/36179Wq1at1LFjRx05ckTJyckaN26cpBunr+Li4pSYmKiIiAhFREQoMTFR9evX16hRo3ycHgAA+AO/LjvLli3T22+/rcmTJysvL0+hoaGaOHGi5s6d69pn5syZKikp0eTJk5Wfn6+ePXsqLS1NQUFBPkwOAAD8hc0wDMPXIXytsLBQISEhKigoUHBwsKnHbjP7C1OPd798u3CoryMAAHBXVf397dfX7AAAAFQXZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFgaZQcAAFiax2WnpKREV69edb0/d+6cUlJSlJaWZmowAAAAM3hcdoYNG6Z169ZJkn788Uf17NlT7733noYNG6aVK1eaHhAAAKA6PC47hw8fVp8+fSRJf/zjH+VwOHTu3DmtW7dOS5cuNT0gAABAdXhcdq5evaqgoCBJUlpamkaMGKFatWrp6aef1rlz50wPCAAAUB0el5127dpp27ZtunDhgnbs2KHo6GhJUl5enoKDg00PCAAAUB0el525c+fqzTffVJs2bdSzZ0/16tVL0o1Vnq5du5oeEAAAoDrqePqBkSNHqnfv3rp06ZK6dOniGh8wYID+6Z/+ydRwAAAA1eVx2ZEkp9Mpp9PpNvbUU0+ZEggAAMBMHped4uJiLVy4ULt27VJeXp4qKirctp89e9a0cAAAANXlcdmZMGGCMjIyNHr0aLVo0UI2m80buQAAAEzhcdn58ssv9cUXX+iZZ57xRh4AAABTeXw3VuPGjdWkSRNvZAEAADCdx2XnnXfe0dy5c92ejwUAAOCvPD6N9d577+nMmTNyOBxq06aNAgIC3LYfPnzYtHAAAADV5XHZGT58uBdiAAAAeIfHZWfevHneyAEAAOAV9/SlgpKUlZWl7Oxs2Ww2dejQgUdFAAAAv+Rx2cnLy9PLL7+svXv3qlGjRjIMQwUFBerfv782btyoZs2aeSMnAADAPfH4bqypU6eqsLBQJ0+e1OXLl5Wfn68TJ06osLBQ06ZN80ZGAACAe+bxys727du1c+dOtW/f3jXWoUMHLV++XNHR0aaGAwAAqC6PV3YqKioq3W4uSQEBAZWekwUAAOBrHpedZ599VrGxsfruu+9cYxcvXtT06dM1YMAAU8MBAABUl8dlJzU1VUVFRWrTpo3atm2rdu3aKTw8XEVFRVq2bJk3MgIAANwzj6/ZCQsL0+HDh5Wenq6vv/5ahmGoQ4cOGjhwoDfyAQAAVItHZef69euqV6+ejh49qkGDBmnQoEHeygUAAGAKj05j1alTR61bt1Z5ebm38gAAAJjK42t2fvWrXyk+Pl6XL1/2Rh4AAABTeXzNztKlS3X69GmFhoaqdevWatCggdt2nnoOAAD8CU89BwAAlsZTzwEAgKV5fM0OAABATVKllZ3GjRvLZrNV6YBcuAwAAPxJlcpOSkqK688//PCD3n33XQ0ePFi9evWSJP3lL3/Rjh079Pbbb3slJAAAwL2yGYZhePKBf/7nf1b//v01ZcoUt/HU1FTt3LlT27ZtMzPffVFYWKiQkBAVFBQoODjY1GO3mf2Fqce7X75dONTXEQAAuKuq/v72+JqdHTt26Lnnnqs0PnjwYO3cudPTwwEAAHiVx2WnadOm2rp1a6Xxbdu2qWnTpqaEAgAAMIvHt57Pnz9f48eP1969e13X7Bw4cEDbt2/XBx98YHpAAACA6vB4ZWfMmDHKzMxUo0aNtGXLFm3evFkhISH66quvNGbMGNMDXrx4Uf/2b/+mpk2bqn79+nriiSeUlZXl2m4YhhISEhQaGqrAwEBFRUXp5MmTpucAAAA1k8crO5LUs2dPffLJJ2ZnqSQ/P1/PPPOM+vfvry+//FLNmzfXmTNn1KhRI9c+ixcvVnJystasWaNHHnlE7777rgYNGqRTp04pKCjI6xkBAIB/u6eyc1NJSYmuXbvmNmbm3UyLFi1SWFiYVq9e7Rpr06aN68+GYSglJUVz5szRiBEjJElr166Vw+HQ+vXrNXHiRNOyAACAmsnj01hXr17VlClT1Lx5czVs2FCNGzd2e5np008/VY8ePfQv//Ivat68ubp27arf/va3ru05OTnKzc1VdHS0a8xut6tfv37KzMy843FLS0tVWFjo9gIAANbkcdl56623tHv3bq1YsUJ2u10ffPCB5s+fr9DQUK1bt87UcGfPntXKlSsVERGhHTt2aNKkSZo2bZrr78nNzZUkORwOt885HA7XtttJSkpSSEiI6xUWFmZqbgAA4D88Po312Wefad26dYqKitK4cePUp08ftWvXTq1bt9Ynn3yiV155xbRwFRUV6tGjhxITEyVJXbt21cmTJ7Vy5Uq9+uqrrv3+8VEWhmHc9fEW8fHxmjFjhut9YWEhhQcAAIvyeGXn8uXLCg8Pl3Tj+pybz8Lq3bu39u3bZ2q4Fi1aqEOHDm5j7du31/nz5yVJTqdTkiqt4uTl5VVa7bmV3W5XcHCw2wsAAFiTx2Xn4Ycf1rfffitJ6tChg37/+99LurHic+tdUmZ45plndOrUKbexb775Rq1bt5YkhYeHy+l0Kj093bW9rKxMGRkZioyMNDULAAComTwuO2PHjtXf/vY3STdOB928dmf69Ol66623TA03ffp0HThwQImJiTp9+rTWr1+vVatWKSYmRtKN01dxcXFKTEzU1q1bdeLECY0ZM0b169fXqFGjTM0CAABqJo+v2Zk+fbrrz/3799fXX3+tQ4cOqW3bturSpYup4Z588klt3bpV8fHxWrBggcLDw5WSkuJ2XdDMmTNVUlKiyZMnKz8/Xz179lRaWhrfsQMAACTdw1PPrYinnlfGU88BAP6uqr+/7+lLBXft2qVdu3YpLy9PFRUVbts++uijezkkAACAV9zTg0AXLFigHj16qEWLFne9xRsAAMDXPC4777//vtasWaPRo0d7Iw8AAICpPL4bq6ysjNu6AQBAjeFx2ZkwYYLWr1/vjSwAAACmq9JprFsfrVBRUaFVq1Zp586d6ty5swICAtz2TU5ONjchAABANVSp7Bw5csTt/RNPPCFJOnHihNs4FysDAAB/U6Wys2fPHm/nAAAA8AqPr9kBAACoSSg7AADA0ig7AADA0ig7AADA0qpUdrp166b8/HxJ0oIFC3T16lWvhgIAADBLlcpOdna2iouLJd14NtaVK1e8GgoAAMAsVbr1/IknntDYsWPVu3dvGYah3/zmN2rYsOFt9507d66pAQEAAKqjSmVnzZo1mjdvnj7//HPZbDZ9+eWXqlOn8kdtNhtlBwAA+JUqlZ1HH31UGzdulCTVqlVLu3btUvPmzb0aDAAAwAxVKju3qqio8EYOAAAAr/C47EjSmTNnlJKSouzsbNlsNrVv316xsbFq27at2fkAAACqxePv2dmxY4c6dOigv/71r+rcubM6deqk//qv/1LHjh2Vnp7ujYwAAAD3zOOVndmzZ2v69OlauHBhpfFZs2Zp0KBBpoUDAACoLo9XdrKzszV+/PhK4+PGjdN///d/mxIKAADALB6XnWbNmuno0aOVxo8ePcodWgAAwO94fBrr9ddf1y9/+UudPXtWkZGRstls2r9/vxYtWqQ33njDGxkBAADumcdl5+2331ZQUJDee+89xcfHS5JCQ0OVkJCgadOmmR4QAACgOjwuOzabTdOnT9f06dNVVFQkSQoKCjI9GAAAgBnu6Xt2bqLkAAAAf+fxBcoAAAA1CWUHAABYGmUHAABYmkdl59q1a+rfv7+++eYbb+UBAAAwlUdlJyAgQCdOnJDNZvNWHgAAAFN5fBrr1Vdf1YcffuiNLAAAAKbz+NbzsrIyffDBB0pPT1ePHj3UoEEDt+3JycmmhQMAAKguj8vOiRMn1K1bN0mqdO0Op7cAAIC/8bjs7Nmzxxs5AAAAvOKebz0/ffq0duzYoZKSEkmSYRimhQIAADCLx2Xnhx9+0IABA/TII4/o+eef16VLlyRJEyZM4KnnAADA73hcdqZPn66AgACdP39e9evXd42/9NJL2r59u6nhAAAAqsvja3bS0tK0Y8cOtWzZ0m08IiJC586dMy0YAACAGTxe2SkuLnZb0bnp+++/l91uNyUUAACAWTwuO3379tW6detc7202myoqKrRkyRL179/f1HAAAADV5fFprCVLligqKkqHDh1SWVmZZs6cqZMnT+ry5cv66quvvJERAADgnnm8stOhQwcdO3ZMTz31lAYNGqTi4mKNGDFCR44cUdu2bb2REQAA4J55vLIjSU6nU/Pnzzc7CwAAgOnuqezk5+frww8/VHZ2tmw2m9q3b6+xY8eqSZMmZucDAACoFo9PY2VkZCg8PFxLly5Vfn6+Ll++rKVLlyo8PFwZGRneyAgAAHDPPF7ZiYmJ0YsvvqiVK1eqdu3akqTy8nJNnjxZMTExOnHihOkhAQAA7pXHKztnzpzRG2+84So6klS7dm3NmDFDZ86cMTUcAABAdXlcdrp166bs7OxK49nZ2XriiSfMyAQAAGCaKp3GOnbsmOvP06ZNU2xsrE6fPq2nn35aknTgwAEtX75cCxcu9E5KAACAe2QzDMP4uZ1q1aolm82mn9vVZrOpvLzctHD3S2FhoUJCQlRQUKDg4GBTj91m9hemHu9++XbhUF9HAADgrqr6+7tKKzs5OTmmBQMAALifqlR2Wrdu7e0cAAAAXnFPXyp48eJFffXVV8rLy1NFRYXbtmnTppkSDAAAwAwel53Vq1dr0qRJqlu3rpo2bSqbzebaZrPZKDsAAMCveFx25s6dq7lz5yo+Pl61anl85zoAAMB95XFbuXr1ql5++WWKDgAAqBE8bizjx4/XH/7wB29kAQAAMJ3HZScpKUkZGRmKiorS1KlTNWPGDLeXNyUlJclmsykuLs41ZhiGEhISFBoaqsDAQEVFRenkyZNezQEAAGoOj6/ZSUxM1I4dO/Too49KUqULlL3l4MGDWrVqlTp37uw2vnjxYiUnJ2vNmjV65JFH9O6772rQoEE6deqUgoKCvJYHAADUDB6XneTkZH300UcaM2aMF+Lc3pUrV/TKK6/ot7/9rd59913XuGEYSklJ0Zw5czRixAhJ0tq1a+VwOLR+/XpNnDjxtscrLS1VaWmp631hYaF3fwAAAOAzHp/GstvteuaZZ7yR5Y5iYmI0dOhQDRw40G08JydHubm5io6OdsvXr18/ZWZm3vF4SUlJCgkJcb3CwsK8lh0AAPiWx2UnNjZWy5Yt80aW29q4caMOHz6spKSkSttyc3MlSQ6Hw23c4XC4tt1OfHy8CgoKXK8LFy6YGxoAAPgNj09j/fWvf9Xu3bv1+eefq2PHjgoICHDbvmXLFtPCXbhwQbGxsUpLS1O9evXuuN8/XitkGMZdrx+y2+2y2+2m5QQAAP7L47LTqFEj1/Ux3paVlaW8vDx1797dNVZeXq59+/YpNTVVp06dknRjhadFixauffLy8iqt9gAAgAfTPT0u4n4ZMGCAjh8/7jY2duxYPfbYY5o1a5YefvhhOZ1Opaenq2vXrpKksrIyZWRkaNGiRfctJwAA8F/39CDQ+yUoKEidOnVyG2vQoIGaNm3qGo+Li1NiYqIiIiIUERGhxMRE1a9fX6NGjfJFZAAA4Gc8Ljvh4eF3vR7m7Nmz1QrkqZkzZ6qkpESTJ09Wfn6+evbsqbS0NL5jBwAASLqHsnPrtxdL0rVr13TkyBFt375db731llm57mjv3r1u7202mxISEpSQkOD1vxsAANQ8Hped2NjY244vX75chw4dqnYgAAAAM5n26PIhQ4Zo8+bNZh0OAADAFKaVnT/+8Y9q0qSJWYcDAAAwhcensbp27ep2gbJhGMrNzdXf//53rVixwtRwAAAA1eVx2Rk+fLjb+1q1aqlZs2aKiorSY489ZlYuAAAAU3hcdubNm+eNHAAAAF5h2jU7AAAA/qjKKzu1atW665cJSje+8+b69evVDgUAAGCWKpedrVu33nFbZmamli1bJsMwTAkFAABgliqXnWHDhlUa+/rrrxUfH6/PPvtMr7zyit555x1TwwEAAFTXPV2z89133+n1119X586ddf36dR09elRr165Vq1atzM4HAABQLR6VnYKCAs2aNUvt2rXTyZMntWvXLn322WeVnkwOAADgL6p8Gmvx4sVatGiRnE6nNmzYcNvTWgAAAP7GZlTxquJatWopMDBQAwcOVO3ate+435YtW0wLd78UFhYqJCREBQUFCg4ONvXYbWZ/Yerx7pdvFw71dQQAAO6qqr+/q7yy8+qrr/7srecAAAD+psplZ82aNV6MAQAA4B18gzIAALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0vy47SUlJevLJJxUUFKTmzZtr+PDhOnXqlNs+hmEoISFBoaGhCgwMVFRUlE6ePOmjxAAAwN/4ddnJyMhQTEyMDhw4oPT0dF2/fl3R0dEqLi527bN48WIlJycrNTVVBw8elNPp1KBBg1RUVOTD5AAAwF/U8XWAu9m+fbvb+9WrV6t58+bKyspS3759ZRiGUlJSNGfOHI0YMUKStHbtWjkcDq1fv14TJ070RWwAAOBH/Hpl5x8VFBRIkpo0aSJJysnJUW5urqKjo1372O129evXT5mZmXc8TmlpqQoLC91eAADAmmpM2TEMQzNmzFDv3r3VqVMnSVJubq4kyeFwuO3rcDhc224nKSlJISEhrldYWJj3ggMAAJ+qMWVnypQpOnbsmDZs2FBpm81mc3tvGEalsVvFx8eroKDA9bpw4YLpeQEAgH/w62t2bpo6dao+/fRT7du3Ty1btnSNO51OSTdWeFq0aOEaz8vLq7Tacyu73S673e69wAAAwG/49cqOYRiaMmWKtmzZot27dys8PNxte3h4uJxOp9LT011jZWVlysjIUGRk5P2OCwAA/JBfr+zExMRo/fr1+tOf/qSgoCDXdTghISEKDAyUzWZTXFycEhMTFRERoYiICCUmJqp+/foaNWqUj9MDAAB/4NdlZ+XKlZKkqKgot/HVq1drzJgxkqSZM2eqpKREkydPVn5+vnr27Km0tDQFBQXd57QAAMAf+XXZMQzjZ/ex2WxKSEhQQkKC9wMBAIAax6+v2QEAAKguyg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0yg4AALA0v35cBHynzewvfB3BY98uHOrrCAAAP8TKDgAAsDTKDgAAsDTKDgAAsDTKDgAAsDTKDgAAsDTKDgAAsDTKDgAAsDTKDgAAsDTKDgAAsDTKDgAAsDQeFwHL4BEXAIDbYWUHAABYGmUHAABYGmUHAABYGmUHAABYGmUHAABYGmUHAABYGmUHAABYGmUHAABYGmUHAABYGmUHAABYGmUHAABYGmUHAABYGmUHAABYGmUHAABYGmUHAABYGmUHAABYWh1fBwAeZG1mf+HrCB77duFQX0cAAI+wsgMAACyNsgMAACyNsgMAACyNsgMAACyNsgMAACyNsgMAACyNW88BeITb5QHUNKzsAAAAS6PsAAAAS6PsAAAAS+OaHQAAahCum/McKzsAAMDSKDsAAMDSKDsAAMDSKDsAAMDSKDsAAMDSKDsAAMDSuPUcgOVxqy7wYGNlBwAAWBplBwAAWJplTmOtWLFCS5Ys0aVLl9SxY0elpKSoT58+vo4FAPekJp56q4k4XfhgsMTKzqZNmxQXF6c5c+boyJEj6tOnj4YMGaLz58/7OhoAAPAxS5Sd5ORkjR8/XhMmTFD79u2VkpKisLAwrVy50tfRAACAj9X401hlZWXKysrS7Nmz3cajo6OVmZl528+UlpaqtLTU9b6goECSVFhYaHq+itKrph8TAGAOb/x339tq4u8Vb/1zvnlcwzDuul+NLzvff/+9ysvL5XA43MYdDodyc3Nv+5mkpCTNnz+/0nhYWJhXMgIA/FNIiq8TPBi8/c+5qKhIISEhd9xe48vOTTabze29YRiVxm6Kj4/XjBkzXO8rKip0+fJlNW3a9I6f8VRhYaHCwsJ04cIFBQcHm3JMVB/z4p+YF//F3Pgn5uUGwzBUVFSk0NDQu+5X48vOQw89pNq1a1daxcnLy6u02nOT3W6X3W53G2vUqJFX8gUHBz/Q/0P0V8yLf2Je/Bdz45+YF911ReemGn+Bct26ddW9e3elp6e7jaenpysyMtJHqQAAgL+o8Ss7kjRjxgyNHj1aPXr0UK9evbRq1SqdP39ekyZN8nU0AADgY5YoOy+99JJ++OEHLViwQJcuXVKnTp30n//5n2rdurXPMtntds2bN6/S6TL4FvPin5gX/8Xc+CfmxTM24+fu1wIAAKjBavw1OwAAAHdD2QEAAJZG2QEAAJZG2QEAAJZG2fGCFStWKDw8XPXq1VP37t315z//2deRHjj79u3TL37xC4WGhspms2nbtm1u2w3DUEJCgkJDQxUYGKioqCidPHnSN2EfEElJSXryyScVFBSk5s2ba/jw4Tp16pTbPsyLb6xcuVKdO3d2fUFdr1699OWXX7q2My/+ISkpSTabTXFxca4x5qZqKDsm27Rpk+Li4jRnzhwdOXJEffr00ZAhQ3T+/HlfR3ugFBcXq0uXLkpNTb3t9sWLFys5OVmpqak6ePCgnE6nBg0apKKiovuc9MGRkZGhmJgYHThwQOnp6bp+/bqio6NVXFzs2od58Y2WLVtq4cKFOnTokA4dOqRnn31Ww4YNc/3SZF587+DBg1q1apU6d+7sNs7cVJEBUz311FPGpEmT3MYee+wxY/bs2T5KBEnG1q1bXe8rKioMp9NpLFy40DX2008/GSEhIcb777/vg4QPpry8PEOSkZGRYRgG8+JvGjdubHzwwQfMix8oKioyIiIijPT0dKNfv35GbGysYRj8O+MJVnZMVFZWpqysLEVHR7uNR0dHKzMz00ep8I9ycnKUm5vrNk92u139+vVjnu6jgoICSVKTJk0kMS/+ory8XBs3blRxcbF69erFvPiBmJgYDR06VAMHDnQbZ26qzhLfoOwvvv/+e5WXl1d6AKnD4aj0oFL4zs25uN08nTt3zheRHjiGYWjGjBnq3bu3OnXqJIl58bXjx4+rV69e+umnn9SwYUNt3bpVHTp0cP3SZF58Y+PGjTp8+LAOHjxYaRv/zlQdZccLbDab23vDMCqNwfeYJ9+ZMmWKjh07pv3791faxrz4xqOPPqqjR4/qxx9/1ObNm/Xaa68pIyPDtZ15uf8uXLig2NhYpaWlqV69enfcj7n5eZzGMtFDDz2k2rVrV1rFycvLq9S84TtOp1OSmCcfmTp1qj799FPt2bNHLVu2dI0zL75Vt25dtWvXTj169FBSUpK6dOmi//iP/2BefCgrK0t5eXnq3r276tSpozp16igjI0NLly5VnTp1XP/8mZufR9kxUd26ddW9e3elp6e7jaenpysyMtJHqfCPwsPD5XQ63eaprKxMGRkZzJMXGYahKVOmaMuWLdq9e7fCw8PdtjMv/sUwDJWWljIvPjRgwAAdP35cR48edb169OihV155RUePHtXDDz/M3FQRp7FMNmPGDI0ePVo9evRQr169tGrVKp0/f16TJk3ydbQHypUrV3T69GnX+5ycHB09elRNmjRRq1atFBcXp8TEREVERCgiIkKJiYmqX7++Ro0a5cPU1hYTE6P169frT3/6k4KCglz/bzQkJESBgYGu7w9hXu6///f//p+GDBmisLAwFRUVaePGjdq7d6+2b9/OvPhQUFCQ65q2mxo0aKCmTZu6xpmbKvLdjWDWtXz5cqN169ZG3bp1jW7durlurcX9s2fPHkNSpddrr71mGMaNWzbnzZtnOJ1Ow263G3379jWOHz/u29AWd7v5kGSsXr3atQ/z4hvjxo1z/TerWbNmxoABA4y0tDTXdubFf9x667lhMDdVZTMMw/BRzwIAAPA6rtkBAACWRtkBAACWRtkBAACWRtkBAACWRtkBAACWRtkBAACWRtkBAACWRtkBAACWRtkBUKOMGTNGNptNNptN27Zt81mOvXv3unIMHz7cZzkA/DzKDgCfubW43Pp67rnn7vq55557TpcuXdKQIUPcxvfs2aMXXnhBzZo1U7169dS2bVu99NJL2rdvX5UzPf7445owYcJtt23YsEEBAQH6v//7P0VGRurSpUt68cUXq3xsAL5B2QHgUzeLy62vDRs23PUzdrtdTqdTdrvdNbZixQoNGDBATZs21aZNm5Sdna3f/e53ioyM1PTp06ucZ/z48fr973+vq1evVtr20Ucf6YUXXpDD4VDdunXldDoVGBhY9R8WgE9QdgD41M3icuurcePGHh3j/PnziouLU1xcnNauXatnn31W4eHhioyMVGxsrA4dOuS2f2Zmpvr27avAwECFhYVp2rRpKi4uliSNHj1apaWl+sMf/lDp79i9e7fGjx9fvR8YwH1H2QFQ423evFnXrl3TzJkzb7vdZrO5/nz8+HENHjxYI0aM0LFjx7Rp0ybt379fU6ZMkSQ1bdpUw4YN0+rVq92OsXr1ajkcjkqnzgD4P8oOAJ/6/PPP1bBhQ7fXO++849ExvvnmGwUHB8vpdLrGNm/e7HbM48ePS5KWLFmiUaNGKS4uThEREYqMjNTSpUu1bt06/fTTT5KkcePGad++fTp79qwkyTAMrVmzRmPGjFHt2rVN+skB3C91fB0AwIOtf//+WrlypdtYkyZNPD7Oras3kjR48GAdPXpUFy9eVFRUlMrLyyVJWVlZOn36tD755BPXvoZhqKKiQjk5OWrfvr2io6PVsmVLrV69Wu+88452796tb7/9VmPHjr2HnxCAr1F2APhUgwYN1K5du2odIyIiQgUFBcrNzXWt7jRs2FDt2rVTnTru/5mrqKjQxIkTNW3atErHadWqlSSpVq1aGjNmjNasWaP58+dr9erV6tu3ryIiIqqVE4BvcBoLQI03cuRIBQQEaNGiRT+7b7du3XTy5Em1a9eu0qtu3bqu/caOHav//d//1ZYtW7RlyxYuTAZqMFZ2APhUaWmpcnNz3cbq1Kmjhx56qMrHaNWqld577z3Fxsbq8uXLGjNmjMLDw3X58mV9/PHHkuS61mbWrFl6+umnFRMTo9dff10NGjRQdna20tPTtWzZMtcxw8PD9eyzz+qXv/ylAgICNHLkSBN+WgC+wMoOAJ/avn27WrRo4fbq3bu3x8eZOnWq0tLS9Pe//10jR45URESEnn/+eeXk5Gj79u16/PHHJUmdO3dWRkaG/ud//kd9+vRR165d9fbbb6tFixaVjjl+/Hjl5+fr5ZdfVv369av9swLwDZthGIavQwBAVY0ZM0Y//vijTx8VcSt/ywOgMlZ2ANQ4N29X//zzz32W4c9//rMaNmzodlcXAP/Eyg6AGiUvL0+FhYWSpBYtWqhBgwY+yVFSUqKLFy9KunHn163f8QPAv1B2AACApXEaCwAAWBplBwAAWBplBwAAWBplBwAAWBplBwAAWBplBwAAWBplBwAAWBplBwAAWNr/B84h/3jBILIsAAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "plt.hist(n_clusters)\n", - "plt.xlabel(\"Number of clusters\")\n", - "plt.ylabel(\"Number of events\")\n", - "plt.show()\n", - "plt.clf()\n", - "\n", - "plt.hist(n_hadrons)\n", - "plt.xlabel(\"Number of hadrons\")\n", - "plt.ylabel(\"Number of events\")\n", - "plt.show()\n", - "plt.clf()\n", - "\n", - "plt.hist(had_kin[:, 0])\n", - "plt.xlabel(\"E [GeV]\")\n", - "plt.ylabel(\"Number of hadrons\")\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.9" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tests/test_eval.py b/tests/test_eval.py index 7f1d96b..b8fd231 100644 --- a/tests/test_eval.py +++ b/tests/test_eval.py @@ -1,16 +1,15 @@ import os import pytest -from hydra.core.hydra_config import HydraConfig -from omegaconf import open_dict - from hadml.eval import evaluate from hadml.train import train +from hydra.core.hydra_config import HydraConfig +from omegaconf import open_dict -@pytest.mark.slow +@pytest.mark.slow() def test_train_eval(tmp_path, cfg_train, cfg_eval): - """Train for 1 epoch with `train.py` and evaluate with `eval.py`""" + """Train for 1 epoch with `train.py` and evaluate with `eval.py`.""" assert str(tmp_path) == cfg_train.paths.output_dir == cfg_eval.paths.output_dir with open_dict(cfg_train): diff --git a/tests/test_mnist_datamodule.py b/tests/test_mnist_datamodule.py index 041e488..8b2b1d4 100644 --- a/tests/test_mnist_datamodule.py +++ b/tests/test_mnist_datamodule.py @@ -2,7 +2,6 @@ import pytest import torch - from hadml.datamodules.mnist_datamodule import MNISTDataModule diff --git a/tests/test_sweeps.py b/tests/test_sweeps.py index b03c459..f91dad3 100644 --- a/tests/test_sweeps.py +++ b/tests/test_sweeps.py @@ -8,7 +8,7 @@ @RunIf(sh=True) -@pytest.mark.slow +@pytest.mark.slow() def test_experiments(tmp_path): """Test running all available experiment configs with fast_dev_run=True.""" command = [ @@ -22,7 +22,7 @@ def test_experiments(tmp_path): @RunIf(sh=True) -@pytest.mark.slow +@pytest.mark.slow() def test_hydra_sweep(tmp_path): """Test default hydra sweep.""" command = [ @@ -37,7 +37,7 @@ def test_hydra_sweep(tmp_path): @RunIf(sh=True) -@pytest.mark.slow +@pytest.mark.slow() def test_hydra_sweep_ddp_sim(tmp_path): """Test default hydra sweep with ddp sim.""" command = [ @@ -55,7 +55,7 @@ def test_hydra_sweep_ddp_sim(tmp_path): @RunIf(sh=True) -@pytest.mark.slow +@pytest.mark.slow() def test_optuna_sweep(tmp_path): """Test optuna sweep.""" command = [ @@ -71,7 +71,7 @@ def test_optuna_sweep(tmp_path): @RunIf(wandb=True, sh=True) -@pytest.mark.slow +@pytest.mark.slow() def test_optuna_sweep_ddp_sim_wandb(tmp_path): """Test optuna sweep with wandb and ddp sim.""" command = [ diff --git a/tests/test_train.py b/tests/test_train.py index 08f2160..d4ffa76 100644 --- a/tests/test_train.py +++ b/tests/test_train.py @@ -1,10 +1,10 @@ import os import pytest +from hadml.train import train from hydra.core.hydra_config import HydraConfig from omegaconf import open_dict -from hadml.train import train from tests.helpers.run_if import RunIf @@ -28,7 +28,7 @@ def test_train_fast_dev_run_gpu(cfg_train): @RunIf(min_gpus=1) -@pytest.mark.slow +@pytest.mark.slow() def test_train_epoch_gpu_amp(cfg_train): """Train 1 epoch on GPU with mixed-precision.""" HydraConfig().set_config(cfg_train) @@ -39,7 +39,7 @@ def test_train_epoch_gpu_amp(cfg_train): train(cfg_train) -@pytest.mark.slow +@pytest.mark.slow() def test_train_epoch_double_val_loop(cfg_train): """Train 1 epoch with validation loop twice per epoch.""" HydraConfig().set_config(cfg_train) @@ -49,7 +49,7 @@ def test_train_epoch_double_val_loop(cfg_train): train(cfg_train) -@pytest.mark.slow +@pytest.mark.slow() def test_train_ddp_sim(cfg_train): """Simulate DDP (Distributed Data Parallel) on 2 CPU processes.""" HydraConfig().set_config(cfg_train) @@ -61,7 +61,7 @@ def test_train_ddp_sim(cfg_train): train(cfg_train) -@pytest.mark.slow +@pytest.mark.slow() def test_train_resume(tmp_path, cfg_train): """Run 1 epoch, finish, and resume for another epoch.""" with open_dict(cfg_train): diff --git a/src/hadml/train.py b/train.py similarity index 62% rename from src/hadml/train.py rename to train.py index ea1f53d..ee81748 100644 --- a/src/hadml/train.py +++ b/train.py @@ -1,59 +1,24 @@ -import pyrootutils - -root = pyrootutils.setup_root( - search_from=__file__, - indicator=[".git", "pyproject.toml"], - pythonpath=True, - dotenv=True, -) - -# ------------------------------------------------------------------------------------ # -# `pyrootutils.setup_root(...)` is an optional line at the top of each entry file -# that helps to make the environment more robust and convenient -# -# the main advantages are: -# - allows you to keep all entry files in "src/" without installing project as a package -# - makes paths and scripts always work no matter where is your current work dir -# - automatically loads environment variables from ".env" file if exists -# -# how it works: -# - the line above recursively searches for either ".git" or "pyproject.toml" in present -# and parent dirs, to determine the project root dir -# - adds root dir to the PYTHONPATH (if `pythonpath=True`), so this file can be run from -# any place without installing project as a package -# - sets PROJECT_ROOT environment variable which is used in "configs/paths/default.yaml" -# to make all paths always relative to the project root -# - loads environment variables from ".env" file in root dir (if `dotenv=True`) -# -# you can remove `pyrootutils.setup_root(...)` if you: -# 1. either install project as a package or move each entry file to the project root dir -# 2. simply remove PROJECT_ROOT variable from paths in "configs/paths/default.yaml" -# 3. always run entry files from the project root dir -# -# https://github.com/ashleve/pyrootutils -# ------------------------------------------------------------------------------------ # - -from typing import List, Optional, Tuple +import operator +from typing import TYPE_CHECKING, Optional import hydra +import pyrootutils import pytorch_lightning as pl - -from omegaconf import DictConfig -from omegaconf import OmegaConf -OmegaConf.register_new_resolver("eval", eval) -OmegaConf.register_new_resolver("sum", lambda x, y: x + y) -OmegaConf.register_new_resolver("gen_list", lambda x, y: [x] * y) - +from omegaconf import DictConfig, OmegaConf from pytorch_lightning import Callback, LightningDataModule, LightningModule, Trainer -from pytorch_lightning.loggers import LightningLoggerBase from hadml import utils -log = utils.get_pylogger(__name__) +if TYPE_CHECKING: + from pytorch_lightning.loggers.logger import Logger as LightningLoggerBase +log = utils.get_pylogger(__name__) +OmegaConf.register_new_resolver("eval", eval) +OmegaConf.register_new_resolver("sum", operator.add) +OmegaConf.register_new_resolver("gen_list", lambda x, y: [x] * y) @utils.task_wrapper -def train(cfg: DictConfig) -> Tuple[dict, dict]: +def train(cfg: DictConfig) -> tuple[dict, dict]: """Trains the model. Can additionally evaluate on a testset, using best weights obtained during training. @@ -63,10 +28,10 @@ def train(cfg: DictConfig) -> Tuple[dict, dict]: Args: cfg (DictConfig): Configuration composed by Hydra. - Returns: + Returns + ------- Tuple[dict, dict]: Dict with metrics and dict with all instantiated objects. """ - # set seed for random number generators in pytorch, numpy and python.random if cfg.get("seed"): pl.seed_everything(cfg.seed, workers=True) @@ -78,10 +43,10 @@ def train(cfg: DictConfig) -> Tuple[dict, dict]: model: LightningModule = hydra.utils.instantiate(cfg.model) log.info("Instantiating callbacks...") - callbacks: List[Callback] = utils.instantiate_callbacks(cfg.get("callbacks")) + callbacks: list[Callback] = utils.instantiate_callbacks(cfg.get("callbacks")) log.info("Instantiating loggers...") - logger: List[LightningLoggerBase] = utils.instantiate_loggers(cfg.get("logger")) + logger: list[LightningLoggerBase] = utils.instantiate_loggers(cfg.get("logger")) log.info(f"Instantiating trainer <{cfg.trainer._target_}>") trainer: Trainer = hydra.utils.instantiate(cfg.trainer, callbacks=callbacks, logger=logger) @@ -122,9 +87,15 @@ def train(cfg: DictConfig) -> Tuple[dict, dict]: return metric_dict, object_dict +root = pyrootutils.setup_root( + search_from=__file__, + indicator=[".git", "pyproject.toml"], + pythonpath=True, + dotenv=True, +) + @hydra.main(version_base="1.2", config_path=root / "configs", config_name="train.yaml") def main(cfg: DictConfig) -> Optional[float]: - # train the model metric_dict, _ = train(cfg) From b139cc11f59723cc4d91fe62277a33d0e761174d Mon Sep 17 00:00:00 2001 From: xju Date: Tue, 2 Apr 2024 16:02:05 -0700 Subject: [PATCH 4/4] update examples --- README.md | 33 +++++--------- configs/datamodule/herwig.yaml | 2 +- configs/experiment/herwig_all_hadron.yaml | 27 ----------- src/hadml/datamodules/gan_datamodule.py | 7 ++- src/hadml/models/cgan/cond_event_gan.py | 36 ++++++++------- src/hadml/models/cgan/cond_particle_gan.py | 52 ++++++++++++---------- train.py | 8 +++- 7 files changed, 72 insertions(+), 93 deletions(-) diff --git a/README.md b/README.md index 9841c21..6eab8f5 100644 --- a/README.md +++ b/README.md @@ -10,21 +10,10 @@ The original README.md is moved to [`tests/README.md`](tests/README.md). git clone git@github.com:hep-lbdl/hadml.git cd hadml -# [OPTIONAL] create conda environment -conda create -n herwig python=3.9 -conda activate herwig - -# install pytorch according to instructions -# https://pytorch.org/get-started/ -# for example, for cuda 11.7 - -conda install pytorch torchvision torchaudio pytorch-cuda=11.7 -c pytorch -c nvidia - -## And install pyG -conda install pyg -c pyg - -# install requirements +# recommend to create .venv and activate it +# it can be easily done in VSCODE by running the command "Python: Create Environment" pip install -r requirements.txt +pip install -e . # IF not runnin the code in cori, you need to copy data from cori (7.1 GB), sorry for the large size. # scp cori.nersc.gov:/global/project/projectdirs/m3246/Herwig7/StartingData/allHadrons_10M_mode4_with_quark_with_pert.npz data/Herwig/ @@ -32,16 +21,16 @@ pip install -r requirements.txt ln -s /global/cfs/cdirs/m3246/Herwig7/StartingData/allHadrons_10M_mode4_with_quark_with_pert.npz data/Herwig/ # run the training for cluster-level generation -python hadml/train.py experiment=herwig_all_hadron +python train.py experiment=herwig_all_hadron # or run the training for event-level generation -python hadml/train.py experiment=herwig_event +python train.py experiment=herwig_event # to run the training for event-level generation with fitting to the nominal Herwig sample (data samples to be downloaded at https://doi.org/10.5281/zenodo.7958362 and placed at data/Herwig/raw) -python hadml/train.py experiment=herwig_event_nominal +python train.py experiment=herwig_event_nominal # to run the training for event-level generation with fitting to the variation Herwig sample (data samples to be downloaded at https://doi.org/10.5281/zenodo.7958362 and placed at data/Herwig/raw) -python hadml/train.py experiment=herwig_event_variation +python train.py experiment=herwig_event_variation ``` # A crash course on training @@ -66,7 +55,7 @@ The environment variable `HerwigData` is used to locate the data files. To perform a training, run the following command: ```bash -python hadml/train.py experiment=herwig_all_hadron +python train.py experiment=herwig_all_hadron ``` This will train a model using the `configs/experiment/herwig_all_hadron.yaml`. Results and logs will be saved in the `logs/herwigAllhadron/runs` as defined by `hydra.run.dir` in the `configs/hydra/default.yaml`. You can change the directory by changing the `task_name` in the `configs/experiment/herwig_all_hadron.yaml`. @@ -75,13 +64,13 @@ Results and logs will be saved in the `logs/herwigAllhadron/runs` as defined by It is optional to add a logger. For example, you can monitor the training performance using the [Weights & Biases](https://wandb.ai/site) logger by running: ```bash -python hadml/train.py experiment=herwig_all_hadron logger=wandb +python train.py experiment=herwig_all_hadron logger=wandb ``` ### Training with different training options [Pytorch Trainer](https://pytorch-lightning.readthedocs.io/en/latest/common/trainer.html) provides a lot of options for training. For example, you can run the training with gradient clip by running: ```bash -python hadml/train.py experiment=herwig_all_hadron logger=wandb +trainer.gradient_clip_val=0.5 +python train.py experiment=herwig_all_hadron logger=wandb +trainer.gradient_clip_val=0.5 ``` ## Training the "Cluster Decayer" with event level information @@ -89,5 +78,5 @@ python hadml/train.py experiment=herwig_all_hadron logger=wandb +trainer.gradien Similarly, the training can be performed by running: ```bash -python hadml/train.py experiment=herwig_event logger=wandb +python train.py experiment=herwig_event logger=wandb ``` diff --git a/configs/datamodule/herwig.yaml b/configs/datamodule/herwig.yaml index 6f6bc9a..30ed429 100644 --- a/configs/datamodule/herwig.yaml +++ b/configs/datamodule/herwig.yaml @@ -5,4 +5,4 @@ pin_memory: False core_dataset: _target_: hadml.datamodules.components.herwig.Herwig data_dir: "${paths.data_dir}Herwig" - train_val_test_split: [0.96, 0.02, 0.02] + train_val_test_split: [80, 10, 10] diff --git a/configs/experiment/herwig_all_hadron.yaml b/configs/experiment/herwig_all_hadron.yaml index 1c699a9..8b5dde6 100644 --- a/configs/experiment/herwig_all_hadron.yaml +++ b/configs/experiment/herwig_all_hadron.yaml @@ -69,30 +69,3 @@ model: discriminator: hidden_dims: ${gen_list:1000,4} # = [1000,] * 4 dropout: 0 - - - # criterion: - # __target__: torch.nn.BCELoss - # reduction: "mean" - - # optimizer_generator: - # lr: 0.000001 - - # optimizer_discriminator: - # lr: 0.000005 - - -# scheduler_generator: -# # _target_: torch.optim.lr_scheduler.ExponentialLR -# _target_: torch.optim.lr_scheduler.CosineAnnealingWarmRestarts -# _partial_: true -# T_0: 1 -# T_mult: 2 -# eta_min: 0.00001 - -# scheduler_discriminator: -# _target_: torch.optim.lr_scheduler.CosineAnnealingWarmRestarts -# _partial_: true -# T_0: 1 -# T_mult: 2 -# eta_min: 0.0001 diff --git a/src/hadml/datamodules/gan_datamodule.py b/src/hadml/datamodules/gan_datamodule.py index d11624a..c7ec6b2 100644 --- a/src/hadml/datamodules/gan_datamodule.py +++ b/src/hadml/datamodules/gan_datamodule.py @@ -1,12 +1,17 @@ from typing import Any, Dict, Optional, Protocol, Tuple +import pytorch_lightning as pl import torch from pytorch_lightning import LightningDataModule -from pytorch_lightning.trainer.supporters import CombinedLoader from torch.utils.data import DataLoader, Dataset, TensorDataset, random_split from torch_geometric.data.dataset import Dataset as GeometricDataset from torch_geometric.loader import DataLoader as GeometricDataLoader +if pl.__version__ <= "2.0.0": + from pytorch_lightning.trainer.supporters import CombinedLoader +else: + from lightning.pytorch.utilities.combined_loader import CombinedLoader + from hadml.datamodules.components.utils import get_num_asked_events, process_data_split diff --git a/src/hadml/models/cgan/cond_event_gan.py b/src/hadml/models/cgan/cond_event_gan.py index dbd4f4d..85e8a52 100644 --- a/src/hadml/models/cgan/cond_event_gan.py +++ b/src/hadml/models/cgan/cond_event_gan.py @@ -1,5 +1,7 @@ +from __future__ import annotations + import os -from typing import Any, Callable, Dict, List, Optional, Tuple +from typing import Any, Callable, Dict, Optional import numpy as np import torch @@ -85,13 +87,14 @@ def __init__( # for tracking best so far self.val_min_avg_wd = MinMetric() self.val_min_avg_nll = MinMetric() + self.val_result_list = [] self.test_wd = MeanMetric() self.test_nll = MeanMetric() def forward( self, cond_info: Optional[torch.Tensor] = None - ) -> Tuple[torch.Tensor, torch.Tensor]: + ) -> tuple[torch.Tensor, torch.Tensor]: noise = torch.randn(len(cond_info), self.hparams.noise_dim, device=cond_info.device) cond_info = self.generator_prescale(cond_info) x_fake = conditional_cat(cond_info, noise, dim=1) @@ -203,10 +206,6 @@ def _discriminator_step( self.log("lossD", loss_disc, prog_bar=True) return {"loss": loss_disc} - def training_epoch_end(self, outputs: List[Any]): - # `outputs` is a list of dicts returned from `training_step()` - pass - def step(self, batch: Any, batch_idx: int) -> Dict[str, Any]: """Common steps for valiation and testing""" cluster = batch["cond_data"].cluster @@ -257,9 +256,11 @@ def compare( hadrons_truth, outname, ) -> None: - """Compare the generated events with the real ones - Parameters: - perf: dictionary from the step function + """Compare the generated events with the real ones. + + Parameters + ---------- + perf: dictionary from the step function. """ if self.comparison_fn is not None: # compare the generated events with the real ones @@ -278,17 +279,20 @@ def compare( caption=list(images.keys()), ) - def validation_step(self, batch: Any, batch_idx: int): - """Validation step""" + def on_validation_epoch_start(self) -> None: + super().on_validation_epoch_start() + self.val_result_list = [] + + def validation_step(self, batch: Any, batch_idx: int) -> None: + """Validation step.""" perf = self.step(batch, batch_idx) wd_distance = perf["wd"] avg_nll = perf["nll"] self.val_wd(wd_distance) self.val_nll(avg_nll) + self.val_result_list.append(perf) - return perf - - def validation_epoch_end(self, validation_step_outputs): + def on_validation_epoch_end(self): wd_distance = self.val_wd.compute() avg_nll = self.val_nll.compute() self.val_min_avg_wd(wd_distance) @@ -312,7 +316,7 @@ def validation_epoch_end(self, validation_step_outputs): hadrons_truths = [] generated_event_label = [] observed_event_label = [] - for perf in validation_step_outputs: + for perf in self.val_result_list: angles_predictions = ( perf["angles_preds"] if len(angles_predictions) == 0 @@ -381,7 +385,7 @@ def validation_epoch_end(self, validation_step_outputs): ) def test_step(self, batch: Any, batch_idx: int): - """Test step""" + """Test step.""" perf = self.step(batch, batch_idx) wd_distance = perf["wd"] avg_nll = perf["nll"] diff --git a/src/hadml/models/cgan/cond_particle_gan.py b/src/hadml/models/cgan/cond_particle_gan.py index 97064c4..2309c82 100644 --- a/src/hadml/models/cgan/cond_particle_gan.py +++ b/src/hadml/models/cgan/cond_particle_gan.py @@ -1,5 +1,7 @@ +from __future__ import annotations + import os -from typing import Any, Callable, Dict, List, Optional, Tuple +from typing import Any, Callable, Dict, Optional import numpy as np import ot @@ -106,6 +108,7 @@ def __init__( self.val_min_avg_swd = MinMetric() self.val_min_avg_particle_swd = MinMetric() self.val_min_avg_kinematic_swd = MinMetric() + self.val_result_list = [] self.test_swd = MeanMetric() self.test_particle_swd = MeanMetric() @@ -119,14 +122,14 @@ def __init__( # check if generator is a particle MLP, # which produces particle kinematics and types in one go. # In MLP case, we need to split the output into two parts. - for name, module in self.generator.named_modules(): + for name, _ in self.generator.named_modules(): if "particle" in name: self.use_particle_mlp = True break def forward( self, noise: torch.Tensor, cond_info: Optional[torch.Tensor] = None - ) -> Tuple[torch.Tensor, torch.Tensor]: + ) -> tuple[torch.Tensor, torch.Tensor]: cond_info = self.generator_prescale(cond_info) x_fake = conditional_cat(cond_info, noise, dim=1) if self.use_particle_mlp: @@ -139,10 +142,10 @@ def forward( def _call_mlp_particle_generator( self, x_fake: torch.Tensor - ) -> Tuple[torch.Tensor, torch.Tensor]: + ) -> tuple[torch.Tensor, torch.Tensor]: return self.generator(x_fake) - def _call_mlp_generator(self, x_fake: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: + def _call_mlp_generator(self, x_fake: torch.Tensor) -> tuple[torch.Tensor, torch.Tensor]: fakes = self.generator(x_fake) num_evts = x_fake.shape[0] @@ -257,10 +260,11 @@ def training_step(self, batch: Any, batch_idx: int, optimizer_idx: int): return self._discriminator_step( cond_info, particle_type_data, x_generated, x_momenta, x_type_data ) - if optimizer_idx == 1: return self._generator_step(particle_type_data, x_generated) + raise ValueError(f"Unknown optimizer index: {optimizer_idx}") + def _update_gumbel_temp(self): progress = self.trainer.current_epoch / (self.trainer.max_epochs - 1) self.current_gumbel_temp = 1.0 - (1 - self.target_gumbel_temp) * progress @@ -268,7 +272,7 @@ def _update_gumbel_temp(self): def _prepare_fake_batch( self, cond_info: Optional[torch.Tensor], num_evts: int, device: torch.device - ) -> Tuple[torch.Tensor, torch.Tensor]: + ) -> tuple[torch.Tensor, torch.Tensor]: noise = self.generate_noise(num_evts).to(device) particle_kinematics, particle_types = self(noise, cond_info) @@ -347,12 +351,8 @@ def _get_grad_penalties(self, particle_type_data, x_generated, x_truth, x_type_d ) return r1_grad_penalty, wasserstein_grad_penalty - def training_epoch_end(self, outputs: List[Any]): - # `outputs` is a list of dicts returned from `training_step()` - pass - def step(self, batch: Any, batch_idx: int) -> Dict[str, Any]: - """Common steps for valiation and testing""" + """Common steps for valiation and testing.""" cond_info, x_angles, x_type_indices, x_momenta, event_labels = batch num_evts, _ = x_angles.shape scaled_cond_info = self.generator_prescale(cond_info) @@ -417,9 +417,11 @@ def step(self, batch: Any, batch_idx: int) -> Dict[str, Any]: } def compare(self, predictions, truths, x_momenta, particle_momenta, outname) -> None: - """Compare the generated events with the real ones + """Compare the generated events with the real ones. + Parameters: - perf: dictionary from the step function + ---------- + perf: dictionary from the step function. """ if self.comparison_fn is not None: # compare the generated events with the real ones @@ -432,16 +434,22 @@ def compare(self, predictions, truths, x_momenta, particle_momenta, outname) -> caption=list(images.keys()), ) - def validation_step(self, batch: Any, batch_idx: int): - """Validation step""" + def validation_step(self, batch: Any, batch_idx: int) -> None: + """Validation step.""" perf = self.step(batch, batch_idx) self.val_swd(perf["swd"]) self.val_particle_swd(perf["particle_swd"]) self.val_kinematic_swd(perf["kinematic_swd"]) + self.val_result_list.append(perf) - return perf + def on_validation_epoch_start(self) -> None: + super().on_validation_epoch_start() + self.val_min_avg_swd.reset() + self.val_min_avg_particle_swd.reset() + self.val_kinematic_swd.reset() + self.val_result_list = [] - def validation_epoch_end(self, outputs: List[Any]): + def on_validation_epoch_end(self): swd_distance = self.val_swd.compute() particle_swd = self.val_particle_swd.compute() kinematic_swd = self.val_kinematic_swd.compute() @@ -471,10 +479,6 @@ def validation_epoch_end(self, outputs: List[Any]): prog_bar=True, ) - self.val_swd.reset() - self.val_particle_swd.reset() - self.val_kinematic_swd.reset() - if ( not self.hparams.save_only_improved_plots or swd_distance <= self.val_min_avg_swd.compute() @@ -487,7 +491,7 @@ def validation_epoch_end(self, outputs: List[Any]): x_momenta = [] event_labels = [] cond_info = [] - for perf in outputs: + for perf in self.val_result_list: predictions = ( perf["predictions"] if len(predictions) == 0 @@ -557,7 +561,7 @@ def test_step(self, batch: Any, batch_idx: int): return perf - def test_epoch_end(self, outputs: List[Any]): + def test_epoch_end(self, outputs: list[Any]): swd_distance = self.test_swd.compute() particle_swd = self.test_particle_swd.compute() kinematic_swd = self.test_kinematic_swd.compute() diff --git a/train.py b/train.py index ee81748..78e87a2 100644 --- a/train.py +++ b/train.py @@ -4,10 +4,12 @@ import hydra import pyrootutils import pytorch_lightning as pl +import torch +from hadml import utils from omegaconf import DictConfig, OmegaConf from pytorch_lightning import Callback, LightningDataModule, LightningModule, Trainer -from hadml import utils +torch.set_float32_matmul_precision("medium") if TYPE_CHECKING: from pytorch_lightning.loggers.logger import Logger as LightningLoggerBase @@ -17,6 +19,7 @@ OmegaConf.register_new_resolver("sum", operator.add) OmegaConf.register_new_resolver("gen_list", lambda x, y: [x] * y) + @utils.task_wrapper def train(cfg: DictConfig) -> tuple[dict, dict]: """Trains the model. Can additionally evaluate on a testset, using best weights obtained during @@ -94,7 +97,8 @@ def train(cfg: DictConfig) -> tuple[dict, dict]: dotenv=True, ) -@hydra.main(version_base="1.2", config_path=root / "configs", config_name="train.yaml") + +@hydra.main(version_base="1.3", config_path=str(root / "configs"), config_name="train.yaml") def main(cfg: DictConfig) -> Optional[float]: # train the model metric_dict, _ = train(cfg)