diff --git a/.ci/steps/setup_step.yml b/.ci/steps/setup_step.yml
new file mode 100644
index 00000000..e30de986
--- /dev/null
+++ b/.ci/steps/setup_step.yml
@@ -0,0 +1,27 @@
+
+parameters:
+ storagename: #
+ storagekey: #
+ conda: seismic-interpretation
+
+steps:
+
+- bash: |
+ echo "##vso[task.prependpath]$CONDA/bin"
+
+- bash: |
+ echo "Running setup..."
+
+ # make sure we have the latest and greatest
+ conda env create -f environment/anaconda/local/environment.yml python=3.6 --force
+ conda init bash
+ source activate ${{parameters.conda}}
+ pip install -e interpretation
+ pip install -e cv_lib
+ # add this if pytorch stops detecting GPU
+ # conda install pytorch torchvision cudatoolkit=9.2 -c pytorch
+
+ # copy your model files like so - using dummy file to illustrate
+ azcopy --quiet --source:https://${{parameters.storagename}}.blob.core.windows.net/models/model --source-key ${{parameters.storagekey}} --destination ./models/your_model_name
+ displayName: Setup
+ failOnStderr: True
diff --git a/.ci/steps/unit_test_steps.yml b/.ci/steps/unit_test_steps.yml
new file mode 100644
index 00000000..ea06da5c
--- /dev/null
+++ b/.ci/steps/unit_test_steps.yml
@@ -0,0 +1,18 @@
+parameters:
+ conda: seismic-interpretation
+
+steps:
+ - bash: |
+ echo "Starting unit tests"
+ source activate ${{parameters.conda}}
+ pytest --durations=0 --junitxml 'reports/test-unit.xml' cv_lib/tests/
+ echo "Unit test job passed"
+ displayName: Unit Tests Job
+ failOnStderr: True
+
+ - task: PublishTestResults@2
+ displayName: 'Publish Test Results **/test-*.xml'
+ inputs:
+ testResultsFiles: '**/test-*.xml'
+ failTaskOnFailedTests: true
+ condition: succeededOrFailed()
diff --git a/.ci/unit_test_build.yml b/.ci/unit_test_build.yml
new file mode 100644
index 00000000..ea7701de
--- /dev/null
+++ b/.ci/unit_test_build.yml
@@ -0,0 +1,28 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+# Pull request against these branches will trigger this build
+pr:
+- master
+- staging
+
+# Any commit to this branch will trigger the build.
+trigger:
+- master
+- staging
+
+jobs:
+# partially disable setup for now - done manually on build VM
+- job: DeepSeismic
+
+ displayName: Deep Seismic Main Build
+ pool:
+ name: $(AgentName)
+
+ steps:
+ - template: steps/setup_step.yml
+ parameters:
+ storagename: $(storageaccoutname)
+ storagekey: $(storagekey)
+
+ - template: steps/unit_test_steps.yml
diff --git a/.flake8 b/.flake8
new file mode 100644
index 00000000..f09cc27c
--- /dev/null
+++ b/.flake8
@@ -0,0 +1,17 @@
+[flake8]
+max-line-length = 120
+max-complexity = 18
+select = B,C,E,F,W,T4,B9
+ignore =
+ # slice notation whitespace, invalid
+ E203
+ # too many leading ‘#’ for block comment
+ E266
+ # module level import not at top of file
+ E402
+ # line break before binary operator
+ W503
+ # blank line contains whitespace
+ W293
+ # line too long
+ E501
diff --git a/.gitignore b/.gitignore
index 894a44cc..a675ed36 100644
--- a/.gitignore
+++ b/.gitignore
@@ -89,6 +89,24 @@ venv/
ENV/
env.bak/
venv.bak/
+wheels/
+
+
+.dev_env
+.azureml
+
+# Logs
+*.tfevents.*
+**/runs
+**/log
+**/output
+
+#
+interpretation/environment/anaconda/local/src/*
+interpretation/environment/anaconda/local/src/cv-lib
+.code-workspace.code-workspace
+**/.vscode
+**/.idea
# Spyder project settings
.spyderproject
@@ -97,8 +115,4 @@ venv.bak/
# Rope project settings
.ropeproject
-# mkdocs documentation
-/site
-
-# mypy
-.mypy_cache/
+*.pth
\ No newline at end of file
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 00000000..d852a98b
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,17 @@
+repos:
+- repo: https://github.com/psf/black
+ rev: stable
+ hooks:
+ - id: black
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v1.2.3
+ hooks:
+ - id: flake8
+- repo: local
+ hooks:
+ - id: jupytext
+ name: jupytext
+ entry: jupytext --from ipynb --pipe black --check flake8
+ pass_filenames: true
+ files: .ipynb
+ language: python
diff --git a/.vscode/settings.json b/.vscode/settings.json
deleted file mode 100644
index 85fbc9f8..00000000
--- a/.vscode/settings.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "python.formatting.provider": "black",
- "python.linting.enabled": true,
- "python.linting.flake8Enabled": true,
- "python.linting.pylintEnabled": false,
-}
\ No newline at end of file
diff --git a/AUTHORS.md b/AUTHORS.md
new file mode 100644
index 00000000..c0011f3e
--- /dev/null
+++ b/AUTHORS.md
@@ -0,0 +1,32 @@
+Contributor
+============
+
+All names are sorted alphabetically by last name.
+Contributors, please add your name to the list when you submit a patch to the project.
+
+
+Contributors (sorted alphabetically)
+-------------------------------------
+To contributors: please add your name to the list when you submit a patch to the project.
+
+* Ashish Bhatia
+* Daniel Ciborowski
+* George Iordanescu
+* Ilia Karmanov
+* Max Kaznady
+* Vanja Paunic
+* Mathew Salvaris
+
+
+## How to be a contributor to the repository
+This project welcomes contributions and suggestions. Most contributions require you to agree to a
+Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
+the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.
+
+When you submit a pull request, a CLA bot will automatically determine whether you need to provide
+a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions
+provided by the bot. You will only need to do this once across all repos using our CLA.
+
+This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
+For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
+contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 00000000..4c422e46
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,88 @@
+# Contribution Guidelines
+
+Contributions are welcomed! Here's a few things to know:
+
+* [Steps to Contributing](#steps-to-contributing)
+* [Coding Guidelines](#coding-guidelines)
+* [Microsoft Contributor License Agreement](#microsoft-contributor-license-agreement)
+* [Code of Conduct](#code-of-conduct)
+
+## Steps to Contributing
+
+**TL;DR for contributing: We use the staging branch to land all new features and fixes. To make a contribution, please create a branch from staging, make a modification in the code and create a PR to staging.**
+
+Here are the basic steps to get started with your first contribution. Please reach out with any questions.
+1. Use [open issues](https://github.com/Microsoft/DeepSeismic/issues) to discuss the proposed changes. Create an issue describing changes if necessary to collect feedback. Also, please use provided labels to tag issues so everyone can easily sort issues of interest.
+2. [Fork the repo](https://help.github.com/articles/fork-a-repo/) so you can make and test local changes.
+3. Create a new branch **from staging branch** for the issue (please do not create a branch from master). We suggest prefixing the branch with your username and then a descriptive title: (e.g. username/update_contributing_docs)
+4. Create a test that replicates the issue.
+5. Make code changes.
+6. Ensure unit tests pass and code style / formatting is consistent TODO: add docstring links.
+7. Create a pull request against **staging** branch.
+
+Once the features included in a [milestone](https://github.com/Microsoft/DeepSeismic/milestones) are completed, we will merge contrib into staging. TODO: make a wiki with coding guidelines.
+
+## Coding Guidelines
+
+We strive to maintain high quality code to make the utilities in the repository easy to understand, use, and extend. We also work hard to maintain a friendly and constructive environment. We've found that having clear expectations on the development process and consistent style helps to ensure everyone can contribute and collaborate effectively.
+
+### Code formatting and style checking
+We use `git-hooks` to automate the process of formatting and style checking the code. In particular, we use `black` as a code formatter, `flake8` for style checking, and the `pre-commit` Python framework, which ensures that both, code formatter and checker, are ran on the code during commit. If they are executed with no issues, then the commit is made, otherwise, the commit is denied until stylistic or formatting changes are made.
+
+Please follow these instructions to set up `pre-commit` in your environment.
+
+```
+pip install pre-commit
+pre-commit install
+```
+
+The above will install the pre-commit package, and install git hooks specified in `.pre-commit-config.yaml` into your `.git/` directory.
+
+## Microsoft Contributor License Agreement
+
+Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit https://cla.microsoft.com.
+
+TODO: add CLA-bot
+
+## Code of Conduct
+
+This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
+
+For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
+
+Apart from the official Code of Conduct developed by Microsoft, in the Computer Vision team we adopt the following behaviors, to ensure a great working environment:
+
+#### Do not point fingers
+Let’s be constructive.
+
+
+Click here to see some examples
+
+"This method is missing docstrings" instead of "YOU forgot to put docstrings".
+
+
+
+#### Provide code feedback based on evidence
+
+When making code reviews, try to support your ideas based on evidence (papers, library documentation, stackoverflow, etc) rather than your personal preferences.
+
+
+Click here to see some examples
+
+"When reviewing this code, I saw that the Python implementation the metrics are based on classes, however, [scikit-learn](https://scikit-learn.org/stable/modules/classes.html#sklearn-metrics-metrics) and [tensorflow](https://www.tensorflow.org/api_docs/python/tf/metrics) use functions. We should follow the standard in the industry."
+
+
+
+
+#### Ask questions - do not give answers
+Try to be empathic.
+
+
+Click here to see some examples
+
+* Would it make more sense if ...?
+* Have you considered this ... ?
+
+
+
+
diff --git a/DeepSeismicLogo.jpg b/DeepSeismicLogo.jpg
new file mode 100644
index 00000000..6c68879d
Binary files /dev/null and b/DeepSeismicLogo.jpg differ
diff --git a/LICENSE b/LICENSE
index 3d8b93bc..236805ac 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,21 +1,22 @@
- MIT License
-
- Copyright (c) Microsoft Corporation.
-
- Permission is hereby granted, free of charge, to any person obtaining a copy
- of this software and associated documentation files (the "Software"), to deal
- in the Software without restriction, including without limitation the rights
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- copies of the Software, and to permit persons to whom the Software is
- furnished to do so, subject to the following conditions:
-
- The above copyright notice and this permission notice shall be included in all
- copies or substantial portions of the Software.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- SOFTWARE
+ MIT License
+
+ Copyright (c) Microsoft Corporation. All rights reserved.
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE
+
\ No newline at end of file
diff --git a/NOTICE.txt b/NOTICE.txt
new file mode 100755
index 00000000..6dc34351
--- /dev/null
+++ b/NOTICE.txt
@@ -0,0 +1,2058 @@
+NOTICES AND INFORMATION
+Do Not Translate or Localize
+
+This software incorporates material from third parties.
+Microsoft makes certain open source code available at https://3rdpartysource.microsoft.com,
+or you may send a check or money order for US $5.00, including the product name,
+the open source component name, and version number, to:
+
+Source Code Compliance Team
+Microsoft Corporation
+One Microsoft Way
+Redmond, WA 98052
+USA
+
+Notwithstanding any other terms, you may reverse engineer this software to the extent
+required to debug changes to any libraries licensed under the GNU Lesser General Public License.
+
+
+-------------------------------------------------------------------
+
+h5py 2.9.0 - BSD-2-Clause
+PyTables Copyright Statement
+Copyright (c) 2009 Darren Dale
+Copyright 2006-2007 by The HDF Group
+Copyright (c) 2006-2008 Alexander Chemeris
+Copyright (c) 2002, 2003, 2004 Francesc Altet
+Copyright 2001-2013 Python Software Foundation
+Copyright (c) 2005, 2006, 2007 Carabos Coop. V.
+copyright u'2014, Andrew Collette and contributors
+Copyright 2008-2013 Andrew Collette and contributors
+Copyright 2008-2018 Andrew Collette and contributors
+Copyright (c) 2008 Andrew Collette http://h5py.alfven.org
+Copyright (c) 2009 Andrew Collette http://h5py.alfven.org
+Copyright (c) 2008-2009 Andrew Collette http://h5py.alfven.org
+Copyright (c) 2000-2007 Marc Alexander Lehmann
+Copyright (c) 2000-2008 Marc Alexander Lehmann
+Copyright (c) 2008 Andrew Collette and contributors http://h5py.alfven.org
+Copyright 1998-2006 by the Board of Trustees of the University of Illinois.
+Copyright (c) 2008-2013 Andrew Collette and contributors http://www.h5py.org
+
+Copyright (c) 2001, 2002 Enthought, Inc.
+All rights reserved.
+
+Copyright (c) 2003-2019 SciPy Developers.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ a. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ b. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ c. Neither the name of Enthought nor the names of the SciPy Developers
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
+OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+SciPy bundles a number of libraries that are compatibly licensed. We list
+these here.
+
+Name: Numpydoc
+Files: doc/sphinxext/numpydoc/*
+License: 2-clause BSD
+ For details, see doc/sphinxext/LICENSE.txt
+
+Name: scipy-sphinx-theme
+Files: doc/scipy-sphinx-theme/*
+License: 3-clause BSD, PSF and Apache 2.0
+ For details, see doc/sphinxext/LICENSE.txt
+
+Name: Six
+Files: scipy/_lib/six.py
+License: MIT
+ For details, see the header inside scipy/_lib/six.py
+
+Name: Decorator
+Files: scipy/_lib/decorator.py
+License: 2-clause BSD
+ For details, see the header inside scipy/_lib/decorator.py
+
+Name: ID
+Files: scipy/linalg/src/id_dist/*
+License: 3-clause BSD
+ For details, see scipy/linalg/src/id_dist/doc/doc.tex
+
+Name: L-BFGS-B
+Files: scipy/optimize/lbfgsb/*
+License: BSD license
+ For details, see scipy/optimize/lbfgsb/README
+
+Name: SuperLU
+Files: scipy/sparse/linalg/dsolve/SuperLU/*
+License: 3-clause BSD
+ For details, see scipy/sparse/linalg/dsolve/SuperLU/License.txt
+
+Name: ARPACK
+Files: scipy/sparse/linalg/eigen/arpack/ARPACK/*
+License: 3-clause BSD
+ For details, see scipy/sparse/linalg/eigen/arpack/ARPACK/COPYING
+
+Name: Qhull
+Files: scipy/spatial/qhull/*
+License: Qhull license (BSD-like)
+ For details, see scipy/spatial/qhull/COPYING.txt
+
+Name: Cephes
+Files: scipy/special/cephes/*
+License: 3-clause BSD
+ Distributed under 3-clause BSD license with permission from the author,
+ see https://lists.debian.org/debian-legal/2004/12/msg00295.html
+
+ Cephes Math Library Release 2.8: June, 2000
+ Copyright 1984, 1995, 2000 by Stephen L. Moshier
+
+ This software is derived from the Cephes Math Library and is
+ incorporated herein by permission of the author.
+
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ * Neither the name of the nor the
+ names of its contributors may be used to endorse or promote products
+ derived from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY
+ DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+Name: Faddeeva
+Files: scipy/special/Faddeeva.*
+License: MIT
+ Copyright (c) 2012 Massachusetts Institute of Technology
+
+ Permission is hereby granted, free of charge, to any person obtaining
+ a copy of this software and associated documentation files (the
+ "Software"), to deal in the Software without restriction, including
+ without limitation the rights to use, copy, modify, merge, publish,
+ distribute, sublicense, and/or sell copies of the Software, and to
+ permit persons to whom the Software is furnished to do so, subject to
+ the following conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Name: qd
+Files: scipy/special/cephes/dd_*.[ch]
+License: modified BSD license ("BSD-LBNL-License.doc")
+ This work was supported by the Director, Office of Science, Division
+ of Mathematical, Information, and Computational Sciences of the
+ U.S. Department of Energy under contract numbers DE-AC03-76SF00098 and
+ DE-AC02-05CH11231.
+
+ Copyright (c) 2003-2009, The Regents of the University of California,
+ through Lawrence Berkeley National Laboratory (subject to receipt of
+ any required approvals from U.S. Dept. of Energy) All rights reserved.
+
+ 1. Redistribution and use in source and binary forms, with or
+ without modification, are permitted provided that the following
+ conditions are met:
+
+ (1) Redistributions of source code must retain the copyright
+ notice, this list of conditions and the following disclaimer.
+
+ (2) Redistributions in binary form must reproduce the copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+
+ (3) Neither the name of the University of California, Lawrence
+ Berkeley National Laboratory, U.S. Dept. of Energy nor the names
+ of its contributors may be used to endorse or promote products
+ derived from this software without specific prior written
+ permission.
+
+ 2. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+ 3. You are under no obligation whatsoever to provide any bug fixes,
+ patches, or upgrades to the features, functionality or performance of
+ the source code ("Enhancements") to anyone; however, if you choose to
+ make your Enhancements available either publicly, or directly to
+ Lawrence Berkeley National Laboratory, without imposing a separate
+ written license agreement for such Enhancements, then you hereby grant
+ the following license: a non-exclusive, royalty-free perpetual license
+ to install, use, modify, prepare derivative works, incorporate into
+ other computer software, distribute, and sublicense such enhancements
+ or derivative works thereof, in binary and source code form.
+
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+Copyright (C) 2008 Stefan van der Walt , Pauli Virtanen
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+
+
+Copyright (C) 2003-2005 Peter J. Verveer
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+3. The name of the author may not be used to endorse or promote
+ products derived from this software without specific prior
+ written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+
+
+Copyright (c) 2002-2005, Jean-Sebastien Roy (js@jeannot.org)
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+ Qhull, Copyright (c) 1993-2015
+
+ C.B. Barber
+ Arlington, MA
+
+ and
+
+ The National Science and Technology Research Center for
+ Computation and Visualization of Geometric Structures
+ (The Geometry Center)
+ University of Minnesota
+
+ email: qhull@qhull.org
+
+This software includes Qhull from C.B. Barber and The Geometry Center.
+Qhull is copyrighted as noted above. Qhull is free software and may
+be obtained via http from www.qhull.org. It may be freely copied, modified,
+and redistributed under the following conditions:
+
+1. All copyright notices must remain intact in all files.
+
+2. A copy of this text file must be distributed along with any copies
+ of Qhull that you redistribute; this includes copies that you have
+ modified, or copies of programs or other software products that
+ include Qhull.
+
+3. If you modify Qhull, you must include a notice giving the
+ name of the person performing the modification, the date of
+ modification, and the reason for such modification.
+
+4. When distributing modified versions of Qhull, or other software
+ products that include Qhull, you must provide notice that the original
+ source code may be obtained as noted above.
+
+5. There is no warranty or other guarantee of fitness for Qhull, it is
+ provided solely "as is". Bug reports or fixes may be sent to
+ qhull_bug@qhull.org; the authors may or may not act on them as
+ they desire.
+
+
+-------------------------------------------------------------------
+
+-------------------------------------------------------------------
+
+scipy 1.3.0 - BSD-2-Clause
+(c) Col
+(c) KvK
+(c) 2011
+(c) 2012
+(c) 2014
+(c) KKOK KkK
+(c) B Whether
+(c) , (R,1) col
+copyright u'2017
+(c) Compute Hessian
+Copyright 2014 PSF.
+(c) .GT. ZERO .AND. ABS
+Copyright Gautam Sewani
+(c) KKKKKwKnKK K KQKKKKe
+copyrighted by Alan Genz
+Copyright 2006 Johan Rade
+Copyright Paul A. Bristow
+Csp self.spmatrix (c) Dsp
+(c) KKKKY KKKKKKKKKKKKKKKKe
+(c) KZK8K K9K8K KCKDKCKDK7K
+(c) KaKKKQK K KzKkKKKiKKqKK
+Copyright 2006 John Maddock
+Copyright 2012 Twitter, Inc
+Copyright 2000 by Alan Genz.
+Copyright 2008 Gautam Sewani
+Copyright 2013 Andrea Gavana
+Copyright Gautam Sewani 2008
+Copyright John Maddock 2005.
+Copyright John Maddock 2006.
+Copyright John Maddock 2007.
+Copyright John Maddock 2008.
+Copyright John Maddock 2009.
+Copyright John Maddock 2010.
+Copyright John Maddock 2011.
+Copyright John Maddock 2012.
+Copyright John Maddock 2013.
+Copyright Paul Bristow 2007.
+Copyright Yosef Meller, 2009
+Copyright (c) 2006 Johan Rade
+Copyright (c) 2014 Eric Moore
+Copyright (c) Piers Lawrence.
+Copyright 2002 Pearu Peterson
+Copyright 2014, Eric W. Moore
+Copyright Xiaogang Zhang 2006
+copyright Cephes Math Library
+(c) KKPSKKtK KWKzKeKzKvK KyKjK
+Copyright (c) 2008 Damian Eads
+Copyright (c) 2012 Google Inc.
+Copyright 1999 Travis Oliphant
+Copyright 2002 Gary Strangman.
+Copyright 2005 Travis Oliphant
+Copyright 2010 Paul A. Bristow
+Copyright 2011 Paul A. Bristow
+Copyright 2012 Paul A. Bristow
+Copyright John Maddock 2006-7.
+Copyright John Maddock 2007-8.
+Qhull, Copyright (c) 1993-2015
+copyrighted by Enthought, Inc.
+(c) KSSKaKoKUKoKeKIKKKKK KKKKKK
+Copyright (c) 2006 John Maddock
+Copyright (c) 2007 John Maddock
+Copyright (c) 2011 John Maddock
+Copyright (c) 2016 Adrian Veres
+Copyright (c) Tyler Reddy, 2016
+Copyright Paul A. Bristow 2006.
+Copyright Paul A. Bristow 2007.
+Copyright Paul A. Bristow 2010.
+Copyright Paul A. Bristow 2012.
+Copyright Paul A. Bristow 2013.
+(c) Copyright Hubert Holin 2003.
+(c) Copyright John Maddock 2005.
+(c) Copyright John Maddock 2006.
+(c) Copyright John Maddock 2007.
+(c) Copyright John Maddock 2008.
+(c) Copyright John Maddock 2010.
+Copyright (c) 2007, Damian Eads.
+Copyright (c) 2013 Kenneth L. Ho
+Copyright 1991 Dieter Kraft, FHM
+Copyright Anne M. Archibald 2008
+Copyright Benjamin Sobotta 2012.
+(c) Copyright Bruno Lalande 2008.
+Copyright (c) 2006 Xiaogang Zhang
+Copyright (c) 2009 Pauli Virtanen
+Copyright (c) 2009, Motorola, Inc
+Copyright (c) 2013 Pauli Virtanen
+Copyright 2011 Paul A. Bristow To
+Copyright Paul A. Bristow 2006-7.
+(c) Copyright John Maddock 2006-7.
+(c) Copyright Paul A. Bristow 2011
+Copyright (c) 2002 Travis Oliphant
+Copyright (c) 2011 Paul A. Bristow
+Copyright (c) 2012 Paul A. Bristow
+Copyright John Maddock 2006, 2007.
+Copyright John Maddock 2006, 2011.
+Copyright John Maddock 2006, 2012.
+Copyright John Maddock 2008, 2012.
+Copyright Paul Bristow 2006, 2007.
+Copyright Paul Bristow 2007, 2011.
+Copyright (c) 1988 by Theo Jurriens
+Copyright (c) Benjamin Sobotta 2012
+Copyright (c) Pauli Virtanen, 2010.
+Copyright 2002 H Lohninger, TU Wein
+Copyright 2015 Jon Lund Steffensen.
+Copyright Thijs van den Berg, 2008.
+Copyright (c) 1993-2015 C.B. Barber.
+Copyright (c) 2007 Cybozu Labs, Inc.
+Copyright Paul A. Bristow 2009, 2011
+(c) Copyright Hubert Holin 2003-2005.
+(c) KyKOKQKOKEK9K8K KFKGKGKJKHKKAKKAK
+Copyright (c) 2007 - Sebastien Fabbro
+Copyright (c) 2011 Paul A. Bristow To
+Copyright (c) 2014 Mathjax Consortium
+Copyright (c) 2015-2017 Martin Hensel
+Copyright (c) 2016 2017 Felix Lenders
+Copyright (c) Damian Eads, 2007-2008.
+Copyright Christopher Kormanyos 2013.
+Copyright Paul A. Bristow 2007, 2009.
+Copyright Paul A. Bristow 2007, 2010.
+Copyright Paul A. Bristow 2007, 2012.
+Copyright Paul A. Bristow 2008, 2009.
+Copyright (c) 2007, 2008, Damian Eads.
+Copyright (c) 2012, Jaydeep P. Bardhan
+Copyright (c) 2012, Matthew G. Knepley
+Copyright (c) 2014, Janani Padmanabhan
+Copyright 2004-2005 by Enthought, Inc.
+Copyright 2007-2011 by the Sphinx team
+Copyright 2007-2018 by the Sphinx team
+copyright 2008- s, The SciPy community
+(c) Copyright Daryle Walker 2001, 2006.
+Copyright (c) 2010 Thomas P. Robitaille
+Copyright (c) 1989-2004 Johannes Braams.
+Copyright (c) 1994 by Xerox Corporation.
+Copyright (c) 1996-2008 Rice University.
+Copyright (c) 2001, 2002 Enthought, Inc.
+Copyright (c) 2003-2005 Peter J. Verveer
+Copyright 2002-2016 The SciPy Developers
+Copyright (c) 2003-2019 SciPy Developers.
+Copyright (c) 2010-2012 Benjamin Peterson
+Copyright (c) 1990-2004 by Johannes Braams
+Copyright (c) 2005-2015, Michele Simionato
+Copyright (c) 2006-2008 Alexander Chemeris
+Copyright 1984, 1995 by Stephen L. Moshier
+Copyright 1984, 1996 by Stephen L. Moshier
+Copyright 1985 by Stephen L. Moshier Direct
+Copyright (c) 1993-2015 The Geometry Center.
+Copyright (c) 2001-2011 - Scilab Enterprises
+Copyright (c) 2010 - Jordi Gutierrez Hermoso
+Copyright (c) 2009-2017 The MathJax Consortium
+Copyright (c) 2010-2017 The MathJax Consortium
+Copyright (c) 2011-2015 The MathJax Consortium
+Copyright (c) 2011-2017 The MathJax Consortium
+Copyright (c) 2013-2017 The MathJax Consortium
+Copyright (c) 2014-2017 The MathJax Consortium
+Copyright (c) 2015-2017 The MathJax Consortium
+Copyright (c) 2016-2017 The MathJax Consortium
+Copyright J.S. Roy (js@jeannot.org), 2002-2005
+Copyright (c) 2009, Pauli Virtanen
+Copyright (c) 2015, Pauli Virtanen
+Copyright 1984, 1987, 1995 by Stephen L. Moshier
+Copyright 1984, 1987, 2000 by Stephen L. Moshier
+Copyright 1984, 1995, 2000 by Stephen L. Moshier
+Copyright 1985, 1987, 2000 by Stephen L. Moshier
+Copyright 1984, 1987 by Stephen L. Moshier Direct
+Copyright 1984, 1991 by Stephen L. Moshier Direct
+Copyright 1985, 1987 by Stephen L. Moshier Direct
+Copyright Paul A. Bristow 2007, 2009, 2010, 2012.
+Copyright (c) 2010 David Fong and Michael Saunders
+copyright u'2013, Surya Kasturi and Pauli Virtanen
+Copyright (c) 1992-2015 The University of Tennessee
+Copyright (c) 2006, Systems Optimization Laboratory
+Copyright (c) 2007, John Travers
+Copyright (c) 1998-2003 by the University of Florida.
+Copyright 1984, 1987, 1988, 2000 by Stephen L. Moshier
+Copyright 1984, 1987, 1989, 1995 by Stephen L. Moshier
+Copyright 1984, 1987, 1989, 2000 by Stephen L. Moshier
+Copyright 1984, 1987, 1992, 2000 by Stephen L. Moshier
+Copyright 1984, 1987, 1988 by Stephen L. Moshier Direct
+Copyright 1984, 1987, 1989 by Stephen L. Moshier Direct
+Copyright 1984, 1987, 1993 by Stephen L. Moshier Direct
+Copyright 1985, 1987, 1989 by Stephen L. Moshier Direct
+Copyright (c) 2012 Massachusetts Institute of Technology
+Copyright (c) 2006-2007, Robert Hetland
+Copyright (c) 2006-2015 The University of Colorado Denver.
+Copyright (c) 2002-2005, Jean-Sebastien Roy (js@jeannot.org)
+Copyright (c) 2004-2005, Jean-Sebastien Roy (js@jeannot.org)
+Copyright 1984, 1987, 1989, 1992, 2000 by Stephen L. Moshier
+Copyright 1984, 1987, 1988, 1992 by Stephen L. Moshier Direct
+Copyright 1984, 1987, 1989, 1992 by Stephen L. Moshier Direct
+Copyright (c) 2000-2015 The University of California Berkeley.
+Copyright (c) Tyler Reddy, Richard Gowers, and Max Linke, 2016
+Copyright (c) 2004 David M. Cooke
+Copyright Daryle Walker, Hubert Holin, John Maddock 2006 - 2007
+copyrighted 2004 by David M. Cooke
+Copyright (c) 2008 Brian M. Clapper , Gael Varoquaux
+Copyright (c) 2011 Kevin Dunn, Surya K, Pauli Virtanen, the Sphinx team
+Copyright 2014 by P.-G. Martinsson, V. Rokhlin, Y. Shkolnisky, and M. Tygert.
+KQKJKCKGKEKBKMKrK (c) KUKoKoKiKeKeKiKiKiKiKiKiKiKiKiKiKiKiKiKiKoKeKaKiKiKiKiKdKIKAKThKOK
+Copyright (c) 2008 Stefan van der Walt , Pauli Virtanen
+Copyright (c) 2018 Sylvain Gubian , Yang Xiang
+(c) KKdegKoKKKKY KY KKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKe
+Copyright (c) Tyler Reddy, Ross Hemsley, Edd Edmondson, Nikolai Nowaczyk, Joe Pitt-Francis, 2015.
+Copyright (c) 2003, The Regents of the University of California, through Lawrence Berkeley National Laboratory
+Copyright (c) 2003-2009, The Regents of the University of California, through Lawrence Berkeley National Laboratory
+Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 Python Software Foundation
+
+Copyright (c) 2001, 2002 Enthought, Inc.
+All rights reserved.
+
+Copyright (c) 2003-2019 SciPy Developers.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ a. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ b. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ c. Neither the name of Enthought nor the names of the SciPy Developers
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
+OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+SciPy bundles a number of libraries that are compatibly licensed. We list
+these here.
+
+Name: Numpydoc
+Files: doc/sphinxext/numpydoc/*
+License: 2-clause BSD
+ For details, see doc/sphinxext/LICENSE.txt
+
+Name: scipy-sphinx-theme
+Files: doc/scipy-sphinx-theme/*
+License: 3-clause BSD, PSF and Apache 2.0
+ For details, see doc/sphinxext/LICENSE.txt
+
+Name: Six
+Files: scipy/_lib/six.py
+License: MIT
+ For details, see the header inside scipy/_lib/six.py
+
+Name: Decorator
+Files: scipy/_lib/decorator.py
+License: 2-clause BSD
+ For details, see the header inside scipy/_lib/decorator.py
+
+Name: ID
+Files: scipy/linalg/src/id_dist/*
+License: 3-clause BSD
+ For details, see scipy/linalg/src/id_dist/doc/doc.tex
+
+Name: L-BFGS-B
+Files: scipy/optimize/lbfgsb/*
+License: BSD license
+ For details, see scipy/optimize/lbfgsb/README
+
+Name: SuperLU
+Files: scipy/sparse/linalg/dsolve/SuperLU/*
+License: 3-clause BSD
+ For details, see scipy/sparse/linalg/dsolve/SuperLU/License.txt
+
+Name: ARPACK
+Files: scipy/sparse/linalg/eigen/arpack/ARPACK/*
+License: 3-clause BSD
+ For details, see scipy/sparse/linalg/eigen/arpack/ARPACK/COPYING
+
+Name: Qhull
+Files: scipy/spatial/qhull/*
+License: Qhull license (BSD-like)
+ For details, see scipy/spatial/qhull/COPYING.txt
+
+Name: Cephes
+Files: scipy/special/cephes/*
+License: 3-clause BSD
+ Distributed under 3-clause BSD license with permission from the author,
+ see https://lists.debian.org/debian-legal/2004/12/msg00295.html
+
+ Cephes Math Library Release 2.8: June, 2000
+ Copyright 1984, 1995, 2000 by Stephen L. Moshier
+
+ This software is derived from the Cephes Math Library and is
+ incorporated herein by permission of the author.
+
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ * Neither the name of the nor the
+ names of its contributors may be used to endorse or promote products
+ derived from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY
+ DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+Name: Faddeeva
+Files: scipy/special/Faddeeva.*
+License: MIT
+ Copyright (c) 2012 Massachusetts Institute of Technology
+
+ Permission is hereby granted, free of charge, to any person obtaining
+ a copy of this software and associated documentation files (the
+ "Software"), to deal in the Software without restriction, including
+ without limitation the rights to use, copy, modify, merge, publish,
+ distribute, sublicense, and/or sell copies of the Software, and to
+ permit persons to whom the Software is furnished to do so, subject to
+ the following conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Name: qd
+Files: scipy/special/cephes/dd_*.[ch]
+License: modified BSD license ("BSD-LBNL-License.doc")
+ This work was supported by the Director, Office of Science, Division
+ of Mathematical, Information, and Computational Sciences of the
+ U.S. Department of Energy under contract numbers DE-AC03-76SF00098 and
+ DE-AC02-05CH11231.
+
+ Copyright (c) 2003-2009, The Regents of the University of California,
+ through Lawrence Berkeley National Laboratory (subject to receipt of
+ any required approvals from U.S. Dept. of Energy) All rights reserved.
+
+ 1. Redistribution and use in source and binary forms, with or
+ without modification, are permitted provided that the following
+ conditions are met:
+
+ (1) Redistributions of source code must retain the copyright
+ notice, this list of conditions and the following disclaimer.
+
+ (2) Redistributions in binary form must reproduce the copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+
+ (3) Neither the name of the University of California, Lawrence
+ Berkeley National Laboratory, U.S. Dept. of Energy nor the names
+ of its contributors may be used to endorse or promote products
+ derived from this software without specific prior written
+ permission.
+
+ 2. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+ 3. You are under no obligation whatsoever to provide any bug fixes,
+ patches, or upgrades to the features, functionality or performance of
+ the source code ("Enhancements") to anyone; however, if you choose to
+ make your Enhancements available either publicly, or directly to
+ Lawrence Berkeley National Laboratory, without imposing a separate
+ written license agreement for such Enhancements, then you hereby grant
+ the following license: a non-exclusive, royalty-free perpetual license
+ to install, use, modify, prepare derivative works, incorporate into
+ other computer software, distribute, and sublicense such enhancements
+ or derivative works thereof, in binary and source code form.
+
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+Copyright (C) 2008 Stefan van der Walt , Pauli Virtanen
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+
+
+Copyright (C) 2003-2005 Peter J. Verveer
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+3. The name of the author may not be used to endorse or promote
+ products derived from this software without specific prior
+ written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+
+
+Copyright (c) 2002-2005, Jean-Sebastien Roy (js@jeannot.org)
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+ Qhull, Copyright (c) 1993-2015
+
+ C.B. Barber
+ Arlington, MA
+
+ and
+
+ The National Science and Technology Research Center for
+ Computation and Visualization of Geometric Structures
+ (The Geometry Center)
+ University of Minnesota
+
+ email: qhull@qhull.org
+
+This software includes Qhull from C.B. Barber and The Geometry Center.
+Qhull is copyrighted as noted above. Qhull is free software and may
+be obtained via http from www.qhull.org. It may be freely copied, modified,
+and redistributed under the following conditions:
+
+1. All copyright notices must remain intact in all files.
+
+2. A copy of this text file must be distributed along with any copies
+ of Qhull that you redistribute; this includes copies that you have
+ modified, or copies of programs or other software products that
+ include Qhull.
+
+3. If you modify Qhull, you must include a notice giving the
+ name of the person performing the modification, the date of
+ modification, and the reason for such modification.
+
+4. When distributing modified versions of Qhull, or other software
+ products that include Qhull, you must provide notice that the original
+ source code may be obtained as noted above.
+
+5. There is no warranty or other guarantee of fitness for Qhull, it is
+ provided solely "as is". Bug reports or fixes may be sent to
+ qhull_bug@qhull.org; the authors may or may not act on them as
+ they desire.
+
+
+-------------------------------------------------------------------
+
+-------------------------------------------------------------------
+
+sympy 1.4 - BSD-2-Clause
+(c) A. B
+(c), cos
+(c), cot
+(c) + cos
+(c) (-1) cos
+(c) Fix Qasm
+(c) Matrix I
+Copyright 2016
+(c) + cos(a) cos
+(c) , sin(a) cos
+(c), -sin(a) cos
+(c) tan(b) + a cos
+(c), sin(a) sin(b) cos
+(c) + sin(b) cos(a) cos
+(c) tan(b) a + sin(b) cos
+(c) G PermutationGroup Permutation
+Copyright (c) 2014 Matthew Rocklin
+(c) cos(b), -sin(b) sin(a) sin(b) cos
+copyright 2019 SymPy Development Team
+Copyright 2007-2013 by the Sphinx team
+copyright 2015, SymPy Development Team
+Copyright (c) 2006-2014 SymPy developers
+(c) + (sin(a) cos(b) + sin(b) cos(a)) cos
+Copyright (c) 2001, 2002 Vasil Yaroshevich
+Copyright 2014 by the SymPy Development Team
+Copyright (c) 2006-2019 SymPy Development Team
+Copyright (c) 2008 The IPython Development Team
+Copyright (c) 2008 Jens Rasch
+CoprimeQ, Distribute, ProductLog, Floor, PolyGamma
+Copyright (c) 2006-2017 SymPy Development Team, 2013-2017 Sergey B Kirpichev
+(c) Copyright 2000-2003 Symbolic Computation Laboratory, University of Western Ontario, London, Canada N6A
+
+Copyright (c) 2001, 2002 Enthought, Inc.
+All rights reserved.
+
+Copyright (c) 2003-2019 SciPy Developers.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ a. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ b. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ c. Neither the name of Enthought nor the names of the SciPy Developers
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
+OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+SciPy bundles a number of libraries that are compatibly licensed. We list
+these here.
+
+Name: Numpydoc
+Files: doc/sphinxext/numpydoc/*
+License: 2-clause BSD
+ For details, see doc/sphinxext/LICENSE.txt
+
+Name: scipy-sphinx-theme
+Files: doc/scipy-sphinx-theme/*
+License: 3-clause BSD, PSF and Apache 2.0
+ For details, see doc/sphinxext/LICENSE.txt
+
+Name: Six
+Files: scipy/_lib/six.py
+License: MIT
+ For details, see the header inside scipy/_lib/six.py
+
+Name: Decorator
+Files: scipy/_lib/decorator.py
+License: 2-clause BSD
+ For details, see the header inside scipy/_lib/decorator.py
+
+Name: ID
+Files: scipy/linalg/src/id_dist/*
+License: 3-clause BSD
+ For details, see scipy/linalg/src/id_dist/doc/doc.tex
+
+Name: L-BFGS-B
+Files: scipy/optimize/lbfgsb/*
+License: BSD license
+ For details, see scipy/optimize/lbfgsb/README
+
+Name: SuperLU
+Files: scipy/sparse/linalg/dsolve/SuperLU/*
+License: 3-clause BSD
+ For details, see scipy/sparse/linalg/dsolve/SuperLU/License.txt
+
+Name: ARPACK
+Files: scipy/sparse/linalg/eigen/arpack/ARPACK/*
+License: 3-clause BSD
+ For details, see scipy/sparse/linalg/eigen/arpack/ARPACK/COPYING
+
+Name: Qhull
+Files: scipy/spatial/qhull/*
+License: Qhull license (BSD-like)
+ For details, see scipy/spatial/qhull/COPYING.txt
+
+Name: Cephes
+Files: scipy/special/cephes/*
+License: 3-clause BSD
+ Distributed under 3-clause BSD license with permission from the author,
+ see https://lists.debian.org/debian-legal/2004/12/msg00295.html
+
+ Cephes Math Library Release 2.8: June, 2000
+ Copyright 1984, 1995, 2000 by Stephen L. Moshier
+
+ This software is derived from the Cephes Math Library and is
+ incorporated herein by permission of the author.
+
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ * Neither the name of the nor the
+ names of its contributors may be used to endorse or promote products
+ derived from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY
+ DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+Name: Faddeeva
+Files: scipy/special/Faddeeva.*
+License: MIT
+ Copyright (c) 2012 Massachusetts Institute of Technology
+
+ Permission is hereby granted, free of charge, to any person obtaining
+ a copy of this software and associated documentation files (the
+ "Software"), to deal in the Software without restriction, including
+ without limitation the rights to use, copy, modify, merge, publish,
+ distribute, sublicense, and/or sell copies of the Software, and to
+ permit persons to whom the Software is furnished to do so, subject to
+ the following conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Name: qd
+Files: scipy/special/cephes/dd_*.[ch]
+License: modified BSD license ("BSD-LBNL-License.doc")
+ This work was supported by the Director, Office of Science, Division
+ of Mathematical, Information, and Computational Sciences of the
+ U.S. Department of Energy under contract numbers DE-AC03-76SF00098 and
+ DE-AC02-05CH11231.
+
+ Copyright (c) 2003-2009, The Regents of the University of California,
+ through Lawrence Berkeley National Laboratory (subject to receipt of
+ any required approvals from U.S. Dept. of Energy) All rights reserved.
+
+ 1. Redistribution and use in source and binary forms, with or
+ without modification, are permitted provided that the following
+ conditions are met:
+
+ (1) Redistributions of source code must retain the copyright
+ notice, this list of conditions and the following disclaimer.
+
+ (2) Redistributions in binary form must reproduce the copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+
+ (3) Neither the name of the University of California, Lawrence
+ Berkeley National Laboratory, U.S. Dept. of Energy nor the names
+ of its contributors may be used to endorse or promote products
+ derived from this software without specific prior written
+ permission.
+
+ 2. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+ 3. You are under no obligation whatsoever to provide any bug fixes,
+ patches, or upgrades to the features, functionality or performance of
+ the source code ("Enhancements") to anyone; however, if you choose to
+ make your Enhancements available either publicly, or directly to
+ Lawrence Berkeley National Laboratory, without imposing a separate
+ written license agreement for such Enhancements, then you hereby grant
+ the following license: a non-exclusive, royalty-free perpetual license
+ to install, use, modify, prepare derivative works, incorporate into
+ other computer software, distribute, and sublicense such enhancements
+ or derivative works thereof, in binary and source code form.
+
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+Copyright (C) 2008 Stefan van der Walt , Pauli Virtanen
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+
+
+Copyright (C) 2003-2005 Peter J. Verveer
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+3. The name of the author may not be used to endorse or promote
+ products derived from this software without specific prior
+ written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+
+
+Copyright (c) 2002-2005, Jean-Sebastien Roy (js@jeannot.org)
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+ Qhull, Copyright (c) 1993-2015
+
+ C.B. Barber
+ Arlington, MA
+
+ and
+
+ The National Science and Technology Research Center for
+ Computation and Visualization of Geometric Structures
+ (The Geometry Center)
+ University of Minnesota
+
+ email: qhull@qhull.org
+
+This software includes Qhull from C.B. Barber and The Geometry Center.
+Qhull is copyrighted as noted above. Qhull is free software and may
+be obtained via http from www.qhull.org. It may be freely copied, modified,
+and redistributed under the following conditions:
+
+1. All copyright notices must remain intact in all files.
+
+2. A copy of this text file must be distributed along with any copies
+ of Qhull that you redistribute; this includes copies that you have
+ modified, or copies of programs or other software products that
+ include Qhull.
+
+3. If you modify Qhull, you must include a notice giving the
+ name of the person performing the modification, the date of
+ modification, and the reason for such modification.
+
+4. When distributing modified versions of Qhull, or other software
+ products that include Qhull, you must provide notice that the original
+ source code may be obtained as noted above.
+
+5. There is no warranty or other guarantee of fitness for Qhull, it is
+ provided solely "as is". Bug reports or fixes may be sent to
+ qhull_bug@qhull.org; the authors may or may not act on them as
+ they desire.
+
+
+-------------------------------------------------------------------
+
+-------------------------------------------------------------------
+
+dask/dask 54019e9c05134585c9c40e4195206aa78e2ea61a - BSD-3-Clause
+Copyright 2002 Gary Strangman.
+Copyright 2002-2016 The SciPy Developers
+Copyright (c) 2005-2015, NumPy Developers.
+copyright u'2014-2018, Anaconda, Inc. and contributors
+Copyright (c) 2014-2018, Anaconda, Inc. and contributors
+
+Copyright (c) . All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+ 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+-------------------------------------------------------------------
+
+mpmath 1.1.0 - BSD-3-Clause
+
+Copyright (c) . All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+ 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+-------------------------------------------------------------------
+
+numpy 1.17.0 - BSD-3-Clause
+
+Copyright (c) . All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+ 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+-------------------------------------------------------------------
+
+pytorch/ignite 38a4f37de759e33bc08441bde99bcb50f3d81f55 - BSD-3-Clause
+copyright 2018, Torch
+Copyright (c) 2018, PyTorch team
+Copyright (c) 2010-2017 Benjamin Peterson
+
+Copyright (c) . All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+ 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+-------------------------------------------------------------------
+
+hrnet/hrnet-semantic-segmentation 06142dc1c7026e256a7561c3e875b06622b5670f - MIT
+Copyright (c) 2017
+Copyright (c) Microsoft
+Copyright (c) 2019 Microsoft
+
+Copyright (c) 2010-2018 Benjamin Peterson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+-------------------------------------------------------------------
+
+-------------------------------------------------------------------
+
+olivesgatech/facies_classification_benchmark 12102683a1ae78f8fbc953823c35a43b151194b3 - MIT
+Copyright (c) 2017 Meet Pragnesh Shah
+
+Copyright (c) 2010-2018 Benjamin Peterson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+-------------------------------------------------------------------
+
+-------------------------------------------------------------------
+
+opesci/devito f6129286d9c0b3a8bfe07e724ac5b00dc762efee - MIT
+copyright u'2016-2019, Devito
+Copyright (c) 2016, Imperial College, London
+
+Copyright (c) 2010-2018 Benjamin Peterson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+-------------------------------------------------------------------
+
+-------------------------------------------------------------------
+
+six 1.12.0 - MIT
+copyright u'2010-2018, Benjamin Peterson
+Copyright (c) 2010-2018 Benjamin Peterson
+
+Copyright (c) 2010-2018 Benjamin Peterson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+-------------------------------------------------------------------
+
+-------------------------------------------------------------------
+
+waldeland/cnn-for-asi 6f985cccecf9a811565d0b7cd919412569a22b7b - MIT
+Copyright (c) 2017
+
+Copyright (c) 2010-2018 Benjamin Peterson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+-------------------------------------------------------------------
diff --git a/README.md b/README.md
index 8cfec9e1..479e8e37 100644
--- a/README.md
+++ b/README.md
@@ -1,69 +1,402 @@
----
-page_type: sample
-languages:
-- csharp
-products:
-- dotnet
-description: "Add 150 character max description"
-urlFragment: "update-this-to-unique-url-stub"
----
-
-# DeepSeismic
-
-![Build Status](https://dev.azure.com/best-practices/deepseismic/_apis/build/status/microsoft.DeepSeismic?branchName=master)
-[![Build Status](https://dev.azure.com/best-practices/deepseismic/_apis/build/status/microsoft.DeepSeismic?branchName=master)](https://dev.azure.com/best-practices/deepseismic/_build/latest?definitionId=108&branchName=master)
-
-# Official Microsoft Sample
-
-
-
-Give a short description for your sample here. What does it do and why is it important?
-
-## Contents
-
-Outline the file contents of the repository. It helps users navigate the codebase, build configuration and any related assets.
-
-| File/folder | Description |
-|-------------------|--------------------------------------------|
-| `src` | Sample source code. |
-| `.gitignore` | Define what to ignore at commit time. |
-| `CHANGELOG.md` | List of changes to the sample. |
-| `CONTRIBUTING.md` | Guidelines for contributing to the sample. |
-| `README.md` | This README file. |
-| `LICENSE` | The license for the sample. |
-
-## Prerequisites
-
-Outline the required components and tools that a user might need to have on their machine in order to run the sample. This can be anything from frameworks, SDKs, OS versions or IDE releases.
-
-## Setup
-
-Explain how to prepare the sample once the user clones or downloads the repository. The section should outline every step necessary to install dependencies and set up any settings (for example, API keys and output folders).
-
-## Runnning the sample
-
-Outline step-by-step instructions to execute the sample and see its output. Include steps for executing the sample from the IDE, starting specific services in the Azure portal or anything related to the overall launch of the code.
-
-## Key concepts
-
-Provide users with more context on the tools and services used in the sample. Explain some of the code that is being used and how services interact with each other.
-
-## Contributing
-
-This project welcomes contributions and suggestions. Most contributions require you to agree to a
-Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
-the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.
-
-When you submit a pull request, a CLA bot will automatically determine whether you need to provide
-a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions
-provided by the bot. You will only need to do this once across all repos using our CLA.
-
-This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
-For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
-contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
+# DeepSeismic
+![DeepSeismic](./assets/DeepSeismicLogo.jpg )
+
+This repository shows you how to perform seismic imaging and interpretation on Azure. It empowers geophysicists and data scientists to run seismic experiments using state-of-art DSL-based PDE solvers and segmentation algorithms on Azure.
+
+The repository provides sample notebooks, data loaders for seismic data, utilities, and out-of-the box ML pipelines, organized as follows:
+- **sample notebooks**: these can be found in the `examples` folder - they are standard Jupyter notebooks which highlight how to use the codebase by walking the user through a set of pre-made examples
+- **experiments**: the goal is to provide runnable Python scripts which train and test (score) our machine learning models in `experiments` folder. The models themselves are swappable, meaning a single train script can be used to run a different model on the same dataset by simply swapping out the configuration file which defines the model. Experiments are organized by model types and datasets - for example, "2D segmentation on Dutch F3 dataset", "2D segmentation on Penobscot dataset" and "3D segmentation on Penobscot dataset" are all different experiments. As another example, if one is swapping 2D segmentation models on Dutch F3 dataset, one would just point the train and test scripts to a different configuration file within the same experiment.
+- **pip installable utilities**: we provide `cv_lib` and `deepseismic_interpretation` utilities (more info below) which are used by both sample notebooks and experiments mentioned above
+
+DeepSeismic currently focuses on Seismic Interpretation (3D segmentation aka facies classification) with experimental code provided around Seismic Imaging.
+
+### Quick Start
+
+There are two ways to get started with the DeepSeismic codebase, which currently focuses on Interpretation:
+- if you'd like to get an idea of how our interpretation (segmentation) models are used, simply review the [HRNet demo notebook](https://github.com/microsoft/DeepSeismic/blob/master/examples/interpretation/notebooks/HRNet_Penobscot_demo_notebook.ipynb)
+- to actually run the code, you'll need to set up a compute environment (which includes setting up a GPU-enabled Linux VM and downloading the appropriate Anaconda Python packages) and download the datasets which you'd like to work with - detailed steps for doing this are provided in the next `Interpretation` section below.
+
+If you run into any problems, chances are your problem has already been solved in the [Troubleshooting](#troubleshooting) section.
+
+### Pre-run notebooks
+
+Notebooks stored in the repository have output intentionally displaced - you can find full auto-generated versions of the notebooks here:
+- **HRNet Penobscot demo**: [[HTML](https://deepseismicstore.blob.core.windows.net/shared/HRNet_Penobscot_demo_notebook.html)] [[.ipynb](https://deepseismicstore.blob.core.windows.net/shared/HRNet_Penobscot_demo_notebook.ipynb)]
+- **Dutch F3 dataset**: [[HTML](https://deepseismicstore.blob.core.windows.net/shared/F3_block_training_and_evaluation_local.html)] [[.ipynb](https://deepseismicstore.blob.core.windows.net/shared/F3_block_training_and_evaluation_local.ipynb)]
+
+### Azure Machine Learning
+[Azure Machine Learning](https://docs.microsoft.com/en-us/azure/machine-learning/) enables you to train and deploy your machine learning models and pipelines at scale, ane leverage open-source Python frameworks, such as PyTorch, TensorFlow, and scikit-learn. If you are looking at getting started with using the code in this repository with Azure Machine Learning, refer to [Azure Machine Learning How-to](https://github.com/Azure/MachineLearningNotebooks/tree/master/how-to-use-azureml) to get started.
+
+## Interpretation
+For seismic interpretation, the repository consists of extensible machine learning pipelines, that shows how you can leverage state-of-the-art segmentation algorithms (UNet, SEResNET, HRNet) for seismic interpretation, and also benchmarking results from running these algorithms using various seismic datasets (Dutch F3, and Penobscot).
+
+To run examples available on the repo, please follow instructions below to:
+1) [Set up the environment](#setting-up-environment)
+2) [Download the data sets](#dataset-download-and-preparation)
+3) [Run example notebooks and scripts](#run-examples)
+
+### Setting up Environment
+
+Follow the instruction bellow to read about compute requirements and install required libraries.
+
+
+#### Compute environment
+
+We recommend using a virtual machine to run the example notebooks and scripts. Specifically, you will need a GPU powered Linux machine, as this repository is developed and tested on __Linux only__. The easiest way to get started is to use the [Azure Data Science Virtual Machine (DSVM) for Linux (Ubuntu)](https://docs.microsoft.com/en-us/azure/machine-learning/data-science-virtual-machine/dsvm-ubuntu-intro). This VM will come installed with all the system requirements that are needed to create the conda environment described below and then run the notebooks in this repository.
+
+For this repo, we recommend selecting a multi-GPU Ubuntu VM of type [Standard_NC12](https://docs.microsoft.com/en-us/azure/virtual-machines/windows/sizes-gpu#nc-series). The machine is powered by NVIDIA Tesla K80 (or V100 GPU for NCv2 series) which can be found in most Azure regions.
+
+> NOTE: For users new to Azure, your subscription may not come with a quota for GPUs. You may need to go into the Azure portal to increase your quota for GPU VMs. Learn more about how to do this here: https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits.
+
+
+#### Package Installation
+
+To install packages contained in this repository, navigate to the directory where you pulled the DeepSeismic repo to run:
+```bash
+conda env create -f environment/anaconda/local/environment.yml
+```
+This will create the appropriate conda environment to run experiments.
+
+Next you will need to install the common package for interpretation:
+```bash
+conda activate seismic-interpretation
+pip install -e interpretation
+```
+
+Then you will also need to install `cv_lib` which contains computer vision related utilities:
+```bash
+pip install -e cv_lib
+```
+
+Both repos are installed in developer mode with the `-e` flag. This means that to update simply go to the folder and pull the appropriate commit or branch.
+
+During development, in case you need to update the environment due to a conda env file change, you can run
+```
+conda env update --file environment/anaconda/local/environment.yml
+```
+from the root of DeepSeismic repo.
+
+
+### Dataset download and preparation
+
+This repository provides examples on how to run seismic interpretation on two publicly available annotated seismic datasets: [Penobscot](https://zenodo.org/record/1341774) and [F3 Netherlands](https://github.com/olivesgatech/facies_classification_benchmark). Their respective sizes (uncompressed on disk in your folder after downloading and pre-processing) are:
+- **Penobscot**: 7.9 GB
+- **Dutch F3**: 2.2 GB
+
+Please make sure you have enough disk space to download either dataset.
+
+We have experiments and notebooks which use either one dataset or the other. Depending on which experiment/notebook you want to run you'll need to download the corresponding dataset. We suggest you start by looking at [HRNet demo notebook](https://github.com/microsoft/DeepSeismic/blob/master/examples/interpretation/notebooks/HRNet_Penobscot_demo_notebook.ipynb) which requires the Penobscot dataset.
+
+#### Penobscot
+To download the Penobscot dataset run the [download_penobscot.sh](scripts/download_penobscot.sh) script, e.g.
+
+```
+data_dir="$HOME/data/penobscot"
+mkdir -p "$data_dir"
+./scripts/download_penobscot.sh "$data_dir"
+```
+
+Note that the specified download location should be configured with appropriate `write` permissions. On some Linux virtual machines, you may want to place the data into `/mnt` or `/data` folder so you have to make sure you have write access.
+
+To make things easier, we suggested you use your home directory where you might run out of space. If this happens on an [Azure Data Science Virtual Machine](https://azure.microsoft.com/en-us/services/virtual-machines/data-science-virtual-machines/) you can resize the disk quite easily from [Azure Portal](https://portal.azure.com) - please see the [Troubleshooting](#troubleshooting) section at the end of this README regarding [how to do this](#how-to-resize-data-science-virtual-machine-disk).
+
+To prepare the data for the experiments (e.g. split into train/val/test), please run the following script (modifying arguments as desired):
+
+```
+python scripts/prepare_penobscot.py split_inline --data-dir="$HOME/data/penobscot" --val-ratio=.1 --test-ratio=.2
+```
+
+#### F3 Netherlands
+To download the F3 Netherlands dataset for 2D experiments, please follow the data download instructions at
+[this github repository](https://github.com/yalaudah/facies_classification_benchmark) (section Dataset).
+
+Once you've downloaded the data set, make sure to create an empty `splits` directory, under the downloaded `data` directory; you can re-use the same data directory as the one for Penobscot dataset created earlier. This is where your training/test/validation splits will be saved.
+
+```
+cd data
+mkdir splits
+```
+
+At this point, your `data` directory tree should look like this:
+
+```
+data
+├── splits
+├── test_once
+│ ├── test1_labels.npy
+│ ├── test1_seismic.npy
+│ ├── test2_labels.npy
+│ └── test2_seismic.npy
+└── train
+ ├── train_labels.npy
+ └── train_seismic.npy
+```
+
+To prepare the data for the experiments (e.g. split into train/val/test), please run the following script:
+
+```
+# For section-based experiments
+python scripts/prepare_dutchf3.py split_train_val section --data-dir=/mnt/dutchf3
+
+
+# For patch-based experiments
+python scripts/prepare_dutchf3.py split_train_val patch --data-dir=/mnt/dutchf3 --stride=50 --patch=100
+
+```
+
+Refer to the script itself for more argument options.
+
+### Run Examples
+
+#### Notebooks
+We provide example notebooks under `examples/interpretation/notebooks/` to demonstrate how to train seismic interpretation models and evaluate them on Penobscot and F3 datasets.
+
+Make sure to run the notebooks in the conda environment we previously set up (`seismic-interpretation`). To register the conda environment in Jupyter, please run:
+
+```
+python -m ipykernel install --user --name seismic-interpretation
+```
+
+#### Experiments
+
+We also provide scripts for a number of experiments we conducted using different segmentation approaches. These experiments are available under `experiments/interpretation`, and can be used as examples. Within each experiment start from the `train.sh` and `test.sh` scripts under the `local/` (single GPU) and `distributed/` (multiple GPUs) directories, which invoke the corresponding python scripts, `train.py` and `test.py`. Take a look at the experiment configurations (see Experiment Configuration Files section below) for experiment options and modify if necessary.
+
+Please refer to individual experiment README files for more information.
+- [Penobscot](experiments/interpretation/penobscot/README.md)
+- [F3 Netherlands Patch](experiments/interpretation/dutchf3_patch/README.md)
+- [F3 Netherlands Section](experiments/interpretation/dutchf3_section/README.md)
+
+#### Configuration Files
+We use [YACS](https://github.com/rbgirshick/yacs) configuration library to manage configuration options for the experiments. There are three ways to pass arguments to the experiment scripts (e.g. train.py or test.py):
+
+- __default.py__ - A project config file `default.py` is a one-stop reference point for all configurable options, and provides sensible defaults for all arguments. If no arguments are passed to `train.py` or `test.py` script (e.g. `python train.py`), the arguments are by default loaded from `default.py`. Please take a look at `default.py` to familiarize yourself with the experiment arguments the script you run uses.
+
+- __yml config files__ - YAML configuration files under `configs/` are typically created one for each experiment. These are meant to be used for repeatable experiment runs and reproducible settings. Each configuration file only overrides the options that are changing in that experiment (e.g. options loaded from `defaults.py` during an experiment run will be overridden by arguments loaded from the yaml file). As an example, to use yml configuration file with the training script, run:
+
+ ```
+ python train.py --cfg "configs/hrnet.yaml"
+ ```
+
+- __command line__ - Finally, options can be passed in through `options` argument, and those will override arguments loaded from the configuration file. We created CLIs for all our scripts (using Python Fire library), so you can pass these options via command-line arguments, like so:
+
+ ```
+ python train.py DATASET.ROOT "/mnt/dutchf3" TRAIN.END_EPOCH 10
+ ```
+
+
+### Pretrained Models
+
+#### HRNet
+
+To achieve the same results as the benchmarks above you will need to download the HRNet model [pretrained](https://github.com/HRNet/HRNet-Image-Classification) on ImageNet. We are specifically using the [HRNet-W48-C](https://1drv.ms/u/s!Aus8VCZ_C_33dKvqI6pBZlifgJk) pre-trained model; other HRNet variants are also available [here](https://github.com/HRNet/HRNet-Image-Classification) - you can navigate to those from the [main HRNet landing page](https://github.com/HRNet/HRNet-Object-Detection) for object detection.
+
+Unfortunately the OneDrive location which is used to host the model is using a temporary authentication token, so there is no way for us to scipt up model download. There are two ways to upload and use the pre-trained HRNet model on DS VM:
+- download the model to your local drive using a web browser of your choice and then upload the model to the DS VM using something like `scp`; navigate to Portal and copy DS VM's public IP from the Overview panel of your DS VM (you can search your DS VM by name in the search bar of the Portal) then use `scp local_model_location username@DS_VM_public_IP:./model/save/path` to upload
+- alternatively you can use the same public IP to open remote desktop over SSH to your Linux VM using [X2Go](https://wiki.x2go.org/doku.php/download:start): you can basically open the web browser on your VM this way and download the model to VM's disk
+
+
+### Viewers (optional)
+
+For seismic interpretation (segmentation), if you want to visualize cross-sections of a 3D volume (both the input velocity model and the segmented output) you can use
+[segyviewer](https://github.com/equinor/segyviewer). To install and use segyviewer, please follow the instructions below.
+
+#### segyviewer
+
+To install [segyviewer](https://github.com/equinor/segyviewer) run:
+```bash
+conda env create -n segyviewer python=2.7
+conda activate segyviewer
+conda install -c anaconda pyqt=4.11.4
+pip install segyviewer
+```
+
+To visualize cross-sections of a 3D volume, you can run
+[segyviewer](https://github.com/equinor/segyviewer) like so:
+```bash
+segyviewer "${HOME}/data/dutchf3/data.segy"
+```
+
+### Benchmarks
+
+#### Dense Labels
+
+This section contains benchmarks of different algorithms for seismic interpretation on 3D seismic datasets with densely-annotated data.
+
+Below are the results from the models contained in this repo. To run them check the instructions in folder. Alternatively take a look in for how to run them on your own dataset
+
+#### Netherlands F3
+
+| Source | Experiment | PA | FW IoU | MCA |
+|------------------|-----------------------------------|-------------|--------------|------------|
+| Alaudah et al.| Section-based | 0.905 | 0.817 | .832 |
+| | Patch-based | 0.852 | 0.743 | .689 |
+| DeepSeismic | Patch-based+fixed | .869 | .761 | .775 |
+| | SEResNet UNet+section depth | .917 | .849 | .834 |
+| | HRNet(patch)+patch_depth | .908 | .843 | .837 |
+| | HRNet(patch)+section_depth | .928 | .871 | .871 |
+
+#### Penobscot
+
+Trained and tested on full dataset. Inlines with artefacts were left in for training, validation and testing.
+The dataset was split 70% training, 10% validation and 20% test. The results below are from the test set
+
+| Source | Experiment | PA | IoU | MCA |
+|------------------|-------------------------------------|-------------|--------------|------------|
+| DeepSeismic | SEResNet UNet + section depth | 1.0 | .98 | .99 |
+| | HRNet(patch) + section depth | 1.0 | .97 | .98 |
+
+![Best Penobscot SEResNet](assets/penobscot_seresnet_best.png "Best performing inlines, Mask and Predictions from SEResNet")
+![Worst Penobscot SEResNet](assets/penobscot_seresnet_worst.png "Worst performing inlines Mask and Predictions from SEResNet")
+
+#### Reproduce benchmarks
+In order to reproduce the benchmarks you will need to navigate to the [experiments](experiments) folder. In there each of the experiments
+are split into different folders. To run the Netherlands F3 experiment navigate to the [dutchf3_patch/local](experiments/dutchf3_patch/local) folder. In there is a training script [([train.sh](experiments/dutchf3_patch/local/train.sh))
+which will run the training for any configuration you pass in. Once you have run the training you will need to run the [test.sh](experiments/dutchf3_patch/local/test.sh) script. Make sure you specify
+the path to the best performing model from your training run, either by passing it in as an argument or altering the YACS config file.
+
+To reproduce the benchmarks
+for the Penobscot dataset follow the same instructions but navigate to the [penobscot](penobscot) folder.
+
+#### Scripts
+- [parallel_training.sh](scripts/parallel_training.sh): Script to launch multiple jobs in parallel. Used mainly for local hyperparameter tuning. Look at the script for further instructions
+
+- [kill_windows.sh](scripts/kill_windows.sh): Script to kill multiple tmux windows. Used to kill jobs that parallel_training.sh might have started.
+
+
+## Contributing
+
+This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.
+
+### Submitting a Pull Request
+
+We try to keep the repo in a clean state, which means that we only enable read access to the repo - read access still enables one to submit a PR or an issue. To do so, fork the repo, and submit a PR from a branch in your forked repo into our staging branch.
+
+When you submit a pull request, a CLA bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA.
+
+This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
+
+## Build Status
+| Build | Branch | Status |
+| --- | --- | --- |
+| **Legal Compliance** | staging | [![Build Status](https://dev.azure.com/best-practices/deepseismic/_apis/build/status/microsoft.ComponentGovernance%20(seismic-deeplearning)?branchName=staging)](https://dev.azure.com/best-practices/deepseismic/_build/latest?definitionId=124&branchName=staging) |
+| **Legal Compliance** | master | [![Build Status](https://dev.azure.com/best-practices/deepseismic/_apis/build/status/microsoft.ComponentGovernance%20(seismic-deeplearning)?branchName=master)](https://dev.azure.com/best-practices/deepseismic/_build/latest?definitionId=124&branchName=master) |
+| **Tests** | staging | [![Build Status](https://dev.azure.com/best-practices/deepseismic/_apis/build/status/microsoft.Notebooks%20(seismic-deeplearning)?branchName=staging)](https://dev.azure.com/best-practices/deepseismic/_build/latest?definitionId=125&branchName=staging) |
+| **Tests** | master | [![Build Status](https://dev.azure.com/best-practices/deepseismic/_apis/build/status/microsoft.Notebooks%20(seismic-deeplearning)?branchName=master)](https://dev.azure.com/best-practices/deepseismic/_build/latest?definitionId=125&branchName=master) |
+| **Notebook Tests** | staging | [![Build Status](https://dev.azure.com/best-practices/deepseismic/_apis/build/status/microsoft.Tests%20(seismic-deeplearning)?branchName=staging)](https://dev.azure.com/best-practices/deepseismic/_build/latest?definitionId=126&branchName=staging) |
+| **Notebook Tests** | master | [![Build Status](https://dev.azure.com/best-practices/deepseismic/_apis/build/status/microsoft.Tests%20(seismic-deeplearning)?branchName=master)](https://dev.azure.com/best-practices/deepseismic/_build/latest?definitionId=126&branchName=master) |
+
+
+# Troubleshooting
+
+For Data Science Virtual Machine conda package installation issues, make sure you locate the anaconda location on the DSVM, for example by running:
+```bash
+which python
+```
+A typical output will be:
+```bash
+someusername@somevm:/projects/DeepSeismic$ which python
+/anaconda/envs/py35/bin/python
+```
+which will indicate that anaconda folder is __/anaconda__. We'll refer to this location in instructions below, but you should update the commands according to your local anaconda folder.
+
+
+ Data Science Virtual Machine conda package installation errors
+
+ It could happen that you don't have sufficient permissions to run conda commands / install packages in an Anaconda packages directory. To remedy the situation, please run the following commands
+ ```bash
+ rm -rf /anaconda/pkgs/*
+ sudo chown -R $(whoami) /anaconda
+ ```
+
+ After these commands complete, try installing the packages again.
+
+
+
+
+ Data Science Virtual Machine conda package installation warnings
+
+ It could happen that while creating the conda environment defined by environment/anaconda/local/environment.yml on an Ubuntu DSVM, one can get multiple warnings like so:
+ ```
+ WARNING conda.gateways.disk.delete:unlink_or_rename_to_trash(140): Could not remove or rename /anaconda/pkgs/ipywidgets-7.5.1-py_0/site-packages/ipywidgets-7.5.1.dist-info/LICENSE. Please remove this file manually (you may need to reboot to free file handles)
+ ```
+
+ If this happens, similar to instructions above, stop the conda environment creation (type ```Ctrl+C```) and then change recursively the ownership /anaconda directory from root to current user, by running this command:
+
+ ```bash
+ sudo chown -R $USER /anaconda
+ ```
+
+ After these command completes, try creating the conda environment in __environment/anaconda/local/environment.yml__ again.
+
+
+
+
+ Model training or scoring is not using GPU
+
+ To see if GPU is being using while your model is being trained or used for inference, run
+ ```bash
+ nvidia-smi
+ ```
+ and confirm that you see you Python process using the GPU.
+
+ If not, you may want to try reverting to an older version of CUDA for use with pyTorch. After the environment has been setup, run the following command (by default we use CUDA 10) after running `conda activate seismic-interpretation` to activate the conda environment:
+ ```bash
+ conda install pytorch torchvision cudatoolkit=9.2 -c pytorch
+ ```
+
+ To test whether this setup worked, right after you can open `ipython` and execute the following code
+ ```python
+ import torch
+ torch.cuda.is_available()
+ ```
+
+ The output should say "True".
+
+ If the output is still "False", you may want to try setting your environment variable to specify the device manually - to test this, start a new `ipython` session and type:
+ ```python
+ import os
+ os.environ['CUDA_VISIBLE_DEVICES']='0'
+ import torch
+ torch.cuda.is_available()
+ ```
+
+ Output should say "True" this time. If it does, you can make the change permanent by adding
+ ```bash
+ export CUDA_VISIBLE_DEVICES=0
+ ```
+ to your `$HOME/.bashrc` file.
+
+
+
+
+ GPU out of memory errors
+
+ You should be able to see how much GPU memory your process is using by running
+ ```bash
+ nvidia-smi
+ ```
+ and seeing if this amount is close to the physical memory limit specified by the GPU manufacturer.
+
+ If we're getting close to the memory limit, you may want to lower the batch size in the model configuration file. Specifically, `TRAIN.BATCH_SIZE_PER_GPU` and `VALIDATION.BATCH_SIZE_PER_GPU` settings.
+
+
+
+
+ How to resize Data Science Virtual Machine disk
+
+ 1. Go to the [Azure Portal](https://portal.azure.com) and find your virtual machine by typing its name in the search bar at the very top of the page.
+
+ 2. In the Overview panel on the left hand side, click Stop button to stop the virtual machine.
+
+ 3. Next, select Disks in the same panel on the left hand side.
+
+ 4. Click the Name of the OS Disk - you'll be navigated to the Disk view. From this view, select Configuration on the left hand side and then increase Size in GB and hit the Save button.
+
+ 5. Navigate back to the Virtual Machine view in Step 2 and click the Start button to start the virtual machine.
+
+
+
+
+
+
+
diff --git a/WORKERS b/WORKERS
new file mode 100644
index 00000000..633ed717
--- /dev/null
+++ b/WORKERS
@@ -0,0 +1,51 @@
+AUTO_RESUME: False
+CUDNN:
+ BENCHMARK: True
+ DETERMINISTIC: False
+ ENABLED: True
+DATASET:
+ CLASS_WEIGHTS: [0.7151, 0.8811, 0.5156, 0.9346, 0.9683, 0.9852]
+ NUM_CLASSES: 6
+ ROOT:
+GPUS: (0,)
+LOG_CONFIG: logging.conf
+LOG_DIR:
+MODEL:
+ IN_CHANNELS: 1
+ NAME: patch_deconvnet
+OUTPUT_DIR: output
+PIN_MEMORY: True
+PRINT_FREQ: 20
+SEED: 42
+TEST:
+ CROSSLINE: True
+ INLINE: True
+ MODEL_PATH:
+ SPLIT: Both
+ TEST_STRIDE: 10
+TRAIN:
+ AUGMENTATION: True
+ AUGMENTATIONS:
+ PAD:
+ HEIGHT: 256
+ WIDTH: 256
+ RESIZE:
+ HEIGHT: 200
+ WIDTH: 200
+ BATCH_SIZE_PER_GPU: 32
+ BEGIN_EPOCH: 0
+ DEPTH: no
+ END_EPOCH: 484
+ MAX_LR: 0.01
+ MEAN: 0.0009997
+ MIN_LR: 0.001
+ MODEL_DIR: models
+ MOMENTUM: 0.9
+ PATCH_SIZE: 99
+ SNAPSHOTS: 5
+ STD: 0.20977
+ STRIDE: 50
+ WEIGHT_DECAY: 0.0001
+VALIDATION:
+ BATCH_SIZE_PER_GPU: 32
+WORKERS: 4
diff --git a/assets/DeepSeismicLogo.jpg b/assets/DeepSeismicLogo.jpg
new file mode 100644
index 00000000..6c68879d
Binary files /dev/null and b/assets/DeepSeismicLogo.jpg differ
diff --git a/assets/penobscot_seresnet_best.png b/assets/penobscot_seresnet_best.png
new file mode 100644
index 00000000..4f8f3beb
Binary files /dev/null and b/assets/penobscot_seresnet_best.png differ
diff --git a/assets/penobscot_seresnet_worst.png b/assets/penobscot_seresnet_worst.png
new file mode 100644
index 00000000..369ec4e3
Binary files /dev/null and b/assets/penobscot_seresnet_worst.png differ
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
deleted file mode 100644
index aa912913..00000000
--- a/azure-pipelines.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-# Starter pipeline
-# Start with a minimal pipeline that you can customize to build and deploy your code.
-# Add steps that build, run tests, deploy, and more:
-# https://aka.ms/yaml
-
-trigger:
-- master
-
-pool:
- vmImage: 'ubuntu-latest'
-
-steps:
-- script: echo Hello, world!
- displayName: 'Run a one-line script'
-
-- script: |
- echo Add other tasks to build, test, and deploy your project.
- echo See https://aka.ms/yaml
- displayName: 'Run a multi-line script'
diff --git a/bin/ds b/bin/ds
deleted file mode 100644
index 3bd01081..00000000
--- a/bin/ds
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env python
-
-from deepseismic import cli
-
-if __name__ == "__main__":
- cli.main()
diff --git a/cgmanifest.json b/cgmanifest.json
new file mode 100644
index 00000000..d647c543
--- /dev/null
+++ b/cgmanifest.json
@@ -0,0 +1,64 @@
+{"Registrations":[
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "repositoryUrl": "https://github.com/olivesgatech/facies_classification_benchmark",
+ "commitHash": "12102683a1ae78f8fbc953823c35a43b151194b3"
+ }
+ },
+ "license": "MIT"
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "repositoryUrl": "https://github.com/waldeland/CNN-for-ASI",
+ "commitHash": "6f985cccecf9a811565d0b7cd919412569a22b7b"
+ }
+ },
+ "license": "MIT"
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "repositoryUrl": "https://github.com/opesci/devito",
+ "commitHash": "f6129286d9c0b3a8bfe07e724ac5b00dc762efee"
+ }
+ },
+ "license": "MIT"
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "repositoryUrl": "https://github.com/pytorch/ignite",
+ "commitHash": "38a4f37de759e33bc08441bde99bcb50f3d81f55"
+ }
+ },
+ "license": "BSD-3-Clause"
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "repositoryUrl": "https://github.com/HRNet/HRNet-Semantic-Segmentation",
+ "commitHash": "06142dc1c7026e256a7561c3e875b06622b5670f"
+ }
+ },
+ "license": "MIT"
+ },
+ {
+ "component": {
+ "type": "git",
+ "git": {
+ "repositoryUrl": "https://github.com/dask/dask",
+ "commitHash": "54019e9c05134585c9c40e4195206aa78e2ea61a"
+ }
+ },
+ "license": "IPL-1.0"
+ }
+ ],
+ "Version": 1
+}
\ No newline at end of file
diff --git a/contrib/README.md b/contrib/README.md
new file mode 100644
index 00000000..a286b0f3
--- /dev/null
+++ b/contrib/README.md
@@ -0,0 +1,8 @@
+### Contrib folder
+
+Code in this folder has not been tested, and are meant for exploratory work only.
+
+We encourage submissions to the contrib folder, and once they are well-tested, do submit a pull request and work with the repository owners to graduate it to the main DeepSeismic repository.
+
+Thank you.
+
diff --git a/contrib/benchmarks/README.md b/contrib/benchmarks/README.md
new file mode 100644
index 00000000..14a86937
--- /dev/null
+++ b/contrib/benchmarks/README.md
@@ -0,0 +1,6 @@
+# Benchmarks
+
+In this folder we show benchmarks using different algorithms. To facilitate the benchmark computation, we provide a set of wrapper functions that can be found in the file [benchmark_utils.py](benchmark_utils.py).
+
+TODO
+
diff --git a/contrib/benchmarks/benchmark_utils.py b/contrib/benchmarks/benchmark_utils.py
new file mode 100644
index 00000000..e69de29b
diff --git a/contrib/experiments/interpretation/dutchf3_voxel/README.md b/contrib/experiments/interpretation/dutchf3_voxel/README.md
new file mode 100644
index 00000000..f794fb37
--- /dev/null
+++ b/contrib/experiments/interpretation/dutchf3_voxel/README.md
@@ -0,0 +1,17 @@
+First, make sure that `${HOME}/data/dutch_f3` folder exists and you have write access.
+
+Next, to get the main input dataset which is the [Dutch F3 dataset](https://terranubis.com/datainfo/Netherlands-Offshore-F3-Block-Complete),
+navigate to [MalenoV](https://github.com/bolgebrygg/MalenoV) project website and follow the links (which will lead to
+[this](https://drive.google.com/drive/folders/0B7brcf-eGK8CbGhBdmZoUnhiTWs) download). Save this file as
+`${HOME}/data/dutch_f3/data.segy`
+
+To download the train and validation masks, from the root of the repo, run
+```bash
+./contrib/scripts/get_F3_voxel.sh ${HOME}/data/dutch_f3
+```
+
+This will also download train and validation masks to the same location as data.segy.
+
+That's it!
+
+To run the training script, run `python train.py --cfg=configs/texture_net.yaml`.
diff --git a/contrib/experiments/interpretation/dutchf3_voxel/configs/texture_net.yaml b/contrib/experiments/interpretation/dutchf3_voxel/configs/texture_net.yaml
new file mode 100644
index 00000000..aeeffb86
--- /dev/null
+++ b/contrib/experiments/interpretation/dutchf3_voxel/configs/texture_net.yaml
@@ -0,0 +1,41 @@
+# TextureNet configuration
+
+CUDNN:
+ BENCHMARK: true
+ DETERMINISTIC: false
+ ENABLED: true
+GPUS: (0,)
+OUTPUT_DIR: 'output'
+LOG_DIR: 'log'
+WORKERS: 4
+PRINT_FREQ: 10
+LOG_CONFIG: logging.conf
+SEED: 2019
+WINDOW_SIZE: 65
+
+DATASET:
+ NUM_CLASSES: 2
+ ROOT: /home/maxkaz/data/dutchf3
+ FILENAME: data.segy
+
+MODEL:
+ NAME: texture_net
+ IN_CHANNELS: 1
+ NUM_FILTERS: 50
+
+TRAIN:
+ BATCH_SIZE_PER_GPU: 32
+ END_EPOCH: 5000
+ LR: 0.02
+ MOMENTUM: 0.9
+ WEIGHT_DECAY: 0.0001
+ DEPTH: "voxel" # Options are No, Patch, Section and Voxel
+ MODEL_DIR: "models"
+
+VALIDATION:
+ BATCH_SIZE_PER_GPU: 32
+
+TEST:
+ MODEL_PATH: ""
+ SPLIT: 'Both' # Can be Both, Test1, Test2
+
diff --git a/contrib/experiments/interpretation/dutchf3_voxel/default.py b/contrib/experiments/interpretation/dutchf3_voxel/default.py
new file mode 100644
index 00000000..100da598
--- /dev/null
+++ b/contrib/experiments/interpretation/dutchf3_voxel/default.py
@@ -0,0 +1,82 @@
+# ------------------------------------------------------------------------------
+# Copyright (c) Microsoft
+# Licensed under the MIT License.
+# ------------------------------------------------------------------------------
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from yacs.config import CfgNode as CN
+
+_C = CN()
+
+# Cudnn related params
+_C.CUDNN = CN()
+_C.CUDNN.BENCHMARK = True
+_C.CUDNN.DETERMINISTIC = False
+_C.CUDNN.ENABLED = True
+
+_C.GPUS = (0,)
+_C.OUTPUT_DIR = "output" # This will be the base directory for all output, such as logs and saved models
+_C.LOG_DIR = "" # This will be a subdirectory inside OUTPUT_DIR
+_C.WORKERS = 4
+_C.PRINT_FREQ = 20
+_C.LOG_CONFIG = "logging.conf"
+_C.SEED = 42
+# size of voxel cube: WINDOW_SIZE x WINDOW_SIZE x WINDOW_SIZE; used for 3D models only
+_C.WINDOW_SIZE = 65
+
+# DATASET related params
+_C.DATASET = CN()
+_C.DATASET.NUM_CLASSES = 2
+_C.DATASET.ROOT = ""
+_C.DATASET.FILENAME = "data.segy"
+
+# common params for NETWORK
+_C.MODEL = CN()
+_C.MODEL.NAME = "texture_net"
+_C.MODEL.IN_CHANNELS = 1
+_C.MODEL.NUM_FILTERS = 50
+_C.MODEL.EXTRA = CN(new_allowed=True)
+
+# training
+_C.TRAIN = CN()
+_C.TRAIN.BATCH_SIZE_PER_GPU = 32
+# number of batches per epoch
+_C.TRAIN.BATCH_PER_EPOCH = 10
+# total number of epochs
+_C.TRAIN.END_EPOCH = 200
+_C.TRAIN.LR = 0.01
+_C.TRAIN.MOMENTUM = 0.9
+_C.TRAIN.WEIGHT_DECAY = 0.0001
+_C.TRAIN.DEPTH = "voxel" # Options are None, Patch and Section
+_C.TRAIN.MODEL_DIR = "models" # This will be a subdirectory inside OUTPUT_DIR
+
+# validation
+_C.VALIDATION = CN()
+_C.VALIDATION.BATCH_SIZE_PER_GPU = 32
+
+# TEST
+_C.TEST = CN()
+_C.TEST.MODEL_PATH = ""
+_C.TEST.SPLIT = "Both" # Can be Both, Test1, Test2
+
+
+def update_config(cfg, options=None, config_file=None):
+ cfg.defrost()
+
+ if config_file:
+ cfg.merge_from_file(config_file)
+
+ if options:
+ cfg.merge_from_list(options)
+
+ cfg.freeze()
+
+
+if __name__ == "__main__":
+ import sys
+
+ with open(sys.argv[1], "w") as f:
+ print(_C, file=f)
diff --git a/contrib/experiments/interpretation/dutchf3_voxel/logging.conf b/contrib/experiments/interpretation/dutchf3_voxel/logging.conf
new file mode 100644
index 00000000..56334fc4
--- /dev/null
+++ b/contrib/experiments/interpretation/dutchf3_voxel/logging.conf
@@ -0,0 +1,34 @@
+[loggers]
+keys=root,__main__,event_handlers
+
+[handlers]
+keys=consoleHandler
+
+[formatters]
+keys=simpleFormatter
+
+[logger_root]
+level=INFO
+handlers=consoleHandler
+
+[logger___main__]
+level=INFO
+handlers=consoleHandler
+qualname=__main__
+propagate=0
+
+[logger_event_handlers]
+level=INFO
+handlers=consoleHandler
+qualname=event_handlers
+propagate=0
+
+[handler_consoleHandler]
+class=StreamHandler
+level=INFO
+formatter=simpleFormatter
+args=(sys.stdout,)
+
+[formatter_simpleFormatter]
+format=%(asctime)s - %(name)s - %(levelname)s - %(message)s
+
diff --git a/contrib/experiments/interpretation/dutchf3_voxel/train.py b/contrib/experiments/interpretation/dutchf3_voxel/train.py
new file mode 100644
index 00000000..bd8cdf4b
--- /dev/null
+++ b/contrib/experiments/interpretation/dutchf3_voxel/train.py
@@ -0,0 +1,230 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+# /* spell-checker: disable */
+
+import logging
+import logging.config
+from os import path
+
+import fire
+import numpy as np
+import torch
+from torch.utils import data
+from ignite.engine import Events
+from ignite.handlers import ModelCheckpoint
+from ignite.metrics import Loss
+from ignite.utils import convert_tensor
+from tqdm import tqdm
+
+from deepseismic_interpretation.dutchf3.data import get_voxel_loader
+from deepseismic_interpretation.models.texture_net import TextureNet
+
+from cv_lib.utils import load_log_configuration
+from cv_lib.event_handlers import (
+ SnapshotHandler,
+ logging_handlers,
+ tensorboard_handlers,
+)
+from cv_lib.event_handlers.logging_handlers import Evaluator
+from cv_lib.event_handlers.tensorboard_handlers import create_summary_writer
+
+from cv_lib.segmentation.metrics import (
+ pixelwise_accuracy,
+ class_accuracy,
+ mean_class_accuracy,
+ class_iou,
+ mean_iou,
+)
+from cv_lib.segmentation import extract_metric_from
+
+# from cv_lib.segmentation.dutchf3.engine import (
+# create_supervised_evaluator,
+# create_supervised_trainer,
+# )
+# Use ignite generic versions for now
+from ignite.engine import create_supervised_trainer, create_supervised_evaluator
+
+from default import _C as config
+from default import update_config
+
+
+def _prepare_batch(batch, device=None, non_blocking=False, t_type=torch.FloatTensor):
+ x, y = batch
+ new_x = convert_tensor(torch.squeeze(x, 1), device=device, non_blocking=non_blocking)
+ new_y = convert_tensor(torch.unsqueeze(y, 2), device=device, non_blocking=non_blocking)
+ if device == "cuda":
+ return (
+ new_x.type(t_type).cuda(),
+ torch.unsqueeze(new_y, 3).type(torch.LongTensor).cuda(),
+ )
+ else:
+ return new_x.type(t_type), torch.unsqueeze(new_y, 3).type(torch.LongTensor)
+
+
+def run(*options, cfg=None):
+ """Run training and validation of model
+
+ Notes:
+ Options can be passed in via the options argument and loaded from the cfg file
+ Options from default.py will be overridden by options loaded from cfg file
+ Options passed in via options argument will override option loaded from cfg file
+
+ Args:
+ *options (str,int ,optional): Options used to overide what is loaded from the
+ config. To see what options are available consult
+ default.py
+ cfg (str, optional): Location of config file to load. Defaults to None.
+ """
+
+ update_config(config, options=options, config_file=cfg)
+
+ # Start logging
+ load_log_configuration(config.LOG_CONFIG)
+ logger = logging.getLogger(__name__)
+ logger.debug(config.WORKERS)
+ torch.backends.cudnn.benchmark = config.CUDNN.BENCHMARK
+
+ torch.manual_seed(config.SEED)
+ if torch.cuda.is_available():
+ torch.cuda.manual_seed_all(config.SEED)
+ np.random.seed(seed=config.SEED)
+
+ # load the data
+ TrainVoxelLoader = get_voxel_loader(config)
+
+ train_set = TrainVoxelLoader(
+ config.DATASET.ROOT,
+ config.DATASET.FILENAME,
+ split="train",
+ window_size=config.WINDOW_SIZE,
+ len=config.TRAIN.BATCH_SIZE_PER_GPU * config.TRAIN.BATCH_PER_EPOCH,
+ batch_size=config.TRAIN.BATCH_SIZE_PER_GPU,
+ )
+ val_set = TrainVoxelLoader(
+ config.DATASET.ROOT,
+ config.DATASET.FILENAME,
+ split="val",
+ window_size=config.WINDOW_SIZE,
+ len=config.TRAIN.BATCH_SIZE_PER_GPU * config.TRAIN.BATCH_PER_EPOCH,
+ batch_size=config.TRAIN.BATCH_SIZE_PER_GPU,
+ )
+
+ n_classes = train_set.n_classes
+
+ # set dataset length to batch size to be consistent with 5000 iterations
+ # each of size 32 in the original Waldeland implementation
+ train_loader = data.DataLoader(
+ train_set, batch_size=config.TRAIN.BATCH_SIZE_PER_GPU, num_workers=config.WORKERS, shuffle=False,
+ )
+ val_loader = data.DataLoader(
+ val_set, batch_size=config.VALIDATION.BATCH_SIZE_PER_GPU, num_workers=config.WORKERS, shuffle=False,
+ )
+
+ # this is how we import model for CV - here we're importing a seismic
+ # segmentation model
+ model = TextureNet(n_classes=config.DATASET.NUM_CLASSES)
+
+ optimizer = torch.optim.Adam(
+ model.parameters(),
+ lr=config.TRAIN.LR,
+ # momentum=config.TRAIN.MOMENTUM,
+ weight_decay=config.TRAIN.WEIGHT_DECAY,
+ )
+
+ device = "cpu"
+
+ if torch.cuda.is_available():
+ device = "cuda"
+ model = model.cuda()
+
+ loss = torch.nn.CrossEntropyLoss()
+
+ trainer = create_supervised_trainer(model, optimizer, loss, prepare_batch=_prepare_batch, device=device)
+
+ desc = "ITERATION - loss: {:.2f}"
+ pbar = tqdm(initial=0, leave=False, total=len(train_loader), desc=desc.format(0))
+
+ # add model checkpointing
+ output_dir = path.join(config.OUTPUT_DIR, config.TRAIN.MODEL_DIR)
+ checkpoint_handler = ModelCheckpoint(
+ output_dir, "model", save_interval=1, n_saved=3, create_dir=True, require_empty=False,
+ )
+
+ criterion = torch.nn.CrossEntropyLoss(reduction="mean")
+
+ # save model at each epoch
+ trainer.add_event_handler(Events.EPOCH_COMPLETED, checkpoint_handler, {config.MODEL.NAME: model})
+
+ def _select_pred_and_mask(model_out):
+ # receive a tuple of (x, y_pred), y
+ # so actually in line 51 of
+ # cv_lib/cv_lib/segmentation/dutch_f3/metrics/__init__.py
+ # we do the following line, so here we just select the model
+ # _, y_pred = torch.max(model_out[0].squeeze(), 1, keepdim=True)
+ y_pred = model_out[0].squeeze()
+ y = model_out[1].squeeze()
+ return (y_pred.squeeze(), y)
+
+ evaluator = create_supervised_evaluator(
+ model,
+ metrics={
+ "nll": Loss(criterion, device=device),
+ "pixa": pixelwise_accuracy(n_classes, output_transform=_select_pred_and_mask, device=device),
+ "cacc": class_accuracy(n_classes, output_transform=_select_pred_and_mask, device=device),
+ "mca": mean_class_accuracy(n_classes, output_transform=_select_pred_and_mask, device=device),
+ "ciou": class_iou(n_classes, output_transform=_select_pred_and_mask, device=device),
+ "mIoU": mean_iou(n_classes, output_transform=_select_pred_and_mask, device=device),
+ },
+ device=device,
+ prepare_batch=_prepare_batch,
+ )
+
+ # Set the validation run to start on the epoch completion of the training run
+ trainer.add_event_handler(Events.EPOCH_COMPLETED, Evaluator(evaluator, val_loader))
+
+ summary_writer = create_summary_writer(log_dir=path.join(output_dir, config.LOG_DIR))
+
+ evaluator.add_event_handler(
+ Events.EPOCH_COMPLETED,
+ logging_handlers.log_metrics(
+ "Validation results",
+ metrics_dict={
+ "mIoU": "Avg IoU :",
+ "nll": "Avg loss :",
+ "pixa": "Pixelwise Accuracy :",
+ "mca": "Mean Class Accuracy :",
+ },
+ ),
+ )
+ evaluator.add_event_handler(
+ Events.EPOCH_COMPLETED,
+ tensorboard_handlers.log_metrics(
+ summary_writer,
+ trainer,
+ "epoch",
+ metrics_dict={"mIoU": "Validation/IoU", "nll": "Validation/Loss", "mca": "Validation/MCA",},
+ ),
+ )
+
+ summary_writer = create_summary_writer(log_dir=path.join(output_dir, config.LOG_DIR))
+
+ snapshot_duration = 1
+
+ def snapshot_function():
+ return (trainer.state.iteration % snapshot_duration) == 0
+
+ checkpoint_handler = SnapshotHandler(
+ path.join(output_dir, config.TRAIN.MODEL_DIR),
+ config.MODEL.NAME,
+ extract_metric_from("mIoU"),
+ snapshot_function,
+ )
+ evaluator.add_event_handler(Events.EPOCH_COMPLETED, checkpoint_handler, {"model": model})
+
+ logger.info("Starting training")
+ trainer.run(train_loader, max_epochs=config.TRAIN.END_EPOCH // config.TRAIN.BATCH_PER_EPOCH)
+ pbar.close()
+
+
+if __name__ == "__main__":
+ fire.Fire(run)
diff --git a/contrib/experiments/interpretation/voxel2pixel/README.md b/contrib/experiments/interpretation/voxel2pixel/README.md
new file mode 100644
index 00000000..8e1f743c
--- /dev/null
+++ b/contrib/experiments/interpretation/voxel2pixel/README.md
@@ -0,0 +1,54 @@
+# Voxel to Pixel approach to Seismic Interpretation
+
+The code which is used in this approach is greatly described in the paper
+
+**Convolutional Neural Networks for Automated Seismic Interpretation**,
+A. U. Waldeland, A. C. Jensen, L. Gelius and A. H. S. Solberg
+[*The Leading Edge, July 2018*](https://library.seg.org/doi/abs/10.1190/tle37070529.1)
+
+There is also an
+EAGE E-lecture which you can watch: [*Seismic interpretation with deep learning*](https://www.youtube.com/watch?v=lm85Ap4OstM) (YouTube)
+
+### Setup to get started
+- make sure you follow `README.md` file in root of repo to install all the proper dependencies.
+- downgrade TensorFlow and pyTorch's CUDA:
+ - downgrade TensorFlow by running `pip install tensorflow-gpu==1.14`
+ - make sure pyTorch uses downgraded CUDA `pip install torch==1.3.1+cu92 torchvision==0.4.2+cu92 -f https://download.pytorch.org/whl/torch_stable.html`
+- download the data by running `contrib/scrips/get_F3_voxel.sh` from the `contrib` folder of this repo.
+This will download the training and validation labels/masks.
+- to get the main input dataset which is the [Dutch F3 dataset](https://terranubis.com/datainfo/Netherlands-Offshore-F3-Block-Complete),
+navigate to [MalenoV](https://github.com/bolgebrygg/MalenoV) project website and follow the links (which will lead to
+[this](https://drive.google.com/drive/folders/0B7brcf-eGK8CbGhBdmZoUnhiTWs) download). Save this file as
+`interpretation/voxel2pixel/F3/data.segy`
+
+If you want to revert downgraded packages, just run `conda env update -f environment/anaconda/local/environment.yml` from the root folder of the repo.
+
+### Monitoring progress with TensorBoard
+- from the `voxel2pixel` directory, run `tensorboard --logdir='log'` (all runtime logging information is
+written to the `log` folder
+- open a web-browser and go to localhost:6006
+More information can be found [here](https://www.tensorflow.org/get_started/summaries_and_tensorboard#launching_tensorboard).
+
+### Usage
+- `python train.py` will train the CNN and produce a model after a few hours on a decent gaming GPU
+with at least 6GB of onboard memory
+- `python test_parallel.py` - Example of how the trained CNN can be applied to predict salt in a slice or
+the full cube in distributed fashion on a single multi-GPU machine (single GPU mode is also supported).
+In addition it shows how learned attributes can be extracted.
+
+### Files
+In addition, it may be useful to have a look on these files
+- texture_net.py - this is where the network is defined
+- batch.py - provides functionality to generate training batches with random augmentation
+- data.py - load/save data sets with segy-format and labeled slices as images
+- tb_logger.py - connects to the tensorboard functionality
+- utils.py - some help functions
+- test_parallel.py - multi-GPU prediction script for scoring
+
+### Using a different data set and custom training labels
+If you want to use a different data set, do the following:
+- Make a new folder where you place the segy-file
+- Make a folder for the training labels
+- Save images of the slices you want to train on as 'SLICETYPE_SLICENO.png' (or jpg), where SLICETYPE is either 'inline', 'crossline', or 'timeslice' and SLICENO is the slice number.
+- Draw the classes on top of the seismic data, using a simple image editing program with the class colors. Currently up to six classes are supported, indicated by the colors: red, blue, green, cyan, magenta and yellow.
+
diff --git a/contrib/experiments/interpretation/voxel2pixel/batch.py b/contrib/experiments/interpretation/voxel2pixel/batch.py
new file mode 100644
index 00000000..d53f79e8
--- /dev/null
+++ b/contrib/experiments/interpretation/voxel2pixel/batch.py
@@ -0,0 +1,351 @@
+# Copyright (c) Microsoft. All rights reserved.
+# Licensed under the MIT license.
+
+# code modified from https://github.com/waldeland/CNN-for-ASI
+
+import numpy as np
+
+
+def get_random_batch(
+ data_cube,
+ label_coordinates,
+ im_size,
+ num_batch_size,
+ random_flip=False,
+ random_stretch=None,
+ random_rot_xy=None,
+ random_rot_z=None,
+):
+ """
+ Returns a batch of augmented samples with center pixels randomly drawn from label_coordinates
+
+ Args:
+ data_cube: 3D numpy array with floating point velocity values
+ label_coordinates: 3D coordinates of the labeled training slice
+ im_size: size of the 3D voxel which we're cutting out around each label_coordinate
+ num_batch_size: size of the batch
+ random_flip: bool to perform random voxel flip
+ random_stretch: bool to enable random stretch
+ random_rot_xy: bool to enable random rotation of the voxel around dim-0 and dim-1
+ random_rot_z: bool to enable random rotation around dim-2
+
+ Returns:
+ a tuple of batch numpy array array of data with dimension
+ (batch, 1, data_cube.shape[0], data_cube.shape[1], data_cube.shape[2]) and the associated labels as an array
+ of size (batch).
+ """
+
+ # Make 3 im_size elements
+ if isinstance(im_size, int):
+ im_size = [im_size, im_size, im_size]
+
+ # Output arrays
+ batch = np.zeros([num_batch_size, 1, im_size[0], im_size[1], im_size[2]])
+ ret_labels = np.zeros([num_batch_size])
+
+ class_keys = list(label_coordinates)
+ n_classes = len(class_keys)
+
+ # Loop through batch
+ n_for_class = 0
+ class_ind = 0
+ for i in range(num_batch_size):
+
+ # Start by getting a grid centered around (0,0,0)
+ grid = get_grid(im_size)
+
+ # Apply random flip
+ if random_flip:
+ grid = augment_flip(grid)
+
+ # Apply random rotations
+ if random_rot_xy:
+ grid = augment_rot_xy(grid, random_rot_xy)
+ if random_rot_z:
+ grid = augment_rot_z(grid, random_rot_z)
+
+ # Apply random stretch
+ if random_stretch:
+ grid = augment_stretch(grid, random_stretch)
+
+ # Pick random location from the label_coordinates for this class:
+ coords_for_class = label_coordinates[class_keys[class_ind]]
+ random_index = rand_int(0, coords_for_class.shape[1])
+ coord = coords_for_class[:, random_index : random_index + 1]
+
+ # Move grid to be centered around this location
+ grid += coord
+
+ # Interpolate samples at grid from the data:
+ sample = trilinear_interpolation(data_cube, grid)
+
+ # Insert in output arrays
+ ret_labels[i] = class_ind
+ batch[i, 0, :, :, :] = np.reshape(sample, (im_size[0], im_size[1], im_size[2]))
+
+ # We seek to have a balanced batch with equally many samples from each class.
+ n_for_class += 1
+ if n_for_class + 1 > int(0.5 + num_batch_size / float(n_classes)):
+ if class_ind < n_classes - 1:
+ class_ind += 1
+ n_for_class = 0
+
+ return batch, ret_labels
+
+
+def get_grid(im_size):
+ """
+ getGrid returns z,x,y coordinates centered around (0,0,0)
+
+ Args:
+ im_size: size of window
+
+ Returns
+ numpy int array with size: 3 x im_size**3
+ """
+ win0 = np.linspace(-im_size[0] // 2, im_size[0] // 2, im_size[0])
+ win1 = np.linspace(-im_size[1] // 2, im_size[1] // 2, im_size[1])
+ win2 = np.linspace(-im_size[2] // 2, im_size[2] // 2, im_size[2])
+
+ x0, x1, x2 = np.meshgrid(win0, win1, win2, indexing="ij")
+
+ ex0 = np.expand_dims(x0.ravel(), 0)
+ ex1 = np.expand_dims(x1.ravel(), 0)
+ ex2 = np.expand_dims(x2.ravel(), 0)
+
+ grid = np.concatenate((ex0, ex1, ex2), axis=0)
+
+ return grid
+
+
+def augment_flip(grid):
+ """
+ Random flip of non-depth axes.
+
+ Args:
+ grid: 3D coordinates of the voxel
+
+ Returns:
+ flipped grid coordinates
+ """
+
+ # Flip x axis
+ if rand_bool():
+ grid[1, :] = -grid[1, :]
+
+ # Flip y axis
+ if rand_bool():
+ grid[2, :] = -grid[2, :]
+
+ return grid
+
+
+def augment_stretch(grid, stretch_factor):
+ """
+ Random stretch/scale
+
+ Args:
+ grid: 3D coordinate grid of the voxel
+ stretch_factor: this is actually a boolean which triggers stretching
+ TODO: change this to just call the function and not do -1,1 in rand_float
+
+ Returns:
+ stretched grid coordinates
+ """
+ stretch = rand_float(-stretch_factor, stretch_factor)
+ grid *= 1 + stretch
+ return grid
+
+
+def augment_rot_xy(grid, random_rot_xy):
+ """
+ Random rotation
+
+ Args:
+ grid: coordinate grid list of 3D points
+ random_rot_xy: this is actually a boolean which triggers rotation
+ TODO: change this to just call the function and not do -1,1 in rand_float
+
+ Returns:
+ randomly rotated grid
+ """
+ theta = np.deg2rad(rand_float(-random_rot_xy, random_rot_xy))
+ x = grid[2, :] * np.cos(theta) - grid[1, :] * np.sin(theta)
+ y = grid[2, :] * np.sin(theta) + grid[1, :] * np.cos(theta)
+ grid[1, :] = x
+ grid[2, :] = y
+ return grid
+
+
+def augment_rot_z(grid, random_rot_z):
+ """
+ Random tilt around z-axis (dim-2)
+
+ Args:
+ grid: coordinate grid list of 3D points
+ random_rot_z: this is actually a boolean which triggers rotation
+ TODO: change this to just call the function and not do -1,1 in rand_float
+
+ Returns:
+ randomly tilted coordinate grid
+ """
+ theta = np.deg2rad(rand_float(-random_rot_z, random_rot_z))
+ z = grid[0, :] * np.cos(theta) - grid[1, :] * np.sin(theta)
+ x = grid[0, :] * np.sin(theta) + grid[1, :] * np.cos(theta)
+ grid[0, :] = z
+ grid[1, :] = x
+ return grid
+
+
+def trilinear_interpolation(input_array, indices):
+ """
+ Linear interpolation
+ code taken from
+ http://stackoverflow.com/questions/6427276/3d-interpolation-of-numpy-arrays-without-scipy
+
+ Args:
+ input_array: 3D data array
+ indices: 3D grid coordinates
+
+ Returns:
+ interpolated input array
+ """
+
+ x_indices, y_indices, z_indices = indices[0:3]
+
+ n0, n1, n2 = input_array.shape
+
+ x0 = x_indices.astype(np.integer)
+ y0 = y_indices.astype(np.integer)
+ z0 = z_indices.astype(np.integer)
+ x1 = x0 + 1
+ y1 = y0 + 1
+ z1 = z0 + 1
+
+ # put all samples outside datacube to 0
+ inds_out_of_range = (
+ (x0 < 0)
+ | (x1 < 0)
+ | (y0 < 0)
+ | (y1 < 0)
+ | (z0 < 0)
+ | (z1 < 0)
+ | (x0 >= n0)
+ | (x1 >= n0)
+ | (y0 >= n1)
+ | (y1 >= n1)
+ | (z0 >= n2)
+ | (z1 >= n2)
+ )
+
+ x0[inds_out_of_range] = 0
+ y0[inds_out_of_range] = 0
+ z0[inds_out_of_range] = 0
+ x1[inds_out_of_range] = 0
+ y1[inds_out_of_range] = 0
+ z1[inds_out_of_range] = 0
+
+ x = x_indices - x0
+ y = y_indices - y0
+ z = z_indices - z0
+ output = (
+ input_array[x0, y0, z0] * (1 - x) * (1 - y) * (1 - z)
+ + input_array[x1, y0, z0] * x * (1 - y) * (1 - z)
+ + input_array[x0, y1, z0] * (1 - x) * y * (1 - z)
+ + input_array[x0, y0, z1] * (1 - x) * (1 - y) * z
+ + input_array[x1, y0, z1] * x * (1 - y) * z
+ + input_array[x0, y1, z1] * (1 - x) * y * z
+ + input_array[x1, y1, z0] * x * y * (1 - z)
+ + input_array[x1, y1, z1] * x * y * z
+ )
+
+ output[inds_out_of_range] = 0
+ return output
+
+
+def rand_float(low, high):
+ """
+ Generate random floating point number between two limits
+
+ Args:
+ low: low limit
+ high: high limit
+
+ Returns:
+ single random floating point number
+ """
+ return (high - low) * np.random.random_sample() + low
+
+
+def rand_int(low, high):
+ """
+ Generate random integer between two limits
+
+ Args:
+ low: low limit
+ high: high limit
+
+ Returns:
+ random integer between two limits
+ """
+ return np.random.randint(low, high)
+
+
+def rand_bool():
+ """
+ Generate random boolean.
+
+ Returns:
+ Random boolean
+ """
+ return bool(np.random.randint(0, 2))
+
+
+"""
+TODO: the following is not needed and should be added as tests later.
+
+# Test the batch-functions
+if __name__ == "__main__":
+ from data import read_segy, read_labels, get_slice
+ import tb_logger
+ import numpy as np
+ import os
+
+ data, data_info = read_segy(os.path.join("F3", "data.segy"))
+
+ train_coordinates = {"1": np.expand_dims(np.array([50, 50, 50]), 1)}
+
+ logger = tb_logger.TBLogger("log", "batch test")
+
+ [batch, labels] = get_random_batch(data, train_coordinates, 65, 32)
+ logger.log_images("normal", batch)
+
+ [batch, labels] = get_random_batch(
+ data, train_coordinates, 65, 32, random_flip=True
+ )
+ logger.log_images("flipping", batch)
+
+ [batch, labels] = get_random_batch(
+ data, train_coordinates, 65, 32, random_stretch=0.50
+ )
+ logger.log_images("stretching", batch)
+
+ [batch, labels] = get_random_batch(
+ data, train_coordinates, 65, 32, random_rot_xy=180
+ )
+ logger.log_images("rot", batch)
+
+ [batch, labels] = get_random_batch(
+ data, train_coordinates, 65, 32, random_rot_z=15
+ )
+ logger.log_images("dip", batch)
+
+ train_cls_imgs, train_coordinates = read_labels(
+ os.path.join("F3", "train"), data_info
+ )
+ [batch, labels] = get_random_batch(data, train_coordinates, 65, 32)
+ logger.log_images("salt", batch[:16, :, :, :, :])
+ logger.log_images("not salt", batch[16:, :, :, :, :])
+
+ logger.log_images("data", data[:, :, 50])
+"""
diff --git a/contrib/experiments/interpretation/voxel2pixel/data.py b/contrib/experiments/interpretation/voxel2pixel/data.py
new file mode 100644
index 00000000..bdcad76a
--- /dev/null
+++ b/contrib/experiments/interpretation/voxel2pixel/data.py
@@ -0,0 +1,326 @@
+# Copyright (c) Microsoft. All rights reserved.
+# Licensed under the MIT license.
+
+# code modified from https://github.com/waldeland/CNN-for-ASI
+
+from __future__ import print_function
+from os.path import isfile, join
+
+import segyio
+from os import listdir
+import numpy as np
+import scipy.misc
+
+
+def read_segy(filename):
+ """
+ Read in a SEGY-format file given a filename
+
+ Args:
+ filename: input filename
+
+ Returns:
+ numpy data array and its info as a dictionary (tuple)
+
+ """
+ print("Loading data cube from", filename, "with:")
+
+ # Read full data cube
+ data = segyio.tools.cube(filename)
+
+ # Put temporal axis first
+ data = np.moveaxis(data, -1, 0)
+
+ # Make data cube fast to access
+ data = np.ascontiguousarray(data, "float32")
+
+ # Read meta data
+ segyfile = segyio.open(filename, "r")
+ print(" Crosslines: ", segyfile.xlines[0], ":", segyfile.xlines[-1])
+ print(" Inlines: ", segyfile.ilines[0], ":", segyfile.ilines[-1])
+ print(" Timeslices: ", "1", ":", data.shape[0])
+
+ # Make dict with cube-info
+ # TODO: read this from segy
+ # Read dt and other params needed to do create a new
+ data_info = {
+ "crossline_start": segyfile.xlines[0],
+ "inline_start": segyfile.ilines[0],
+ "timeslice_start": 1,
+ "shape": data.shape,
+ }
+
+ return data, data_info
+
+
+def write_segy(out_filename, in_filename, out_cube):
+ """
+ Writes out_cube to a segy-file (out_filename) with same header/size as in_filename
+
+ Args:
+ out_filename:
+ in_filename:
+ out_cube:
+
+ Returns:
+
+ """
+ # Select last channel
+ if type(out_cube) is list:
+ out_cube = out_cube[-1]
+
+ print("Writing interpretation to " + out_filename)
+ # Copy segy file
+ from shutil import copyfile
+
+ copyfile(in_filename, out_filename)
+
+ # Moving temporal axis back again
+ out_cube = np.moveaxis(out_cube, 0, -1)
+
+ # Open out-file
+ with segyio.open(out_filename, "r+") as src:
+ iline_start = src.ilines[0]
+ dtype = src.iline[iline_start].dtype
+ # loop through inlines and insert output
+ for i in src.ilines:
+ iline = out_cube[i - iline_start, :, :]
+ src.iline[i] = np.ascontiguousarray(iline.astype(dtype))
+
+ # TODO: rewrite this whole function
+ # Moving temporal axis first again - just in case the user want to keep working on it
+ out_cube = np.moveaxis(out_cube, -1, 0)
+
+ print("Writing interpretation - Finished")
+ return
+
+
+# Alternative writings for slice-type
+inline_alias = ["inline", "in-line", "iline", "y"]
+crossline_alias = ["crossline", "cross-line", "xline", "x"]
+timeslice_alias = ["timeslice", "time-slice", "t", "z", "depthslice", "depth"]
+
+
+def read_labels(fname, data_info):
+ """
+ Read labels from an image.
+
+ Args:
+ fname: filename of labelling mask (image)
+ data_info: dictionary describing the data
+
+ Returns:
+ list of labels and list of coordinates
+ """
+
+ label_imgs = []
+ label_coordinates = {}
+
+ # Find image files in folder
+
+ tmp = fname.split("/")[-1].split("_")
+ slice_type = tmp[0].lower()
+ tmp = tmp[1].split(".")
+ slice_no = int(tmp[0])
+
+ if slice_type not in inline_alias + crossline_alias + timeslice_alias:
+ print(
+ "File:", fname, "could not be loaded.", "Unknown slice type",
+ )
+ return None
+
+ if slice_type in inline_alias:
+ slice_type = "inline"
+ if slice_type in crossline_alias:
+ slice_type = "crossline"
+ if slice_type in timeslice_alias:
+ slice_type = "timeslice"
+
+ # Read file
+ print("Loading labels for", slice_type, slice_no, "with")
+ img = scipy.misc.imread(fname)
+ img = interpolate_to_fit_data(img, slice_type, slice_no, data_info)
+ label_img = parse_labels_in_image(img)
+
+ # Get coordinates for slice
+ coords = get_coordinates_for_slice(slice_type, slice_no, data_info)
+
+ # Loop through labels in label_img and append to label_coordinates
+ for cls in np.unique(label_img):
+ if cls > -1:
+ if str(cls) not in label_coordinates.keys():
+ label_coordinates[str(cls)] = np.array(np.zeros([3, 0]))
+ inds_with_cls = label_img == cls
+ cords_with_cls = coords[:, inds_with_cls.ravel()]
+ label_coordinates[str(cls)] = np.concatenate((label_coordinates[str(cls)], cords_with_cls), 1)
+ print(
+ " ", str(np.sum(inds_with_cls)), "labels for class", str(cls),
+ )
+ if len(np.unique(label_img)) == 1:
+ print(" ", 0, "labels", str(cls))
+
+ # Add label_img to output
+ label_imgs.append([label_img, slice_type, slice_no])
+
+ return label_imgs, label_coordinates
+
+
+# Add colors to this table to make it possible to have more classes
+class_color_coding = [
+ [0, 0, 255], # blue
+ [0, 255, 0], # green
+ [0, 255, 255], # cyan
+ [255, 0, 0], # red
+ [255, 0, 255], # blue
+ [255, 255, 0], # yellow
+]
+
+
+def parse_labels_in_image(img):
+ """
+ Convert RGB image to class img.
+
+ Args:
+ img: 3-channel image array
+
+ Returns:
+ monotonically increasing class labels
+ """
+ label_img = np.int16(img[:, :, 0]) * 0 - 1 # -1 = no class
+
+ # decompose color channels (#Alpha is ignored)
+ r = img[:, :, 0]
+ g = img[:, :, 1]
+ b = img[:, :, 2]
+
+ # Alpha channel
+ if img.shape[2] == 4:
+ a = 1 - img.shape[2] // 255
+ r = r * a
+ g = g * a
+ b = b * a
+
+ tolerance = 1
+ # Go through classes and find pixels with this class
+ cls = 0
+ for color in class_color_coding:
+ # Find pixels with these labels
+ inds = (
+ (np.abs(r - color[0]) < tolerance) & (np.abs(g - color[1]) < tolerance) & (np.abs(b - color[2]) < tolerance)
+ )
+ label_img[inds] = cls
+ cls += 1
+
+ return label_img
+
+
+def interpolate_to_fit_data(img, slice_type, slice_no, data_info):
+ """
+ Function to resize image if needed
+
+ Args:
+ img: image array
+ slice_type: inline, crossline or timeslice slice type
+ slice_no: slice number
+ data_info: data info dictionary distracted from SEGY file
+
+ Returns:
+ resized image array
+
+ """
+
+ # Get wanted output size
+ if slice_type == "inline":
+ n0 = data_info["shape"][0]
+ n1 = data_info["shape"][2]
+ elif slice_type == "crossline":
+ n0 = data_info["shape"][0]
+ n1 = data_info["shape"][1]
+ elif slice_type == "timeslice":
+ n0 = data_info["shape"][1]
+ n1 = data_info["shape"][2]
+ return scipy.misc.imresize(img, (n0, n1), interp="nearest")
+
+
+def get_coordinates_for_slice(slice_type, slice_no, data_info):
+ """
+
+ Get coordinates for slice in the full cube
+
+ Args:
+ slice_type: type of slice, e.g. inline, crossline, etc
+ slice_no: slice number
+ data_info: data dictionary array
+
+ Returns:
+ index coordinates of the voxel
+
+ """
+ ds = data_info["shape"]
+
+ # Coordinates for cube
+ x0, x1, x2 = np.meshgrid(
+ np.linspace(0, ds[0] - 1, ds[0]),
+ np.linspace(0, ds[1] - 1, ds[1]),
+ np.linspace(0, ds[2] - 1, ds[2]),
+ indexing="ij",
+ )
+ if slice_type == "inline":
+ start = data_info["inline_start"]
+ slice_no = slice_no - start
+
+ x0 = x0[:, slice_no, :]
+ x1 = x1[:, slice_no, :]
+ x2 = x2[:, slice_no, :]
+ elif slice_type == "crossline":
+ start = data_info["crossline_start"]
+ slice_no = slice_no - start
+ x0 = x0[:, :, slice_no]
+ x1 = x1[:, :, slice_no]
+ x2 = x2[:, :, slice_no]
+
+ elif slice_type == "timeslice":
+ start = data_info["timeslice_start"]
+ slice_no = slice_no - start
+ x0 = x0[slice_no, :, :]
+ x1 = x1[slice_no, :, :]
+ x2 = x2[slice_no, :, :]
+
+ # Collect indexes
+ x0 = np.expand_dims(x0.ravel(), 0)
+ x1 = np.expand_dims(x1.ravel(), 0)
+ x2 = np.expand_dims(x2.ravel(), 0)
+ coords = np.concatenate((x0, x1, x2), axis=0)
+
+ return coords
+
+
+def get_slice(data, data_info, slice_type, slice_no, window=0):
+ """
+ Return data-slice
+
+ Args:
+ data: input 3D voxel numpy array
+ data_info: data info dictionary
+ slice_type: type of slice, like inline, crossline, etc
+ slice_no: slice number
+ window: window size around center pixel
+
+ Returns:
+ 2D slice of the voxel as a numpy array
+
+ """
+
+ if slice_type == "inline":
+ start = data_info["inline_start"]
+
+ elif slice_type == "crossline":
+ start = data_info["crossline_start"]
+
+ elif slice_type == "timeslice":
+ start = data_info["timeslice_start"]
+
+ slice_no = slice_no - start
+ slice = data[:, slice_no - window : slice_no + window + 1, :]
+
+ return np.squeeze(slice)
diff --git a/contrib/experiments/interpretation/voxel2pixel/tb_logger.py b/contrib/experiments/interpretation/voxel2pixel/tb_logger.py
new file mode 100644
index 00000000..c6a894dc
--- /dev/null
+++ b/contrib/experiments/interpretation/voxel2pixel/tb_logger.py
@@ -0,0 +1,181 @@
+# Copyright (c) Microsoft. All rights reserved.
+# Licensed under the MIT license.
+
+# code modified from https://github.com/waldeland/CNN-for-ASI
+
+from __future__ import print_function
+from os.path import join
+
+# TODO: make this nicer and remove the non-bare except for PEP8 compliance
+try:
+ import tensorflow as tf
+except:
+ print("Tensorflow could not be imported, therefore tensorboard cannot be used.")
+
+from io import BytesIO
+import matplotlib.pyplot as plt
+import numpy as np
+import torch
+import datetime
+
+# TODO: it looks like the majority of the methods of this class are static and as such they should be in utils
+class TBLogger(object):
+ """
+ TensorBoard logger class
+ """
+
+ def __init__(self, log_dir, folder_name=""):
+
+ self.log_dir = join(log_dir, folder_name + " " + datetime.datetime.now().strftime("%I%M%p, %B %d, %Y"),)
+ self.log_dir = self.log_dir.replace("//", "/")
+ self.writer = tf.summary.FileWriter(self.log_dir)
+
+ def log_scalar(self, tag, value, step=0):
+ """
+ Add scalar
+
+ Args:
+ tag: tag
+ value: simple_value
+ step: step
+
+ """
+ summary = tf.Summary(value=[tf.Summary.Value(tag=tag, simple_value=value)])
+ self.writer.add_summary(summary, step)
+
+ # TODO: this should probably be a static method - take care of this when re-writing the whole thing
+ def make_list_of_2d_array(self, im):
+ """
+ Flatten 2D array to a list
+
+ Args:
+ im: image
+
+ Returns:
+ Flattened image list
+
+ """
+ if isinstance(im, list):
+ return im
+ ims = []
+ if len(im.shape) == 2:
+ ims.append(im)
+ elif len(im.shape) == 3:
+ for i in range(im.shape[0]):
+ ims.append(np.squeeze(im[i, :, :]))
+
+ elif len(im.shape) == 4:
+ for i in range(im.shape[0]):
+ ims.append(np.squeeze(im[i, 0, :, :]))
+ return ims
+
+ def log_images(self, tag, images, step=0, dim=2, max_imgs=50, cm="jet"):
+ """
+ Log images to TensorBoard
+
+ Args:
+ tag: image tag
+ images: list of images
+ step: training step
+ dim: image shape (3 for voxel)
+ max_imgs: max number of images
+ cm: colormap
+
+ """
+
+ # Make sure images are on numpy format in case the input is a Torch-variable
+ images = self.convert_to_numpy(images)
+
+ if len(images.shape) > 2:
+ dim = 3
+
+ # Make list of images
+ if dim == 2:
+ images = self.make_list_of_2d_array(images)
+
+ # If 3D we make one list for each slice-type
+ if dim == 3:
+ new_images_ts, new_images_il, new_images_cl = self.get_slices_from_3d(images)
+ self.log_images(tag + "_timeslice", new_images_ts, step, 2, max_imgs)
+ self.log_images(tag + "_inline", new_images_il, step, 2, max_imgs)
+ self.log_images(tag + "_crossline", new_images_cl, step, 2, max_imgs)
+ return
+
+ im_summaries = []
+
+ for nr, img in enumerate(images):
+
+ # Grayscale
+ if cm == "gray" or cm == "grey":
+ img = img.astype("float")
+ img = np.repeat(np.expand_dims(img, 2), 3, 2)
+ img -= img.min()
+ img /= img.max()
+ img *= 255
+ img = img.astype("uint8")
+
+ # Write the image to a string
+ s = BytesIO()
+ plt.imsave(s, img, format="png")
+
+ # Create an Image object
+ img_sum = tf.Summary.Image(encoded_image_string=s.getvalue(), height=img.shape[0], width=img.shape[1],)
+ # Create a Summary value
+ im_summaries.append(tf.Summary.Value(tag="%s/%d" % (tag, nr), image=img_sum))
+
+ # if nr == max_imgs-1:
+ # break
+
+ # Create and write Summary
+ summary = tf.Summary(value=im_summaries)
+ self.writer.add_summary(summary, step)
+
+ # TODO: probably another static method
+ def get_slices_from_3d(self, img):
+ """
+ Cuts out middle slices from image
+
+ Args:
+ img: image array
+
+ """
+
+ new_images_ts = []
+ new_images_il = []
+ new_images_cl = []
+
+ if len(img.shape) == 3:
+ new_images_ts.append(np.squeeze(img[img.shape[0] / 2, :, :]))
+ new_images_il.append(np.squeeze(img[:, img.shape[1] / 2, :]))
+ new_images_cl.append(np.squeeze(img[:, :, img.shape[2] / 2]))
+
+ elif len(img.shape) == 4:
+ for i in range(img.shape[0]):
+ new_images_ts.append(np.squeeze(img[i, img.shape[1] / 2, :, :]))
+ new_images_il.append(np.squeeze(img[i, :, img.shape[2] / 2, :]))
+ new_images_cl.append(np.squeeze(img[i, :, :, img.shape[3] / 2]))
+
+ elif len(img.shape) == 5:
+ for i in range(img.shape[0]):
+ new_images_ts.append(np.squeeze(img[i, 0, img.shape[2] / 2, :, :]))
+ new_images_il.append(np.squeeze(img[i, 0, :, img.shape[3] / 2, :]))
+ new_images_cl.append(np.squeeze(img[i, 0, :, :, img.shape[4] / 2]))
+
+ return new_images_ts, new_images_il, new_images_cl
+
+ # TODO: another static method most likely
+ def convert_to_numpy(self, im):
+ """
+ Convert torch to numpy
+
+ Args:
+ im: image array
+
+ """
+
+ if type(im) == torch.autograd.Variable:
+ # Put on CPU
+ im = im.cpu()
+ # Get np-data
+ im = im.data.numpy()
+ return im
diff --git a/contrib/experiments/interpretation/voxel2pixel/test_parallel.py b/contrib/experiments/interpretation/voxel2pixel/test_parallel.py
new file mode 100644
index 00000000..4e095afc
--- /dev/null
+++ b/contrib/experiments/interpretation/voxel2pixel/test_parallel.py
@@ -0,0 +1,426 @@
+# Copyright (c) Microsoft. All rights reserved.
+# Licensed under the MIT license.
+
+# code modified from https://github.com/waldeland/CNN-for-ASI
+from __future__ import print_function
+
+import os
+
+# set default number of GPUs which are discoverable
+N_GPU = 4
+DEVICE_IDS = list(range(N_GPU))
+os.environ["CUDA_VISIBLE_DEVICES"] = ",".join([str(x) for x in DEVICE_IDS])
+
+# static parameters
+RESOLUTION = 1
+# these match how the model is trained
+N_CLASSES = 2
+IM_SIZE = 65
+
+import random
+import argparse
+import json
+
+import torch
+import torch.nn as nn
+import torch.backends.cudnn as cudnn
+from torch.utils.data import Dataset, DataLoader
+import torch.distributed as dist
+
+if torch.cuda.is_available():
+ device_str = os.environ["CUDA_VISIBLE_DEVICES"]
+ device = torch.device("cuda:" + device_str)
+else:
+ raise Exception("No GPU detected for parallel scoring!")
+
+# ability to perform multiprocessing
+import multiprocessing
+
+from os.path import join
+from data import read_segy, get_slice
+from texture_net import TextureNet
+import itertools
+import numpy as np
+import tb_logger
+from data import write_segy
+
+# graphical progress bar
+from tqdm import tqdm
+
+
+class ModelWrapper(nn.Module):
+ """
+ Wrap TextureNet for (Distributed)DataParallel to invoke classify method
+ """
+
+ def __init__(self, texture_model):
+ super(ModelWrapper, self).__init__()
+ self.texture_model = texture_model
+
+ def forward(self, input_net):
+ return self.texture_model.classify(input_net)
+
+
+class MyDataset(Dataset):
+ def __init__(self, data, window, coord_list):
+
+ # main array
+ self.data = data
+ self.coord_list = coord_list
+ self.window = window
+ self.len = len(coord_list)
+
+ def __getitem__(self, index):
+
+ # TODO: can we specify a pixel mathematically by index?
+ pixel = self.coord_list[index]
+ x, y, z = pixel
+ # TODO: current bottleneck - can we slice out voxels any faster
+ small_cube = self.data[
+ x - self.window : x + self.window + 1,
+ y - self.window : y + self.window + 1,
+ z - self.window : z + self.window + 1,
+ ]
+
+ return small_cube[np.newaxis, :, :, :], pixel
+
+ def __len__(self):
+ return self.len
+
+
+def main_worker(gpu, ngpus_per_node, args):
+ """
+ Main worker function, given the gpu parameter and how many GPUs there are per node
+ it can figure out its rank
+
+ :param gpu: rank of the process if gpu >= ngpus_per_node, otherwise just gpu ID which worker will run on.
+ :param ngpus_per_node: total number of GPU available on this node.
+ :param args: various arguments for the code in the worker.
+ :return: nothing
+ """
+
+ print("I got GPU", gpu)
+
+ args.rank = gpu
+
+ # loop around in round-robin fashion if we want to run multiple processes per GPU
+ args.gpu = gpu % ngpus_per_node
+
+ # initialize the distributed process and join the group
+ print(
+ "setting rank", args.rank, "world size", args.world_size, args.dist_backend, args.dist_url,
+ )
+ dist.init_process_group(
+ backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank,
+ )
+
+ # set default GPU device for this worker
+ torch.cuda.set_device(args.gpu)
+ # set up device for the rest of the code
+ local_device = torch.device("cuda:" + str(args.gpu))
+
+ # Load trained model (run train.py to create trained
+ network = TextureNet(n_classes=N_CLASSES)
+ model_state_dict = torch.load(join(args.data, "saved_model.pt"), map_location=local_device)
+ network.load_state_dict(model_state_dict)
+ network.eval()
+ network.cuda(args.gpu)
+
+ # set the scoring wrapper also to eval mode
+ model = ModelWrapper(network)
+ model.eval()
+ model.cuda(args.gpu)
+
+ # When using a single GPU per process and per
+ # DistributedDataParallel, we need to divide the batch size
+ # ourselves based on the total number of GPUs we have.
+ # Min batch size is 1
+ args.batch_size = max(int(args.batch_size / ngpus_per_node), 1)
+ # obsolete: number of data loading workers - this is only used when reading from disk, which we're not
+ # args.workers = int((args.workers + ngpus_per_node - 1) / ngpus_per_node)
+
+ # wrap the model for distributed use - for scoring this is not needed
+ # model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu])
+
+ # set to benchmark mode because we're running the same workload multiple times
+ cudnn.benchmark = True
+
+ # Read 3D cube
+ # NOTE: we cannot pass this data manually as serialization of data into each python process is costly,
+ # so each worker has to load the data on its own.
+ data, data_info = read_segy(join(args.data, "data.segy"))
+
+ # Get half window size
+ window = IM_SIZE // 2
+
+ # reduce data size for debugging
+ if args.debug:
+ data = data[0 : 3 * window]
+
+ # generate full list of coordinates
+ # memory footprint of this isn't large yet, so not need to wrap as a generator
+ nx, ny, nz = data.shape
+ x_list = range(window, nx - window)
+ y_list = range(window, ny - window)
+ z_list = range(window, nz - window)
+
+ print("-- generating coord list --")
+ # TODO: is there any way to use a generator with pyTorch data loader?
+ coord_list = list(itertools.product(x_list, y_list, z_list))
+
+ # we need to map the data manually to each rank - DistributedDataParallel doesn't do this at score time
+ print("take a subset of coord_list by chunk")
+ coord_list = list(np.array_split(np.array(coord_list), args.world_size)[args.rank])
+ coord_list = [tuple(x) for x in coord_list]
+
+ # we only score first batch in debug mode
+ if args.debug:
+ coord_list = coord_list[0 : args.batch_size]
+
+ # prepare the data
+ print("setup dataset")
+ # TODO: RuntimeError: cannot pin 'torch.cuda.FloatTensor' only dense CPU tensors can be pinned
+ data_torch = torch.cuda.FloatTensor(data).cuda(args.gpu, non_blocking=True)
+ dataset = MyDataset(data_torch, window, coord_list)
+
+ # not sampling like in training
+ # datasampler = DistributedSampler(dataset)
+ # just set some default epoch
+ # datasampler.set_epoch(1)
+
+ # we use 0 workers because we're reading from memory
+ print("setting up loader")
+ my_loader = DataLoader(
+ dataset=dataset,
+ batch_size=args.batch_size,
+ shuffle=False,
+ num_workers=0,
+ pin_memory=False,
+ sampler=None
+ # sampler=datasampler
+ )
+
+ print("running loop")
+
+ pixels_x = []
+ pixels_y = []
+ pixels_z = []
+ predictions = []
+
+ # Loop through center pixels in output cube
+ with torch.no_grad():
+ print("no grad")
+ for (chunk, pixel) in tqdm(my_loader):
+ data_input = chunk.cuda(args.gpu, non_blocking=True)
+ output = model(data_input)
+ # save and deal with it later on CPU
+ # we want to make sure order is preserved
+ pixels_x += pixel[0].tolist()
+ pixels_y += pixel[1].tolist()
+ pixels_z += pixel[2].tolist()
+ predictions += output.tolist()
+ # just score a single batch in debug mode
+ if args.debug:
+ break
+
+ # TODO: legacy Queue Manager code from multiprocessing which we left here for illustration purposes
+ # result_queue.append([deepcopy(coord_list), deepcopy(predictions)])
+ # result_queue.append([coord_list, predictions])
+ # transform pixels into x, y, z list format
+ with open("results_{}.json".format(args.rank), "w") as f:
+ json.dump(
+ {
+ "pixels_x": pixels_x,
+ "pixels_y": pixels_y,
+ "pixels_z": pixels_z,
+ "preds": [int(x[0][0][0][0]) for x in predictions],
+ },
+ f,
+ )
+
+ # TODO: we cannot use pickle to dump from multiprocess - processes lock up
+ # with open("result_predictions_{}.pkl".format(args.rank), "wb") as f:
+ # print ("dumping predictions pickle file")
+ # pickle.dump(predictions, f)
+
+
+parser = argparse.ArgumentParser(description="Seismic Distributed Scoring")
+parser.add_argument("-d", "--data", default="/home/maxkaz/data/dutchf3", type=str, help="default dataset folder name")
+parser.add_argument(
+ "-s",
+ "--slice",
+ default="inline",
+ type=str,
+ choices=["inline", "crossline", "timeslice", "full"],
+ help="slice type which we want to score on",
+)
+parser.add_argument(
+ "-n", "--slice-num", default=339, type=int, help="slice number which we want to score",
+)
+parser.add_argument(
+ "-b", "--batch-size", default=2 ** 11, type=int, help="batch size which we use for scoring",
+)
+parser.add_argument(
+ "-p", "--n-proc-per-gpu", default=1, type=int, help="number of multiple processes to run per each GPU",
+)
+parser.add_argument(
+ "--dist-url", default="tcp://127.0.0.1:12345", type=str, help="url used to set up distributed training",
+)
+parser.add_argument("--dist-backend", default="nccl", type=str, help="distributed backend")
+parser.add_argument("--seed", default=0, type=int, help="default random number seed")
+parser.add_argument(
+ "--debug", action="store_true", help="debug flag - if on we will only process one batch",
+)
+
+
+def main():
+
+ # use distributed scoring+
+ if RESOLUTION != 1:
+ raise Exception("Currently we only support pixel-level scoring")
+
+ args = parser.parse_args()
+
+ args.gpu = None
+ args.rank = 0
+
+ # world size is the total number of processes we want to run across all nodes and GPUs
+ args.world_size = N_GPU * args.n_proc_per_gpu
+
+ if args.debug:
+ args.batch_size = 4
+
+ # fix away any kind of randomness - although for scoring it should not matter
+ random.seed(args.seed)
+ torch.manual_seed(args.seed)
+ cudnn.deterministic = True
+
+ print("RESOLUTION {}".format(RESOLUTION))
+
+ ##########################################################################
+ print("-- scoring on GPU --")
+
+ ngpus_per_node = torch.cuda.device_count()
+ print("nGPUs per node", ngpus_per_node)
+
+ """
+ First, read this: https://thelaziestprogrammer.com/python/a-multiprocessing-pool-pickle
+
+ OK, so there are a few ways in which we can spawn a running process with pyTorch:
+ 1) Default mp.spawn should work just fine but won't let us access internals
+ 2) So we copied out the code from mp.spawn below to control how processes get created
+ 3) One could spawn their own processes but that would not be thread-safe with CUDA, line
+ "mp = multiprocessing.get_context('spawn')" guarantees we use the proper pyTorch context
+
+ Input data serialization is too costly, in general so is output data serialization as noted here:
+ https://docs.python.org/3/library/multiprocessing.html
+
+ Feeding data into each process is too costly, so each process loads its own data.
+
+ For deserialization we could try and fail using:
+ 1) Multiprocessing queue manager
+ manager = Manager()
+ return_dict = manager.dict()
+ OR
+ result_queue = multiprocessing.Queue()
+ CALLING
+ with Manager() as manager:
+ results_list = manager.list()
+ mp.spawn(main_worker, nprocs=args.world_size, args=(ngpus_per_node, results_list/dict/queue, args))
+ results = deepcopy(results_list)
+ 2) pickling results to disc.
+
+ Turns out that for the reasons mentioned in the first article both approaches are too costly.
+
+ The only reasonable way to deserialize data from a Python process is to write it to text, in which case
+ writing to JSON is a saner approach: https://www.datacamp.com/community/tutorials/pickle-python-tutorial
+ """
+
+ # invoke processes manually suppressing error queue
+ mp = multiprocessing.get_context("spawn")
+ # error_queues = []
+ processes = []
+ for i in range(args.world_size):
+ # error_queue = mp.SimpleQueue()
+ process = mp.Process(target=main_worker, args=(i, ngpus_per_node, args), daemon=False)
+ process.start()
+ # error_queues.append(error_queue)
+ processes.append(process)
+
+ # block on wait
+ for process in processes:
+ process.join()
+
+ print("-- aggregating results --")
+
+ # Read 3D cube
+ data, data_info = read_segy(join(args.data, "data.segy"))
+
+ # Log to tensorboard - input slice
+ logger = tb_logger.TBLogger("log", "Test")
+ logger.log_images(
+ args.slice + "_" + str(args.slice_num), get_slice(data, data_info, args.slice, args.slice_num), cm="gray",
+ )
+
+ x_coords = []
+ y_coords = []
+ z_coords = []
+ predictions = []
+ for i in range(args.world_size):
+ with open("results_{}.json".format(i), "r") as f:
+ results_dict = json.load(f)
+
+ x_coords += results_dict["pixels_x"]
+ y_coords += results_dict["pixels_y"]
+ z_coords += results_dict["pixels_z"]
+ predictions += results_dict["preds"]
+
+ """
+ So because of Python's GIL having multiple workers write to the same array is not efficient - basically
+ the only way we can have shared memory is with threading but thanks to GIL only one thread can execute at a time,
+ so we end up with the overhead of managing multiple threads when writes happen sequentially.
+
+ A much faster alternative is to just invoke underlying compiled code (C) through the use of array indexing.
+
+ So basically instead of the following:
+
+ NUM_CORES = multiprocessing.cpu_count()
+ print("Post-processing will run on {} CPU cores on your machine.".format(NUM_CORES))
+
+ def worker(classified_cube, coord):
+ x, y, z = coord
+ ind = new_coord_list.index(coord)
+ # print (coord, ind)
+ pred_class = predictions[ind]
+ classified_cube[x, y, z] = pred_class
+
+ # launch workers in parallel with memory sharing ("threading" backend)
+ _ = Parallel(n_jobs=4*NUM_CORES, backend="threading")(
+ delayed(worker)(classified_cube, coord) for coord in tqdm(pixels)
+ )
+
+ We do this:
+ """
+
+ # placeholder for results
+ classified_cube = np.zeros(data.shape)
+ # store final results
+ classified_cube[x_coords, y_coords, z_coords] = predictions
+
+ print("-- writing segy --")
+ in_file = join(args.data, "data.segy".format(RESOLUTION))
+ out_file = join(args.data, "salt_{}.segy".format(RESOLUTION))
+ write_segy(out_file, in_file, classified_cube)
+
+ print("-- logging prediction --")
+ # log prediction to tensorboard
+ logger = tb_logger.TBLogger("log", "Test_scored")
+ logger.log_images(
+ args.slice + "_" + str(args.slice_num),
+ get_slice(classified_cube, data_info, args.slice, args.slice_num),
+ cm="binary",
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/contrib/experiments/interpretation/voxel2pixel/texture_net.py b/contrib/experiments/interpretation/voxel2pixel/texture_net.py
new file mode 100644
index 00000000..f19fda96
--- /dev/null
+++ b/contrib/experiments/interpretation/voxel2pixel/texture_net.py
@@ -0,0 +1,157 @@
+# Copyright (c) Microsoft. All rights reserved.
+# Licensed under the MIT license.
+
+# code modified from https://github.com/waldeland/CNN-for-ASI
+
+import torch
+from torch import nn
+
+from utils import gpu_no_of_var
+
+
+class TextureNet(nn.Module):
+ def __init__(self, n_classes=2, n_filters=50):
+ super(TextureNet, self).__init__()
+
+ # Network definition
+ # Parameters #in_channels, #out_channels, filter_size, stride (downsampling factor)
+ self.net = nn.Sequential(
+ nn.Conv3d(1, n_filters, 5, 4, padding=2),
+ nn.BatchNorm3d(n_filters),
+ # nn.Dropout3d() #Droput can be added like this ...
+ nn.ReLU(),
+ nn.Conv3d(n_filters, n_filters, 3, 2, padding=1, bias=False),
+ nn.BatchNorm3d(n_filters),
+ nn.ReLU(),
+ nn.Conv3d(n_filters, n_filters, 3, 2, padding=1, bias=False),
+ nn.BatchNorm3d(n_filters),
+ nn.ReLU(),
+ nn.Conv3d(n_filters, n_filters, 3, 2, padding=1, bias=False),
+ nn.BatchNorm3d(n_filters),
+ nn.ReLU(),
+ nn.Conv3d(n_filters, n_filters, 3, 3, padding=1, bias=False),
+ nn.BatchNorm3d(n_filters),
+ nn.ReLU(),
+ nn.Conv3d(
+ n_filters, n_classes, 1, 1
+ ), # This is the equivalent of a fully connected layer since input has width/height/depth = 1
+ nn.ReLU(),
+ )
+ # The filter weights are by default initialized by random
+
+ def forward(self, x):
+ """
+ Is called to compute network output
+
+ Args:
+ x: network input - torch tensor
+
+ Returns:
+ output from the neural network
+
+ """
+ return self.net(x)
+
+ def classify(self, x):
+ """
+ Classification wrapper
+
+ Args:
+ x: input tensor for classification
+
+ Returns:
+ classification result
+
+ """
+ x = self.net(x)
+ _, class_no = torch.max(x, 1, keepdim=True)
+ return class_no
+
+ # Functions to get output from intermediate feature layers
+ def f1(self, x):
+ """
+ Wrapper to obtain a particular network layer
+
+ Args:
+ x: input tensor for classification
+
+ Returns:
+ requested layer
+
+ """
+ return self.getFeatures(x, 0)
+
+ def f2(self, x):
+ """
+ Wrapper to obtain a particular network layer
+
+ Args:
+ x: input tensor for classification
+
+ Returns:
+ requested layer
+
+ """
+ return self.getFeatures(x, 1)
+
+ def f3(self, x):
+ """
+ Wrapper to obtain a particular network layer
+
+ Args:
+ x: input tensor for classification
+
+ Returns:
+ requested layer
+
+ """
+ return self.getFeatures(x, 2)
+
+ def f4(self, x):
+ """
+ Wrapper to obtain a particular network layer
+
+ Args:
+ x: input tensor for classification
+
+ Returns:
+ requested layer
+
+ """
+ return self.getFeatures(x, 3)
+
+ def f5(self, x):
+ """
+ Wrapper to obtain a particular network layer
+
+ Args:
+ x: input tensor for classification
+
+ Returns:
+ requested layer
+
+ """
+ return self.getFeatures(x, 4)
+
+ def getFeatures(self, x, layer_no):
+ """
+ Main call method to call the wrapped layers
+
+ Args:
+ x: input tensor for classification
+ layer_no: number of hidden layer we want to extract
+
+ Returns:
+ requested layer
+
+ """
+ layer_indexes = [0, 3, 6, 9, 12]
+
+ # Make new network that has the layers up to the requested output
+ tmp_net = nn.Sequential()
+ layers = list(self.net.children())[0 : layer_indexes[layer_no] + 1]
+ for i in range(len(layers)):
+ tmp_net.add_module(str(i), layers[i])
+ if type(gpu_no_of_var(self)) == int:
+ tmp_net.cuda(gpu_no_of_var(self))
+ return tmp_net(x)
diff --git a/contrib/experiments/interpretation/voxel2pixel/train.py b/contrib/experiments/interpretation/voxel2pixel/train.py
new file mode 100644
index 00000000..74f664ca
--- /dev/null
+++ b/contrib/experiments/interpretation/voxel2pixel/train.py
@@ -0,0 +1,136 @@
+# Copyright (c) Microsoft. All rights reserved.
+# Licensed under the MIT license.
+
+# code modified from https://github.com/waldeland/CNN-for-ASI
+
+from __future__ import print_function
+from os.path import join
+import torch
+from torch import nn
+from data import read_segy, read_labels, get_slice
+from batch import get_random_batch
+from torch.autograd import Variable
+from texture_net import TextureNet
+import tb_logger
+import utils
+
+# Parameters
+ROOT_PATH = "/home/maxkaz/data/dutchf3"
+INPUT_VOXEL = "data.segy"
+TRAIN_MASK = "inline_339.png"
+VAL_MASK = "inline_405.png"
+IM_SIZE = 65
+# If you have a GPU with little memory, try reducing this to 16 (may degrade results)
+BATCH_SIZE = 32
+# Switch to toggle the use of GPU or not
+USE_GPU = True
+# Log progress on tensor board
+LOG_TENSORBOARD = True
+
+# the rest of the code
+if LOG_TENSORBOARD:
+ logger = tb_logger.TBLogger("log", "Train")
+
+# This is the network definition proposed in the paper
+network = TextureNet(n_classes=2)
+
+# Loss function - Softmax function is included
+cross_entropy = nn.CrossEntropyLoss()
+
+# Optimizer to control step size in gradient descent
+optimizer = torch.optim.Adam(network.parameters())
+
+# Transfer model to gpu
+if USE_GPU and torch.cuda.is_available():
+ network = network.cuda()
+
+# Load the data cube and labels
+data, data_info = read_segy(join(ROOT_PATH, INPUT_VOXEL))
+train_class_imgs, train_coordinates = read_labels(join(ROOT_PATH, TRAIN_MASK), data_info)
+val_class_imgs, _ = read_labels(join(ROOT_PATH, VAL_MASK), data_info)
+
+# Plot training/validation data with labels
+if LOG_TENSORBOARD:
+ for class_img in train_class_imgs + val_class_imgs:
+ logger.log_images(
+ class_img[1] + "_" + str(class_img[2]), get_slice(data, data_info, class_img[1], class_img[2]), cm="gray",
+ )
+ logger.log_images(
+ class_img[1] + "_" + str(class_img[2]) + "_true_class", class_img[0],
+ )
+
+# Training loop
+for i in range(5000):
+
+ # Get random training batch with augmentation
+ # This is the bottle-neck for training and could be done more efficient on the GPU...
+ [batch, labels] = get_random_batch(
+ data,
+ train_coordinates,
+ IM_SIZE,
+ BATCH_SIZE,
+ random_flip=True,
+ random_stretch=0.2,
+ random_rot_xy=180,
+ random_rot_z=15,
+ )
+
+ # Format data to torch-variable
+ batch = Variable(torch.Tensor(batch).float())
+ labels = Variable(torch.Tensor(labels).long())
+
+ # Transfer data to gpu
+ if USE_GPU and torch.cuda.is_available():
+ batch = batch.cuda()
+ labels = labels.cuda()
+
+ # Set network to training phase
+ network.train()
+
+ # Run the samples through the network
+ output = network(batch)
+
+ # Compute loss
+ loss = cross_entropy(torch.squeeze(output), labels)
+
+ # Do back-propagation to get gradients of weights w.r.t. loss
+ loss.backward()
+
+ # Ask the optimizer to adjust the parameters in the direction of lower loss
+ optimizer.step()
+
+ # Every 10th iteration - print training loss
+ if i % 10 == 0:
+ network.eval()
+
+ # Log to training loss/acc
+ print("Iteration:", i, "Training loss:", utils.var_to_np(loss))
+ if LOG_TENSORBOARD:
+ logger.log_scalar("training_loss", utils.var_to_np(loss), i)
+ for k, v in utils.compute_accuracy(torch.argmax(output, 1), labels).items():
+ if LOG_TENSORBOARD:
+ logger.log_scalar("training_" + k, v, i)
+ print(" -", k, v, "%")
+
+ # every 100th iteration
+ if i % 100 == 0 and LOG_TENSORBOARD:
+ network.eval()
+
+ # Output predicted train/validation class/probability images
+ for class_img in train_class_imgs + val_class_imgs:
+
+ slice = class_img[1]
+ slice_no = class_img[2]
+
+ class_img = utils.interpret(
+ network.classify, data, data_info, slice, slice_no, IM_SIZE, 16, return_full_size=True, use_gpu=USE_GPU,
+ )
+ logger.log_images(slice + "_" + str(slice_no) + "_pred_class", class_img[0], step=i)
+
+ class_img = utils.interpret(
+ network, data, data_info, slice, slice_no, IM_SIZE, 16, return_full_size=True, use_gpu=USE_GPU,
+ )
+ logger.log_images(slice + "_" + str(slice_no) + "_pred_prob", class_img[0], i)
+
+ # Store trained network
+ torch.save(network.state_dict(), join(ROOT_PATH, "saved_model.pt"))
diff --git a/contrib/experiments/interpretation/voxel2pixel/utils.py b/contrib/experiments/interpretation/voxel2pixel/utils.py
new file mode 100644
index 00000000..31db6b55
--- /dev/null
+++ b/contrib/experiments/interpretation/voxel2pixel/utils.py
@@ -0,0 +1,337 @@
+# Copyright (c) Microsoft. All rights reserved.
+# Licensed under the MIT license.
+
+# code modified from https://github.com/waldeland/CNN-for-ASI
+
+from __future__ import print_function
+
+import torch
+import numpy as np
+from torch.autograd import Variable
+from scipy.interpolate import interpn
+import sys
+import time
+
+# global parameters
+ST = 0
+LAST_UPDATE = 0
+
+
+def interpret(
+ network, data, data_info, slice, slice_no, im_size, subsampl, return_full_size=True, use_gpu=True,
+):
+ """
+ Down-samples a slice from the classified image and upsamples to full resolution if needed. Basically
+ given a full 3D-classified voxel at a particular resolution (say we classify every n-th pixel as given by the
+ subsampl variable below) we take a particular slice from the voxel and optoinally blow it up to full resolution
+ as if we classified every single pixel.
+
+ Args:
+ network: pytorch model definition
+ data: input voxel
+ data_info: input voxel information
+ slice: slice type which we want to interpret
+ slice_no: slice number
+ im_size: size of the voxel
+ subsampl: at what resolution do we want to subsample, e.g. we move across every subsampl pixels
+ return_full_size: boolean flag, enable if you want to return full size without downsampling
+ use_gpu: boolean flag to use the GPU
+
+ Returns:
+ upsampled slice
+
+ """
+
+ # Wrap np.linspace in compact function call
+ ls = lambda N: np.linspace(0, N - 1, N, dtype="int")
+
+ # Size of cube
+ N0, N1, N2 = data.shape
+
+ # Coords for full cube
+ x0_range = ls(N0)
+ x1_range = ls(N1)
+ x2_range = ls(N2)
+
+ # Coords for subsampled cube
+ pred_points = (x0_range[::subsampl], x1_range[::subsampl], x2_range[::subsampl])
+
+ # Select slice
+ if slice == "full":
+ class_cube = data[::subsampl, ::subsampl, ::subsampl] * 0
+
+ elif slice == "inline":
+ slice_no = slice_no - data_info["inline_start"]
+ class_cube = data[::subsampl, 0:1, ::subsampl] * 0
+ x1_range = np.array([slice_no])
+ pred_points = (pred_points[0], pred_points[2])
+
+ elif slice == "crossline":
+ slice_no = slice_no - data_info["crossline_start"]
+ class_cube = data[::subsampl, ::subsampl, 0:1,] * 0
+ x2_range = np.array([slice_no])
+ pred_points = (pred_points[0], pred_points[1])
+
+ elif slice == "timeslice":
+ slice_no = slice_no - data_info["timeslice_start"]
+ class_cube = data[0:1, ::subsampl, ::subsampl] * 0
+ x0_range = np.array([slice_no])
+ pred_points = (pred_points[1], pred_points[2])
+
+ # Grid for small class slice/cube
+ n0, n1, n2 = class_cube.shape
+ x0_grid, x1_grid, x2_grid = np.meshgrid(ls(n0,), ls(n1), ls(n2), indexing="ij")
+
+ # Grid for full slice/cube
+ X0_grid, X1_grid, X2_grid = np.meshgrid(x0_range, x1_range, x2_range, indexing="ij")
+
+ # Indexes for large cube at small cube pixels
+ X0_grid_sub = X0_grid[::subsampl, ::subsampl, ::subsampl]
+ X1_grid_sub = X1_grid[::subsampl, ::subsampl, ::subsampl]
+ X2_grid_sub = X2_grid[::subsampl, ::subsampl, ::subsampl]
+
+ # Get half window size
+ w = im_size // 2
+
+ # Loop through center pixels in output cube
+ for i in range(X0_grid_sub.size):
+
+ # Get coordinates in small and large cube
+ x0 = x0_grid.ravel()[i]
+ x1 = x1_grid.ravel()[i]
+ x2 = x2_grid.ravel()[i]
+
+ X0 = X0_grid_sub.ravel()[i]
+ X1 = X1_grid_sub.ravel()[i]
+ X2 = X2_grid_sub.ravel()[i]
+
+ # Only compute when a full 65x65x65 cube can be extracted around center pixel
+ if X0 > w and X1 > w and X2 > w and X0 < N0 - w + 1 and X1 < N1 - w + 1 and X2 < N2 - w + 1:
+
+ # Get mini-cube around center pixel
+ mini_cube = data[X0 - w : X0 + w + 1, X1 - w : X1 + w + 1, X2 - w : X2 + w + 1]
+
+ # Get predicted "probabilities"
+ mini_cube = Variable(torch.FloatTensor(mini_cube[np.newaxis, np.newaxis, :, :, :]))
+ if use_gpu:
+ mini_cube = mini_cube.cuda()
+ out = network(mini_cube)
+ out = out.data.cpu().numpy()
+
+ out = out[:, :, out.shape[2] // 2, out.shape[3] // 2, out.shape[4] // 2]
+ out = np.squeeze(out)
+
+ # Make one output pr output channel
+ if not isinstance(class_cube, list):
+ class_cube = np.split(np.repeat(class_cube[:, :, :, np.newaxis], out.size, 3), out.size, axis=3,)
+
+ # Insert into output
+ if out.size == 1:
+ class_cube[0][x0, x1, x2] = out
+ else:
+ for i in range(out.size):
+ class_cube[i][x0, x1, x2] = out[i]
+
+ # Keep user informed about progress
+ if slice == "full":
+ printProgressBar(i, x0_grid.size)
+
+ # Resize to input size
+ if return_full_size:
+ if slice == "full":
+ print("Interpolating down sampled results to fit input cube")
+
+ N = X0_grid.size
+
+ # Output grid
+ if slice == "full":
+ grid_output_cube = np.concatenate(
+ [X0_grid.reshape([N, 1]), X1_grid.reshape([N, 1]), X2_grid.reshape([N, 1]),], 1,
+ )
+ elif slice == "inline":
+ grid_output_cube = np.concatenate([X0_grid.reshape([N, 1]), X2_grid.reshape([N, 1])], 1)
+ elif slice == "crossline":
+ grid_output_cube = np.concatenate([X0_grid.reshape([N, 1]), X1_grid.reshape([N, 1])], 1)
+ elif slice == "timeslice":
+ grid_output_cube = np.concatenate([X1_grid.reshape([N, 1]), X2_grid.reshape([N, 1])], 1)
+
+ # Interpolation
+ for i in range(len(class_cube)):
+ is_int = (
+ np.sum(
+ np.unique(class_cube[i]).astype("float") - np.unique(class_cube[i]).astype("int32").astype("float")
+ )
+ == 0
+ )
+ class_cube[i] = interpn(
+ pred_points,
+ class_cube[i].astype("float").squeeze(),
+ grid_output_cube,
+ method="linear",
+ fill_value=0,
+ bounds_error=False,
+ )
+ class_cube[i] = class_cube[i].reshape([x0_range.size, x1_range.size, x2_range.size])
+
+ # If ouput is class labels we convert the interpolated array to ints
+ if is_int:
+ class_cube[i] = class_cube[i].astype("int32")
+
+ if slice == "full":
+ print("Finished interpolating")
+
+ # Squeeze outputs
+ for i in range(len(class_cube)):
+ class_cube[i] = class_cube[i].squeeze()
+
+ return class_cube
+
+
+# TODO: this should probably be replaced with TQDM
+def print_progress_bar(iteration, total, prefix="", suffix="", decimals=1, length=100, fill="="):
+ """
+ Privides a progress bar implementation.
+
+ Adapted from https://stackoverflow.com/questions/3173320/text-progress-bar-in-the-console/14879561#14879561
+
+ Args:
+ iteration: iteration number
+ total: total number of iterations
+ prefix: comment prefix in display
+ suffix: comment suffix in display
+ decimals: how many decimals to display
+ length: character length of progress bar
+ fill: character to display as progress bar
+
+ """
+ global ST, LAST_UPDATE
+
+ # Expect itteration to go from 0 to N-1
+ iteration = iteration + 1
+
+ # Only update every 5 second
+ if time.time() - LAST_UPDATE < 5:
+ if iteration == total:
+ time.sleep(1)
+ else:
+ return
+
+ if iteration <= 1:
+ st = time.time()
+ exp_h = ""
+ exp_m = ""
+ exp_s = ""
+ elif iteration == total:
+ exp_time = time.time() - ST
+ exp_h = int(exp_time / 3600)
+ exp_m = int(exp_time / 60 - exp_h * 60.0)
+ exp_s = int(exp_time - exp_m * 60.0 - exp_h * 3600.0)
+ else:
+ exp_time = (time.time() - ST) / (iteration - 1) * total - (time.time() - ST)
+ exp_h = int(exp_time / 3600)
+ exp_m = int(exp_time / 60 - exp_h * 60.0)
+ exp_s = int(exp_time - exp_m * 60.0 - exp_h * 3600.0)
+
+ percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
+ filled_length = int(length * iteration // total)
+ bar = fill * filled_length + "-" * (length - filled_length)
+ if iteration != total:
+ print("\r%s |%s| %s%% %s - %sh %smin %ss left" % (prefix, bar, percent, suffix, exp_h, exp_m, exp_s))
+ else:
+ print("\r%s |%s| %s%% %s - %sh %smin %ss " % (prefix, bar, percent, suffix, exp_h, exp_m, exp_s))
+ sys.stdout.write("\033[F")
+ # Print New Line on Complete
+ if iteration == total:
+ print("")
+ # last_update = time.time()
+
+
+# TODO: rewrite this whole function to get rid of excepts
+# TODO: also not sure what this function is for - it's almost as if it's not needed - try to remove it.
+def gpu_no_of_var(var):
+ """
+ Function that returns the GPU number or whether the tensor is on GPU or not
+
+ Args:
+ var: torch tensor
+
+ Returns:
+ The CUDA device that the torch tensor is on, or whether the tensor is on GPU
+
+ """
+
+ try:
+ is_cuda = next(var.parameters()).is_cuda
+ except:
+ is_cuda = var.is_cuda
+
+ if is_cuda:
+ try:
+ return next(var.parameters()).get_device()
+ except:
+ return var.get_device()
+ else:
+ return False
+
+
+# TODO: remove all the try except statements
+def var_to_np(var):
+ """
+ Take a pyTorch tensor and convert it to numpy array of the same shape, as the name suggests.
+
+ Args:
+ var: input variable
+
+ Returns:
+ numpy array of the tensor
+
+ """
+ if type(var) in [np.array, np.ndarray]:
+ return var
+
+ # If input is list we do this for all elements
+ if type(var) == type([]):
+ out = []
+ for v in var:
+ out.append(var_to_np(v))
+ return out
+
+ try:
+ var = var.cpu()
+ except:
+ None
+ try:
+ var = var.data
+ except:
+ None
+ try:
+ var = var.numpy()
+ except:
+ None
+
+ if type(var) == tuple:
+ var = var[0]
+ return var
+
+
+def compute_accuracy(predicted_class, labels):
+ """
+ Accuracy performance metric which needs to be computed
+
+ Args:
+ predicted_class: pyTorch tensor with predictions
+ labels: pyTorch tensor with ground truth labels
+
+ Returns:
+ Accuracy calculation as a dictionary per class and average class accuracy across classes
+
+ """
+ labels = var_to_np(labels)
+ predicted_class = var_to_np(predicted_class)
+
+ accuracies = {}
+ for cls in np.unique(labels):
+ if cls >= 0:
+ accuracies["accuracy_class_" + str(cls)] = int(np.mean(predicted_class[labels == cls] == cls) * 100)
+ accuracies["average_class_accuracy"] = np.mean([acc for acc in accuracies.values()])
+ return accuracies
diff --git a/contrib/fwi/azureml_devito/README.md b/contrib/fwi/azureml_devito/README.md
new file mode 100755
index 00000000..80b11e85
--- /dev/null
+++ b/contrib/fwi/azureml_devito/README.md
@@ -0,0 +1,60 @@
+# DeepSeismic
+
+## Imaging
+
+This tutorial shows how to run [devito](https://www.devitoproject.org/) tutorial [notebooks](https://github.com/opesci/devito/tree/master/examples/seismic/tutorials) in Azure Machine Learning ([Azure ML](https://docs.microsoft.com/en-us/azure/machine-learning/)) using [Azure Machine Learning Python SDK](https://docs.microsoft.com/en-us/azure/machine-learning/service/tutorial-1st-experiment-sdk-setup).
+
+For best experience use a Linux (Ubuntu) Azure [DSVM](https://docs.microsoft.com/en-us/azure/machine-learning/data-science-virtual-machine/dsvm-ubuntu-intro) and Jupyter Notebook with AzureML Python SDK and [Azure CLI](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli?view=azure-cli-latest) to run the notebooks (see __Setting up Environment__ section below).
+
+Devito is a domain-specific Language (DSL) and code generation framework for the design of highly optimized finite difference kernels via symbolic computation for use in inversion methods. Here we show how ```devito``` can be openly used in the cloud by leveraging AzureML experimentation framework as a transparent and scalable platform for generic computation workloads. We focus on Full waveform inversion (__FWI__) problems where non-linear data-fitting procedures are applied for computing estimates of subsurface properties from seismic data.
+
+
+### Setting up Environment
+
+The [conda environment](https://docs.conda.io/projects/conda/en/latest/user-guide/concepts/environments.html) that encapsulates all the dependencies needed to run the notebooks described above can be created using the fwi_dev_conda_environment.yml file. See [here](https://github.com/Azure/MachineLearningNotebooks/blob/master/NBSETUP.md) generic instructions on how to install and run AzureML Python SDK in Jupyter Notebooks.
+
+To create the conda environment, run:
+```
+conda env create -f fwi_dev_conda_environment.yml
+
+```
+
+then, one can see the created environment within the list of available environments and export it as a .yml file:
+```
+conda env list
+conda env export --name fwi_dev_conda_environment -f ./contrib/fwi/azureml_devito/fwi_dev_conda_environment_exported.yml
+
+```
+The created conda environment needs to be activated, followed by the installation of its corresponding IPython kernel:
+```
+conda activate fwi_dev_conda_environment
+python -m ipykernel install --user --name fwi_dev_conda_environment --display-name "fwi_dev_conda_environment Python"
+```
+
+Finally, start Jupyter notebook from within the activated environment:
+```
+jupyter notebook
+```
+One can then choose the __fwi_dev_conda_environment Python__ kernel defined above either when a notebook is opened for the first time, or by using the "Kernel/Change kernel" notebook menu.
+
+
+
+[Azure CLI](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli?view=azure-cli-latest) is also used to create an ACR in notebook 000_Setup_GeophysicsTutorial_FWI_Azure_devito, and then push and pull docker images. One can also create the ACR via Azure [portal](https://azure.microsoft.com/).
+
+### Run devito in Azure
+The devito fwi examples are run in AzuremL using 4 notebooks:
+ - ```000_Setup_GeophysicsTutorial_FWI_Azure_devito.ipynb```: sets up Azure resources (like resource groups, AzureML [workspace](https://docs.microsoft.com/en-us/azure/machine-learning/service/how-to-manage-workspace), Azure (docker) [container registry](https://azure.microsoft.com/en-us/services/container-registry/)).
+ - ```010_CreateExperimentationDockerImage_GeophysicsTutorial_FWI_Azure_devito.ipynb```: Creates a custom docker file and the associated image that contains ```devito``` [github repository](https://github.com/opesci/devito.git) (including devito fwi tutorial [notebooks](https://github.com/opesci/devito/tree/master/examples/seismic/tutorials)) and runs the official devito install [tests](https://github.com/opesci/devito/tree/master/tests).
+ - ```020_UseAzureMLEstimatorForExperimentation_GeophysicsTutorial_FWI_Azure_devito.ipynb```: shows how the devito fwi tutorial [notebooks](https://github.com/opesci/devito/tree/master/examples/seismic/tutorials) can be run in AzureML using Azure Machine Learning [generic](https://docs.microsoft.com/en-us/python/api/azureml-train-core/azureml.train.estimator?view=azure-ml-py) [estimators](https://docs.microsoft.com/en-us/azure/machine-learning/service/how-to-train-ml-models) with custom docker images. FWI computation takes place on a managed AzureML [remote compute cluster](https://docs.microsoft.com/en-us/azure/machine-learning/service/how-to-set-up-training-targets).
+
+ ```Devito``` fwi computation artifacts (images and notebooks with data processing output results) are tracked under the AzureML workspace, and can be later downloaded and visualized.
+
+ Two ways of running devito code are shown:
+
+ (1) using __custom code__ (slightly modified graphing functions that save images to files). The AzureML experimentation job is defined by the devito code packaged as a py file. The experimentation job (defined by [azureml.core.experiment.Experiment](https://docs.microsoft.com/en-us/python/api/azureml-core/azureml.core.experiment.experiment?view=azure-ml-py) class can be used to track metrics or other artifacts (images) that are available in Azure portal.
+
+ (2) using [__papermill__](https://github.com/nteract/papermill) invoked via its Python API to run unedited devito demo notebooks (including the [dask](https://dask.org/) local cluster [example](https://github.com/opesci/devito/blob/master/examples/seismic/tutorials/04_dask.ipynb) on the remote compute target and the results as saved notebooks that are available in Azure portal.
+
+ - ```030_ScaleJobsUsingAzuremL_GeophysicsTutorial_FWI_Azure_devito.ipynb```: shows how the devito fwi tutorial notebooks can be run in parallel on the elastically allocated AzureML [remote compute cluster](https://docs.microsoft.com/en-us/azure/machine-learning/service/how-to-set-up-training-targets) created before. By submitting multiple jobs via azureml.core.Experiment submit(azureml.train.estimator.Estimator) one can use the [portal](https://portal.azure.com) to visualize the elastic allocation of AzureML [remote compute cluster](https://docs.microsoft.com/en-us/azure/machine-learning/service/how-to-set-up-training-targets) nodes.
+
+
diff --git a/contrib/fwi/azureml_devito/fwi_dev_conda_environment.yml b/contrib/fwi/azureml_devito/fwi_dev_conda_environment.yml
new file mode 100755
index 00000000..d3757663
--- /dev/null
+++ b/contrib/fwi/azureml_devito/fwi_dev_conda_environment.yml
@@ -0,0 +1,17 @@
+
+name: fwi_dev_conda_environment
+
+channels:
+ - anaconda
+dependencies:
+ - python=3.7
+ - numpy
+ - notebook
+ - ipykernel #nb_conda
+ - scikit-learn
+ - pip
+ - pip:
+ - python-dotenv
+ - papermill[azure]
+ - azureml-sdk[notebooks,automl,explain]==1.0.76
+ - docker
diff --git a/contrib/fwi/azureml_devito/fwi_dev_conda_environment_exported.yml b/contrib/fwi/azureml_devito/fwi_dev_conda_environment_exported.yml
new file mode 100644
index 00000000..ac0566d3
--- /dev/null
+++ b/contrib/fwi/azureml_devito/fwi_dev_conda_environment_exported.yml
@@ -0,0 +1,211 @@
+name: fwi_dev_conda_environment
+channels:
+ - anaconda
+ - defaults
+dependencies:
+ - attrs=19.3.0=py_0
+ - backcall=0.1.0=py37_0
+ - blas=1.0=mkl
+ - bleach=3.1.0=py37_0
+ - ca-certificates=2019.11.27=0
+ - certifi=2019.11.28=py37_0
+ - decorator=4.4.1=py_0
+ - defusedxml=0.6.0=py_0
+ - entrypoints=0.3=py37_0
+ - gmp=6.1.2=hb3b607b_0
+ - importlib_metadata=1.1.0=py37_0
+ - intel-openmp=2019.5=281
+ - ipykernel=5.1.3=py37h39e3cac_0
+ - ipython=7.10.1=py37h39e3cac_0
+ - ipython_genutils=0.2.0=py37_0
+ - jedi=0.15.1=py37_0
+ - jinja2=2.10.3=py_0
+ - joblib=0.14.0=py_0
+ - jsonschema=3.2.0=py37_0
+ - jupyter_client=5.3.4=py37_0
+ - jupyter_core=4.6.1=py37_0
+ - libedit=3.1.20181209=hc058e9b_0
+ - libffi=3.2.1=h4deb6c0_3
+ - libgcc-ng=9.1.0=hdf63c60_0
+ - libgfortran-ng=7.3.0=hdf63c60_0
+ - libsodium=1.0.16=h1bed415_0
+ - libstdcxx-ng=9.1.0=hdf63c60_0
+ - markupsafe=1.1.1=py37h7b6447c_0
+ - mistune=0.8.4=py37h7b6447c_0
+ - mkl=2019.5=281
+ - mkl-service=2.3.0=py37he904b0f_0
+ - mkl_fft=1.0.15=py37ha843d7b_0
+ - mkl_random=1.1.0=py37hd6b4f25_0
+ - more-itertools=7.2.0=py37_0
+ - nbconvert=5.6.1=py37_0
+ - nbformat=4.4.0=py37_0
+ - ncurses=6.1=he6710b0_1
+ - notebook=6.0.2=py37_0
+ - openssl=1.1.1=h7b6447c_0
+ - pandoc=2.2.3.2=0
+ - pandocfilters=1.4.2=py37_1
+ - parso=0.5.1=py_0
+ - pexpect=4.7.0=py37_0
+ - pickleshare=0.7.5=py37_0
+ - pip=19.3.1=py37_0
+ - prometheus_client=0.7.1=py_0
+ - prompt_toolkit=3.0.2=py_0
+ - ptyprocess=0.6.0=py37_0
+ - pygments=2.5.2=py_0
+ - pyrsistent=0.15.6=py37h7b6447c_0
+ - python=3.7.5=h0371630_0
+ - python-dateutil=2.8.1=py_0
+ - pyzmq=18.1.0=py37he6710b0_0
+ - readline=7.0=h7b6447c_5
+ - send2trash=1.5.0=py37_0
+ - setuptools=42.0.2=py37_0
+ - six=1.13.0=py37_0
+ - sqlite=3.30.1=h7b6447c_0
+ - terminado=0.8.3=py37_0
+ - testpath=0.4.4=py_0
+ - tk=8.6.8=hbc83047_0
+ - tornado=6.0.3=py37h7b6447c_0
+ - traitlets=4.3.3=py37_0
+ - wcwidth=0.1.7=py37_0
+ - webencodings=0.5.1=py37_1
+ - xz=5.2.4=h14c3975_4
+ - zeromq=4.3.1=he6710b0_3
+ - zipp=0.6.0=py_0
+ - zlib=1.2.11=h7b6447c_3
+ - pip:
+ - adal==1.2.2
+ - ansiwrap==0.8.4
+ - applicationinsights==0.11.9
+ - azure-common==1.1.23
+ - azure-core==1.1.1
+ - azure-datalake-store==0.0.48
+ - azure-graphrbac==0.61.1
+ - azure-mgmt-authorization==0.60.0
+ - azure-mgmt-containerregistry==2.8.0
+ - azure-mgmt-keyvault==2.0.0
+ - azure-mgmt-resource==7.0.0
+ - azure-mgmt-storage==7.0.0
+ - azure-storage-blob==12.1.0
+ - azureml-automl-core==1.0.76
+ - azureml-automl-runtime==1.0.76.1
+ - azureml-contrib-notebook==1.0.76
+ - azureml-core==1.0.76
+ - azureml-dataprep==1.1.33
+ - azureml-dataprep-native==13.1.0
+ - azureml-defaults==1.0.76
+ - azureml-explain-model==1.0.76
+ - azureml-interpret==1.0.76
+ - azureml-model-management-sdk==1.0.1b6.post1
+ - azureml-pipeline==1.0.76
+ - azureml-pipeline-core==1.0.76
+ - azureml-pipeline-steps==1.0.76
+ - azureml-sdk==1.0.76
+ - azureml-telemetry==1.0.76
+ - azureml-train==1.0.76
+ - azureml-train-automl==1.0.76
+ - azureml-train-automl-client==1.0.76
+ - azureml-train-automl-runtime==1.0.76.1
+ - azureml-train-core==1.0.76
+ - azureml-train-restclients-hyperdrive==1.0.76
+ - azureml-widgets==1.0.76
+ - backports-tempfile==1.0
+ - backports-weakref==1.0.post1
+ - boto==2.49.0
+ - boto3==1.10.37
+ - botocore==1.13.37
+ - cffi==1.13.2
+ - chardet==3.0.4
+ - click==7.0
+ - cloudpickle==1.2.2
+ - configparser==3.7.4
+ - contextlib2==0.6.0.post1
+ - cryptography==2.8
+ - cycler==0.10.0
+ - cython==0.29.14
+ - dill==0.3.1.1
+ - distro==1.4.0
+ - docker==4.1.0
+ - docutils==0.15.2
+ - dotnetcore2==2.1.11
+ - fire==0.2.1
+ - flake8==3.7.9
+ - flask==1.0.3
+ - fusepy==3.0.1
+ - future==0.18.2
+ - gensim==3.8.1
+ - gunicorn==19.9.0
+ - idna==2.8
+ - imageio==2.6.1
+ - interpret-community==0.2.3
+ - interpret-core==0.1.19
+ - ipywidgets==7.5.1
+ - isodate==0.6.0
+ - itsdangerous==1.1.0
+ - jeepney==0.4.1
+ - jmespath==0.9.4
+ - json-logging-py==0.2
+ - jsonform==0.0.2
+ - jsonpickle==1.2
+ - jsonsir==0.0.2
+ - keras2onnx==1.6.0
+ - kiwisolver==1.1.0
+ - liac-arff==2.4.0
+ - lightgbm==2.3.0
+ - matplotlib==3.1.2
+ - mccabe==0.6.1
+ - msrest==0.6.10
+ - msrestazure==0.6.2
+ - ndg-httpsclient==0.5.1
+ - networkx==2.4
+ - nimbusml==1.6.1
+ - numpy==1.16.2
+ - oauthlib==3.1.0
+ - onnx==1.6.0
+ - onnxconverter-common==1.6.0
+ - onnxmltools==1.4.1
+ - packaging==19.2
+ - pandas==0.23.4
+ - papermill==1.2.1
+ - pathspec==0.6.0
+ - patsy==0.5.1
+ - pillow==6.2.1
+ - pmdarima==1.1.1
+ - protobuf==3.11.1
+ - psutil==5.6.7
+ - pyasn1==0.4.8
+ - pycodestyle==2.5.0
+ - pycparser==2.19
+ - pyflakes==2.1.1
+ - pyjwt==1.7.1
+ - pyopenssl==19.1.0
+ - pyparsing==2.4.5
+ - python-dotenv==0.10.3
+ - python-easyconfig==0.1.7
+ - pytz==2019.3
+ - pywavelets==1.1.1
+ - pyyaml==5.2
+ - requests==2.22.0
+ - requests-oauthlib==1.3.0
+ - resource==0.2.1
+ - ruamel-yaml==0.15.89
+ - s3transfer==0.2.1
+ - scikit-image==0.16.2
+ - scikit-learn==0.20.3
+ - scipy==1.1.0
+ - secretstorage==3.1.1
+ - shap==0.29.3
+ - skl2onnx==1.4.9
+ - sklearn-pandas==1.7.0
+ - smart-open==1.9.0
+ - statsmodels==0.10.2
+ - tenacity==6.0.0
+ - termcolor==1.1.0
+ - textwrap3==0.9.2
+ - tqdm==4.40.2
+ - typing-extensions==3.7.4.1
+ - urllib3==1.25.7
+ - websocket-client==0.56.0
+ - werkzeug==0.16.0
+ - wheel==0.30.0
+ - widgetsnbextension==3.5.1
+prefix: /data/anaconda/envs/fwi_dev_conda_environment
diff --git a/contrib/fwi/azureml_devito/notebooks/000_Setup_GeophysicsTutorial_FWI_Azure_devito.ipynb b/contrib/fwi/azureml_devito/notebooks/000_Setup_GeophysicsTutorial_FWI_Azure_devito.ipynb
new file mode 100755
index 00000000..c9a17f1e
--- /dev/null
+++ b/contrib/fwi/azureml_devito/notebooks/000_Setup_GeophysicsTutorial_FWI_Azure_devito.ipynb
@@ -0,0 +1,923 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Copyright (c) Microsoft Corporation. \n",
+ "Licensed under the MIT License."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# FWI in Azure project\n",
+ "\n",
+ "## Set-up AzureML resources\n",
+ "\n",
+ "This project ports devito (https://github.com/opesci/devito) into Azure and runs tutorial notebooks at:\n",
+ "https://nbviewer.jupyter.org/github/opesci/devito/blob/master/examples/seismic/tutorials/\n",
+ "\n",
+ "\n",
+ "\n",
+ "In this notebook we setup AzureML resources. This notebook should be run once and will enable all subsequent notebooks.\n",
+ "\n",
+ "\n",
+ "User input requiring steps:\n",
+ " - [Fill in and save sensitive information](#dot_env_description)\n",
+ " - [Azure login](#Azure_login) (may be required first time the notebook is run) \n",
+ " - [Set __create_ACR_FLAG__ to true to trigger ACR creation and to save of ACR login info](#set_create_ACR_flag)\n",
+ " - [Azure CLI login ](#Azure_cli_login) (may be required once to create an [ACR](https://azure.microsoft.com/en-us/services/container-registry/)) \n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Allow multiple displays per cell\n",
+ "from IPython.core.interactiveshell import InteractiveShell\n",
+ "InteractiveShell.ast_node_interactivity = \"all\" "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Azure Machine Learning and Pipeline SDK-specific imports"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import sys, os\n",
+ "import shutil\n",
+ "import urllib\n",
+ "import azureml.core\n",
+ "from azureml.core import Workspace, Experiment\n",
+ "from azureml.core.compute import ComputeTarget, AmlCompute\n",
+ "from azureml.core.compute_target import ComputeTargetException\n",
+ "import platform, dotenv\n",
+ "import pathlib"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Azure ML SDK Version: 1.0.76\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'Linux-4.15.0-1064-azure-x86_64-with-debian-stretch-sid'"
+ ]
+ },
+ "execution_count": 3,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'/datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks'"
+ ]
+ },
+ "execution_count": 3,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "print(\"Azure ML SDK Version: \", azureml.core.VERSION)\n",
+ "platform.platform()\n",
+ "os.getcwd()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### 1. Create utilities file\n",
+ "\n",
+ "##### 1.1 Define utilities file (project_utils.py) path\n",
+ "Utilities file created here has code for Azure resources access authorization, project configuration settings like directories and file names in __project_consts__ class."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "utils_file_name = 'project_utils'\n",
+ "auxiliary_files_dir = os.path.join(*(['.', 'src']))\n",
+ "\n",
+ "\n",
+ "utils_path_name = os.path.join(os.getcwd(), auxiliary_files_dir)\n",
+ "utils_full_name = os.path.join(utils_path_name, os.path.join(*([utils_file_name+'.py'])))\n",
+ "os.makedirs(utils_path_name, exist_ok=True)\n",
+ " \n",
+ "def ls_l(a_dir):\n",
+ " return ([f for f in os.listdir(a_dir) if os.path.isfile(os.path.join(a_dir, f))]) "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "##### 1.2. Edit/create project_utils.py file"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Overwriting /datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks/./src/project_utils.py\n"
+ ]
+ }
+ ],
+ "source": [
+ "%%writefile $utils_full_name\n",
+ "\n",
+ "from azureml.core.authentication import ServicePrincipalAuthentication\n",
+ "from azureml.core.authentication import AzureCliAuthentication\n",
+ "from azureml.core.authentication import InteractiveLoginAuthentication\n",
+ "from azureml.core.authentication import AuthenticationException\n",
+ "import dotenv, logging, pathlib, os\n",
+ "\n",
+ "\n",
+ "# credit Mathew Salvaris\n",
+ "def get_auth(env_path):\n",
+ " \"\"\"Tries to get authorization info by first trying to get Service Principal info, then CLI, then interactive. \n",
+ " \"\"\"\n",
+ " logger = logging.getLogger(__name__)\n",
+ " crt_sp_pwd = os.environ.get(\"SP_PASSWORD\", None)\n",
+ " if crt_sp_pwd:\n",
+ " logger.debug(\"Trying to create Workspace with Service Principal\")\n",
+ " aml_sp_password = crt_sp_pwd\n",
+ " aml_sp_tennant_id = dotenv.get_key(env_path, 'SP_TENANT_ID')\n",
+ " aml_sp_username = dotenv.get_key(env_path, 'SP_APPLICATION_ID')\n",
+ " auth = ServicePrincipalAuthentication(\n",
+ " tenant_id=aml_sp_tennant_id,\n",
+ " username=aml_sp_username,\n",
+ " password=aml_sp_password,\n",
+ " )\n",
+ " else:\n",
+ " logger.debug(\"Trying to create Workspace with CLI Authentication\")\n",
+ " try:\n",
+ " auth = AzureCliAuthentication()\n",
+ " auth.get_authentication_header()\n",
+ " except AuthenticationException:\n",
+ " logger.debug(\"Trying to create Workspace with Interactive login\")\n",
+ " auth = InteractiveLoginAuthentication()\n",
+ "\n",
+ " return auth \n",
+ "\n",
+ "\n",
+ "def set_dotenv_info(dotenv_file_path, env_dict):\n",
+ " \"\"\"Use dict loop to set multiple keys in dotenv file.\n",
+ " Minimal file error management.\n",
+ " \"\"\"\n",
+ " logger = logging.getLogger(__name__)\n",
+ " if bool(env_dict):\n",
+ " dotenv_file = pathlib.Path(dotenv_file_path)\n",
+ " if not dotenv_file.is_file():\n",
+ " logger.debug('dotenv file not found, will create \"{}\" using the sensitive info you provided.'.format(dotenv_file_path))\n",
+ " dotenv_file.touch()\n",
+ " else:\n",
+ " logger.debug('dotenv file \"{}\" found, will (over)write it with current sensitive info you provided.'.format(dotenv_file_path))\n",
+ " \n",
+ " for crt_key, crt_val in env_dict.items():\n",
+ " dotenv.set_key(dotenv_file_path, crt_key, crt_val)\n",
+ "\n",
+ " else:\n",
+ " logger.debug(\\\n",
+ " 'Trying to save empty env_dict variable into {}, please set your sensitive info in a dictionary.'\\\n",
+ " .format(dotenv_file_path)) \n",
+ " \n",
+ "\n",
+ "class project_consts(object):\n",
+ " \"\"\"Keep project's file names and directory structure in one place.\n",
+ " Minimal setattr error management.\n",
+ " \"\"\"\n",
+ " \n",
+ " AML_WORKSPACE_CONFIG_DIR = ['.', '..', 'not_shared']\n",
+ " AML_EXPERIMENT_DIR = ['.', '..', 'temp']\n",
+ " AML_WORKSPACE_CONFIG_FILE_NAME = 'aml_ws_config.json'\n",
+ " DOTENV_FILE_PATH = AML_WORKSPACE_CONFIG_DIR + ['general.env'] \n",
+ " DOCKER_DOTENV_FILE_PATH = AML_WORKSPACE_CONFIG_DIR + ['dockerhub.env'] \n",
+ "\n",
+ " def __setattr__(self, *_):\n",
+ " raise TypeError\n",
+ "\n",
+ " \n",
+ "if __name__==\"__main__\":\n",
+ " \"\"\"Basic function/class tests.\n",
+ " \"\"\"\n",
+ " import sys, os\n",
+ " prj_consts = project_consts()\n",
+ " logger = logging.getLogger(__name__)\n",
+ " logging.basicConfig(level=logging.DEBUG) # Logging Levels: DEBUG\t10, NOTSET\t0\n",
+ " logger.debug('AML ws file = {}'.format(os.path.join(*([os.path.join(*(prj_consts.AML_WORKSPACE_CONFIG_DIR)),\n",
+ " prj_consts.AML_WORKSPACE_CONFIG_FILE_NAME]))))\n",
+ "\n",
+ " crt_dotenv_file_path = os.path.join(*(prj_consts.DOTENV_FILE_PATH))\n",
+ " set_dotenv_info(crt_dotenv_file_path, {})\n",
+ " "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "##### 1.3. Import utilities functions defined above"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "[None]"
+ ]
+ },
+ "execution_count": 6,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "def add_path_to_sys_path(path_to_append):\n",
+ " if not (any(path_to_append in paths for paths in sys.path)):\n",
+ " sys.path.append(path_to_append)\n",
+ " \n",
+ "paths_to_append = [os.path.join(os.getcwd(), auxiliary_files_dir)]\n",
+ "[add_path_to_sys_path(crt_path) for crt_path in paths_to_append]\n",
+ "\n",
+ "\n",
+ "import project_utils\n",
+ "prj_consts = project_utils.project_consts()\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### 2. Set-up the AML SDK infrastructure\n",
+ "\n",
+ "* Create Azure resource group (rsg), workspaces, \n",
+ "* save sensitive info using [python-dotenv](https://github.com/theskumar/python-dotenv) \n",
+ " \n",
+ "Notebook repeateability notes:\n",
+ "* The notebook tries to find and use an existing Azure resource group (rsg) defined by __crt_resource_group__. It creates a new one if needed. "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n",
+ "\n",
+ "##### Create [ACR]() first time this notebook is run. \n",
+ "Either docker hub or ACR can be used to store the experimentation image. To create the ACR, set: \n",
+ "```\n",
+ "create_ACR_FLAG=True \n",
+ "```\n",
+ "It will create an ACR by running severral steps described below in section 2.7. __Create an [ACR]__ \n",
+ " \n",
+ " \n",
+ "[Back](#user_input_requiring_steps) to summary of user input requiring steps."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "create_ACR_FLAG = False #True False"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "sensitive_info = {}"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n",
+ "##### 2.1. Input here sensitive and configuration information\n",
+ "[dotenv](https://github.com/theskumar/python-dotenv) is used to hide sensitive info, like Azure subscription name/ID. The serialized info needs to be manually input once. \n",
+ " \n",
+ "* REQUIRED ACTION for the 2 cells below: uncomment them, add the required info in first cell below, run both cells one. \n",
+ " The sensitive information will be packed in __sensitive_info__ dictionary variable, which that will then be saved in a following cell in an .env file (__dotenv_file_path__) that should likely be git ignored. \n",
+ "\n",
+ "* OPTIONAL STEP: After running once the two cells below to save __sensitive_info__ dictionary variable with your custom info, you can comment them and leave the __sensitive_info__ variable defined above as an empty python dictionary. \n",
+ " \n",
+ " \n",
+ "__Notes__:\n",
+ "* An empty __sensitive_info__ dictionary is ignored by the __set_dotenv_info__ function defined above in project_utils.py . \n",
+ "* The saved .env file will be used thereafter in each cell that starts with %dotenv. \n",
+ "* The saved .env file contains user specific information and it shoulld __not__ be version-controlled in git.\n",
+ "* If you would like to [use service principal authentication](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/manage-azureml-service/authentication-in-azureml/authentication-in-azure-ml.ipynb) make sure you provide the optional values as well (see get_auth function definition in project_utils.py file created above for details).\n",
+ "\n",
+ "[Back](#user_input_requiring_steps) to summary of user input requiring steps."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# subscription_id = \"\"\n",
+ "# resource_group = \"ghiordanfwirsg01\"\n",
+ "# workspace_name = \"ghiordanfwiws\"\n",
+ "# workspace_region = \"eastus2\"\n",
+ "# gpu_cluster_name = \"gpuclstfwi02\"\n",
+ "# gpucluster_admin_user_name = \"\"\n",
+ "# gpucluster_admin_user_password = \"\"\n",
+ "\n",
+ "# experimentation_docker_image_name = \"fwi01_azureml\"\n",
+ "# experimentation_docker_image_tag = \"sdk.v1.0.60\"\n",
+ "# docker_container_mount_point = os.getcwd() # use project directory or a subdirectory\n",
+ "\n",
+ "# docker_login = \"georgedockeraccount\"\n",
+ "# docker_pwd = \"\"\n",
+ "\n",
+ "# acr_name=\"fwi01acr\""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# sensitive_info = {\n",
+ "# 'SUBSCRIPTION_ID':subscription_id,\n",
+ "# 'RESOURCE_GROUP':resource_group, \n",
+ "# 'WORKSPACE_NAME':workspace_name, \n",
+ "# 'WORKSPACE_REGION':workspace_region,\n",
+ "# 'GPU_CLUSTER_NAME':gpu_cluster_name,\n",
+ "# 'GPU_CLUSTER_ADMIN_USER_NAME':gpucluster_admin_user_name,\n",
+ "# 'GPU_CLUSTER_ADMIN_USER_PASSWORD':gpucluster_admin_user_password,\n",
+ "# 'EXPERIMENTATION_DOCKER_IMAGE_NAME':experimentation_docker_image_name,\n",
+ "# 'EXPERIMENTATION_DOCKER_IMAGE_TAG':experimentation_docker_image_tag,\n",
+ "# 'DOCKER_CONTAINER_MOUNT_POINT':docker_container_mount_point,\n",
+ "# 'DOCKER_LOGIN':docker_login,\n",
+ "# 'DOCKER_PWD':docker_pwd,\n",
+ "# 'ACR_NAME':acr_name\n",
+ "# }"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "##### 2.2. Save sensitive info\n",
+ "An empty __sensitive_info__ variable will be ingored. \n",
+ "A non-empty __sensitive_info__ variable will overwrite info in an existing .env file."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'./../not_shared/general.env'"
+ ]
+ },
+ "execution_count": 11,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "%load_ext dotenv\n",
+ "dotenv_file_path = os.path.join(*(prj_consts.DOTENV_FILE_PATH)) \n",
+ "os.makedirs(os.path.join(*(prj_consts.DOTENV_FILE_PATH[:-1])), exist_ok=True)\n",
+ "pathlib.Path(dotenv_file_path).touch()\n",
+ "\n",
+ "# # show .env file path\n",
+ "# !pwd\n",
+ "dotenv_file_path\n",
+ "\n",
+ "#save your sensitive info\n",
+ "project_utils.set_dotenv_info(dotenv_file_path, sensitive_info)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "##### 2.3. Use (load) saved sensitive info\n",
+ "THis is how sensitive info will be retrieved in other notebooks"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "%dotenv $dotenv_file_path\n",
+ "\n",
+ "subscription_id = os.getenv('SUBSCRIPTION_ID')\n",
+ "# # print a bit of subscription ID, to show dotenv file was found and loaded \n",
+ "# subscription_id[:2]\n",
+ "\n",
+ "crt_resource_group = os.getenv('RESOURCE_GROUP')\n",
+ "crt_workspace_name = os.getenv('WORKSPACE_NAME')\n",
+ "crt_workspace_region = os.getenv('WORKSPACE_REGION') "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "##### 2.4. Access your workspace\n",
+ "\n",
+ "* In AML SDK we can get a ws in two ways: \n",
+ " - via Workspace(subscription_id = ...) \n",
+ " - via Workspace.from_config(path=some_file_path). \n",
+ " \n",
+ "For demo purposes, both ways are shown in this notebook.\n",
+ "\n",
+ "* At first notebook run:\n",
+ " - the AML workspace ws is typically not found, so a new ws object is created and persisted on disk.\n",
+ " - If the ws has been created other ways (e.g. via Azure portal), it may be persisted on disk by calling ws1.write_config(...)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "workspace_config_dir = os.path.join(*(prj_consts.AML_WORKSPACE_CONFIG_DIR))\n",
+ "workspace_config_file = prj_consts.AML_WORKSPACE_CONFIG_FILE_NAME\n",
+ "\n",
+ "# # print debug info if needed \n",
+ "# workspace_config_dir \n",
+ "# ls_l(os.path.join(os.getcwd(), os.path.join(*([workspace_config_dir]))))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n",
+ "###### Login into Azure may be required here\n",
+ "[Back](#user_input_requiring_steps) to summary of user input requiring steps."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "WARNING - Warning: Falling back to use azure cli login credentials.\n",
+ "If you run your code in unattended mode, i.e., where you can't give a user input, then we recommend to use ServicePrincipalAuthentication or MsiAuthentication.\n",
+ "Please refer to aka.ms/aml-notebook-auth for different authentication mechanisms in azureml-sdk.\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Workspace configuration loading succeeded. \n"
+ ]
+ }
+ ],
+ "source": [
+ "try:\n",
+ " ws1 = Workspace(\n",
+ " subscription_id = subscription_id, \n",
+ " resource_group = crt_resource_group, \n",
+ " workspace_name = crt_workspace_name,\n",
+ " auth=project_utils.get_auth(dotenv_file_path))\n",
+ " print(\"Workspace configuration loading succeeded. \")\n",
+ "# ws1.write_config(path=os.path.join(os.getcwd(), os.path.join(*([workspace_config_dir]))),\n",
+ "# file_name=workspace_config_file)\n",
+ " del ws1 # ws will be (re)created later using from_config() function\n",
+ "except Exception as e :\n",
+ " print('Exception msg: {}'.format(str(e )))\n",
+ " print(\"Workspace not accessible. Will create a new workspace below\")\n",
+ " \n",
+ " workspace_region = crt_workspace_region\n",
+ "\n",
+ " # Create the workspace using the specified parameters\n",
+ " ws2 = Workspace.create(name = crt_workspace_name,\n",
+ " subscription_id = subscription_id,\n",
+ " resource_group = crt_resource_group, \n",
+ " location = workspace_region,\n",
+ " create_resource_group = True,\n",
+ " exist_ok = False)\n",
+ " ws2.get_details()\n",
+ "\n",
+ " # persist the subscription id, resource group name, and workspace name in aml_config/config.json.\n",
+ " ws2.write_config(path=os.path.join(os.getcwd(), os.path.join(*([workspace_config_dir]))),\n",
+ " file_name=workspace_config_file)\n",
+ " \n",
+ " #Delete ws2 and use ws = Workspace.from_config() as shwon below to recover the ws, rather than rely on what we get from one time creation\n",
+ " del ws2"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "##### 2.5. Demo access to created workspace\n",
+ "\n",
+ "From now on, even in other notebooks, the provisioned AML workspace will be accesible using Workspace.from_config() as shown below:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# path arg is:\n",
+ "# - a file path which explictly lists aml_config subdir for function from_config() \n",
+ "# - a dir path with a silently added <> subdir for function write_config(). \n",
+ "ws = Workspace.from_config(path=os.path.join(os.getcwd(), \n",
+ " os.path.join(*([workspace_config_dir, '.azureml', workspace_config_file]))))\n",
+ "# # print debug info if needed\n",
+ "# print(ws.name, ws.resource_group, ws.location, ws.subscription_id[0], sep = '\\n')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "##### 2.6. Create compute cluster used in following notebooks"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'gpuclstfwi02'"
+ ]
+ },
+ "execution_count": 16,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "gpu_cluster_name = os.getenv('GPU_CLUSTER_NAME')\n",
+ "gpu_cluster_name"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 17,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Found existing gpu cluster\n"
+ ]
+ }
+ ],
+ "source": [
+ "max_nodes_value = 3\n",
+ "\n",
+ "try:\n",
+ " gpu_cluster = ComputeTarget(workspace=ws, name=gpu_cluster_name)\n",
+ " print(\"Found existing gpu cluster\")\n",
+ "except ComputeTargetException:\n",
+ " print(\"Could not find gpu cluster, please create one\")\n",
+ " \n",
+ "# # Specify the configuration for the new cluster, add admin_user_ssh_key='ssh-rsa ... ghiordan@microsoft.com' if needed\n",
+ "# compute_config = AmlCompute.provisioning_configuration(vm_size=\"Standard_NC12\",\n",
+ "# min_nodes=0,\n",
+ "# max_nodes=max_nodes_value,\n",
+ "# admin_username=os.getenv('GPU_CLUSTER_ADMIN_USER_NAME'), \n",
+ "# admin_user_password=os.getenv('GPU_CLUSTER_ADMIN_USER_NAME'))\n",
+ "# # Create the cluster with the specified name and configuration\n",
+ "# gpu_cluster = ComputeTarget.create(ws, gpu_cluster_name, compute_config)\n",
+ "\n",
+ "# # Wait for the cluster to complete, show the output log\n",
+ "# gpu_cluster.wait_for_completion(show_output=True)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "##### 2.7. Create an [ACR](https://docs.microsoft.com/en-us/azure/container-registry/) if you have not done so using the [portal](https://docs.microsoft.com/en-us/azure/container-registry/container-registry-get-started-portal) \n",
+ " - Follow the 4 ACR steps described below. \n",
+ " - Uncomment cells' lines as needed to login and see commands responses while you set the right subscription and then create the ACR. \n",
+ " - You need [Azure CLI](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli) to run the commands below. \n",
+ "\n",
+ "\n",
+ "##### ACR Step 1. Select ACR subscription (az cli login into Azure may be required here)\n",
+ "[Back](#user_input_requiring_steps) to summary of user input requiring steps."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "azure-cli 2.0.58 *\r\n",
+ "\r\n",
+ "acr 2.2.0 *\r\n",
+ "acs 2.3.17 *\r\n",
+ "advisor 2.0.0 *\r\n",
+ "ams 0.4.1 *\r\n",
+ "appservice 0.2.13 *\r\n",
+ "backup 1.2.1 *\r\n",
+ "batch 3.4.1 *\r\n",
+ "batchai 0.4.7 *\r\n",
+ "billing 0.2.0 *\r\n",
+ "botservice 0.1.6 *\r\n",
+ "cdn 0.2.0 *\r\n",
+ "cloud 2.1.0 *\r\n",
+ "cognitiveservices 0.2.4 *\r\n",
+ "command-modules-nspkg 2.0.2 *\r\n",
+ "configure 2.0.20 *\r\n",
+ "consumption 0.4.2 *\r\n",
+ "container 0.3.13 *\r\n",
+ "core 2.0.58 *\r\n",
+ "cosmosdb 0.2.7 *\r\n",
+ "dla 0.2.4 *\r\n",
+ "dls 0.1.8 *\r\n",
+ "dms 0.1.2 *\r\n",
+ "eventgrid 0.2.1 *\r\n",
+ "eventhubs 0.3.3 *\r\n",
+ "extension 0.2.3 *\r\n",
+ "feedback 2.1.4 *\r\n",
+ "find 0.2.13 *\r\n",
+ "hdinsight 0.3.0 *\r\n",
+ "interactive 0.4.1 *\r\n",
+ "iot 0.3.6 *\r\n",
+ "iotcentral 0.1.6 *\r\n",
+ "keyvault 2.2.11 *\r\n",
+ "kusto 0.1.0 *\r\n",
+ "lab 0.1.5 *\r\n",
+ "maps 0.3.3 *\r\n",
+ "monitor 0.2.10 *\r\n",
+ "network 2.3.2 *\r\n",
+ "nspkg 3.0.3 *\r\n",
+ "policyinsights 0.1.1 *\r\n",
+ "profile 2.1.3 *\r\n",
+ "rdbms 0.3.7 *\r\n",
+ "redis 0.4.0 *\r\n",
+ "relay 0.1.3 *\r\n",
+ "reservations 0.4.1 *\r\n",
+ "resource 2.1.10 *\r\n",
+ "role 2.4.0 *\r\n",
+ "search 0.1.1 *\r\n",
+ "security 0.1.0 *\r\n",
+ "servicebus 0.3.3 *\r\n",
+ "servicefabric 0.1.12 *\r\n",
+ "signalr 1.0.0 *\r\n",
+ "sql 2.1.9 *\r\n",
+ "sqlvm 0.1.0 *\r\n",
+ "storage 2.3.1 *\r\n",
+ "telemetry 1.0.1 *\r\n",
+ "vm 2.2.15 *\r\n",
+ "\r\n",
+ "Extensions:\r\n",
+ "azure-ml-admin-cli 0.0.1\r\n",
+ "azure-cli-ml Unknown\r\n",
+ "\r\n",
+ "Python location '/opt/az/bin/python3'\r\n",
+ "Extensions directory '/opt/az/extensions'\r\n",
+ "\r\n",
+ "Python (Linux) 3.6.5 (default, Feb 12 2019, 02:10:43) \r\n",
+ "[GCC 5.4.0 20160609]\r\n",
+ "\r\n",
+ "Legal docs and information: aka.ms/AzureCliLegal\r\n",
+ "\r\n",
+ "\r\n",
+ "\u001b[33mYou have 57 updates available. Consider updating your CLI installation.\u001b[0m\r\n"
+ ]
+ }
+ ],
+ "source": [
+ "!az --version\n",
+ "if create_ACR_FLAG:\n",
+ " !az login\n",
+ " response01 = ! az account list --all --refresh -o table\n",
+ " response02 = ! az account set --subscription $subscription_id\n",
+ " response03 = ! az account list -o table\n",
+ " response04 = ! $cli_command\n",
+ "\n",
+ " response01\n",
+ " response02\n",
+ " response03\n",
+ " response04"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "##### ACR Step 2. Create the ACR"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 19,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'az acr create --resource-group ghiordanfwirsg01 --name fwi01acr --sku Basic'"
+ ]
+ },
+ "execution_count": 19,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "[' \"loginServer\": \"fwi01acr.azurecr.io\",',\n",
+ " ' \"name\": \"fwi01acr\",',\n",
+ " ' \"networkRuleSet\": null,',\n",
+ " ' \"provisioningState\": \"Succeeded\",',\n",
+ " ' \"resourceGroup\": \"ghiordanfwirsg01\",',\n",
+ " ' \"sku\": {',\n",
+ " ' \"name\": \"Basic\",',\n",
+ " ' \"tier\": \"Basic\"',\n",
+ " ' },',\n",
+ " ' \"status\": null,',\n",
+ " ' \"storageAccount\": null,',\n",
+ " ' \"tags\": {},',\n",
+ " ' \"type\": \"Microsoft.ContainerRegistry/registries\"',\n",
+ " '}']"
+ ]
+ },
+ "execution_count": 19,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "%dotenv $dotenv_file_path\n",
+ "acr_name = os.getenv('ACR_NAME')\n",
+ "\n",
+ "cli_command='az acr create --resource-group '+ crt_resource_group +' --name ' + acr_name + ' --sku Basic'\n",
+ "cli_command\n",
+ "\n",
+ "response = !$cli_command\n",
+ "response[-14:]"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "##### ACR Step 3. Also enable password and login via __ [--admin-enabled true](https://docs.microsoft.com/en-us/azure/container-registry/container-registry-authentication) __ and then use the az cli or portal to set up the credentials"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 20,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'az acr update -n fwi01acr --admin-enabled true'"
+ ]
+ },
+ "execution_count": 20,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# per https://docs.microsoft.com/en-us/azure/container-registry/container-registry-authentication\n",
+ "cli_command='az acr update -n '+acr_name+' --admin-enabled true'\n",
+ "cli_command\n",
+ "\n",
+ "response = !$cli_command\n",
+ "# response"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "##### ACR Step 4. Save the ACR password and login"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 21,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# create_ACR_FLAG=False\n",
+ "if create_ACR_FLAG:\n",
+ " import subprocess\n",
+ " cli_command = 'az acr credential show -n '+acr_name\n",
+ "\n",
+ "acr_username = subprocess.Popen(cli_command+' --query username',shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE).\\\n",
+ "communicate()[0].decode(\"utf-8\").split()[0].strip('\\\"')\n",
+ "\n",
+ "acr_password = subprocess.Popen(cli_command+' --query passwords[0].value',shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE).\\\n",
+ "communicate()[0].decode(\"utf-8\").split()[0].strip('\\\"')\n",
+ "\n",
+ "response = dotenv.set_key(dotenv_file_path, 'ACR_PASSWORD', acr_password)\n",
+ "response = dotenv.set_key(dotenv_file_path, 'ACR_USERNAME', acr_username)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 22,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "%reload_ext dotenv\n",
+ "%dotenv -o $dotenv_file_path\n",
+ "\n",
+ "# print acr password and login info saved in dotenv file\n",
+ "if create_ACR_FLAG:\n",
+ " os.getenv('ACR_PASSWORD')\n",
+ " os.getenv('ACR_USERNAME')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "print('Finished running 000_Setup_GeophysicsTutorial_FWI_Azure_devito!')"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python [conda env:fwi_dev_conda_environment] *",
+ "language": "python",
+ "name": "conda-env-fwi_dev_conda_environment-py"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.7.5"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/contrib/fwi/azureml_devito/notebooks/010_CreateExperimentationDockerImage_GeophysicsTutorial_FWI_Azure_devito.ipynb b/contrib/fwi/azureml_devito/notebooks/010_CreateExperimentationDockerImage_GeophysicsTutorial_FWI_Azure_devito.ipynb
new file mode 100755
index 00000000..aac30038
--- /dev/null
+++ b/contrib/fwi/azureml_devito/notebooks/010_CreateExperimentationDockerImage_GeophysicsTutorial_FWI_Azure_devito.ipynb
@@ -0,0 +1,1060 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Copyright (c) Microsoft Corporation. \n",
+ "Licensed under the MIT License.\n",
+ "\n",
+ "# FWI in Azure project\n",
+ "\n",
+ "## Create Experimentation Docker image\n",
+ "\n",
+ "FWI demo based on: \n",
+ "This project ports devito (https://github.com/opesci/devito) into Azure and runs tutorial notebooks at:\n",
+ "https://nbviewer.jupyter.org/github/opesci/devito/blob/master/examples/seismic/tutorials/\n",
+ "\n",
+ "\n",
+ "\n",
+ "In this notebook we create a custom docker image that will be used to run the devito demo notebooks in AzureML. \n",
+ "\n",
+ " - We transparently create a docker file, a conda environment .yml file, build the docker image and push it into dockerhub. Azure ACR could also be used for storing docker images. \n",
+ " - The conda environment .yml file lists conda and pip installs, and separates all python dependencies from the docker installs. \n",
+ " - The dockerfile is generic. The only AzureML depedency is azureml-sdk pip installable package in conda environment .yml file\n",
+ " - The created docer image will be run in following notebook in a container on the local AzureVM or on a remote AzureML compute cluster. This AzureML pattern decouples experimentation (or training) job definition (experimentation script, data location, dependencies and docker image) happening on the control plane machine that runs this notebook, from the elastically allocated and Azure managed VM/cluster that does the actual training/experimentation computation.\n",
+ " \n",
+ "\n",
+ "User input requiring steps:\n",
+ " - [Fill in and save docker image name settings, if needed. ](#docker_image_settings)\n",
+ " - [Update DOCKER_CONTAINER_MOUNT_POINT to match our local path](#docker_image_settings)\n",
+ " - [Set docker build and test flags](#docker_build_test_settings) \n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Allow multiple displays per cell\n",
+ "from IPython.core.interactiveshell import InteractiveShell\n",
+ "InteractiveShell.ast_node_interactivity = \"all\" "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import sys, os\n",
+ "import shutil\n",
+ "import urllib\n",
+ "\n",
+ "import platform\n",
+ "import math\n",
+ "import docker"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'Linux-4.15.0-1063-azure-x86_64-with-debian-stretch-sid'"
+ ]
+ },
+ "execution_count": 3,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'/datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks'"
+ ]
+ },
+ "execution_count": 3,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "platform.platform()\n",
+ "os.getcwd()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n",
+ "#### Setup docker image build and test process. \n",
+ " - devito tests take abou 15 mins (981.41 seconds). When running this notebook for first time make:\n",
+ " > docker_build_no_cache = '--no-cache' \n",
+ " > docker_test_run_devito_tests = True\n",
+ " \n",
+ "[Back](#user_input_requiring_steps) to summary of user input requiring steps."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "docker_build_no_cache = '' # '--no-cache' # or '' #\n",
+ "docker_test_run_devito_tests = True # True # False"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "##### Import utilities functions"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "[None]"
+ ]
+ },
+ "execution_count": 5,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "def add_path_to_sys_path(path_to_append):\n",
+ " if not (any(path_to_append in paths for paths in sys.path)):\n",
+ " sys.path.append(path_to_append)\n",
+ " \n",
+ "auxiliary_files_dir = os.path.join(*(['.', 'src']))\n",
+ "paths_to_append = [os.path.join(os.getcwd(), auxiliary_files_dir)]\n",
+ "[add_path_to_sys_path(crt_path) for crt_path in paths_to_append]\n",
+ "\n",
+ "import project_utils\n",
+ "prj_consts = project_utils.project_consts()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "##### Create experimentation docker file"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'./../not_shared/general.env'"
+ ]
+ },
+ "execution_count": 6,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "dotenv_file_path = os.path.join(*(prj_consts.DOTENV_FILE_PATH))\n",
+ "dotenv_file_path"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "/datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks\r\n"
+ ]
+ }
+ ],
+ "source": [
+ "!pwd"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# azureml_sdk_version set here must match azureml sdk version pinned in conda env file written to conda_common_file_path below\n",
+ "azureml_sdk_version = '1.0.76' "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n",
+ "\n",
+ "##### Input here docker image settings \n",
+ "in cell below we use [dotenv](https://github.com/theskumar/python-dotenv) to overwrite docker image properties already save in dotenv_file_path. Change as needed, e.g. update azureml_sdk version if using a different version.\n",
+ "\n",
+ "[Back](#user_input_requiring_steps) to summary of user input requiring steps."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "(True, 'EXPERIMENTATION_DOCKER_IMAGE_TAG', 'sdk.v1.0.76')"
+ ]
+ },
+ "execution_count": 9,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "(True,\n",
+ " 'DOCKER_CONTAINER_MOUNT_POINT',\n",
+ " '/datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks')"
+ ]
+ },
+ "execution_count": 9,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# SDK changes often, so we'll keep its version transparent \n",
+ "import dotenv\n",
+ "\n",
+ "# EXPERIMENTATION_IMAGE_VERSION should:\n",
+ "# - match sdk version in fwi01_conda_env01 environmnet in conda_env_fwi01_azureml_sdk.v1.0.XX.yml file below\n",
+ "# - match the conda env yml file name, e.g. conda_env_fwi01_azureml_sdk.v1.0.xx.yml referenced in \n",
+ "# Dockerfile_fwi01_azureml_sdk.v1.0.xx\n",
+ "# dotenv.set_key(dotenv_file_path, 'EXPERIMENTATION_DOCKER_IMAGE_NAME', 'fwi01_azureml')\n",
+ "dotenv.set_key(dotenv_file_path, 'EXPERIMENTATION_DOCKER_IMAGE_TAG', ('sdk.v'+azureml_sdk_version))\n",
+ "\n",
+ "\n",
+ "docker_container_mount_point = os.getcwd()\n",
+ "# or something like \"/datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks'\n",
+ "dotenv.set_key(dotenv_file_path, 'DOCKER_CONTAINER_MOUNT_POINT', docker_container_mount_point)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'fwi01acr.azurecr.io/fwi01_azureml:sdk.v1.0.76'"
+ ]
+ },
+ "execution_count": 10,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'conda_env_fwi01_azureml_sdk.v1.0.76.yml'"
+ ]
+ },
+ "execution_count": 10,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'/datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks/./../temp/docker_build/conda_env_fwi01_azureml_sdk.v1.0.76.yml'"
+ ]
+ },
+ "execution_count": 10,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'/datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks/./../temp/docker_build/conda_env_fwi01_azureml.yml'"
+ ]
+ },
+ "execution_count": 10,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'/datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks/./../temp/docker_build'"
+ ]
+ },
+ "execution_count": 10,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'/datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks/./../temp/docker_build/Dockerfile_fwi01_azureml_sdk.v1.0.76'"
+ ]
+ },
+ "execution_count": 10,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "%reload_ext dotenv\n",
+ "%dotenv $dotenv_file_path\n",
+ "\n",
+ "docker_file_location = os.path.join(*(prj_consts.AML_EXPERIMENT_DIR + ['docker_build']))\n",
+ "\n",
+ "docker_file_name = 'Dockerfile'+ '_' + os.getenv('EXPERIMENTATION_DOCKER_IMAGE_NAME')\n",
+ "\n",
+ "conda_dependency_file_name = 'conda_env'+ '_' + os.getenv('EXPERIMENTATION_DOCKER_IMAGE_NAME')\n",
+ "conda_dependency_common_file_name = conda_dependency_file_name\n",
+ "\n",
+ "devito_conda_dependency_file_name = 'devito_conda_env'+'.yml'\n",
+ "\n",
+ "docker_repo_name = os.getenv('ACR_NAME')+'.azurecr.io' # or os.getenv('DOCKER_LOGIN')\n",
+ "docker_image_name = docker_repo_name + '/' + os.getenv('EXPERIMENTATION_DOCKER_IMAGE_NAME')\n",
+ "\n",
+ "image_version = os.getenv('EXPERIMENTATION_DOCKER_IMAGE_TAG')\n",
+ "if image_version!=\"\":\n",
+ " docker_file_name = docker_file_name +'_'+ image_version\n",
+ " conda_dependency_file_name = conda_dependency_file_name+'_'+ image_version\n",
+ " docker_image_name = docker_image_name +':'+ image_version\n",
+ "conda_dependency_file_name=conda_dependency_file_name+'.yml'\n",
+ "conda_dependency_common_file_name = conda_dependency_common_file_name+'.yml'\n",
+ "\n",
+ "docker_file_dir = os.path.join(*([os.getcwd(), docker_file_location]))\n",
+ "os.makedirs(docker_file_dir, exist_ok=True)\n",
+ "docker_file_path = os.path.join(*([docker_file_dir]+[docker_file_name]))\n",
+ "conda_file_path = os.path.join(*([docker_file_dir]+[conda_dependency_file_name]))\n",
+ "conda_common_file_path = os.path.join(*([docker_file_dir]+[conda_dependency_common_file_name]))\n",
+ "\n",
+ "docker_image_name\n",
+ "\n",
+ "conda_dependency_file_name\n",
+ "conda_file_path\n",
+ "conda_common_file_path\n",
+ "\n",
+ "docker_file_dir\n",
+ "docker_file_path"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Writing /datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks/./../temp/docker_build/conda_env_fwi01_azureml.yml\n"
+ ]
+ }
+ ],
+ "source": [
+ "%%writefile $conda_common_file_path\n",
+ "name: fwi01_conda_env01\n",
+ " \n",
+ "#https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-1.13.1-cp37-cp37m-linux_x86_64.whl \n",
+ "# https://github.com/dask/dask-tutorial\n",
+ "\n",
+ "channels:\n",
+ " - anaconda\n",
+ " - conda-forge\n",
+ "dependencies:\n",
+ " - python=3.6 # 3.6 req by tf, not 3.7.2 \n",
+ " - dask\n",
+ " - distributed\n",
+ " - h5py\n",
+ " - matplotlib\n",
+ " - nb_conda\n",
+ " - notebook \n",
+ " - numpy \n",
+ " - pandas\n",
+ " - pip\n",
+ " - py-cpuinfo # all required by devito or dask-tutorial\n",
+ " - pytables\n",
+ " - python-graphviz\n",
+ " - requests\n",
+ " - pillow\n",
+ " - scipy\n",
+ " - snakeviz\n",
+ " - scikit-image\n",
+ " - toolz\n",
+ " - pip:\n",
+ " - anytree # required by devito\n",
+ " - azureml-sdk[notebooks,automl]==1.0.76\n",
+ " - codepy # required by devito\n",
+ " - papermill[azure]\n",
+ " - pyrevolve # required by devito"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Writing /datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks/./../temp/docker_build/Dockerfile_fwi01_azureml_sdk.v1.0.76\n"
+ ]
+ }
+ ],
+ "source": [
+ "%%writefile $docker_file_path \n",
+ "\n",
+ "FROM continuumio/miniconda3:4.7.10 \n",
+ "MAINTAINER George Iordanescu \n",
+ "\n",
+ "RUN apt-get update --fix-missing && apt-get install -y --no-install-recommends \\\n",
+ " gcc g++ \\\n",
+ " wget bzip2 \\\n",
+ " curl \\\n",
+ " git make \\\n",
+ " mpich \\ \n",
+ " libmpich-dev && \\\n",
+ " apt-get clean && \\\n",
+ " rm -rf /var/lib/apt/lists/*\n",
+ "\n",
+ "ENV CONDA_ENV_FILE_NAME conda_env_fwi01_azureml.yml\n",
+ "ADD $CONDA_ENV_FILE_NAME /tmp/$CONDA_ENV_FILE_NAME\n",
+ "ENV CONDA_DIR /opt/conda\n",
+ "ENV CONDA_ENV_NAME fwi01_conda_env\n",
+ "\n",
+ "RUN git clone https://github.com/opesci/devito.git && \\\n",
+ " cd devito && \\\n",
+ " /opt/conda/bin/conda env create -q --name $CONDA_ENV_NAME -f environment.yml && \\\n",
+ " pip install -e . \n",
+ " \n",
+ "ENV CONDA_AUTO_UPDATE_CONDA=false\n",
+ "ENV CONDA_DEFAULT_ENV=$CONDA_ENV_NAME\n",
+ "ENV CONDA_PREFIX=$CONDA_DIR/envs/$CONDA_DEFAULT_ENV\n",
+ "ENV PATH=$CONDA_PREFIX/bin:/opt/conda/bin:$PATH \n",
+ "\n",
+ "RUN /opt/conda/bin/conda env update --name $CONDA_ENV_NAME -f /tmp/$CONDA_ENV_FILE_NAME && \\\n",
+ " /opt/conda/bin/conda clean --yes --all\n",
+ "\n",
+ "ENV PYTHONPATH=$PYTHONPATH:devito/app\n",
+ "\n",
+ "# WORKDIR /devito \n",
+ " \n",
+ "CMD /bin/bash"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'/datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks/./../temp/docker_build/conda_env_fwi01_azureml_sdk.v1.0.76.yml'"
+ ]
+ },
+ "execution_count": 13,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "total 12\r\n",
+ "-rw-rw-r-- 1 loginvm022 loginvm022 725 Dec 6 15:26 conda_env_fwi01_azureml_sdk.v1.0.76.yml\r\n",
+ "-rw-rw-r-- 1 loginvm022 loginvm022 725 Dec 6 15:26 conda_env_fwi01_azureml.yml\r\n",
+ "-rw-rw-r-- 1 loginvm022 loginvm022 1073 Dec 6 15:26 Dockerfile_fwi01_azureml_sdk.v1.0.76\r\n"
+ ]
+ }
+ ],
+ "source": [
+ "shutil.copyfile(conda_common_file_path, conda_file_path)\n",
+ "\n",
+ "! ls -l $docker_file_dir"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'docker build -t fwi01acr.azurecr.io/fwi01_azureml:sdk.v1.0.76 -f /datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks/./../temp/docker_build/Dockerfile_fwi01_azureml_sdk.v1.0.76 /datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks/./../temp/docker_build '"
+ ]
+ },
+ "execution_count": 14,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "['Sending build context to Docker daemon 6.144kB',\n",
+ " '',\n",
+ " 'Step 1/15 : FROM continuumio/miniconda3:4.7.10',\n",
+ " '4.7.10: Pulling from continuumio/miniconda3',\n",
+ " '1ab2bdfe9778: Pulling fs layer']"
+ ]
+ },
+ "execution_count": 14,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "[' ---> Running in 00c2824f0cd3',\n",
+ " 'Removing intermediate container 00c2824f0cd3',\n",
+ " ' ---> 48fb03897096',\n",
+ " 'Successfully built 48fb03897096',\n",
+ " 'Successfully tagged fwi01acr.azurecr.io/fwi01_azureml:sdk.v1.0.76']"
+ ]
+ },
+ "execution_count": 14,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "cli_command='docker build -t '+ docker_image_name + \\\n",
+ "' -f ' + docker_file_path + \\\n",
+ "' ' + docker_file_dir + ' ' +\\\n",
+ "docker_build_no_cache #'' #' --no-cache'\n",
+ "\n",
+ "\n",
+ "cli_command\n",
+ "docker_build_response = ! $cli_command\n",
+ "\n",
+ "docker_build_response[0:5] \n",
+ "docker_build_response[-5:] "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Docker containers can be run using python docker sdk"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'fwi01acr.azurecr.io/fwi01_azureml:sdk.v1.0.76'"
+ ]
+ },
+ "execution_count": 15,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'bash -c \"pwd;python -c \\'import azureml.core;print(azureml.core.VERSION)\\'\"'"
+ ]
+ },
+ "execution_count": 15,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "b'/\\n1.0.76\\n'"
+ ]
+ },
+ "execution_count": 15,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "docker_image_name\n",
+ "\n",
+ "sh_command='bash -c \"pwd;python -c \\'import azureml.core;print(azureml.core.VERSION)\\'\"'\n",
+ "sh_command\n",
+ "client = docker.from_env()\n",
+ "client.containers.run(docker_image_name, \n",
+ " remove=True,\n",
+ " volumes={os.getenv('DOCKER_CONTAINER_MOUNT_POINT'): {'bind': '/workspace', 'mode': 'rw'}},\n",
+ " working_dir='/',\n",
+ " command=sh_command)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Docker containers can also be run in cli \n",
+ "\n",
+ "Here we also create a log file to capture commands execution in container. If flag docker_test_run_devito_tests is True, we run \n",
+ "and capture test commands output. Tests take abou 15 minutes to run. If flag docker_test_run_devito_tests is False, we show the results of a previous session. "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'./fwi01_azureml_buildexperimentationdockerimage.log'"
+ ]
+ },
+ "execution_count": 16,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "fwi01_log_file = os.path.join(*(['.', 'fwi01_azureml_buildexperimentationdockerimage.log']))\n",
+ "fwi01_log_file"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### Create command for running devito tests, capture output in a log file, save log file outside container"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 17,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\n",
+ "content of devito tests log file before testing:\n",
+ "Before running e13n container... \r\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "' python -m pytest tests/ > ./fwi01_azureml_buildexperimentationdockerimage.log 2>&1; mv ./fwi01_azureml_buildexperimentationdockerimage.log /workspace/'"
+ ]
+ },
+ "execution_count": 17,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "if docker_test_run_devito_tests:\n",
+ " run_devito_tests_command = ' python -m pytest tests/ ' + \\\n",
+ "'> ' + fwi01_log_file +' 2>&1; ' + \\\n",
+ "' mv ' + fwi01_log_file + ' /workspace/' \n",
+ " \n",
+ " with open(os.path.join(*(['.', 'fwi01_azureml_buildexperimentationdockerimage.log'])), \"w\") as crt_log_file:\n",
+ " print('Before running e13n container... ', file=crt_log_file)\n",
+ " print('\\ncontent of devito tests log file before testing:')\n",
+ " !cat $fwi01_log_file\n",
+ "else:\n",
+ " run_devito_tests_command = '' \n",
+ "\n",
+ "# run_devito_tests_command = 'ls -l > ./fwi01_azureml_buildexperimentationdockerimage.log 2>&1; mv ./fwi01_azureml_buildexperimentationdockerimage.log /workspace/'\n",
+ "run_devito_tests_command"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'docker run -it --rm --name fwi01_azureml_container -v /datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks:/workspace:rw fwi01acr.azurecr.io/fwi01_azureml:sdk.v1.0.76 /bin/bash -c \"conda env list ; ls -l /devito/tests; python -c \\'import azureml.core;print(azureml.core.VERSION)\\'; cd /devito; python -m pytest tests/ > ./fwi01_azureml_buildexperimentationdockerimage.log 2>&1; mv ./fwi01_azureml_buildexperimentationdockerimage.log /workspace/ \"'"
+ ]
+ },
+ "execution_count": 18,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "# conda environments:\n",
+ "#\n",
+ "base /opt/conda\n",
+ "fwi01_conda_env * /opt/conda/envs/fwi01_conda_env\n",
+ "\n",
+ "total 560\n",
+ "-rw-r--r-- 1 root root 11521 Dec 6 15:26 conftest.py\n",
+ "-rw-r--r-- 1 root root 6006 Dec 6 15:26 test_adjoint.py\n",
+ "-rw-r--r-- 1 root root 14586 Dec 6 15:26 test_autotuner.py\n",
+ "-rw-r--r-- 1 root root 7538 Dec 6 15:26 test_builtins.py\n",
+ "-rw-r--r-- 1 root root 24415 Dec 6 15:26 test_caching.py\n",
+ "-rw-r--r-- 1 root root 9721 Dec 6 15:26 test_checkpointing.py\n",
+ "-rw-r--r-- 1 root root 1095 Dec 6 15:26 test_constant.py\n",
+ "-rw-r--r-- 1 root root 55954 Dec 6 15:26 test_data.py\n",
+ "-rw-r--r-- 1 root root 481 Dec 6 15:26 test_dependency_bugs.py\n",
+ "-rw-r--r-- 1 root root 16331 Dec 6 15:26 test_derivatives.py\n",
+ "-rw-r--r-- 1 root root 1473 Dec 6 15:26 test_differentiable.py\n",
+ "-rw-r--r-- 1 root root 30846 Dec 6 15:26 test_dimension.py\n",
+ "-rw-r--r-- 1 root root 24838 Dec 6 15:26 test_dle.py\n",
+ "-rw-r--r-- 1 root root 1169 Dec 6 15:26 test_docstrings.py\n",
+ "-rw-r--r-- 1 root root 32134 Dec 6 15:26 test_dse.py\n",
+ "-rw-r--r-- 1 root root 8205 Dec 6 15:26 test_gradient.py\n",
+ "-rw-r--r-- 1 root root 15227 Dec 6 15:26 test_interpolation.py\n",
+ "-rw-r--r-- 1 root root 31816 Dec 6 15:26 test_ir.py\n",
+ "-rw-r--r-- 1 root root 63169 Dec 6 15:26 test_mpi.py\n",
+ "-rw-r--r-- 1 root root 67053 Dec 6 15:26 test_operator.py\n",
+ "-rw-r--r-- 1 root root 14875 Dec 6 15:26 test_ops.py\n",
+ "-rw-r--r-- 1 root root 12228 Dec 6 15:26 test_pickle.py\n",
+ "-rw-r--r-- 1 root root 1809 Dec 6 15:26 test_resample.py\n",
+ "-rw-r--r-- 1 root root 1754 Dec 6 15:26 test_save.py\n",
+ "-rw-r--r-- 1 root root 2115 Dec 6 15:26 test_staggered_utils.py\n",
+ "-rw-r--r-- 1 root root 5711 Dec 6 15:26 test_subdomains.py\n",
+ "-rw-r--r-- 1 root root 3320 Dec 6 15:26 test_symbolic_coefficients.py\n",
+ "-rw-r--r-- 1 root root 7277 Dec 6 15:26 test_tensors.py\n",
+ "-rw-r--r-- 1 root root 3186 Dec 6 15:26 test_timestepping.py\n",
+ "-rw-r--r-- 1 root root 603 Dec 6 15:26 test_tools.py\n",
+ "-rw-r--r-- 1 root root 3296 Dec 6 15:26 test_tti.py\n",
+ "-rw-r--r-- 1 root root 8835 Dec 6 15:26 test_visitors.py\n",
+ "-rw-r--r-- 1 root root 21802 Dec 6 15:26 test_yask.py\n",
+ "1.0.76\n",
+ "\n",
+ "content of devito tests log file after testing:\n",
+ "============================= test session starts ==============================\n",
+ "platform linux -- Python 3.6.9, pytest-5.3.1, py-1.8.0, pluggy-0.13.1\n",
+ "rootdir: /devito, inifile: setup.cfg\n",
+ "plugins: nbval-0.9.3, cov-2.8.1\n",
+ "collected 1056 items / 2 skipped / 1054 selected\n",
+ "\n",
+ "tests/test_adjoint.py .......................... [ 2%]\n",
+ "tests/test_autotuner.py ..........s..... [ 3%]\n",
+ "tests/test_builtins.py ....s...............s..s [ 6%]\n",
+ "tests/test_caching.py .................................................. [ 10%]\n",
+ " [ 10%]\n",
+ "tests/test_checkpointing.py ....... [ 11%]\n",
+ "tests/test_constant.py . [ 11%]\n",
+ "tests/test_data.py ..........................ssssssssssssssssss.ss.. [ 16%]\n",
+ "tests/test_dependency_bugs.py . [ 16%]\n",
+ "tests/test_derivatives.py .............................................. [ 20%]\n",
+ "........................................................................ [ 27%]\n",
+ "........................................................................ [ 34%]\n",
+ "...... [ 35%]\n",
+ "tests/test_differentiable.py .. [ 35%]\n",
+ "tests/test_dimension.py ............................... [ 38%]\n",
+ "tests/test_dle.py ...................................................... [ 43%]\n",
+ "........................................... [ 47%]\n",
+ "tests/test_docstrings.py ................ [ 48%]\n",
+ "tests/test_dse.py ......x............................................... [ 53%]\n",
+ "................x..........s.... [ 57%]\n",
+ "tests/test_gradient.py .... [ 57%]\n",
+ "tests/test_interpolation.py ........................ [ 59%]\n",
+ "tests/test_ir.py ....................................................... [ 64%]\n",
+ "................ [ 66%]\n",
+ "tests/test_mpi.py ssssssssssssssssssssssssssssssssssssssssssssssssssssss [ 71%]\n",
+ "sss [ 71%]\n",
+ "tests/test_operator.py ................................................. [ 76%]\n",
+ "..............................................s......................... [ 83%]\n",
+ ".................................. [ 86%]\n",
+ "tests/test_pickle.py .................ss. [ 88%]\n",
+ "tests/test_resample.py . [ 88%]\n",
+ "tests/test_save.py .. [ 88%]\n",
+ "tests/test_staggered_utils.py ......... [ 89%]\n",
+ "tests/test_subdomains.py ... [ 89%]\n",
+ "tests/test_symbolic_coefficients.py .....F [ 90%]\n",
+ "tests/test_tensors.py .................................................. [ 95%]\n",
+ "........................... [ 97%]\n",
+ "tests/test_timestepping.py ....... [ 98%]\n",
+ "tests/test_tools.py ..... [ 98%]\n",
+ "tests/test_tti.py .... [ 99%]\n",
+ "tests/test_visitors.py ......... [100%]\n",
+ "\n",
+ "=================================== FAILURES ===================================\n",
+ "______________________ TestSC.test_function_coefficients _______________________\n",
+ "\n",
+ "self = \n",
+ "\n",
+ " def test_function_coefficients(self):\n",
+ " \"\"\"Test that custom function coefficients return the expected result\"\"\"\n",
+ " so = 2\n",
+ " grid = Grid(shape=(4, 4))\n",
+ " f0 = TimeFunction(name='f0', grid=grid, space_order=so, coefficients='symbolic')\n",
+ " f1 = TimeFunction(name='f1', grid=grid, space_order=so)\n",
+ " x, y = grid.dimensions\n",
+ " \n",
+ " s = Dimension(name='s')\n",
+ " ncoeffs = so+1\n",
+ " \n",
+ " wshape = list(grid.shape)\n",
+ " wshape.append(ncoeffs)\n",
+ " wshape = as_tuple(wshape)\n",
+ " \n",
+ " wdims = list(grid.dimensions)\n",
+ " wdims.append(s)\n",
+ " wdims = as_tuple(wdims)\n",
+ " \n",
+ " w = Function(name='w', dimensions=wdims, shape=wshape)\n",
+ " w.data[:, :, 0] = 0.0\n",
+ " w.data[:, :, 1] = -1.0/grid.spacing[0]\n",
+ " w.data[:, :, 2] = 1.0/grid.spacing[0]\n",
+ " \n",
+ " f_x_coeffs = Coefficient(1, f0, x, w)\n",
+ " \n",
+ " subs = Substitutions(f_x_coeffs)\n",
+ " \n",
+ " eq0 = Eq(f0.dt + f0.dx, 1, coefficients=subs)\n",
+ " eq1 = Eq(f1.dt + f1.dx, 1)\n",
+ " \n",
+ " stencil0 = solve(eq0.evaluate, f0.forward)\n",
+ " stencil1 = solve(eq1.evaluate, f1.forward)\n",
+ " \n",
+ " op0 = Operator(Eq(f0.forward, stencil0))\n",
+ " op1 = Operator(Eq(f1.forward, stencil1))\n",
+ " \n",
+ " op0(time_m=0, time_M=5, dt=1.0)\n",
+ " op1(time_m=0, time_M=5, dt=1.0)\n",
+ " \n",
+ "> assert np.all(np.isclose(f0.data[:] - f1.data[:], 0.0, atol=1e-5, rtol=0))\n",
+ "E assert Data(False)\n",
+ "E + where Data(False) = (Data([[[False, False, False, False],\\n [False, False, False, False],\\n [ True, True, True, True],\\n ...alse],\\n [False, False, False, False],\\n [False, False, False, False],\\n [ True, True, True, True]]]))\n",
+ "E + where = np.all\n",
+ "E + and Data([[[False, False, False, False],\\n [False, False, False, False],\\n [ True, True, True, True],\\n ...alse],\\n [False, False, False, False],\\n [False, False, False, False],\\n [ True, True, True, True]]]) = ((Data([[[-1452., -1452., -1452., -1452.],\\n [ 3327., 3327., 3327., 3327.],\\n [-3414., -3414., -3414., -341...3., 383., 383.],\\n [ -598., -598., -598., -598.],\\n [ 341., 341., 341., 341.]]], dtype=float32) - Data([[[-1451.9998 , -1451.9998 , -1451.9998 , -1451.9998 ],\\n [ 3326.9995 , 3326.9995 , 3326.9995 , 33...4 , -597.99994 , -597.99994 ],\\n [ 341. , 341. , 341. , 341. ]]],\\n dtype=float32)), 0.0, atol=1e-05, rtol=0)\n",
+ "E + where = np.isclose\n",
+ "\n",
+ "tests/test_symbolic_coefficients.py:96: AssertionError\n",
+ "----------------------------- Captured stderr call -----------------------------\n",
+ "/tmp/devito-jitcache-uid0/28a0c1d4f6f5711828a8c4cd1ff27eaa7607404e.c: In function ‘Kernel’:\n",
+ "/tmp/devito-jitcache-uid0/28a0c1d4f6f5711828a8c4cd1ff27eaa7607404e.c:39: warning: ignoring #pragma omp simd [-Wunknown-pragmas]\n",
+ " #pragma omp simd aligned(f0,w:32)\n",
+ " \n",
+ "Operator `Kernel` run in 0.01 s\n",
+ "/tmp/devito-jitcache-uid0/0031268cb9efe9dfa4f656da51efd0d4fa4b9d00.c: In function ‘Kernel’:\n",
+ "/tmp/devito-jitcache-uid0/0031268cb9efe9dfa4f656da51efd0d4fa4b9d00.c:38: warning: ignoring #pragma omp simd [-Wunknown-pragmas]\n",
+ " #pragma omp simd aligned(f1:32)\n",
+ " \n",
+ "Operator `Kernel` run in 0.01 s\n",
+ "------------------------------ Captured log call -------------------------------\n",
+ "INFO Devito:logger.py:129 Operator `Kernel` run in 0.01 s\n",
+ "INFO Devito:logger.py:129 Operator `Kernel` run in 0.01 s\n",
+ "====== 1 failed, 968 passed, 87 skipped, 2 xfailed in 1070.16s (0:17:50) =======\n"
+ ]
+ }
+ ],
+ "source": [
+ "cli_command='docker run -it --rm --name fwi01_azureml_container ' +\\\n",
+ "' -v '+os.getenv('DOCKER_CONTAINER_MOUNT_POINT')+':/workspace:rw ' + \\\n",
+ "docker_image_name + \\\n",
+ "' /bin/bash -c \"conda env list ; ls -l /devito/tests; ' + \\\n",
+ "'python -c \\'import azureml.core;print(azureml.core.VERSION)\\'; ' + \\\n",
+ "'cd /devito; ' + \\\n",
+ "run_devito_tests_command +\\\n",
+ "' \"'\n",
+ "\n",
+ "cli_command\n",
+ "! $cli_command\n",
+ "# # ============= 774 passed, 70 skipped, 1 xfailed in 1106.76 seconds =============\n",
+ "print('\\ncontent of devito tests log file after testing:')\n",
+ "!cat $fwi01_log_file"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "###### Use the ACR created in previous notebook or docker hub to push your image"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 22,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'az acr login --name fwi01acr'"
+ ]
+ },
+ "execution_count": 22,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Login Succeeded\r\n",
+ "WARNING! Your password will be stored unencrypted in /home/loginvm022/.docker/config.json.\r\n",
+ "Configure a credential helper to remove this warning. See\r\n",
+ "https://docs.docker.com/engine/reference/commandline/login/#credentials-store\r\n",
+ "\r\n",
+ "\u001b[0m"
+ ]
+ }
+ ],
+ "source": [
+ "# docker_pwd = os.getenv('DOCKER_PWD')\n",
+ "# docker_login = os.getenv('DOCKER_LOGIN')\n",
+ "# !docker login -u=$docker_login -p=$docker_pwd\n",
+ "# !docker push {docker_image_name}\n",
+ "\n",
+ "%dotenv -o $dotenv_file_path\n",
+ "cli_command='az acr login --name '+os.getenv('ACR_NAME')\n",
+ "# print cli command\n",
+ "cli_command\n",
+ "\n",
+ "# run cli command\n",
+ "cli_command = cli_command+' --username '+os.getenv('ACR_USERNAME') + ' --password ' + os.getenv('ACR_PASSWORD')\n",
+ "! $cli_command"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 23,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'docker push fwi01acr.azurecr.io/fwi01_azureml:sdk.v1.0.76'"
+ ]
+ },
+ "execution_count": 23,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "cli_command='docker push '+docker_image_name\n",
+ "cli_command"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 24,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "The push refers to repository [fwi01acr.azurecr.io/fwi01_azureml]\n",
+ "\n",
+ "\u001b[1Bd6300f53: Preparing \n",
+ "\u001b[1B01af7f6b: Preparing \n",
+ "\u001b[1B41f0b573: Preparing \n",
+ "\u001b[1B04ca5654: Preparing \n",
+ "\u001b[1Bf8fc4c9a: Preparing \n",
+ "\u001b[1Bba47210e: Preparing \n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\u001b[6B01af7f6b: Pushing 1.484GB/3.028GBA\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[2A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[4A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[4A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\u001b[7Bd6300f53: Pushing 3.026GB/3.028GB\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2KPushing 2.58GB/2.968GB\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\u001b[6B01af7f6b: Pushed 3.103GB/3.028GB\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[7A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2K\u001b[6A\u001b[2Ksdk.v1.0.76: digest: sha256:416dc7ce59c279822e967223790f7b8b7d99ba62bc643ca44b94551135b60b6b size: 1800\n"
+ ]
+ }
+ ],
+ "source": [
+ "! $cli_command"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 25,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Finished running 010_CreateExperimentationDockerImage_GeophysicsTutorial_FWI_Azure_devito!\n"
+ ]
+ }
+ ],
+ "source": [
+ "# !jupyter nbconvert 010_CreateExperimentationDockerImage_GeophysicsTutorial_FWI_Azure_devito --to html\n",
+ "print('Finished running 010_CreateExperimentationDockerImage_GeophysicsTutorial_FWI_Azure_devito!')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.7.5"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/contrib/fwi/azureml_devito/notebooks/020_UseAzureMLEstimatorForExperimentation_GeophysicsTutorial_FWI_Azure_devito.ipynb b/contrib/fwi/azureml_devito/notebooks/020_UseAzureMLEstimatorForExperimentation_GeophysicsTutorial_FWI_Azure_devito.ipynb
new file mode 100755
index 00000000..db76f4c5
--- /dev/null
+++ b/contrib/fwi/azureml_devito/notebooks/020_UseAzureMLEstimatorForExperimentation_GeophysicsTutorial_FWI_Azure_devito.ipynb
@@ -0,0 +1,1003 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Copyright (c) Microsoft Corporation. \n",
+ "Licensed under the MIT License. \n",
+ " \n",
+ " \n",
+ "# FWI demo based on: \n",
+ "This project ports devito (https://github.com/opesci/devito) into Azure and runs tutorial notebooks at:\n",
+ "https://nbviewer.jupyter.org/github/opesci/devito/blob/master/examples/seismic/tutorials/\n",
+ "\n",
+ "\n",
+ "\n",
+ "In this notebook we run the devito demo [notebooks](https://nbviewer.jupyter.org/github/opesci/devito/blob/master/examples/seismic/tutorials/) mentioned above by using an [AzureML estimator](https://docs.microsoft.com/en-us/python/api/azureml-train-core/azureml.train.estimator.estimator?view=azure-ml-py) with custom docker image. The docker image and associated docker file were created in previous notebook.\n",
+ "\n",
+ "\n",
+ "#### This notebook is used as a control plane to submit experimentation jobs running devito in Azure in two modes (see [remote run azureml python script file invoking devito](#devito_demo_mode)):\n",
+ " - [Mode 1](#devito_demo_mode_1):\n",
+ " - uses custom code (slightly modified graphing functions save images to files too) \n",
+ " - experimentation job is defined by the devito code that is packaged as a py file to be run on an Azure remote compute target\n",
+ " - experimentation job can be used to track metrics or other artifacts (images)\n",
+ " \n",
+ " - Mode 2:\n",
+ " - papermill is invoked via its Python API to run unedited devito demo notebooks (https://github.com/opesci/devito/tree/master/examples/seismic/tutorials) on the remote compute target and get back the results as saved notebooks that are then Available in Azure portal. \n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Allow multiple displays per cell\n",
+ "from IPython.core.interactiveshell import InteractiveShell\n",
+ "InteractiveShell.ast_node_interactivity = \"all\" "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import sys, os\n",
+ "import shutil\n",
+ "import urllib\n",
+ "import azureml.core\n",
+ "from azureml.core import Workspace, Experiment\n",
+ "from azureml.core.compute import ComputeTarget, AmlCompute\n",
+ "from azureml.core.compute_target import ComputeTargetException\n",
+ "from azureml.core.runconfig import MpiConfiguration\n",
+ "\n",
+ "\n",
+ "# from azureml.core.datastore import Datastore\n",
+ "# from azureml.data.data_reference import DataReference\n",
+ "# from azureml.pipeline.steps import HyperDriveStep\n",
+ "# from azureml.pipeline.core import Pipeline, PipelineData\n",
+ "# from azureml.train.dnn import TensorFlow\n",
+ "\n",
+ "from azureml.train.estimator import Estimator\n",
+ "from azureml.widgets import RunDetails\n",
+ "\n",
+ "import platform"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Azure ML SDK Version: 1.0.76\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'Linux-4.15.0-1063-azure-x86_64-with-debian-stretch-sid'"
+ ]
+ },
+ "execution_count": 3,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'/datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks'"
+ ]
+ },
+ "execution_count": 3,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "print(\"Azure ML SDK Version: \", azureml.core.VERSION)\n",
+ "platform.platform()\n",
+ "os.getcwd()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "[None]"
+ ]
+ },
+ "execution_count": 4,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'./../not_shared/general.env'"
+ ]
+ },
+ "execution_count": 4,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "def add_path_to_sys_path(path_to_append):\n",
+ " if not (any(path_to_append in paths for paths in sys.path)):\n",
+ " sys.path.append(path_to_append)\n",
+ " \n",
+ "auxiliary_files_dir = os.path.join(*(['.', 'src']))\n",
+ "paths_to_append = [os.path.join(os.getcwd(), auxiliary_files_dir)]\n",
+ "[add_path_to_sys_path(crt_path) for crt_path in paths_to_append]\n",
+ "\n",
+ "import project_utils\n",
+ "prj_consts = project_utils.project_consts()\n",
+ "\n",
+ "dotenv_file_path = os.path.join(*(prj_consts.DOTENV_FILE_PATH))\n",
+ "dotenv_file_path"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "%load_ext dotenv"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'./../not_shared'"
+ ]
+ },
+ "execution_count": 6,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "workspace_config_dir = os.path.join(*(prj_consts.AML_WORKSPACE_CONFIG_DIR))\n",
+ "workspace_config_file = prj_consts.AML_WORKSPACE_CONFIG_FILE_NAME\n",
+ "workspace_config_dir"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'./../temp/devito_tutorial/01_modelling.py'"
+ ]
+ },
+ "execution_count": 7,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'./../temp/devito_tutorial/azureml_01_modelling.py'"
+ ]
+ },
+ "execution_count": 7,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "%dotenv $dotenv_file_path\n",
+ "\n",
+ "script_folder = prj_consts.AML_EXPERIMENT_DIR + ['devito_tutorial']\n",
+ "\n",
+ "devito_training_script_file = '01_modelling.py' # hardcoded in file azureml_training_script_full_file_name below\n",
+ "azureml_training_script_file = 'azureml_'+devito_training_script_file\n",
+ "experimentName = '020_AzureMLEstimator'\n",
+ "\n",
+ "os.makedirs(os.path.join(*(script_folder)), exist_ok=True)\n",
+ "script_path = os.path.join(*(script_folder))\n",
+ "training_script_full_file_name = os.path.join(script_path, devito_training_script_file)\n",
+ "azureml_training_script_full_file_name = os.path.join(script_path, azureml_training_script_file)\n",
+ "\n",
+ "training_script_full_file_name\n",
+ "azureml_training_script_full_file_name"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n",
+ " \n",
+ "##### devito in Azure ML demo mode 1\n",
+ "Create devito demo script based on \n",
+ "https://nbviewer.jupyter.org/github/opesci/devito/blob/master/examples/seismic/tutorials/01_modelling.ipynb\n",
+ "\n",
+ "[Back](#devito_in_AzureML_demoing_modes) to summary of modes od demoing devito in AzureML.\n",
+ "\n",
+ "Main purpose of this script is to extend _plot_velocity()_ and _plot_shotrecord()_ devito [plotting functions](https://github.com/opesci/devito/blob/master/examples/seismic/plotting.py) to allow the mto work in batch mode, i.e. save output to a file."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Overwriting ./../temp/devito_tutorial/01_modelling.py\n"
+ ]
+ }
+ ],
+ "source": [
+ "%%writefile $training_script_full_file_name\n",
+ "\n",
+ "import numpy as np\n",
+ "import os, argparse\n",
+ "\n",
+ "from examples.seismic import Model\n",
+ "from examples.seismic import TimeAxis\n",
+ "from examples.seismic import Receiver\n",
+ "from devito import TimeFunction\n",
+ "from devito import Eq, solve\n",
+ "from devito import Operator\n",
+ "\n",
+ "\n",
+ "# try:\n",
+ "import matplotlib as mpl\n",
+ "import matplotlib.pyplot as plt\n",
+ "from matplotlib import cm\n",
+ "from mpl_toolkits.axes_grid1 import make_axes_locatable\n",
+ "\n",
+ "mpl.rc('font', size=16)\n",
+ "mpl.rc('figure', figsize=(8, 6))\n",
+ "# except:\n",
+ "# plt = None\n",
+ "# cm = None\n",
+ " \n",
+ "\n",
+ "\n",
+ "# \"all\" plotting utils in devito do not save to file, so we extend them here\n",
+ "# https://github.com/opesci/devito/blob/master/examples/seismic/plotting.py\n",
+ "def plot_velocity(model, source=None, receiver=None, colorbar=True, file=None):\n",
+ " \"\"\"\n",
+ " Plot a two-dimensional velocity field from a seismic `Model`\n",
+ " object. Optionally also includes point markers for sources and receivers.\n",
+ "\n",
+ " Parameters\n",
+ " ----------\n",
+ " model : Model\n",
+ " Object that holds the velocity model.\n",
+ " source : array_like or float\n",
+ " Coordinates of the source point.\n",
+ " receiver : array_like or float\n",
+ " Coordinates of the receiver points.\n",
+ " colorbar : bool\n",
+ " Option to plot the colorbar.\n",
+ " \"\"\"\n",
+ " domain_size = 1.e-3 * np.array(model.domain_size)\n",
+ " extent = [model.origin[0], model.origin[0] + domain_size[0],\n",
+ " model.origin[1] + domain_size[1], model.origin[1]]\n",
+ "\n",
+ " plot = plt.imshow(np.transpose(model.vp.data), animated=True, cmap=cm.jet,\n",
+ " vmin=np.min(model.vp.data), vmax=np.max(model.vp.data),\n",
+ " extent=extent)\n",
+ " plt.xlabel('X position (km)')\n",
+ " plt.ylabel('Depth (km)')\n",
+ "\n",
+ " # Plot source points, if provided\n",
+ " if receiver is not None:\n",
+ " plt.scatter(1e-3*receiver[:, 0], 1e-3*receiver[:, 1],\n",
+ " s=25, c='green', marker='D')\n",
+ "\n",
+ " # Plot receiver points, if provided\n",
+ " if source is not None:\n",
+ " plt.scatter(1e-3*source[:, 0], 1e-3*source[:, 1],\n",
+ " s=25, c='red', marker='o')\n",
+ "\n",
+ " # Ensure axis limits\n",
+ " plt.xlim(model.origin[0], model.origin[0] + domain_size[0])\n",
+ " plt.ylim(model.origin[1] + domain_size[1], model.origin[1])\n",
+ "\n",
+ " # Create aligned colorbar on the right\n",
+ " if colorbar:\n",
+ " ax = plt.gca()\n",
+ " divider = make_axes_locatable(ax)\n",
+ " cax = divider.append_axes(\"right\", size=\"5%\", pad=0.05)\n",
+ " cbar = plt.colorbar(plot, cax=cax)\n",
+ " cbar.set_label('Velocity (km/s)')\n",
+ " plt.show()\n",
+ " \n",
+ " if file is not None:\n",
+ " plt.savefig(file)\n",
+ " print('plotted image saved as {} file'.format(file))\n",
+ " \n",
+ " plt.clf()\n",
+ "\n",
+ "def plot_shotrecord(rec, model, t0, tn, colorbar=True, file=None):\n",
+ " \"\"\"\n",
+ " Plot a shot record (receiver values over time).\n",
+ "\n",
+ " Parameters\n",
+ " ----------\n",
+ " rec :\n",
+ " Receiver data with shape (time, points).\n",
+ " model : Model\n",
+ " object that holds the velocity model.\n",
+ " t0 : int\n",
+ " Start of time dimension to plot.\n",
+ " tn : int\n",
+ " End of time dimension to plot.\n",
+ " \"\"\"\n",
+ " scale = np.max(rec) / 10.\n",
+ " extent = [model.origin[0], model.origin[0] + 1e-3*model.domain_size[0],\n",
+ " 1e-3*tn, t0]\n",
+ "\n",
+ " plot = plt.imshow(rec, vmin=-scale, vmax=scale, cmap=cm.gray, extent=extent)\n",
+ " plt.xlabel('X position (km)')\n",
+ " plt.ylabel('Time (s)')\n",
+ "\n",
+ " # Create aligned colorbar on the right\n",
+ " if colorbar:\n",
+ " ax = plt.gca()\n",
+ " divider = make_axes_locatable(ax)\n",
+ " cax = divider.append_axes(\"right\", size=\"5%\", pad=0.05)\n",
+ " plt.colorbar(plot, cax=cax)\n",
+ " plt.show() \n",
+ " \n",
+ " if file is not None:\n",
+ " plt.savefig(file)\n",
+ " print('plotted image saved as {} file'.format(file))\n",
+ " \n",
+ " plt.clf()\n",
+ "\n",
+ "def main(output_folder): \n",
+ " # 1. Define the physical problem\n",
+ " # The first step is to define the physical model:\n",
+ " # - physical dimensions of interest\n",
+ " # - velocity profile of this physical domain\n",
+ "\n",
+ " # Define a physical size\n",
+ " shape = (101, 101) # Number of grid point (nx, nz)\n",
+ " spacing = (10., 10.) # Grid spacing in m. The domain size is now 1km by 1km\n",
+ " origin = (0., 0.) # What is the location of the top left corner. This is necessary to define\n",
+ " # the absolute location of the source and receivers\n",
+ "\n",
+ " # Define a velocity profile. The velocity is in km/s\n",
+ " v = np.empty(shape, dtype=np.float32)\n",
+ " v[:, :51] = 1.5\n",
+ " v[:, 51:] = 2.5\n",
+ "\n",
+ " # With the velocity and model size defined, we can create the seismic model that\n",
+ " # encapsulates this properties. We also define the size of the absorbing layer as 10 grid points\n",
+ " model = Model(vp=v, origin=origin, shape=shape, spacing=spacing,\n",
+ " space_order=2, nbpml=10)\n",
+ "\n",
+ " plot_velocity(model, \n",
+ " file= os.path.join(*( [output_folder,'output000.png'])))\n",
+ " \n",
+ " # 2. Acquisition geometry\n",
+ " t0 = 0. # Simulation starts a t=0\n",
+ " tn = 1000. # Simulation last 1 second (1000 ms)\n",
+ " dt = model.critical_dt # Time step from model grid spacing\n",
+ "\n",
+ " time_range = TimeAxis(start=t0, stop=tn, step=dt)\n",
+ " from examples.seismic import RickerSource\n",
+ "\n",
+ " f0 = 0.010 # Source peak frequency is 10Hz (0.010 kHz)\n",
+ " src = RickerSource(name='src', grid=model.grid, f0=f0,\n",
+ " npoint=1, time_range=time_range)\n",
+ "\n",
+ " # First, position source centrally in all dimensions, then set depth\n",
+ " src.coordinates.data[0, :] = np.array(model.domain_size) * .5\n",
+ " src.coordinates.data[0, -1] = 20. # Depth is 20m\n",
+ "\n",
+ " # We can plot the time signature to see the wavelet\n",
+ "# src.show()\n",
+ "\n",
+ " # Create symbol for 101 receivers\n",
+ " rec = Receiver(name='rec', grid=model.grid, npoint=101, time_range=time_range)\n",
+ "\n",
+ " # Prescribe even spacing for receivers along the x-axis\n",
+ " rec.coordinates.data[:, 0] = np.linspace(0, model.domain_size[0], num=101)\n",
+ " rec.coordinates.data[:, 1] = 20. # Depth is 20m\n",
+ "\n",
+ " # We can now show the source and receivers within our domain:\n",
+ " # Red dot: Source location\n",
+ " # Green dots: Receiver locations (every 4th point)\n",
+ " plot_velocity(model, source=src.coordinates.data,\n",
+ " receiver=rec.coordinates.data[::4, :], \n",
+ " file= os.path.join(*( [output_folder,'output010.png'])))\n",
+ " \n",
+ " # Define the wavefield with the size of the model and the time dimension\n",
+ " u = TimeFunction(name=\"u\", grid=model.grid, time_order=2, space_order=2)\n",
+ "\n",
+ " # We can now write the PDE\n",
+ " pde = model.m * u.dt2 - u.laplace + model.damp * u.dt\n",
+ "\n",
+ " # The PDE representation is as on paper\n",
+ " pde\n",
+ " \n",
+ " # This discrete PDE can be solved in a time-marching way updating u(t+dt) from the previous time step\n",
+ " # Devito as a shortcut for u(t+dt) which is u.forward. We can then rewrite the PDE as \n",
+ " # a time marching updating equation known as a stencil using customized SymPy functions\n",
+ "\n",
+ " stencil = Eq(u.forward, solve(pde, u.forward))\n",
+ " # Finally we define the source injection and receiver read function to generate the corresponding code\n",
+ " src_term = src.inject(field=u.forward, expr=src * dt**2 / model.m)\n",
+ "\n",
+ " # Create interpolation expression for receivers\n",
+ " rec_term = rec.interpolate(expr=u.forward)\n",
+ "\n",
+ " op = Operator([stencil] + src_term + rec_term, subs=model.spacing_map)\n",
+ " \n",
+ " op(time=time_range.num-1, dt=model.critical_dt)\n",
+ " plot_shotrecord(rec.data, model, t0, tn, \n",
+ " file= os.path.join(*( [output_folder,'output020.png'])))\n",
+ "\n",
+ "if __name__ == \"__main__\":\n",
+ " parser = argparse.ArgumentParser()\n",
+ " parser.add_argument('--output_folder', type=str, nargs='?', \\\n",
+ " dest='output_folder', help='ouput artifacts location',\\\n",
+ " default='.')\n",
+ " args = parser.parse_args()\n",
+ " \n",
+ " main(args.output_folder)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "##### Get experimentation docker image for devito"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'fwi01_azureml:sdk.v1.0.76'"
+ ]
+ },
+ "execution_count": 9,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'fwi01acr.azurecr.io/fwi01_azureml:sdk.v1.0.76'"
+ ]
+ },
+ "execution_count": 9,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "docker_repo_name = os.getenv('ACR_NAME')+'.azurecr.io' # or os.getenv('DOCKER_LOGIN')\n",
+ "docker_image_name = os.getenv('EXPERIMENTATION_DOCKER_IMAGE_NAME')\n",
+ "\n",
+ "image_version = os.getenv('EXPERIMENTATION_DOCKER_IMAGE_TAG')\n",
+ "if image_version!=\"\":\n",
+ " docker_image_name = docker_image_name +':'+ image_version\n",
+ "\n",
+ "full_docker_image_name = docker_repo_name + '/' + docker_image_name\n",
+ " \n",
+ "docker_image_name\n",
+ "full_docker_image_name"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Extract/decide the python path in custom docker image that corresponds to desired conda environment. Without this, AzureML tries to create a separate environment."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Login Succeeded\r\n",
+ "WARNING! Your password will be stored unencrypted in /home/loginvm022/.docker/config.json.\r\n",
+ "Configure a credential helper to remove this warning. See\r\n",
+ "https://docs.docker.com/engine/reference/commandline/login/#credentials-store\r\n",
+ "\r\n",
+ "\u001b[0m"
+ ]
+ }
+ ],
+ "source": [
+ "%dotenv $dotenv_file_path\n",
+ "cli_command='az acr login --name '+\\\n",
+ "os.getenv('ACR_NAME')+\\\n",
+ "' --username '+os.getenv('ACR_USERNAME') + ' --password ' + os.getenv('ACR_PASSWORD')\n",
+ "! $cli_command"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'docker run -i --rm --name fwi01_azureml_container02 fwi01acr.azurecr.io/fwi01_azureml:sdk.v1.0.76 /bin/bash -c \"which python\" '"
+ ]
+ },
+ "execution_count": 11,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'/opt/conda/envs/fwi01_conda_env/bin/python'"
+ ]
+ },
+ "execution_count": 11,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "get_Python_path_command='docker run -i --rm --name fwi01_azureml_container02 '+ \\\n",
+ "full_docker_image_name + \\\n",
+ "' /bin/bash -c \"which python\" '\n",
+ "get_Python_path_command\n",
+ "\n",
+ "\n",
+ "import subprocess\n",
+ "python_path_in_docker_image = subprocess.check_output(get_Python_path_command,shell=True,stderr=subprocess.STDOUT).\\\n",
+ "decode('utf-8').strip()\n",
+ "python_path_in_docker_image"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n",
+ "#### Create azureml_script_file that invokes:\n",
+ " - devito exclusive custom edited training_script_file\n",
+ " - unedited devito notebooks via papermill (invoked via cli and via ppapermill python API)\n",
+ "\n",
+ "[Back](#devito_in_AzureML_demoing_modes) to notebook summary."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Overwriting ./../temp/devito_tutorial/azureml_01_modelling.py\n"
+ ]
+ }
+ ],
+ "source": [
+ "%%writefile $azureml_training_script_full_file_name\n",
+ "\n",
+ "import argparse\n",
+ "import os\n",
+ "os.system('conda env list')\n",
+ "\n",
+ "import azureml.core;\n",
+ "from azureml.core.run import Run\n",
+ "\n",
+ "print(azureml.core.VERSION)\n",
+ "\n",
+ "parser = argparse.ArgumentParser()\n",
+ "parser.add_argument('--output_folder', type=str, dest='output_folder', help='ouput artifacts location')\n",
+ "\n",
+ "args = parser.parse_args()\n",
+ "print('args.output_folder is {} but it will be ignored since AzureML_tracked ./outputs will be used'.format(args.output_folder))\n",
+ "\n",
+ "# get the Azure ML run object\n",
+ "run = Run.get_context()\n",
+ "\n",
+ "# ./outputs/ folder is autotracked so should get uploaded at the end of the run\n",
+ "output_dir_AzureML_tracked = './outputs'\n",
+ "\n",
+ "crt_dir = os.getcwd()\n",
+ "\n",
+ "cli_command= \\\n",
+ "'cd /devito; /opt/conda/envs/fwi01_conda_env/bin/python '+ crt_dir +'/01_modelling.py' + \\\n",
+ "' --output_folder '+ crt_dir + output_dir_AzureML_tracked+ '/' + \\\n",
+ "' > '+ crt_dir + output_dir_AzureML_tracked + '/01_modelling.log' \n",
+ "# + \\\n",
+ "# ' 2>&1 ' + crt_dir +'/'+ output_dir_AzureML_tracked + '/devito_cli_py.log'\n",
+ "print('Running devito from cli on 01_modelling.py----BEGIN-----:') \n",
+ "print(cli_command); print('\\n');os.system(cli_command)\n",
+ "print('Running devito from cli on 01_modelling.py----END-----:\\n\\n')\n",
+ "\n",
+ "cli_command= \\\n",
+ "'cd /devito; papermill ' + \\\n",
+ "'./examples/seismic/tutorials/02_rtm.ipynb '+\\\n",
+ "crt_dir +'/outputs/02_rtm_output.ipynb ' + \\\n",
+ "'--log-output --no-progress-bar --kernel python3 ' + \\\n",
+ "' > '+ crt_dir + output_dir_AzureML_tracked + '/02_rtm_output.log' \n",
+ "# + \\\n",
+ "# ' 2>&1 ' + crt_dir +'/'+ output_dir_AzureML_tracked + '/papermill_cli.log'\n",
+ "\n",
+ "# FIXME - activate right conda env for running papermill from cli\n",
+ "activate_right_conda_env_fixed = False\n",
+ "if activate_right_conda_env_fixed:\n",
+ " print('Running papermill from cli on 02_rtm.ipynb----BEGIN-----:') \n",
+ " print(cli_command); print('\\n');os.system(cli_command)\n",
+ " print('Running papermill from cli on 02_rtm.ipynb----END-----:\\n\\n') \n",
+ "\n",
+ "\n",
+ "print('Running papermill from Python API on 03_fwi.ipynb----BEGIN-----:') \n",
+ "import papermill as pm\n",
+ "os.chdir('/devito')\n",
+ "pm.execute_notebook(\n",
+ " './examples/seismic/tutorials/03_fwi.ipynb',\n",
+ " crt_dir +'/outputs/03_fwi_output.ipynb'\n",
+ ")\n",
+ "print('Running papermill from Python API on 03_fwi.ipynb----END-----:') \n",
+ "\n",
+ "print('Running papermill from Python API on 04_dask.ipynb----BEGIN-----:') \n",
+ "import papermill as pm\n",
+ "os.chdir('/devito')\n",
+ "pm.execute_notebook(\n",
+ " './examples/seismic/tutorials/04_dask.ipynb',\n",
+ " crt_dir +'/outputs/04_dask_output.ipynb'\n",
+ ")\n",
+ "print('Running papermill from Python API on 04_dask.ipynb----END-----:') \n",
+ " \n",
+ "\n",
+ "os.system('pwd')\n",
+ "os.system('ls -l /')\n",
+ "os.system('ls -l ./')\n",
+ "os.system('ls -l ' +crt_dir + output_dir_AzureML_tracked)\n",
+ "run.log('training_message01: ', 'finished experiment')\n",
+ "print('\\n')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "['azureml_01_modelling.py', '01_modelling.py']"
+ ]
+ },
+ "execution_count": 13,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "script_path=os.path.join(*(script_folder))\n",
+ "os.listdir(script_path)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Initialize workspace\n",
+ "\n",
+ "Initialize a workspace object from persisted configuration. If you are using an Azure Machine Learning Notebook VM, you are all set. Otherwise, make sure the config file is present at .\\config.json"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "WARNING - Warning: Falling back to use azure cli login credentials.\n",
+ "If you run your code in unattended mode, i.e., where you can't give a user input, then we recommend to use ServicePrincipalAuthentication or MsiAuthentication.\n",
+ "Please refer to aka.ms/aml-notebook-auth for different authentication mechanisms in azureml-sdk.\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Workspace name: ghiordanfwiws\n",
+ "Azure region: eastus2\n",
+ "Subscription id: 7899\n"
+ ]
+ }
+ ],
+ "source": [
+ "ws = Workspace.from_config(\n",
+ " path=os.path.join(os.getcwd(),\n",
+ " os.path.join(*([workspace_config_dir, '.azureml', workspace_config_file]))))\n",
+ "print('Workspace name: ' + ws.name, \n",
+ " 'Azure region: ' + ws.location, \n",
+ " 'Subscription id: ' + ws.subscription_id[0:4], sep = '\\n')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Create an Azure ML experiment\n",
+ "Let's create an experiment named \"tf-mnist\" and a folder to hold the training scripts. The script runs will be recorded under the experiment in Azure."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "exp = Experiment(workspace=ws, name=experimentName)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Retrieve or create a Azure Machine Learning compute\n",
+ "Azure Machine Learning Compute is a service for provisioning and managing clusters of Azure virtual machines for running machine learning workloads. Let's create a new Azure Machine Learning Compute in the current workspace, if it doesn't already exist. We will then run the training script on this compute target.\n",
+ "\n",
+ "If we could not find the compute with the given name in the previous cell, then we will create a new compute here. This process is broken down into the following steps:\n",
+ "\n",
+ "1. Create the configuration\n",
+ "2. Create the Azure Machine Learning compute\n",
+ "\n",
+ "**This process will take a few minutes and is providing only sparse output in the process. Please make sure to wait until the call returns before moving to the next cell.**"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'gpuclstfwi02'"
+ ]
+ },
+ "execution_count": 16,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "gpu_cluster_name = os.getenv('GPU_CLUSTER_NAME')\n",
+ "gpu_cluster_name"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 17,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Found existing gpu cluster\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Verify that cluster does not exist already\n",
+ "max_nodes_value = 5\n",
+ "try:\n",
+ " gpu_cluster = ComputeTarget(workspace=ws, name=gpu_cluster_name)\n",
+ " print(\"Found existing gpu cluster\")\n",
+ "except ComputeTargetException:\n",
+ " print(\"Could not find ComputeTarget cluster!\")\n",
+ " \n",
+ "# # Create a new gpucluster using code below\n",
+ "# # Specify the configuration for the new cluster\n",
+ "# compute_config = AmlCompute.provisioning_configuration(vm_size=\"Standard_NC6\",\n",
+ "# min_nodes=0,\n",
+ "# max_nodes=max_nodes_value)\n",
+ "# # Create the cluster with the specified name and configuration\n",
+ "# gpu_cluster = ComputeTarget.create(ws, gpu_cluster_name, compute_config)\n",
+ "\n",
+ "# # Wait for the cluster to complete, show the output log\n",
+ "# gpu_cluster.wait_for_completion(show_output=True)\n",
+ " \n",
+ " \n",
+ "# for demo purposes, show how clsuter properties can be altered post-creation\n",
+ "gpu_cluster.update(min_nodes=0, max_nodes=max_nodes_value, idle_seconds_before_scaledown=1200)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### Create an Azure ML SDK estimator with custom docker image "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "565b952db744469fa2137b6c94e15f7a",
+ "version_major": 2,
+ "version_minor": 0
+ },
+ "text/plain": [
+ "_UserRunWidget(widget_settings={'childWidgetDisplay': 'popup', 'send_telemetry': False, 'log_level': 'NOTSET',…"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "application/aml.mini.widget.v1": "{\"status\": \"Completed\", \"workbench_run_details_uri\": \"https://ml.azure.com/experiments/020_AzureMLEstimator/runs/020_AzureMLEstimator_1575674728_d40baeba?wsid=/subscriptions/789908e0-5fc2-4c4d-b5f5-9764b0d602b3/resourcegroups/ghiordanfwirsg01/workspaces/ghiordanfwiws\", \"run_id\": \"020_AzureMLEstimator_1575674728_d40baeba\", \"run_properties\": {\"run_id\": \"020_AzureMLEstimator_1575674728_d40baeba\", \"created_utc\": \"2019-12-06T23:25:30.597858Z\", \"properties\": {\"_azureml.ComputeTargetType\": \"amlcompute\", \"ContentSnapshotId\": \"a5071b2a-37a7-40da-8340-69cc894091cb\", \"azureml.git.repository_uri\": \"git@github.com:georgeAccnt-GH/DeepSeismic.git\", \"mlflow.source.git.repoURL\": \"git@github.com:georgeAccnt-GH/DeepSeismic.git\", \"azureml.git.branch\": \"staging\", \"mlflow.source.git.branch\": \"staging\", \"azureml.git.commit\": \"1d3cd3340f4063508b6f707d5fc2a35f5429a07f\", \"mlflow.source.git.commit\": \"1d3cd3340f4063508b6f707d5fc2a35f5429a07f\", \"azureml.git.dirty\": \"True\", \"ProcessInfoFile\": \"azureml-logs/process_info.json\", \"ProcessStatusFile\": \"azureml-logs/process_status.json\"}, \"tags\": {\"_aml_system_ComputeTargetStatus\": \"{\\\"AllocationState\\\":\\\"steady\\\",\\\"PreparingNodeCount\\\":1,\\\"RunningNodeCount\\\":0,\\\"CurrentNodeCount\\\":1}\"}, \"script_name\": null, \"arguments\": null, \"end_time_utc\": \"2019-12-06T23:34:26.039772Z\", \"status\": \"Completed\", \"log_files\": {\"azureml-logs/55_azureml-execution-tvmps_d8d8a91061fed6f3a36a0e0da11655ae12488195551133265afca81050ad2db4_d.txt\": \"https://ghiordanstoragee145cef0b.blob.core.windows.net/azureml/ExperimentRun/dcid.020_AzureMLEstimator_1575674728_d40baeba/azureml-logs/55_azureml-execution-tvmps_d8d8a91061fed6f3a36a0e0da11655ae12488195551133265afca81050ad2db4_d.txt?sv=2019-02-02&sr=b&sig=1Fz2ltrBSXhF9tDzTuEOv35mBsOLsf%2BCVuTEuSCRWdg%3D&st=2019-12-06T23%3A24%3A44Z&se=2019-12-07T07%3A34%3A44Z&sp=r\", \"azureml-logs/65_job_prep-tvmps_d8d8a91061fed6f3a36a0e0da11655ae12488195551133265afca81050ad2db4_d.txt\": \"https://ghiordanstoragee145cef0b.blob.core.windows.net/azureml/ExperimentRun/dcid.020_AzureMLEstimator_1575674728_d40baeba/azureml-logs/65_job_prep-tvmps_d8d8a91061fed6f3a36a0e0da11655ae12488195551133265afca81050ad2db4_d.txt?sv=2019-02-02&sr=b&sig=PwHIdkWadtTAj29WuPOCF3g0RSrWdriOmKhqdjZNm3I%3D&st=2019-12-06T23%3A24%3A44Z&se=2019-12-07T07%3A34%3A44Z&sp=r\", \"azureml-logs/70_driver_log.txt\": \"https://ghiordanstoragee145cef0b.blob.core.windows.net/azureml/ExperimentRun/dcid.020_AzureMLEstimator_1575674728_d40baeba/azureml-logs/70_driver_log.txt?sv=2019-02-02&sr=b&sig=Iz8WkiOv%2BkEXeOox8p3P8XkLIdb8pjhCO%2Bo8slYUBGk%3D&st=2019-12-06T23%3A24%3A44Z&se=2019-12-07T07%3A34%3A44Z&sp=r\", \"azureml-logs/75_job_post-tvmps_d8d8a91061fed6f3a36a0e0da11655ae12488195551133265afca81050ad2db4_d.txt\": \"https://ghiordanstoragee145cef0b.blob.core.windows.net/azureml/ExperimentRun/dcid.020_AzureMLEstimator_1575674728_d40baeba/azureml-logs/75_job_post-tvmps_d8d8a91061fed6f3a36a0e0da11655ae12488195551133265afca81050ad2db4_d.txt?sv=2019-02-02&sr=b&sig=gz88u5ZC%2B7N8QospVRIL8zd%2FEyQKbljoZXQD01jAyXM%3D&st=2019-12-06T23%3A24%3A44Z&se=2019-12-07T07%3A34%3A44Z&sp=r\", \"azureml-logs/process_info.json\": \"https://ghiordanstoragee145cef0b.blob.core.windows.net/azureml/ExperimentRun/dcid.020_AzureMLEstimator_1575674728_d40baeba/azureml-logs/process_info.json?sv=2019-02-02&sr=b&sig=4nj2pjm1rtKIjBmyudNaBEX6ITd3Gm%2BQLEUgjDYVBIc%3D&st=2019-12-06T23%3A24%3A44Z&se=2019-12-07T07%3A34%3A44Z&sp=r\", \"azureml-logs/process_status.json\": \"https://ghiordanstoragee145cef0b.blob.core.windows.net/azureml/ExperimentRun/dcid.020_AzureMLEstimator_1575674728_d40baeba/azureml-logs/process_status.json?sv=2019-02-02&sr=b&sig=NQLsveMtGHBEYsmiwoPvPpOv%2B6wabnQp2IwDrVjh49Q%3D&st=2019-12-06T23%3A24%3A44Z&se=2019-12-07T07%3A34%3A44Z&sp=r\", \"logs/azureml/729_azureml.log\": \"https://ghiordanstoragee145cef0b.blob.core.windows.net/azureml/ExperimentRun/dcid.020_AzureMLEstimator_1575674728_d40baeba/logs/azureml/729_azureml.log?sv=2019-02-02&sr=b&sig=HpwLZSHX0J%2B2eWILTIDA7%2BmpVIEF0%2BIFfM2LHgYGk8w%3D&st=2019-12-06T23%3A24%3A43Z&se=2019-12-07T07%3A34%3A43Z&sp=r\", \"logs/azureml/azureml.log\": \"https://ghiordanstoragee145cef0b.blob.core.windows.net/azureml/ExperimentRun/dcid.020_AzureMLEstimator_1575674728_d40baeba/logs/azureml/azureml.log?sv=2019-02-02&sr=b&sig=g%2Fi60CvATRGwaeQM9b6QihJxeFX0jTl%2BOKELCYYQ3rM%3D&st=2019-12-06T23%3A24%3A43Z&se=2019-12-07T07%3A34%3A43Z&sp=r\"}, \"log_groups\": [[\"azureml-logs/process_info.json\", \"azureml-logs/process_status.json\", \"logs/azureml/azureml.log\"], [\"azureml-logs/55_azureml-execution-tvmps_d8d8a91061fed6f3a36a0e0da11655ae12488195551133265afca81050ad2db4_d.txt\"], [\"azureml-logs/65_job_prep-tvmps_d8d8a91061fed6f3a36a0e0da11655ae12488195551133265afca81050ad2db4_d.txt\"], [\"azureml-logs/70_driver_log.txt\"], [\"azureml-logs/75_job_post-tvmps_d8d8a91061fed6f3a36a0e0da11655ae12488195551133265afca81050ad2db4_d.txt\"], [\"logs/azureml/729_azureml.log\"]], \"run_duration\": \"0:08:55\"}, \"child_runs\": [], \"children_metrics\": {}, \"run_metrics\": [{\"name\": \"training_message01: \", \"run_id\": \"020_AzureMLEstimator_1575674728_d40baeba\", \"categories\": [0], \"series\": [{\"data\": [\"finished experiment\"]}]}], \"run_logs\": \"2019-12-06 23:32:41,989|azureml|DEBUG|Inputs:: kwargs: {'OutputCollection': True, 'snapshotProject': True, 'only_in_process_features': True, 'skip_track_logs_dir': True}, track_folders: None, deny_list: None, directories_to_watch: []\\n2019-12-06 23:32:41,989|azureml.history._tracking.PythonWorkingDirectory|DEBUG|Execution target type: batchai\\n2019-12-06 23:32:41,990|azureml.history._tracking.PythonWorkingDirectory|DEBUG|Failed to import pyspark with error: No module named 'pyspark'\\n2019-12-06 23:32:41,990|azureml.history._tracking.PythonWorkingDirectory.workingdir|DEBUG|Pinning working directory for filesystems: ['pyfs']\\n2019-12-06 23:32:42,323|azureml._base_sdk_common.user_agent|DEBUG|Fetching client info from /root/.azureml/clientinfo.json\\n2019-12-06 23:32:42,323|azureml._base_sdk_common.user_agent|DEBUG|Error loading client info: [Errno 2] No such file or directory: '/root/.azureml/clientinfo.json'\\n2019-12-06 23:32:42,721|azureml.core._experiment_method|DEBUG|Trying to register submit_function search, on method \\n2019-12-06 23:32:42,721|azureml.core._experiment_method|DEBUG|Registered submit_function search, on method \\n2019-12-06 23:32:42,722|azureml.core._experiment_method|DEBUG|Trying to register submit_function search, on method \\n2019-12-06 23:32:42,722|azureml.core._experiment_method|DEBUG|Registered submit_function search, on method \\n2019-12-06 23:32:42,722|azureml.core.run|DEBUG|Adding new factory for run source hyperdrive\\n2019-12-06 23:32:43,300|azureml.core.run|DEBUG|Adding new factory for run source azureml.PipelineRun\\n2019-12-06 23:32:43,306|azureml.core.run|DEBUG|Adding new factory for run source azureml.ReusedStepRun\\n2019-12-06 23:32:43,311|azureml.core.run|DEBUG|Adding new factory for run source azureml.StepRun\\n2019-12-06 23:32:43,316|azureml.core.run|DEBUG|Adding new factory for run source azureml.scriptrun\\n2019-12-06 23:32:43,318|azureml.core.authentication.TokenRefresherDaemon|DEBUG|Starting daemon and triggering first instance\\n2019-12-06 23:32:43,324|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-06 23:32:43,325|azureml._restclient.clientbase|INFO|Created a worker pool for first use\\n2019-12-06 23:32:43,325|azureml.core.authentication|DEBUG|Time to expire 1813966.674698 seconds\\n2019-12-06 23:32:43,325|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:43,325|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:43,325|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:43,325|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:43,325|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:43,325|azureml._base_sdk_common.service_discovery|DEBUG|Constructing mms service url in from history url environment variable None, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:43,326|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:43,326|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:43,326|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:43,356|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:43,361|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-06 23:32:43,369|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-06 23:32:43,374|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-06 23:32:43,379|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-06 23:32:43,385|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-06 23:32:43,385|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.RunClient.get-async:False|DEBUG|[START]\\n2019-12-06 23:32:43,386|msrest.service_client|DEBUG|Accept header absent and forced to application/json\\n2019-12-06 23:32:43,386|msrest.http_logger|DEBUG|Request URL: 'https://eastus2.experiments.azureml.net/history/v1.0/subscriptions/789908e0-5fc2-4c4d-b5f5-9764b0d602b3/resourceGroups/ghiordanfwirsg01/providers/Microsoft.MachineLearningServices/workspaces/ghiordanfwiws/experiments/020_AzureMLEstimator/runs/020_AzureMLEstimator_1575674728_d40baeba'\\n2019-12-06 23:32:43,387|msrest.http_logger|DEBUG|Request method: 'GET'\\n2019-12-06 23:32:43,387|msrest.http_logger|DEBUG|Request headers:\\n2019-12-06 23:32:43,387|msrest.http_logger|DEBUG| 'Accept': 'application/json'\\n2019-12-06 23:32:43,387|msrest.http_logger|DEBUG| 'Content-Type': 'application/json; charset=utf-8'\\n2019-12-06 23:32:43,387|msrest.http_logger|DEBUG| 'x-ms-client-request-id': '2a72fb1c-fdba-4e6d-a244-7315dcdf5d54'\\n2019-12-06 23:32:43,387|msrest.http_logger|DEBUG| 'request-id': '2a72fb1c-fdba-4e6d-a244-7315dcdf5d54'\\n2019-12-06 23:32:43,387|msrest.http_logger|DEBUG| 'User-Agent': 'python/3.6.9 (Linux-4.15.0-1057-azure-x86_64-with-debian-10.0) msrest/0.6.10 azureml._restclient/core.1.0.76'\\n2019-12-06 23:32:43,387|msrest.http_logger|DEBUG|Request body:\\n2019-12-06 23:32:43,387|msrest.http_logger|DEBUG|None\\n2019-12-06 23:32:43,387|msrest.universal_http|DEBUG|Configuring redirects: allow=True, max=30\\n2019-12-06 23:32:43,387|msrest.universal_http|DEBUG|Configuring request: timeout=100, verify=True, cert=None\\n2019-12-06 23:32:43,387|msrest.universal_http|DEBUG|Configuring proxies: ''\\n2019-12-06 23:32:43,387|msrest.universal_http|DEBUG|Evaluate proxies against ENV settings: True\\n2019-12-06 23:32:43,442|msrest.http_logger|DEBUG|Response status: 200\\n2019-12-06 23:32:43,443|msrest.http_logger|DEBUG|Response headers:\\n2019-12-06 23:32:43,443|msrest.http_logger|DEBUG| 'Date': 'Fri, 06 Dec 2019 23:32:43 GMT'\\n2019-12-06 23:32:43,443|msrest.http_logger|DEBUG| 'Content-Type': 'application/json; charset=utf-8'\\n2019-12-06 23:32:43,443|msrest.http_logger|DEBUG| 'Transfer-Encoding': 'chunked'\\n2019-12-06 23:32:43,443|msrest.http_logger|DEBUG| 'Connection': 'keep-alive'\\n2019-12-06 23:32:43,443|msrest.http_logger|DEBUG| 'Vary': 'Accept-Encoding'\\n2019-12-06 23:32:43,443|msrest.http_logger|DEBUG| 'Request-Context': 'appId=cid-v1:2d2e8e63-272e-4b3c-8598-4ee570a0e70d'\\n2019-12-06 23:32:43,443|msrest.http_logger|DEBUG| 'x-ms-client-request-id': '2a72fb1c-fdba-4e6d-a244-7315dcdf5d54'\\n2019-12-06 23:32:43,444|msrest.http_logger|DEBUG| 'x-ms-client-session-id': ''\\n2019-12-06 23:32:43,444|msrest.http_logger|DEBUG| 'Strict-Transport-Security': 'max-age=15724800; includeSubDomains; preload'\\n2019-12-06 23:32:43,444|msrest.http_logger|DEBUG| 'X-Content-Type-Options': 'nosniff'\\n2019-12-06 23:32:43,444|msrest.http_logger|DEBUG| 'Content-Encoding': 'gzip'\\n2019-12-06 23:32:43,444|msrest.http_logger|DEBUG|Response content:\\n2019-12-06 23:32:43,444|msrest.http_logger|DEBUG|{\\n \\\"runNumber\\\": 1516,\\n \\\"rootRunId\\\": \\\"020_AzureMLEstimator_1575674728_d40baeba\\\",\\n \\\"experimentId\\\": \\\"8d96276b-f420-4a67-86be-f933dd3d38cd\\\",\\n \\\"createdUtc\\\": \\\"2019-12-06T23:25:30.5978583+00:00\\\",\\n \\\"createdBy\\\": {\\n \\\"userObjectId\\\": \\\"b77869a0-66f2-4288-89ef-13c10accc4dc\\\",\\n \\\"userPuId\\\": \\\"1003000090A95868\\\",\\n \\\"userIdp\\\": null,\\n \\\"userAltSecId\\\": null,\\n \\\"userIss\\\": \\\"https://sts.windows.net/72f988bf-86f1-41af-91ab-2d7cd011db47/\\\",\\n \\\"userTenantId\\\": \\\"72f988bf-86f1-41af-91ab-2d7cd011db47\\\",\\n \\\"userName\\\": \\\"George Iordanescu\\\"\\n },\\n \\\"userId\\\": \\\"b77869a0-66f2-4288-89ef-13c10accc4dc\\\",\\n \\\"token\\\": null,\\n \\\"tokenExpiryTimeUtc\\\": null,\\n \\\"error\\\": null,\\n \\\"warnings\\\": null,\\n \\\"revision\\\": 10,\\n \\\"runId\\\": \\\"020_AzureMLEstimator_1575674728_d40baeba\\\",\\n \\\"parentRunId\\\": null,\\n \\\"status\\\": \\\"Running\\\",\\n \\\"startTimeUtc\\\": \\\"2019-12-06T23:30:15.4122862+00:00\\\",\\n \\\"endTimeUtc\\\": null,\\n \\\"heartbeatEnabled\\\": false,\\n \\\"options\\\": {\\n \\\"generateDataContainerIdIfNotSpecified\\\": true\\n },\\n \\\"name\\\": null,\\n \\\"dataContainerId\\\": \\\"dcid.020_AzureMLEstimator_1575674728_d40baeba\\\",\\n \\\"description\\\": null,\\n \\\"hidden\\\": false,\\n \\\"runType\\\": \\\"azureml.scriptrun\\\",\\n \\\"properties\\\": {\\n \\\"_azureml.ComputeTargetType\\\": \\\"amlcompute\\\",\\n \\\"ContentSnapshotId\\\": \\\"a5071b2a-37a7-40da-8340-69cc894091cb\\\",\\n \\\"azureml.git.repository_uri\\\": \\\"git@github.com:georgeAccnt-GH/DeepSeismic.git\\\",\\n \\\"mlflow.source.git.repoURL\\\": \\\"git@github.com:georgeAccnt-GH/DeepSeismic.git\\\",\\n \\\"azureml.git.branch\\\": \\\"staging\\\",\\n \\\"mlflow.source.git.branch\\\": \\\"staging\\\",\\n \\\"azureml.git.commit\\\": \\\"1d3cd3340f4063508b6f707d5fc2a35f5429a07f\\\",\\n \\\"mlflow.source.git.commit\\\": \\\"1d3cd3340f4063508b6f707d5fc2a35f5429a07f\\\",\\n \\\"azureml.git.dirty\\\": \\\"True\\\",\\n \\\"ProcessInfoFile\\\": \\\"azureml-logs/process_info.json\\\",\\n \\\"ProcessStatusFile\\\": \\\"azureml-logs/process_status.json\\\"\\n },\\n \\\"scriptName\\\": \\\"azureml_01_modelling.py\\\",\\n \\\"target\\\": \\\"gpuclstfwi02\\\",\\n \\\"tags\\\": {\\n \\\"_aml_system_ComputeTargetStatus\\\": \\\"{\\\\\\\"AllocationState\\\\\\\":\\\\\\\"steady\\\\\\\",\\\\\\\"PreparingNodeCount\\\\\\\":1,\\\\\\\"RunningNodeCount\\\\\\\":0,\\\\\\\"CurrentNodeCount\\\\\\\":1}\\\"\\n },\\n \\\"inputDatasets\\\": [],\\n \\\"runDefinition\\\": null,\\n \\\"createdFrom\\\": {\\n \\\"type\\\": \\\"Notebook\\\",\\n \\\"locationType\\\": \\\"ArtifactId\\\",\\n \\\"location\\\": \\\"LocalUpload/020_AzureMLEstimator_1575674728_d40baeba/020_UseAzureMLEstimatorForExperimentation_GeophysicsTutorial_FWI_Azure_devito.ipynb\\\"\\n },\\n \\\"cancelUri\\\": \\\"https://eastus2.experiments.azureml.net/execution/v1.0/subscriptions/789908e0-5fc2-4c4d-b5f5-9764b0d602b3/resourceGroups/ghiordanfwirsg01/providers/Microsoft.MachineLearningServices/workspaces/ghiordanfwiws/experiments/020_AzureMLEstimator/runId/020_AzureMLEstimator_1575674728_d40baeba/cancel\\\",\\n \\\"completeUri\\\": null,\\n \\\"diagnosticsUri\\\": \\\"https://eastus2.experiments.azureml.net/execution/v1.0/subscriptions/789908e0-5fc2-4c4d-b5f5-9764b0d602b3/resourceGroups/ghiordanfwirsg01/providers/Microsoft.MachineLearningServices/workspaces/ghiordanfwiws/experiments/020_AzureMLEstimator/runId/020_AzureMLEstimator_1575674728_d40baeba/diagnostics\\\",\\n \\\"computeRequest\\\": {\\n \\\"nodeCount\\\": 1\\n },\\n \\\"retainForLifetimeOfWorkspace\\\": false\\n}\\n2019-12-06 23:32:43,449|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.RunClient.get-async:False|DEBUG|[STOP]\\n2019-12-06 23:32:43,450|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba|DEBUG|Constructing run from dto. type: azureml.scriptrun, source: None, props: {'_azureml.ComputeTargetType': 'amlcompute', 'ContentSnapshotId': 'a5071b2a-37a7-40da-8340-69cc894091cb', 'azureml.git.repository_uri': 'git@github.com:georgeAccnt-GH/DeepSeismic.git', 'mlflow.source.git.repoURL': 'git@github.com:georgeAccnt-GH/DeepSeismic.git', 'azureml.git.branch': 'staging', 'mlflow.source.git.branch': 'staging', 'azureml.git.commit': '1d3cd3340f4063508b6f707d5fc2a35f5429a07f', 'mlflow.source.git.commit': '1d3cd3340f4063508b6f707d5fc2a35f5429a07f', 'azureml.git.dirty': 'True', 'ProcessInfoFile': 'azureml-logs/process_info.json', 'ProcessStatusFile': 'azureml-logs/process_status.json'}\\n2019-12-06 23:32:43,450|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunContextManager|DEBUG|Valid logs dir, setting up content loader\\n2019-12-06 23:32:43,451|azureml|WARNING|Could not import azureml.mlflow or azureml.contrib.mlflow mlflow APIs will not run against AzureML services. Add azureml-mlflow as a conda dependency for the run if this behavior is desired\\n2019-12-06 23:32:43,451|azureml.WorkerPool|DEBUG|[START]\\n2019-12-06 23:32:43,451|azureml.SendRunKillSignal|DEBUG|[START]\\n2019-12-06 23:32:43,451|azureml.RunStatusContext|DEBUG|[START]\\n2019-12-06 23:32:43,451|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunContextManager.RunStatusContext|DEBUG|[START]\\n2019-12-06 23:32:43,451|azureml.WorkingDirectoryCM|DEBUG|[START]\\n2019-12-06 23:32:43,451|azureml.history._tracking.PythonWorkingDirectory.workingdir|DEBUG|[START]\\n2019-12-06 23:32:43,451|azureml.history._tracking.PythonWorkingDirectory|INFO|Current working dir: /mnt/batch/tasks/shared/LS_root/jobs/ghiordanfwiws/azureml/020_azuremlestimator_1575674728_d40baeba/mounts/workspaceblobstore/azureml/020_AzureMLEstimator_1575674728_d40baeba\\n2019-12-06 23:32:43,451|azureml.history._tracking.PythonWorkingDirectory.workingdir|DEBUG|Calling pyfs\\n2019-12-06 23:32:43,451|azureml.history._tracking.PythonWorkingDirectory.workingdir|DEBUG|Storing working dir for pyfs as /mnt/batch/tasks/shared/LS_root/jobs/ghiordanfwiws/azureml/020_azuremlestimator_1575674728_d40baeba/mounts/workspaceblobstore/azureml/020_AzureMLEstimator_1575674728_d40baeba\\n2019-12-06 23:32:45,592|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:45,592|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:45,592|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:45,592|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:45,592|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:45,592|azureml._base_sdk_common.service_discovery|DEBUG|Constructing mms service url in from history url environment variable None, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:45,592|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:45,593|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:45,593|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-06 23:32:45,599|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-06 23:32:45,600|azureml._run_impl.run_history_facade|DEBUG|Created a static thread pool for RunHistoryFacade class\\n2019-12-06 23:32:45,605|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-06 23:32:45,610|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-06 23:32:45,616|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-06 23:32:45,621|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-06 23:32:45,622|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.RunClient.get-async:False|DEBUG|[START]\\n2019-12-06 23:32:45,622|msrest.service_client|DEBUG|Accept header absent and forced to application/json\\n2019-12-06 23:32:45,622|msrest.http_logger|DEBUG|Request URL: 'https://eastus2.experiments.azureml.net/history/v1.0/subscriptions/789908e0-5fc2-4c4d-b5f5-9764b0d602b3/resourceGroups/ghiordanfwirsg01/providers/Microsoft.MachineLearningServices/workspaces/ghiordanfwiws/experiments/020_AzureMLEstimator/runs/020_AzureMLEstimator_1575674728_d40baeba'\\n2019-12-06 23:32:45,622|msrest.http_logger|DEBUG|Request method: 'GET'\\n2019-12-06 23:32:45,622|msrest.http_logger|DEBUG|Request headers:\\n2019-12-06 23:32:45,622|msrest.http_logger|DEBUG| 'Accept': 'application/json'\\n2019-12-06 23:32:45,622|msrest.http_logger|DEBUG| 'Content-Type': 'application/json; charset=utf-8'\\n2019-12-06 23:32:45,623|msrest.http_logger|DEBUG| 'x-ms-client-request-id': '7502a986-27e5-47c2-8a48-e5501a0dda7c'\\n2019-12-06 23:32:45,623|msrest.http_logger|DEBUG| 'request-id': '7502a986-27e5-47c2-8a48-e5501a0dda7c'\\n2019-12-06 23:32:45,623|msrest.http_logger|DEBUG| 'User-Agent': 'python/3.6.9 (Linux-4.15.0-1057-azure-x86_64-with-debian-10.0) msrest/0.6.10 azureml._restclient/core.1.0.76'\\n2019-12-06 23:32:45,623|msrest.http_logger|DEBUG|Request body:\\n2019-12-06 23:32:45,623|msrest.http_logger|DEBUG|None\\n2019-12-06 23:32:45,623|msrest.universal_http|DEBUG|Configuring redirects: allow=True, max=30\\n2019-12-06 23:32:45,623|msrest.universal_http|DEBUG|Configuring request: timeout=100, verify=True, cert=None\\n2019-12-06 23:32:45,623|msrest.universal_http|DEBUG|Configuring proxies: ''\\n2019-12-06 23:32:45,623|msrest.universal_http|DEBUG|Evaluate proxies against ENV settings: True\\n2019-12-06 23:32:46,018|msrest.http_logger|DEBUG|Response status: 200\\n2019-12-06 23:32:46,018|msrest.http_logger|DEBUG|Response headers:\\n2019-12-06 23:32:46,018|msrest.http_logger|DEBUG| 'Date': 'Fri, 06 Dec 2019 23:32:46 GMT'\\n2019-12-06 23:32:46,019|msrest.http_logger|DEBUG| 'Content-Type': 'application/json; charset=utf-8'\\n2019-12-06 23:32:46,019|msrest.http_logger|DEBUG| 'Transfer-Encoding': 'chunked'\\n2019-12-06 23:32:46,019|msrest.http_logger|DEBUG| 'Connection': 'keep-alive'\\n2019-12-06 23:32:46,019|msrest.http_logger|DEBUG| 'Vary': 'Accept-Encoding'\\n2019-12-06 23:32:46,019|msrest.http_logger|DEBUG| 'Request-Context': 'appId=cid-v1:2d2e8e63-272e-4b3c-8598-4ee570a0e70d'\\n2019-12-06 23:32:46,019|msrest.http_logger|DEBUG| 'x-ms-client-request-id': '7502a986-27e5-47c2-8a48-e5501a0dda7c'\\n2019-12-06 23:32:46,019|msrest.http_logger|DEBUG| 'x-ms-client-session-id': ''\\n2019-12-06 23:32:46,019|msrest.http_logger|DEBUG| 'Strict-Transport-Security': 'max-age=15724800; includeSubDomains; preload'\\n2019-12-06 23:32:46,019|msrest.http_logger|DEBUG| 'X-Content-Type-Options': 'nosniff'\\n2019-12-06 23:32:46,019|msrest.http_logger|DEBUG| 'Content-Encoding': 'gzip'\\n2019-12-06 23:32:46,019|msrest.http_logger|DEBUG|Response content:\\n2019-12-06 23:32:46,019|msrest.http_logger|DEBUG|{\\n \\\"runNumber\\\": 1516,\\n \\\"rootRunId\\\": \\\"020_AzureMLEstimator_1575674728_d40baeba\\\",\\n \\\"experimentId\\\": \\\"8d96276b-f420-4a67-86be-f933dd3d38cd\\\",\\n \\\"createdUtc\\\": \\\"2019-12-06T23:25:30.5978583+00:00\\\",\\n \\\"createdBy\\\": {\\n \\\"userObjectId\\\": \\\"b77869a0-66f2-4288-89ef-13c10accc4dc\\\",\\n \\\"userPuId\\\": \\\"1003000090A95868\\\",\\n \\\"userIdp\\\": null,\\n \\\"userAltSecId\\\": null,\\n \\\"userIss\\\": \\\"https://sts.windows.net/72f988bf-86f1-41af-91ab-2d7cd011db47/\\\",\\n \\\"userTenantId\\\": \\\"72f988bf-86f1-41af-91ab-2d7cd011db47\\\",\\n \\\"userName\\\": \\\"George Iordanescu\\\"\\n },\\n \\\"userId\\\": \\\"b77869a0-66f2-4288-89ef-13c10accc4dc\\\",\\n \\\"token\\\": null,\\n \\\"tokenExpiryTimeUtc\\\": null,\\n \\\"error\\\": null,\\n \\\"warnings\\\": null,\\n \\\"revision\\\": 10,\\n \\\"runId\\\": \\\"020_AzureMLEstimator_1575674728_d40baeba\\\",\\n \\\"parentRunId\\\": null,\\n \\\"status\\\": \\\"Running\\\",\\n \\\"startTimeUtc\\\": \\\"2019-12-06T23:30:15.4122862+00:00\\\",\\n \\\"endTimeUtc\\\": null,\\n \\\"heartbeatEnabled\\\": false,\\n \\\"options\\\": {\\n \\\"generateDataContainerIdIfNotSpecified\\\": true\\n },\\n \\\"name\\\": null,\\n \\\"dataContainerId\\\": \\\"dcid.020_AzureMLEstimator_1575674728_d40baeba\\\",\\n \\\"description\\\": null,\\n \\\"hidden\\\": false,\\n \\\"runType\\\": \\\"azureml.scriptrun\\\",\\n \\\"properties\\\": {\\n \\\"_azureml.ComputeTargetType\\\": \\\"amlcompute\\\",\\n \\\"ContentSnapshotId\\\": \\\"a5071b2a-37a7-40da-8340-69cc894091cb\\\",\\n \\\"azureml.git.repository_uri\\\": \\\"git@github.com:georgeAccnt-GH/DeepSeismic.git\\\",\\n \\\"mlflow.source.git.repoURL\\\": \\\"git@github.com:georgeAccnt-GH/DeepSeismic.git\\\",\\n \\\"azureml.git.branch\\\": \\\"staging\\\",\\n \\\"mlflow.source.git.branch\\\": \\\"staging\\\",\\n \\\"azureml.git.commit\\\": \\\"1d3cd3340f4063508b6f707d5fc2a35f5429a07f\\\",\\n \\\"mlflow.source.git.commit\\\": \\\"1d3cd3340f4063508b6f707d5fc2a35f5429a07f\\\",\\n \\\"azureml.git.dirty\\\": \\\"True\\\",\\n \\\"ProcessInfoFile\\\": \\\"azureml-logs/process_info.json\\\",\\n \\\"ProcessStatusFile\\\": \\\"azureml-logs/process_status.json\\\"\\n },\\n \\\"scriptName\\\": \\\"azureml_01_modelling.py\\\",\\n \\\"target\\\": \\\"gpuclstfwi02\\\",\\n \\\"tags\\\": {\\n \\\"_aml_system_ComputeTargetStatus\\\": \\\"{\\\\\\\"AllocationState\\\\\\\":\\\\\\\"steady\\\\\\\",\\\\\\\"PreparingNodeCount\\\\\\\":1,\\\\\\\"RunningNodeCount\\\\\\\":0,\\\\\\\"CurrentNodeCount\\\\\\\":1}\\\"\\n },\\n \\\"inputDatasets\\\": [],\\n \\\"runDefinition\\\": null,\\n \\\"createdFrom\\\": {\\n \\\"type\\\": \\\"Notebook\\\",\\n \\\"locationType\\\": \\\"ArtifactId\\\",\\n \\\"location\\\": \\\"LocalUpload/020_AzureMLEstimator_1575674728_d40baeba/020_UseAzureMLEstimatorForExperimentation_GeophysicsTutorial_FWI_Azure_devito.ipynb\\\"\\n },\\n \\\"cancelUri\\\": \\\"https://eastus2.experiments.azureml.net/execution/v1.0/subscriptions/789908e0-5fc2-4c4d-b5f5-9764b0d602b3/resourceGroups/ghiordanfwirsg01/providers/Microsoft.MachineLearningServices/workspaces/ghiordanfwiws/experiments/020_AzureMLEstimator/runId/020_AzureMLEstimator_1575674728_d40baeba/cancel\\\",\\n \\\"completeUri\\\": null,\\n \\\"diagnosticsUri\\\": \\\"https://eastus2.experiments.azureml.net/execution/v1.0/subscriptions/789908e0-5fc2-4c4d-b5f5-9764b0d602b3/resourceGroups/ghiordanfwirsg01/providers/Microsoft.MachineLearningServices/workspaces/ghiordanfwiws/experiments/020_AzureMLEstimator/runId/020_AzureMLEstimator_1575674728_d40baeba/diagnostics\\\",\\n \\\"computeRequest\\\": {\\n \\\"nodeCount\\\": 1\\n },\\n \\\"retainForLifetimeOfWorkspace\\\": false\\n}\\n2019-12-06 23:32:46,022|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.RunClient.get-async:False|DEBUG|[STOP]\\n2019-12-06 23:32:46,023|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba|DEBUG|Constructing run from dto. type: azureml.scriptrun, source: None, props: {'_azureml.ComputeTargetType': 'amlcompute', 'ContentSnapshotId': 'a5071b2a-37a7-40da-8340-69cc894091cb', 'azureml.git.repository_uri': 'git@github.com:georgeAccnt-GH/DeepSeismic.git', 'mlflow.source.git.repoURL': 'git@github.com:georgeAccnt-GH/DeepSeismic.git', 'azureml.git.branch': 'staging', 'mlflow.source.git.branch': 'staging', 'azureml.git.commit': '1d3cd3340f4063508b6f707d5fc2a35f5429a07f', 'mlflow.source.git.commit': '1d3cd3340f4063508b6f707d5fc2a35f5429a07f', 'azureml.git.dirty': 'True', 'ProcessInfoFile': 'azureml-logs/process_info.json', 'ProcessStatusFile': 'azureml-logs/process_status.json'}\\n2019-12-06 23:32:46,023|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunContextManager|DEBUG|Valid logs dir, setting up content loader\\n2019-12-06 23:33:13,322|azureml.core.authentication|DEBUG|Time to expire 1813936.677149 seconds\\n2019-12-06 23:33:43,323|azureml.core.authentication|DEBUG|Time to expire 1813906.67683 seconds\\n2019-12-06 23:33:57,866|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient|DEBUG|Overrides: Max batch size: 50, batch cushion: 5, Interval: 1.\\n2019-12-06 23:33:57,867|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.PostMetricsBatchDaemon|DEBUG|Starting daemon and triggering first instance\\n2019-12-06 23:33:57,867|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient|DEBUG|Used for use_batch=True.\\n2019-12-06 23:33:57,911|azureml.history._tracking.PythonWorkingDirectory.workingdir|DEBUG|Calling pyfs\\n2019-12-06 23:33:57,911|azureml.history._tracking.PythonWorkingDirectory|INFO|Current working dir: /devito\\n2019-12-06 23:33:57,911|azureml.history._tracking.PythonWorkingDirectory.workingdir|DEBUG|pyfs has path /devito\\n2019-12-06 23:33:57,911|azureml.history._tracking.PythonWorkingDirectory.workingdir|DEBUG|Reverting working dir from /devito to /mnt/batch/tasks/shared/LS_root/jobs/ghiordanfwiws/azureml/020_azuremlestimator_1575674728_d40baeba/mounts/workspaceblobstore/azureml/020_AzureMLEstimator_1575674728_d40baeba\\n2019-12-06 23:33:57,911|azureml.history._tracking.PythonWorkingDirectory|INFO|Setting working dir to /mnt/batch/tasks/shared/LS_root/jobs/ghiordanfwiws/azureml/020_azuremlestimator_1575674728_d40baeba/mounts/workspaceblobstore/azureml/020_AzureMLEstimator_1575674728_d40baeba\\n2019-12-06 23:33:57,912|azureml.history._tracking.PythonWorkingDirectory.workingdir|DEBUG|[STOP]\\n2019-12-06 23:33:57,912|azureml.WorkingDirectoryCM|DEBUG|[STOP]\\n2019-12-06 23:33:57,912|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba|INFO|complete is not setting status for submitted runs.\\n2019-12-06 23:33:57,912|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.FlushingMetricsClient|DEBUG|[START]\\n2019-12-06 23:33:57,912|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient|DEBUG|Overrides: Max batch size: 50, batch cushion: 5, Interval: 1.\\n2019-12-06 23:33:57,912|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.PostMetricsBatchDaemon|DEBUG|Starting daemon and triggering first instance\\n2019-12-06 23:33:57,912|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient|DEBUG|Used for use_batch=True.\\n2019-12-06 23:33:57,912|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|[START]\\n2019-12-06 23:33:57,912|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|flush timeout 300 is different from task queue timeout 120, using flush timeout\\n2019-12-06 23:33:57,913|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|Waiting 300 seconds on tasks: [].\\n2019-12-06 23:33:57,913|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch|DEBUG|\\n2019-12-06 23:33:57,913|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|[STOP]\\n2019-12-06 23:33:57,913|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.FlushingMetricsClient|DEBUG|[STOP]\\n2019-12-06 23:33:57,913|azureml.RunStatusContext|DEBUG|[STOP]\\n2019-12-06 23:33:57,913|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.FlushingMetricsClient|DEBUG|[START]\\n2019-12-06 23:33:57,913|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|[START]\\n2019-12-06 23:33:57,913|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|flush timeout 300.0 is different from task queue timeout 120, using flush timeout\\n2019-12-06 23:33:57,913|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|Waiting 300.0 seconds on tasks: [].\\n2019-12-06 23:33:57,913|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch|DEBUG|\\n2019-12-06 23:33:57,913|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|[STOP]\\n2019-12-06 23:33:57,914|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.FlushingMetricsClient|DEBUG|[STOP]\\n2019-12-06 23:33:57,914|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.FlushingMetricsClient|DEBUG|[START]\\n2019-12-06 23:33:57,914|azureml.BatchTaskQueueAdd_1_Batches|DEBUG|[Start]\\n2019-12-06 23:33:57,914|azureml.BatchTaskQueueAdd_1_Batches.WorkerPool|DEBUG|submitting future: _handle_batch\\n2019-12-06 23:33:57,914|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch|DEBUG|Batch size 1.\\n2019-12-06 23:33:57,914|azureml.BatchTaskQueueAdd_1_Batches.0__handle_batch|DEBUG|Using basic handler - no exception handling\\n2019-12-06 23:33:57,914|azureml._restclient.clientbase.WorkerPool|DEBUG|submitting future: _log_batch\\n2019-12-06 23:33:57,914|azureml.BatchTaskQueueAdd_1_Batches|DEBUG|Adding task 0__handle_batch to queue of approximate size: 0\\n2019-12-06 23:33:57,915|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.0__log_batch|DEBUG|Using basic handler - no exception handling\\n2019-12-06 23:33:57,915|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.post_batch-async:False|DEBUG|[START]\\n2019-12-06 23:33:57,915|azureml.BatchTaskQueueAdd_1_Batches|DEBUG|[Stop] - waiting default timeout\\n2019-12-06 23:33:57,915|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch|DEBUG|Adding task 0__log_batch to queue of approximate size: 0\\n2019-12-06 23:33:57,916|msrest.service_client|DEBUG|Accept header absent and forced to application/json\\n2019-12-06 23:33:57,916|azureml.BatchTaskQueueAdd_1_Batches.WaitFlushSource:BatchTaskQueueAdd_1_Batches|DEBUG|[START]\\n2019-12-06 23:33:57,917|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-06 23:33:57,917|azureml.BatchTaskQueueAdd_1_Batches.WaitFlushSource:BatchTaskQueueAdd_1_Batches|DEBUG|Overriding default flush timeout from None to 120\\n2019-12-06 23:33:57,917|msrest.http_logger|DEBUG|Request URL: 'https://eastus2.experiments.azureml.net/history/v1.0/subscriptions/789908e0-5fc2-4c4d-b5f5-9764b0d602b3/resourceGroups/ghiordanfwirsg01/providers/Microsoft.MachineLearningServices/workspaces/ghiordanfwiws/experiments/020_AzureMLEstimator/runs/020_AzureMLEstimator_1575674728_d40baeba/batch/metrics'\\n2019-12-06 23:33:57,917|azureml.BatchTaskQueueAdd_1_Batches.WaitFlushSource:BatchTaskQueueAdd_1_Batches|DEBUG|Waiting 120 seconds on tasks: [AsyncTask(0__handle_batch)].\\n2019-12-06 23:33:57,918|msrest.http_logger|DEBUG|Request method: 'POST'\\n2019-12-06 23:33:57,918|azureml.BatchTaskQueueAdd_1_Batches.0__handle_batch.WaitingTask|DEBUG|[START]\\n2019-12-06 23:33:57,918|msrest.http_logger|DEBUG|Request headers:\\n2019-12-06 23:33:57,918|azureml.BatchTaskQueueAdd_1_Batches.0__handle_batch.WaitingTask|DEBUG|Awaiter is BatchTaskQueueAdd_1_Batches\\n2019-12-06 23:33:57,918|msrest.http_logger|DEBUG| 'Accept': 'application/json'\\n2019-12-06 23:33:57,918|azureml.BatchTaskQueueAdd_1_Batches.0__handle_batch.WaitingTask|DEBUG|[STOP]\\n2019-12-06 23:33:57,918|msrest.http_logger|DEBUG| 'Content-Type': 'application/json-patch+json; charset=utf-8'\\n2019-12-06 23:33:57,918|azureml.BatchTaskQueueAdd_1_Batches|DEBUG|\\n2019-12-06 23:33:57,918|msrest.http_logger|DEBUG| 'x-ms-client-request-id': '7318af30-3aa3-4d84-a4db-0595c67afd70'\\n2019-12-06 23:33:57,918|azureml.BatchTaskQueueAdd_1_Batches.WaitFlushSource:BatchTaskQueueAdd_1_Batches|DEBUG|[STOP]\\n2019-12-06 23:33:57,919|msrest.http_logger|DEBUG| 'request-id': '7318af30-3aa3-4d84-a4db-0595c67afd70'\\n2019-12-06 23:33:57,919|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|[START]\\n2019-12-06 23:33:57,919|msrest.http_logger|DEBUG| 'Content-Length': '410'\\n2019-12-06 23:33:57,919|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|flush timeout 300.0 is different from task queue timeout 120, using flush timeout\\n2019-12-06 23:33:57,919|msrest.http_logger|DEBUG| 'User-Agent': 'python/3.6.9 (Linux-4.15.0-1057-azure-x86_64-with-debian-10.0) msrest/0.6.10 azureml._restclient/core.1.0.76 sdk_run'\\n2019-12-06 23:33:57,919|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|Waiting 300.0 seconds on tasks: [AsyncTask(0__log_batch)].\\n2019-12-06 23:33:57,919|msrest.http_logger|DEBUG|Request body:\\n2019-12-06 23:33:57,919|msrest.http_logger|DEBUG|{\\\"values\\\": [{\\\"metricId\\\": \\\"d160ffa3-e1bc-4ff2-b60f-7742b38cdfd2\\\", \\\"metricType\\\": \\\"azureml.v1.scalar\\\", \\\"createdUtc\\\": \\\"2019-12-06T23:33:57.866688Z\\\", \\\"name\\\": \\\"training_message01: \\\", \\\"description\\\": \\\"\\\", \\\"numCells\\\": 1, \\\"cells\\\": [{\\\"training_message01: \\\": \\\"finished experiment\\\"}], \\\"schema\\\": {\\\"numProperties\\\": 1, \\\"properties\\\": [{\\\"propertyId\\\": \\\"training_message01: \\\", \\\"name\\\": \\\"training_message01: \\\", \\\"type\\\": \\\"string\\\"}]}}]}\\n2019-12-06 23:33:57,919|msrest.universal_http|DEBUG|Configuring redirects: allow=True, max=30\\n2019-12-06 23:33:57,920|msrest.universal_http|DEBUG|Configuring request: timeout=100, verify=True, cert=None\\n2019-12-06 23:33:57,920|msrest.universal_http|DEBUG|Configuring proxies: ''\\n2019-12-06 23:33:57,920|msrest.universal_http|DEBUG|Evaluate proxies against ENV settings: True\\n2019-12-06 23:33:58,044|msrest.http_logger|DEBUG|Response status: 200\\n2019-12-06 23:33:58,044|msrest.http_logger|DEBUG|Response headers:\\n2019-12-06 23:33:58,044|msrest.http_logger|DEBUG| 'Date': 'Fri, 06 Dec 2019 23:33:58 GMT'\\n2019-12-06 23:33:58,044|msrest.http_logger|DEBUG| 'Content-Length': '0'\\n2019-12-06 23:33:58,044|msrest.http_logger|DEBUG| 'Connection': 'keep-alive'\\n2019-12-06 23:33:58,044|msrest.http_logger|DEBUG| 'Request-Context': 'appId=cid-v1:2d2e8e63-272e-4b3c-8598-4ee570a0e70d'\\n2019-12-06 23:33:58,044|msrest.http_logger|DEBUG| 'x-ms-client-request-id': '7318af30-3aa3-4d84-a4db-0595c67afd70'\\n2019-12-06 23:33:58,044|msrest.http_logger|DEBUG| 'x-ms-client-session-id': ''\\n2019-12-06 23:33:58,045|msrest.http_logger|DEBUG| 'Strict-Transport-Security': 'max-age=15724800; includeSubDomains; preload'\\n2019-12-06 23:33:58,045|msrest.http_logger|DEBUG| 'X-Content-Type-Options': 'nosniff'\\n2019-12-06 23:33:58,045|msrest.http_logger|DEBUG|Response content:\\n2019-12-06 23:33:58,045|msrest.http_logger|DEBUG|\\n2019-12-06 23:33:58,045|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.post_batch-async:False|DEBUG|[STOP]\\n2019-12-06 23:33:58,170|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.0__log_batch.WaitingTask|DEBUG|[START]\\n2019-12-06 23:33:58,170|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.0__log_batch.WaitingTask|DEBUG|Awaiter is PostMetricsBatch\\n2019-12-06 23:33:58,170|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.0__log_batch.WaitingTask|DEBUG|[STOP]\\n2019-12-06 23:33:58,170|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch|DEBUG|Waiting on task: 0__log_batch.\\n1 tasks left. Current duration of flush 0.0002143383026123047 seconds.\\n\\n2019-12-06 23:33:58,170|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|[STOP]\\n2019-12-06 23:33:58,170|azureml._SubmittedRun#020_AzureMLEstimator_1575674728_d40baeba.RunHistoryFacade.MetricsClient.FlushingMetricsClient|DEBUG|[STOP]\\n2019-12-06 23:33:58,170|azureml.SendRunKillSignal|DEBUG|[STOP]\\n2019-12-06 23:33:58,170|azureml.HistoryTrackingWorkerPool.WorkerPoolShutdown|DEBUG|[START]\\n2019-12-06 23:33:58,170|azureml.HistoryTrackingWorkerPool.WorkerPoolShutdown|DEBUG|[STOP]\\n2019-12-06 23:33:58,170|azureml.WorkerPool|DEBUG|[STOP]\\n\\nRun is completed.\", \"graph\": {}, \"widget_settings\": {\"childWidgetDisplay\": \"popup\", \"send_telemetry\": false, \"log_level\": \"NOTSET\", \"sdk_version\": \"1.0.76\"}, \"loading\": false}"
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "# use a custom Docker image\n",
+ "from azureml.core.container_registry import ContainerRegistry\n",
+ "\n",
+ "image_name = docker_image_name\n",
+ "\n",
+ "# you can also point to an image in a private ACR\n",
+ "image_registry_details = ContainerRegistry()\n",
+ "image_registry_details.address = docker_repo_name\n",
+ "image_registry_details.username = os.getenv('ACR_USERNAME')\n",
+ "image_registry_details.password = os.getenv('ACR_PASSWORD') \n",
+ "\n",
+ "# don't let the system build a new conda environment\n",
+ "user_managed_dependencies = True\n",
+ "\n",
+ "# submit to a local Docker container. if you don't have Docker engine running locally, you can set compute_target to cpu_cluster.\n",
+ "script_params = {\n",
+ " '--output_folder': 'some_folder'\n",
+ "}\n",
+ "\n",
+ "\n",
+ "# distributed_training_conf = MpiConfiguration()\n",
+ "# distributed_training_conf.process_count_per_node = 2\n",
+ "\n",
+ "est = Estimator(source_directory=script_path, \n",
+ " compute_target=gpu_cluster,#'local', #gpu_cluster, \n",
+ " entry_script=azureml_training_script_file,\n",
+ " script_params=script_params,\n",
+ " use_docker=True,\n",
+ " custom_docker_image=image_name,\n",
+ " # uncomment below line to use your private ACR\n",
+ " image_registry_details=image_registry_details, \n",
+ " user_managed=user_managed_dependencies,\n",
+ " distributed_training=None,\n",
+ " node_count=1\n",
+ " )\n",
+ "est.run_config.environment.python.interpreter_path = python_path_in_docker_image\n",
+ "\n",
+ "run = exp.submit(est)\n",
+ "RunDetails(run).show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "One can use the above link to currrent experiment run in Azure Portal to see tracked metrics, and images and output notebooks saved by azureml_training_script_full_file_name in {run_dir}/outputs on the remote compute target that are automatically saved by AzureML in the run history Azure portal pages."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 19,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'runId= 020_AzureMLEstimator_1575674728_d40baeba'"
+ ]
+ },
+ "execution_count": 19,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'experimentation baseImage: fwi01_azureml:sdk.v1.0.76'"
+ ]
+ },
+ "execution_count": 19,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "run_details = run.get_details()\n",
+ "\n",
+ "# print some details of job run\n",
+ "'runId= {}'.format(run_details['runId'])\n",
+ "'experimentation baseImage: {}'.format(run_details['runDefinition']['environment']['docker']['baseImage'])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 20,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Finished running 020_UseAzureMLEstimatorForExperimentation_GeophysicsTutorial_FWI_Azure_devito!\n"
+ ]
+ }
+ ],
+ "source": [
+ "print('Finished running 020_UseAzureMLEstimatorForExperimentation_GeophysicsTutorial_FWI_Azure_devito!')"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.7.5"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/contrib/fwi/azureml_devito/notebooks/030_ScaleJobsUsingAzuremL_GeophysicsTutorial_FWI_Azure_devito.ipynb b/contrib/fwi/azureml_devito/notebooks/030_ScaleJobsUsingAzuremL_GeophysicsTutorial_FWI_Azure_devito.ipynb
new file mode 100755
index 00000000..1b6ecaf4
--- /dev/null
+++ b/contrib/fwi/azureml_devito/notebooks/030_ScaleJobsUsingAzuremL_GeophysicsTutorial_FWI_Azure_devito.ipynb
@@ -0,0 +1,1369 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Copyright (c) Microsoft Corporation. \n",
+ "Licensed under the MIT License. \n",
+ " \n",
+ "\n",
+ "# FWI demo based on: \n",
+ "This project ports devito (https://github.com/opesci/devito) into Azure and runs tutorial notebooks at:\n",
+ "https://nbviewer.jupyter.org/github/opesci/devito/blob/master/examples/seismic/tutorials/\n",
+ "\n",
+ "\n",
+ "\n",
+ "In this notebook we run the devito demo [notebooks](https://nbviewer.jupyter.org/github/opesci/devito/blob/master/examples/seismic/tutorials/) mentioned above by using an [AzureML estimator](https://docs.microsoft.com/en-us/python/api/azureml-train-core/azureml.train.estimator.estimator?view=azure-ml-py) with custom docker image. The docker image and associated docker file were created in previous notebook.\n",
+ "\n",
+ "\n",
+ "#### This notebook is used as a control plane to submit experimentation jobs running devito in Azure in two modes (see [remote run azureml python script file invoking devito](#devito_demo_mode)):\n",
+ " - [Mode 1](#devito_demo_mode_1):\n",
+ " - uses custom code (slightly modified graphing functions save images to files too) \n",
+ " - experimentation job is defined by the devito code that is packaged as a py file to be run on an Azure remote compute target\n",
+ " - experimentation job can be used to track metrics or other artifacts (images)\n",
+ " \n",
+ " - Mode 2:\n",
+ " - papermill is invoked via cli or via its Python API to run unedited devito demo notebooks (https://github.com/opesci/devito/tree/master/examples/seismic/tutorials) on the remote compute target and get back the results as saved notebooks that are then Available in Azure portal. \n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Allow multiple displays per cell\n",
+ "from IPython.core.interactiveshell import InteractiveShell\n",
+ "InteractiveShell.ast_node_interactivity = \"all\" "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import sys, os\n",
+ "import shutil\n",
+ "import urllib\n",
+ "import azureml.core\n",
+ "from azureml.core import Workspace, Experiment\n",
+ "from azureml.core.compute import ComputeTarget, AmlCompute\n",
+ "from azureml.core.compute_target import ComputeTargetException\n",
+ "from azureml.core.runconfig import MpiConfiguration\n",
+ "\n",
+ "\n",
+ "# from azureml.core.datastore import Datastore\n",
+ "# from azureml.data.data_reference import DataReference\n",
+ "# from azureml.pipeline.steps import HyperDriveStep\n",
+ "# from azureml.pipeline.core import Pipeline, PipelineData\n",
+ "# from azureml.train.dnn import TensorFlow\n",
+ "\n",
+ "from azureml.train.estimator import Estimator\n",
+ "from azureml.widgets import RunDetails\n",
+ "\n",
+ "import platform"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Azure ML SDK Version: 1.0.76\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'Linux-4.15.0-1064-azure-x86_64-with-debian-stretch-sid'"
+ ]
+ },
+ "execution_count": 3,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'/datadrive01/prj/DeepSeismic/contrib/fwi/azureml_devito/notebooks'"
+ ]
+ },
+ "execution_count": 3,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "print(\"Azure ML SDK Version: \", azureml.core.VERSION)\n",
+ "platform.platform()\n",
+ "os.getcwd()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "[None]"
+ ]
+ },
+ "execution_count": 4,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'./../not_shared/general.env'"
+ ]
+ },
+ "execution_count": 4,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "def add_path_to_sys_path(path_to_append):\n",
+ " if not (any(path_to_append in paths for paths in sys.path)):\n",
+ " sys.path.append(path_to_append)\n",
+ " \n",
+ "auxiliary_files_dir = os.path.join(*(['.', 'src']))\n",
+ "paths_to_append = [os.path.join(os.getcwd(), auxiliary_files_dir)]\n",
+ "[add_path_to_sys_path(crt_path) for crt_path in paths_to_append]\n",
+ "\n",
+ "import project_utils\n",
+ "prj_consts = project_utils.project_consts()\n",
+ "\n",
+ "dotenv_file_path = os.path.join(*(prj_consts.DOTENV_FILE_PATH))\n",
+ "dotenv_file_path"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "%load_ext dotenv"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'./../not_shared'"
+ ]
+ },
+ "execution_count": 6,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "workspace_config_dir = os.path.join(*(prj_consts.AML_WORKSPACE_CONFIG_DIR))\n",
+ "workspace_config_file = prj_consts.AML_WORKSPACE_CONFIG_FILE_NAME\n",
+ "workspace_config_dir"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'./../temp/devito_tutorial/01_modelling.py'"
+ ]
+ },
+ "execution_count": 7,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'./../temp/devito_tutorial/azureml_01_modelling.py'"
+ ]
+ },
+ "execution_count": 7,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "%dotenv $dotenv_file_path\n",
+ "\n",
+ "script_folder = prj_consts.AML_EXPERIMENT_DIR + ['devito_tutorial']\n",
+ "\n",
+ "devito_training_script_file = '01_modelling.py' # hardcoded in file azureml_training_script_full_file_name below\n",
+ "azureml_training_script_file = 'azureml_'+devito_training_script_file\n",
+ "experimentName = '020_AzureMLEstimator'\n",
+ "\n",
+ "os.makedirs(os.path.join(*(script_folder)), exist_ok=True)\n",
+ "script_path = os.path.join(*(script_folder))\n",
+ "training_script_full_file_name = os.path.join(script_path, devito_training_script_file)\n",
+ "azureml_training_script_full_file_name = os.path.join(script_path, azureml_training_script_file)\n",
+ "\n",
+ "training_script_full_file_name\n",
+ "azureml_training_script_full_file_name"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n",
+ " \n",
+ "##### devito in Azure ML demo mode 1\n",
+ "Create devito demo script based on \n",
+ "https://nbviewer.jupyter.org/github/opesci/devito/blob/master/examples/seismic/tutorials/01_modelling.ipynb\n",
+ "\n",
+ "[Back](#devito_in_AzureML_demoing_modes) to summary of modes od demoing devito in AzureML.\n",
+ "\n",
+ "Main purpose of this script is to extend _plot_velocity()_ and _plot_shotrecord()_ devito [plotting functions](https://github.com/opesci/devito/blob/master/examples/seismic/plotting.py) to allow the mto work in batch mode, i.e. save output to a file."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Overwriting ./../temp/devito_tutorial/01_modelling.py\n"
+ ]
+ }
+ ],
+ "source": [
+ "%%writefile $training_script_full_file_name\n",
+ "\n",
+ "import numpy as np\n",
+ "import os, argparse\n",
+ "\n",
+ "from examples.seismic import Model\n",
+ "from examples.seismic import TimeAxis\n",
+ "from examples.seismic import Receiver\n",
+ "from devito import TimeFunction\n",
+ "from devito import Eq, solve\n",
+ "from devito import Operator\n",
+ "\n",
+ "\n",
+ "# try:\n",
+ "import matplotlib as mpl\n",
+ "import matplotlib.pyplot as plt\n",
+ "from matplotlib import cm\n",
+ "from mpl_toolkits.axes_grid1 import make_axes_locatable\n",
+ "\n",
+ "mpl.rc('font', size=16)\n",
+ "mpl.rc('figure', figsize=(8, 6))\n",
+ "# except:\n",
+ "# plt = None\n",
+ "# cm = None\n",
+ " \n",
+ "\n",
+ "\n",
+ "# \"all\" plotting utils in devito do not save to file, so we extend them here\n",
+ "# https://github.com/opesci/devito/blob/master/examples/seismic/plotting.py\n",
+ "def plot_velocity(model, source=None, receiver=None, colorbar=True, file=None):\n",
+ " \"\"\"\n",
+ " Plot a two-dimensional velocity field from a seismic `Model`\n",
+ " object. Optionally also includes point markers for sources and receivers.\n",
+ "\n",
+ " Parameters\n",
+ " ----------\n",
+ " model : Model\n",
+ " Object that holds the velocity model.\n",
+ " source : array_like or float\n",
+ " Coordinates of the source point.\n",
+ " receiver : array_like or float\n",
+ " Coordinates of the receiver points.\n",
+ " colorbar : bool\n",
+ " Option to plot the colorbar.\n",
+ " \"\"\"\n",
+ " domain_size = 1.e-3 * np.array(model.domain_size)\n",
+ " extent = [model.origin[0], model.origin[0] + domain_size[0],\n",
+ " model.origin[1] + domain_size[1], model.origin[1]]\n",
+ "\n",
+ " plot = plt.imshow(np.transpose(model.vp.data), animated=True, cmap=cm.jet,\n",
+ " vmin=np.min(model.vp.data), vmax=np.max(model.vp.data),\n",
+ " extent=extent)\n",
+ " plt.xlabel('X position (km)')\n",
+ " plt.ylabel('Depth (km)')\n",
+ "\n",
+ " # Plot source points, if provided\n",
+ " if receiver is not None:\n",
+ " plt.scatter(1e-3*receiver[:, 0], 1e-3*receiver[:, 1],\n",
+ " s=25, c='green', marker='D')\n",
+ "\n",
+ " # Plot receiver points, if provided\n",
+ " if source is not None:\n",
+ " plt.scatter(1e-3*source[:, 0], 1e-3*source[:, 1],\n",
+ " s=25, c='red', marker='o')\n",
+ "\n",
+ " # Ensure axis limits\n",
+ " plt.xlim(model.origin[0], model.origin[0] + domain_size[0])\n",
+ " plt.ylim(model.origin[1] + domain_size[1], model.origin[1])\n",
+ "\n",
+ " # Create aligned colorbar on the right\n",
+ " if colorbar:\n",
+ " ax = plt.gca()\n",
+ " divider = make_axes_locatable(ax)\n",
+ " cax = divider.append_axes(\"right\", size=\"5%\", pad=0.05)\n",
+ " cbar = plt.colorbar(plot, cax=cax)\n",
+ " cbar.set_label('Velocity (km/s)')\n",
+ " plt.show()\n",
+ " \n",
+ " if file is not None:\n",
+ " plt.savefig(file)\n",
+ " print('plotted image saved as {} file'.format(file))\n",
+ " \n",
+ " plt.clf()\n",
+ "\n",
+ "def plot_shotrecord(rec, model, t0, tn, colorbar=True, file=None):\n",
+ " \"\"\"\n",
+ " Plot a shot record (receiver values over time).\n",
+ "\n",
+ " Parameters\n",
+ " ----------\n",
+ " rec :\n",
+ " Receiver data with shape (time, points).\n",
+ " model : Model\n",
+ " object that holds the velocity model.\n",
+ " t0 : int\n",
+ " Start of time dimension to plot.\n",
+ " tn : int\n",
+ " End of time dimension to plot.\n",
+ " \"\"\"\n",
+ " scale = np.max(rec) / 10.\n",
+ " extent = [model.origin[0], model.origin[0] + 1e-3*model.domain_size[0],\n",
+ " 1e-3*tn, t0]\n",
+ "\n",
+ " plot = plt.imshow(rec, vmin=-scale, vmax=scale, cmap=cm.gray, extent=extent)\n",
+ " plt.xlabel('X position (km)')\n",
+ " plt.ylabel('Time (s)')\n",
+ "\n",
+ " # Create aligned colorbar on the right\n",
+ " if colorbar:\n",
+ " ax = plt.gca()\n",
+ " divider = make_axes_locatable(ax)\n",
+ " cax = divider.append_axes(\"right\", size=\"5%\", pad=0.05)\n",
+ " plt.colorbar(plot, cax=cax)\n",
+ " plt.show() \n",
+ " \n",
+ " if file is not None:\n",
+ " plt.savefig(file)\n",
+ " print('plotted image saved as {} file'.format(file))\n",
+ " \n",
+ " plt.clf()\n",
+ "\n",
+ "def main(output_folder): \n",
+ " # 1. Define the physical problem\n",
+ " # The first step is to define the physical model:\n",
+ " # - physical dimensions of interest\n",
+ " # - velocity profile of this physical domain\n",
+ "\n",
+ " # Define a physical size\n",
+ " shape = (101, 101) # Number of grid point (nx, nz)\n",
+ " spacing = (10., 10.) # Grid spacing in m. The domain size is now 1km by 1km\n",
+ " origin = (0., 0.) # What is the location of the top left corner. This is necessary to define\n",
+ " # the absolute location of the source and receivers\n",
+ "\n",
+ " # Define a velocity profile. The velocity is in km/s\n",
+ " v = np.empty(shape, dtype=np.float32)\n",
+ " v[:, :51] = 1.5\n",
+ " v[:, 51:] = 2.5\n",
+ "\n",
+ " # With the velocity and model size defined, we can create the seismic model that\n",
+ " # encapsulates this properties. We also define the size of the absorbing layer as 10 grid points\n",
+ " model = Model(vp=v, origin=origin, shape=shape, spacing=spacing,\n",
+ " space_order=2, nbpml=10)\n",
+ "\n",
+ " plot_velocity(model, \n",
+ " file= os.path.join(*( [output_folder,'output000.png'])))\n",
+ " \n",
+ " # 2. Acquisition geometry\n",
+ " t0 = 0. # Simulation starts a t=0\n",
+ " tn = 1000. # Simulation last 1 second (1000 ms)\n",
+ " dt = model.critical_dt # Time step from model grid spacing\n",
+ "\n",
+ " time_range = TimeAxis(start=t0, stop=tn, step=dt)\n",
+ " from examples.seismic import RickerSource\n",
+ "\n",
+ " f0 = 0.010 # Source peak frequency is 10Hz (0.010 kHz)\n",
+ " src = RickerSource(name='src', grid=model.grid, f0=f0,\n",
+ " npoint=1, time_range=time_range)\n",
+ "\n",
+ " # First, position source centrally in all dimensions, then set depth\n",
+ " src.coordinates.data[0, :] = np.array(model.domain_size) * .5\n",
+ " src.coordinates.data[0, -1] = 20. # Depth is 20m\n",
+ "\n",
+ " # We can plot the time signature to see the wavelet\n",
+ "# src.show()\n",
+ "\n",
+ " # Create symbol for 101 receivers\n",
+ " rec = Receiver(name='rec', grid=model.grid, npoint=101, time_range=time_range)\n",
+ "\n",
+ " # Prescribe even spacing for receivers along the x-axis\n",
+ " rec.coordinates.data[:, 0] = np.linspace(0, model.domain_size[0], num=101)\n",
+ " rec.coordinates.data[:, 1] = 20. # Depth is 20m\n",
+ "\n",
+ " # We can now show the source and receivers within our domain:\n",
+ " # Red dot: Source location\n",
+ " # Green dots: Receiver locations (every 4th point)\n",
+ " plot_velocity(model, source=src.coordinates.data,\n",
+ " receiver=rec.coordinates.data[::4, :], \n",
+ " file= os.path.join(*( [output_folder,'output010.png'])))\n",
+ " \n",
+ " # Define the wavefield with the size of the model and the time dimension\n",
+ " u = TimeFunction(name=\"u\", grid=model.grid, time_order=2, space_order=2)\n",
+ "\n",
+ " # We can now write the PDE\n",
+ " pde = model.m * u.dt2 - u.laplace + model.damp * u.dt\n",
+ "\n",
+ " # The PDE representation is as on paper\n",
+ " pde\n",
+ " \n",
+ " # This discrete PDE can be solved in a time-marching way updating u(t+dt) from the previous time step\n",
+ " # Devito as a shortcut for u(t+dt) which is u.forward. We can then rewrite the PDE as \n",
+ " # a time marching updating equation known as a stencil using customized SymPy functions\n",
+ "\n",
+ " stencil = Eq(u.forward, solve(pde, u.forward))\n",
+ " # Finally we define the source injection and receiver read function to generate the corresponding code\n",
+ " src_term = src.inject(field=u.forward, expr=src * dt**2 / model.m)\n",
+ "\n",
+ " # Create interpolation expression for receivers\n",
+ " rec_term = rec.interpolate(expr=u.forward)\n",
+ "\n",
+ " op = Operator([stencil] + src_term + rec_term, subs=model.spacing_map)\n",
+ " \n",
+ " op(time=time_range.num-1, dt=model.critical_dt)\n",
+ " plot_shotrecord(rec.data, model, t0, tn, \n",
+ " file= os.path.join(*( [output_folder,'output020.png'])))\n",
+ "\n",
+ "if __name__ == \"__main__\":\n",
+ " parser = argparse.ArgumentParser()\n",
+ " parser.add_argument('--output_folder', type=str, nargs='?', \\\n",
+ " dest='output_folder', help='ouput artifacts location',\\\n",
+ " default='.')\n",
+ " args = parser.parse_args()\n",
+ " \n",
+ " main(args.output_folder)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "##### Get experimentation docker image for devito"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'fwi01_azureml:sdk.v1.0.76'"
+ ]
+ },
+ "execution_count": 9,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'fwi01acr.azurecr.io/fwi01_azureml:sdk.v1.0.76'"
+ ]
+ },
+ "execution_count": 9,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "docker_repo_name = os.getenv('ACR_NAME')+'.azurecr.io' # or os.getenv('DOCKER_LOGIN')\n",
+ "\n",
+ "docker_image_name = os.getenv('EXPERIMENTATION_DOCKER_IMAGE_NAME')\n",
+ "\n",
+ "image_version = os.getenv('EXPERIMENTATION_DOCKER_IMAGE_TAG')\n",
+ "if image_version!=\"\":\n",
+ " docker_image_name = docker_image_name +':'+ image_version\n",
+ "\n",
+ "full_docker_image_name = docker_repo_name + '/' + docker_image_name\n",
+ " \n",
+ "docker_image_name\n",
+ "full_docker_image_name"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Extract/decide the python path in custom docker image that corresponds to desired conda environment. Without this, AzureML tries to create a separate environment."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'docker run -i --rm --name fwi01_azureml_container02 fwi01acr.azurecr.io/fwi01_azureml:sdk.v1.0.76 /bin/bash -c \"which python\" '"
+ ]
+ },
+ "execution_count": 10,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'/opt/conda/envs/fwi01_conda_env/bin/python'"
+ ]
+ },
+ "execution_count": 10,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "get_Python_path_command='docker run -i --rm --name fwi01_azureml_container02 '+ \\\n",
+ "full_docker_image_name + \\\n",
+ "' /bin/bash -c \"which python\" '\n",
+ "get_Python_path_command\n",
+ "\n",
+ "\n",
+ "import subprocess\n",
+ "python_path_in_docker_image = subprocess.check_output(get_Python_path_command,shell=True,stderr=subprocess.STDOUT).\\\n",
+ "decode('utf-8').strip()\n",
+ "python_path_in_docker_image"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n",
+ "#### Create azureml_script_file that invokes:\n",
+ " - devito exclusive custom edited training_script_file\n",
+ " - unedited devito notebooks via papermill (invoked via cli and via ppapermill python API)\n",
+ "\n",
+ "[Back](#devito_in_AzureML_demoing_modes) to notebook summary."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Overwriting ./../temp/devito_tutorial/azureml_01_modelling.py\n"
+ ]
+ }
+ ],
+ "source": [
+ "%%writefile $azureml_training_script_full_file_name\n",
+ "\n",
+ "import argparse\n",
+ "import os\n",
+ "os.system('conda env list')\n",
+ "\n",
+ "import azureml.core;\n",
+ "from azureml.core.run import Run\n",
+ "\n",
+ "print(azureml.core.VERSION)\n",
+ "\n",
+ "parser = argparse.ArgumentParser()\n",
+ "parser.add_argument('--output_folder', type=str, dest='output_folder', help='ouput artifacts location')\n",
+ "\n",
+ "args = parser.parse_args()\n",
+ "print('args.output_folder is {} but it will be ignored since AzureML_tracked ./outputs will be used'.format(args.output_folder))\n",
+ "\n",
+ "# get the Azure ML run object\n",
+ "run = Run.get_context()\n",
+ "\n",
+ "# ./outputs/ folder is autotracked so should get uploaded at the end of the run\n",
+ "output_dir_AzureML_tracked = './outputs'\n",
+ "\n",
+ "crt_dir = os.getcwd()\n",
+ "\n",
+ "cli_command= \\\n",
+ "'cd /devito; /opt/conda/envs/fwi01_conda_env/bin/python '+ crt_dir +'/01_modelling.py' + \\\n",
+ "' --output_folder '+ crt_dir + output_dir_AzureML_tracked+ '/' + \\\n",
+ "' > '+ crt_dir + output_dir_AzureML_tracked + '/01_modelling.log' \n",
+ "# + \\\n",
+ "# ' 2>&1 ' + crt_dir +'/'+ output_dir_AzureML_tracked + '/devito_cli_py.log'\n",
+ "print('Running devito from cli on 01_modelling.py----BEGIN-----:') \n",
+ "print(cli_command); print('\\n');os.system(cli_command)\n",
+ "print('Running devito from cli on 01_modelling.py----END-----:\\n\\n')\n",
+ "\n",
+ "cli_command= \\\n",
+ "'cd /devito; papermill ' + \\\n",
+ "'./examples/seismic/tutorials/02_rtm.ipynb '+\\\n",
+ "crt_dir +'/outputs/02_rtm_output.ipynb ' + \\\n",
+ "'--log-output --no-progress-bar --kernel python3 ' + \\\n",
+ "' > '+ crt_dir + output_dir_AzureML_tracked + '/02_rtm_output.log' \n",
+ "# + \\\n",
+ "# ' 2>&1 ' + crt_dir +'/'+ output_dir_AzureML_tracked + '/papermill_cli.log'\n",
+ "\n",
+ "# FIXME - activate right conda env for running papermill from cli\n",
+ "activate_right_conda_env_fixed = False\n",
+ "if activate_right_conda_env_fixed:\n",
+ " print('Running papermill from cli on 02_rtm.ipynb----BEGIN-----:') \n",
+ " print(cli_command); print('\\n');os.system(cli_command)\n",
+ " print('Running papermill from cli on 02_rtm.ipynb----END-----:\\n\\n') \n",
+ "\n",
+ "\n",
+ "print('Running papermill from Python API on 03_fwi.ipynb----BEGIN-----:') \n",
+ "import papermill as pm\n",
+ "os.chdir('/devito')\n",
+ "pm.execute_notebook(\n",
+ " './examples/seismic/tutorials/03_fwi.ipynb',\n",
+ " crt_dir +'/outputs/03_fwi_output.ipynb'\n",
+ ")\n",
+ "print('Running papermill from Python API on 03_fwi.ipynb----END-----:') \n",
+ "\n",
+ "print('Running papermill from Python API on 04_dask.ipynb----BEGIN-----:') \n",
+ "import papermill as pm\n",
+ "os.chdir('/devito')\n",
+ "pm.execute_notebook(\n",
+ " './examples/seismic/tutorials/04_dask.ipynb',\n",
+ " crt_dir +'/outputs/04_dask_output.ipynb'\n",
+ ")\n",
+ "print('Running papermill from Python API on 04_dask.ipynb----END-----:') \n",
+ " \n",
+ "\n",
+ "os.system('pwd')\n",
+ "os.system('ls -l /')\n",
+ "os.system('ls -l ./')\n",
+ "os.system('ls -l ' +crt_dir + output_dir_AzureML_tracked)\n",
+ "run.log('training_message01: ', 'finished experiment')\n",
+ "print('\\n')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "['azureml_01_modelling.py', '01_modelling.py']"
+ ]
+ },
+ "execution_count": 12,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "script_path=os.path.join(*(script_folder))\n",
+ "os.listdir(script_path)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Initialize workspace\n",
+ "\n",
+ "Initialize a workspace object from persisted configuration. If you are using an Azure Machine Learning Notebook VM, you are all set. Otherwise, make sure the config file is present at .\\config.json"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "WARNING - Warning: Falling back to use azure cli login credentials.\n",
+ "If you run your code in unattended mode, i.e., where you can't give a user input, then we recommend to use ServicePrincipalAuthentication or MsiAuthentication.\n",
+ "Please refer to aka.ms/aml-notebook-auth for different authentication mechanisms in azureml-sdk.\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Workspace name: ghiordanfwiws\n",
+ "Azure region: eastus2\n",
+ "Subscription id: 7899\n"
+ ]
+ }
+ ],
+ "source": [
+ "ws = Workspace.from_config(\n",
+ " path=os.path.join(os.getcwd(),\n",
+ " os.path.join(*([workspace_config_dir, '.azureml', workspace_config_file]))))\n",
+ "print('Workspace name: ' + ws.name, \n",
+ " 'Azure region: ' + ws.location, \n",
+ " 'Subscription id: ' + ws.subscription_id[0:4], sep = '\\n')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Create an Azure ML experiment\n",
+ "Let's create an experiment named \"tf-mnist\" and a folder to hold the training scripts. The script runs will be recorded under the experiment in Azure."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "exp = Experiment(workspace=ws, name=experimentName)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Retrieve or create a Azure Machine Learning compute\n",
+ "Azure Machine Learning Compute is a service for provisioning and managing clusters of Azure virtual machines for running machine learning workloads. Let's create a new Azure Machine Learning Compute in the current workspace, if it doesn't already exist. We will then run the training script on this compute target.\n",
+ "\n",
+ "If we could not find the compute with the given name in the previous cell, then we will create a new compute here. This process is broken down into the following steps:\n",
+ "\n",
+ "1. Create the configuration\n",
+ "2. Create the Azure Machine Learning compute\n",
+ "\n",
+ "**This process will take a few minutes and is providing only sparse output in the process. Please make sure to wait until the call returns before moving to the next cell.**"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'gpuclstfwi07'"
+ ]
+ },
+ "execution_count": 15,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "gpu_cluster_name = os.getenv('GPU_CLUSTER_NAME')\n",
+ "gpu_cluster_name = 'gpuclstfwi07'\n",
+ "gpu_cluster_name"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Found existing gpu cluster\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Verify that cluster does not exist already\n",
+ "max_nodes_value = 2\n",
+ "try:\n",
+ " gpu_cluster = ComputeTarget(workspace=ws, name=gpu_cluster_name)\n",
+ " print(\"Found existing gpu cluster\")\n",
+ "except ComputeTargetException:\n",
+ " print(\"Could not find ComputeTarget cluster!\")\n",
+ " \n",
+ "# # Create a new gpucluster using code below\n",
+ "# # Specify the configuration for the new cluster\n",
+ "# compute_config = AmlCompute.provisioning_configuration(vm_size=\"Standard_NC6\",\n",
+ "# min_nodes=0,\n",
+ "# max_nodes=max_nodes_value)\n",
+ "# # Create the cluster with the specified name and configuration\n",
+ "# gpu_cluster = ComputeTarget.create(ws, gpu_cluster_name, compute_config)\n",
+ "\n",
+ "# # Wait for the cluster to complete, show the output log\n",
+ "# gpu_cluster.wait_for_completion(show_output=True)\n",
+ " \n",
+ " \n",
+ "# for demo purposes, show how clsuter properties can be altered post-creation\n",
+ "gpu_cluster.update(min_nodes=0, max_nodes=max_nodes_value, idle_seconds_before_scaledown=1200)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### Create an Azure ML SDK estimator with custom docker image "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 17,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "a0312dfcb82f419288e3c3c37c39b9dd",
+ "version_major": 2,
+ "version_minor": 0
+ },
+ "text/plain": [
+ "_UserRunWidget(widget_settings={'childWidgetDisplay': 'popup', 'send_telemetry': False, 'log_level': 'NOTSET',…"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "application/aml.mini.widget.v1": "{\"status\": \"Running\", \"workbench_run_details_uri\": \"https://ml.azure.com/experiments/020_AzureMLEstimator/runs/020_AzureMLEstimator_1575678435_be18a2fc?wsid=/subscriptions/789908e0-5fc2-4c4d-b5f5-9764b0d602b3/resourcegroups/ghiordanfwirsg01/workspaces/ghiordanfwiws\", \"run_id\": \"020_AzureMLEstimator_1575678435_be18a2fc\", \"run_properties\": {\"run_id\": \"020_AzureMLEstimator_1575678435_be18a2fc\", \"created_utc\": \"2019-12-07T00:27:18.102865Z\", \"properties\": {\"_azureml.ComputeTargetType\": \"amlcompute\", \"ContentSnapshotId\": \"a5071b2a-37a7-40da-8340-69cc894091cb\", \"azureml.git.repository_uri\": \"git@github.com:georgeAccnt-GH/DeepSeismic.git\", \"mlflow.source.git.repoURL\": \"git@github.com:georgeAccnt-GH/DeepSeismic.git\", \"azureml.git.branch\": \"staging\", \"mlflow.source.git.branch\": \"staging\", \"azureml.git.commit\": \"1d3cd3340f4063508b6f707d5fc2a35f5429a07f\", \"mlflow.source.git.commit\": \"1d3cd3340f4063508b6f707d5fc2a35f5429a07f\", \"azureml.git.dirty\": \"True\", \"ProcessInfoFile\": \"azureml-logs/process_info.json\", \"ProcessStatusFile\": \"azureml-logs/process_status.json\"}, \"tags\": {\"_aml_system_ComputeTargetStatus\": \"{\\\"AllocationState\\\":\\\"steady\\\",\\\"PreparingNodeCount\\\":1,\\\"RunningNodeCount\\\":1,\\\"CurrentNodeCount\\\":2}\"}, \"script_name\": null, \"arguments\": null, \"end_time_utc\": null, \"status\": \"Running\", \"log_files\": {\"azureml-logs/55_azureml-execution-tvmps_e010639b61f121ff1dbd780d646c8bd4bc6a423228429632e00c37ab5e150756_p.txt\": \"https://ghiordanstoragee145cef0b.blob.core.windows.net/azureml/ExperimentRun/dcid.020_AzureMLEstimator_1575678435_be18a2fc/azureml-logs/55_azureml-execution-tvmps_e010639b61f121ff1dbd780d646c8bd4bc6a423228429632e00c37ab5e150756_p.txt?sv=2019-02-02&sr=b&sig=99MfEJ4IvLwXgM3jjLm4amfljnv7gOK3%2BQPb1GN%2BZKg%3D&st=2019-12-07T00%3A22%3A27Z&se=2019-12-07T08%3A32%3A27Z&sp=r\"}, \"log_groups\": [[\"azureml-logs/55_azureml-execution-tvmps_e010639b61f121ff1dbd780d646c8bd4bc6a423228429632e00c37ab5e150756_p.txt\"]], \"run_duration\": \"0:05:10\"}, \"child_runs\": [], \"children_metrics\": {}, \"run_metrics\": [], \"run_logs\": \"2019-12-07T00:31:04Z Starting output-watcher...\\nLogin Succeeded\\nsdk.v1.0.76: Pulling from fwi01_azureml\\n1ab2bdfe9778: Pulling fs layer\\ndd7d28bd8be5: Pulling fs layer\\naf998e3a361b: Pulling fs layer\\n8f61820757bf: Pulling fs layer\\n0eb461057035: Pulling fs layer\\n23276e49c76d: Pulling fs layer\\nc55ca301ea9f: Pulling fs layer\\n0eb461057035: Waiting\\n8f61820757bf: Waiting\\nc55ca301ea9f: Waiting\\n1ab2bdfe9778: Verifying Checksum\\n1ab2bdfe9778: Download complete\\naf998e3a361b: Verifying Checksum\\naf998e3a361b: Download complete\\n0eb461057035: Verifying Checksum\\n0eb461057035: Download complete\\ndd7d28bd8be5: Verifying Checksum\\ndd7d28bd8be5: Download complete\\n1ab2bdfe9778: Pull complete\\n8f61820757bf: Verifying Checksum\\n8f61820757bf: Download complete\\ndd7d28bd8be5: Pull complete\\nc55ca301ea9f: Verifying Checksum\\nc55ca301ea9f: Download complete\\n23276e49c76d: Verifying Checksum\\n23276e49c76d: Download complete\\naf998e3a361b: Pull complete\\n8f61820757bf: Pull complete\\n0eb461057035: Pull complete\\n23276e49c76d: Pull complete\\n\", \"graph\": {}, \"widget_settings\": {\"childWidgetDisplay\": \"popup\", \"send_telemetry\": false, \"log_level\": \"NOTSET\", \"sdk_version\": \"1.0.76\"}, \"loading\": false}"
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "# use a custom Docker image\n",
+ "from azureml.core.container_registry import ContainerRegistry\n",
+ "\n",
+ "image_name = docker_image_name\n",
+ "\n",
+ "# you can also point to an image in a private ACR\n",
+ "image_registry_details = ContainerRegistry()\n",
+ "image_registry_details.address = docker_repo_name\n",
+ "image_registry_details.username = os.getenv('ACR_USERNAME')\n",
+ "image_registry_details.password = os.getenv('ACR_PASSWORD') \n",
+ "\n",
+ "# don't let the system build a new conda environment\n",
+ "user_managed_dependencies = True\n",
+ "\n",
+ "# submit to a local Docker container. if you don't have Docker engine running locally, you can set compute_target to cpu_cluster.\n",
+ "script_params = {\n",
+ " '--output_folder': 'some_folder'\n",
+ "}\n",
+ "\n",
+ "\n",
+ "# distributed_training_conf = MpiConfiguration()\n",
+ "# distributed_training_conf.process_count_per_node = 2\n",
+ "\n",
+ "est = Estimator(source_directory=script_path, \n",
+ " compute_target=gpu_cluster,#'local', #gpu_cluster, \n",
+ " entry_script=azureml_training_script_file,\n",
+ " script_params=script_params,\n",
+ " use_docker=True,\n",
+ " custom_docker_image=image_name,\n",
+ " # uncomment below line to use your private ACR\n",
+ " image_registry_details=image_registry_details, \n",
+ " user_managed=user_managed_dependencies,\n",
+ " distributed_training=None,\n",
+ " node_count=1\n",
+ " )\n",
+ "est.run_config.environment.python.interpreter_path = python_path_in_docker_image\n",
+ "\n",
+ "run = exp.submit(est)\n",
+ "RunDetails(run).show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "One can use the above link to currrent experiment run in Azure Portal to see tracked metrics, and images and output notebooks saved by azureml_training_script_full_file_name in {run_dir}/outputs on the remote compute target that are automatically saved by AzureML in the run history Azure portal pages."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "response = run.wait_for_completion(show_output=False)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 19,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Final print 9, time 20.798 seconds: Counter({'Completed': 1})\r"
+ ]
+ }
+ ],
+ "source": [
+ "import time\n",
+ "from collections import Counter\n",
+ "#wait till all jobs finished\n",
+ "\n",
+ "def wait_for_run_list_to_finish(the_run_list):\n",
+ " finished_status_list = ['Completed', 'Failed']\n",
+ " printing_counter = 0\n",
+ " start_time = time.time()\n",
+ " while (not all((crt_queried_job.get_status() in finished_status_list) for crt_queried_job in the_run_list)):\n",
+ " time.sleep(2)\n",
+ " printing_counter+= 1\n",
+ " print('print {0:.0f}, time {1:.3f} seconds: {2}'.format(printing_counter, time.time() - start_time, \n",
+ " str(Counter([crt_queried_job.get_status() for crt_queried_job in the_run_list]))), end=\"\\r\")\n",
+ "# final status\n",
+ " print('Final print {0:.0f}, time {1:.3f} seconds: {2}'.format(printing_counter, time.time() - start_time, \n",
+ " str(Counter([crt_queried_job.get_status() for crt_queried_job in the_run_list]))), end=\"\\r\") \n",
+ "wait_for_run_list_to_finish([run])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 20,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "run_duration in seconds 243.960763\n",
+ "run_duration= 4m 3.961s\n"
+ ]
+ }
+ ],
+ "source": [
+ "import datetime, math\n",
+ "def get_run_duration(azureml_exp_run):\n",
+ " run_details = azureml_exp_run.get_details()\n",
+ " run_duration = datetime.datetime.strptime(run_details['endTimeUtc'], \"%Y-%m-%dT%H:%M:%S.%fZ\") - \\\n",
+ " datetime.datetime.strptime(run_details['startTimeUtc'], \"%Y-%m-%dT%H:%M:%S.%fZ\")\n",
+ " return run_duration.total_seconds()\n",
+ "run_duration = get_run_duration(run)\n",
+ "\n",
+ "run_seconds, run_minutes = math.modf(run_duration/60)\n",
+ "print('run_duration in seconds {}'.format(run_duration))\n",
+ "print('run_duration= {0:.0f}m {1:.3f}s'.format(run_minutes, run_seconds*60))\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 21,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Showing details for run 498\n"
+ ]
+ },
+ {
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "cd44e7b0a1c447dabe98bf114f420d76",
+ "version_major": 2,
+ "version_minor": 0
+ },
+ "text/plain": [
+ "_UserRunWidget(widget_settings={'childWidgetDisplay': 'popup', 'send_telemetry': False, 'log_level': 'NOTSET',…"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "application/aml.mini.widget.v1": "{\"status\": \"Completed\", \"workbench_run_details_uri\": \"https://ml.azure.com/experiments/020_AzureMLEstimator/runs/020_AzureMLEstimator_1575683693_ddd16e31?wsid=/subscriptions/789908e0-5fc2-4c4d-b5f5-9764b0d602b3/resourcegroups/ghiordanfwirsg01/workspaces/ghiordanfwiws\", \"run_id\": \"020_AzureMLEstimator_1575683693_ddd16e31\", \"run_properties\": {\"run_id\": \"020_AzureMLEstimator_1575683693_ddd16e31\", \"created_utc\": \"2019-12-07T01:54:55.33033Z\", \"properties\": {\"_azureml.ComputeTargetType\": \"amlcompute\", \"ContentSnapshotId\": \"a5071b2a-37a7-40da-8340-69cc894091cb\", \"azureml.git.repository_uri\": \"git@github.com:georgeAccnt-GH/DeepSeismic.git\", \"mlflow.source.git.repoURL\": \"git@github.com:georgeAccnt-GH/DeepSeismic.git\", \"azureml.git.branch\": \"staging\", \"mlflow.source.git.branch\": \"staging\", \"azureml.git.commit\": \"1d3cd3340f4063508b6f707d5fc2a35f5429a07f\", \"mlflow.source.git.commit\": \"1d3cd3340f4063508b6f707d5fc2a35f5429a07f\", \"azureml.git.dirty\": \"True\", \"ProcessInfoFile\": \"azureml-logs/process_info.json\", \"ProcessStatusFile\": \"azureml-logs/process_status.json\"}, \"tags\": {}, \"script_name\": null, \"arguments\": null, \"end_time_utc\": \"2019-12-07T01:56:48.811115Z\", \"status\": \"Completed\", \"log_files\": {\"azureml-logs/55_azureml-execution-tvmps_01b47c06fd150418ce69a91b330cb6996c9e9e076f7368a183a2f9a708f17ccb_p.txt\": \"https://ghiordanstoragee145cef0b.blob.core.windows.net/azureml/ExperimentRun/dcid.020_AzureMLEstimator_1575683693_ddd16e31/azureml-logs/55_azureml-execution-tvmps_01b47c06fd150418ce69a91b330cb6996c9e9e076f7368a183a2f9a708f17ccb_p.txt?sv=2019-02-02&sr=b&sig=9mQARzuRlCW%2F%2Brv3FDzJvm%2Fsaudk6GFjNypMRkV3O8g%3D&st=2019-12-07T01%3A46%3A50Z&se=2019-12-07T09%3A56%3A50Z&sp=r\", \"azureml-logs/65_job_prep-tvmps_01b47c06fd150418ce69a91b330cb6996c9e9e076f7368a183a2f9a708f17ccb_p.txt\": \"https://ghiordanstoragee145cef0b.blob.core.windows.net/azureml/ExperimentRun/dcid.020_AzureMLEstimator_1575683693_ddd16e31/azureml-logs/65_job_prep-tvmps_01b47c06fd150418ce69a91b330cb6996c9e9e076f7368a183a2f9a708f17ccb_p.txt?sv=2019-02-02&sr=b&sig=TMxrg26ywABOyJtGYT3KVLrGP0TYIHQ9E3ePlr%2BQepg%3D&st=2019-12-07T01%3A46%3A50Z&se=2019-12-07T09%3A56%3A50Z&sp=r\", \"azureml-logs/70_driver_log.txt\": \"https://ghiordanstoragee145cef0b.blob.core.windows.net/azureml/ExperimentRun/dcid.020_AzureMLEstimator_1575683693_ddd16e31/azureml-logs/70_driver_log.txt?sv=2019-02-02&sr=b&sig=vWkErsH55%2BLhIG%2FBJbtZb8NSNHFyNAzxk5VjW4p6lcM%3D&st=2019-12-07T01%3A46%3A50Z&se=2019-12-07T09%3A56%3A50Z&sp=r\", \"azureml-logs/75_job_post-tvmps_01b47c06fd150418ce69a91b330cb6996c9e9e076f7368a183a2f9a708f17ccb_p.txt\": \"https://ghiordanstoragee145cef0b.blob.core.windows.net/azureml/ExperimentRun/dcid.020_AzureMLEstimator_1575683693_ddd16e31/azureml-logs/75_job_post-tvmps_01b47c06fd150418ce69a91b330cb6996c9e9e076f7368a183a2f9a708f17ccb_p.txt?sv=2019-02-02&sr=b&sig=cbDgvPNn4LNXDsUXZwmWCjRMj0O9PnFSqSCtuCPMTFo%3D&st=2019-12-07T01%3A46%3A50Z&se=2019-12-07T09%3A56%3A50Z&sp=r\", \"azureml-logs/process_info.json\": \"https://ghiordanstoragee145cef0b.blob.core.windows.net/azureml/ExperimentRun/dcid.020_AzureMLEstimator_1575683693_ddd16e31/azureml-logs/process_info.json?sv=2019-02-02&sr=b&sig=wvqhR%2Bnzw0uLEsCGETAxkKrdwN5eI%2FgvTeB4juQ4aUI%3D&st=2019-12-07T01%3A46%3A50Z&se=2019-12-07T09%3A56%3A50Z&sp=r\", \"azureml-logs/process_status.json\": \"https://ghiordanstoragee145cef0b.blob.core.windows.net/azureml/ExperimentRun/dcid.020_AzureMLEstimator_1575683693_ddd16e31/azureml-logs/process_status.json?sv=2019-02-02&sr=b&sig=kkirWrsrpjcrKndUUPxuJVeRWu0GthsVZ4cXpxbEGMg%3D&st=2019-12-07T01%3A46%3A50Z&se=2019-12-07T09%3A56%3A50Z&sp=r\", \"logs/azureml/728_azureml.log\": \"https://ghiordanstoragee145cef0b.blob.core.windows.net/azureml/ExperimentRun/dcid.020_AzureMLEstimator_1575683693_ddd16e31/logs/azureml/728_azureml.log?sv=2019-02-02&sr=b&sig=pK%2F6TBBvQEPexjuRPR1FyOq6CUPXfnNBobkTmpmaeiM%3D&st=2019-12-07T01%3A46%3A50Z&se=2019-12-07T09%3A56%3A50Z&sp=r\", \"logs/azureml/azureml.log\": \"https://ghiordanstoragee145cef0b.blob.core.windows.net/azureml/ExperimentRun/dcid.020_AzureMLEstimator_1575683693_ddd16e31/logs/azureml/azureml.log?sv=2019-02-02&sr=b&sig=o%2BPcdcJvKZyQWRA0HpaJbM%2BxhqFOkdDjgBqtxtHtoag%3D&st=2019-12-07T01%3A46%3A50Z&se=2019-12-07T09%3A56%3A50Z&sp=r\"}, \"log_groups\": [[\"azureml-logs/process_info.json\", \"azureml-logs/process_status.json\", \"logs/azureml/azureml.log\"], [\"azureml-logs/55_azureml-execution-tvmps_01b47c06fd150418ce69a91b330cb6996c9e9e076f7368a183a2f9a708f17ccb_p.txt\"], [\"azureml-logs/65_job_prep-tvmps_01b47c06fd150418ce69a91b330cb6996c9e9e076f7368a183a2f9a708f17ccb_p.txt\"], [\"azureml-logs/70_driver_log.txt\"], [\"azureml-logs/75_job_post-tvmps_01b47c06fd150418ce69a91b330cb6996c9e9e076f7368a183a2f9a708f17ccb_p.txt\"], [\"logs/azureml/728_azureml.log\"]], \"run_duration\": \"0:01:53\"}, \"child_runs\": [], \"children_metrics\": {}, \"run_metrics\": [{\"name\": \"training_message01: \", \"run_id\": \"020_AzureMLEstimator_1575683693_ddd16e31\", \"categories\": [0], \"series\": [{\"data\": [\"finished experiment\"]}]}], \"run_logs\": \"2019-12-07 01:55:16,975|azureml|DEBUG|Inputs:: kwargs: {'OutputCollection': True, 'snapshotProject': True, 'only_in_process_features': True, 'skip_track_logs_dir': True}, track_folders: None, deny_list: None, directories_to_watch: []\\n2019-12-07 01:55:16,976|azureml.history._tracking.PythonWorkingDirectory|DEBUG|Execution target type: batchai\\n2019-12-07 01:55:16,976|azureml.history._tracking.PythonWorkingDirectory|DEBUG|Failed to import pyspark with error: No module named 'pyspark'\\n2019-12-07 01:55:16,976|azureml.history._tracking.PythonWorkingDirectory.workingdir|DEBUG|Pinning working directory for filesystems: ['pyfs']\\n2019-12-07 01:55:17,242|azureml._base_sdk_common.user_agent|DEBUG|Fetching client info from /root/.azureml/clientinfo.json\\n2019-12-07 01:55:17,243|azureml._base_sdk_common.user_agent|DEBUG|Error loading client info: [Errno 2] No such file or directory: '/root/.azureml/clientinfo.json'\\n2019-12-07 01:55:17,566|azureml.core._experiment_method|DEBUG|Trying to register submit_function search, on method \\n2019-12-07 01:55:17,566|azureml.core._experiment_method|DEBUG|Registered submit_function search, on method \\n2019-12-07 01:55:17,566|azureml.core._experiment_method|DEBUG|Trying to register submit_function search, on method \\n2019-12-07 01:55:17,566|azureml.core._experiment_method|DEBUG|Registered submit_function search, on method \\n2019-12-07 01:55:17,566|azureml.core.run|DEBUG|Adding new factory for run source hyperdrive\\n2019-12-07 01:55:18,070|azureml.core.run|DEBUG|Adding new factory for run source azureml.PipelineRun\\n2019-12-07 01:55:18,075|azureml.core.run|DEBUG|Adding new factory for run source azureml.ReusedStepRun\\n2019-12-07 01:55:18,078|azureml.core.run|DEBUG|Adding new factory for run source azureml.StepRun\\n2019-12-07 01:55:18,082|azureml.core.run|DEBUG|Adding new factory for run source azureml.scriptrun\\n2019-12-07 01:55:18,083|azureml.core.authentication.TokenRefresherDaemon|DEBUG|Starting daemon and triggering first instance\\n2019-12-07 01:55:18,088|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-07 01:55:18,089|azureml._restclient.clientbase|INFO|Created a worker pool for first use\\n2019-12-07 01:55:18,089|azureml.core.authentication|DEBUG|Time to expire 1814376.910384 seconds\\n2019-12-07 01:55:18,089|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:18,089|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:18,089|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:18,089|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:18,090|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:18,090|azureml._base_sdk_common.service_discovery|DEBUG|Constructing mms service url in from history url environment variable None, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:18,090|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:18,090|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:18,090|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:18,118|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:18,122|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-07 01:55:18,128|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-07 01:55:18,132|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-07 01:55:18,136|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-07 01:55:18,141|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-07 01:55:18,141|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.RunClient.get-async:False|DEBUG|[START]\\n2019-12-07 01:55:18,142|msrest.service_client|DEBUG|Accept header absent and forced to application/json\\n2019-12-07 01:55:18,142|msrest.http_logger|DEBUG|Request URL: 'https://eastus2.experiments.azureml.net/history/v1.0/subscriptions/789908e0-5fc2-4c4d-b5f5-9764b0d602b3/resourceGroups/ghiordanfwirsg01/providers/Microsoft.MachineLearningServices/workspaces/ghiordanfwiws/experiments/020_AzureMLEstimator/runs/020_AzureMLEstimator_1575683693_ddd16e31'\\n2019-12-07 01:55:18,142|msrest.http_logger|DEBUG|Request method: 'GET'\\n2019-12-07 01:55:18,142|msrest.http_logger|DEBUG|Request headers:\\n2019-12-07 01:55:18,142|msrest.http_logger|DEBUG| 'Accept': 'application/json'\\n2019-12-07 01:55:18,142|msrest.http_logger|DEBUG| 'Content-Type': 'application/json; charset=utf-8'\\n2019-12-07 01:55:18,142|msrest.http_logger|DEBUG| 'x-ms-client-request-id': '066d53de-da2b-470f-936a-ed66dab2d28c'\\n2019-12-07 01:55:18,142|msrest.http_logger|DEBUG| 'request-id': '066d53de-da2b-470f-936a-ed66dab2d28c'\\n2019-12-07 01:55:18,143|msrest.http_logger|DEBUG| 'User-Agent': 'python/3.6.9 (Linux-4.15.0-1057-azure-x86_64-with-debian-10.0) msrest/0.6.10 azureml._restclient/core.1.0.76'\\n2019-12-07 01:55:18,143|msrest.http_logger|DEBUG|Request body:\\n2019-12-07 01:55:18,143|msrest.http_logger|DEBUG|None\\n2019-12-07 01:55:18,143|msrest.universal_http|DEBUG|Configuring redirects: allow=True, max=30\\n2019-12-07 01:55:18,143|msrest.universal_http|DEBUG|Configuring request: timeout=100, verify=True, cert=None\\n2019-12-07 01:55:18,143|msrest.universal_http|DEBUG|Configuring proxies: ''\\n2019-12-07 01:55:18,143|msrest.universal_http|DEBUG|Evaluate proxies against ENV settings: True\\n2019-12-07 01:55:18,196|msrest.http_logger|DEBUG|Response status: 200\\n2019-12-07 01:55:18,196|msrest.http_logger|DEBUG|Response headers:\\n2019-12-07 01:55:18,196|msrest.http_logger|DEBUG| 'Date': 'Sat, 07 Dec 2019 01:55:18 GMT'\\n2019-12-07 01:55:18,196|msrest.http_logger|DEBUG| 'Content-Type': 'application/json; charset=utf-8'\\n2019-12-07 01:55:18,196|msrest.http_logger|DEBUG| 'Transfer-Encoding': 'chunked'\\n2019-12-07 01:55:18,197|msrest.http_logger|DEBUG| 'Connection': 'keep-alive'\\n2019-12-07 01:55:18,197|msrest.http_logger|DEBUG| 'Vary': 'Accept-Encoding'\\n2019-12-07 01:55:18,197|msrest.http_logger|DEBUG| 'Request-Context': 'appId=cid-v1:2d2e8e63-272e-4b3c-8598-4ee570a0e70d'\\n2019-12-07 01:55:18,197|msrest.http_logger|DEBUG| 'x-ms-client-request-id': '066d53de-da2b-470f-936a-ed66dab2d28c'\\n2019-12-07 01:55:18,197|msrest.http_logger|DEBUG| 'x-ms-client-session-id': ''\\n2019-12-07 01:55:18,197|msrest.http_logger|DEBUG| 'Strict-Transport-Security': 'max-age=15724800; includeSubDomains; preload'\\n2019-12-07 01:55:18,197|msrest.http_logger|DEBUG| 'X-Content-Type-Options': 'nosniff'\\n2019-12-07 01:55:18,197|msrest.http_logger|DEBUG| 'Content-Encoding': 'gzip'\\n2019-12-07 01:55:18,197|msrest.http_logger|DEBUG|Response content:\\n2019-12-07 01:55:18,197|msrest.http_logger|DEBUG|{\\n \\\"runNumber\\\": 2107,\\n \\\"rootRunId\\\": \\\"020_AzureMLEstimator_1575683693_ddd16e31\\\",\\n \\\"experimentId\\\": \\\"8d96276b-f420-4a67-86be-f933dd3d38cd\\\",\\n \\\"createdUtc\\\": \\\"2019-12-07T01:54:55.3303306+00:00\\\",\\n \\\"createdBy\\\": {\\n \\\"userObjectId\\\": \\\"b77869a0-66f2-4288-89ef-13c10accc4dc\\\",\\n \\\"userPuId\\\": \\\"1003000090A95868\\\",\\n \\\"userIdp\\\": null,\\n \\\"userAltSecId\\\": null,\\n \\\"userIss\\\": \\\"https://sts.windows.net/72f988bf-86f1-41af-91ab-2d7cd011db47/\\\",\\n \\\"userTenantId\\\": \\\"72f988bf-86f1-41af-91ab-2d7cd011db47\\\",\\n \\\"userName\\\": \\\"George Iordanescu\\\"\\n },\\n \\\"userId\\\": \\\"b77869a0-66f2-4288-89ef-13c10accc4dc\\\",\\n \\\"token\\\": null,\\n \\\"tokenExpiryTimeUtc\\\": null,\\n \\\"error\\\": null,\\n \\\"warnings\\\": null,\\n \\\"revision\\\": 7,\\n \\\"runId\\\": \\\"020_AzureMLEstimator_1575683693_ddd16e31\\\",\\n \\\"parentRunId\\\": null,\\n \\\"status\\\": \\\"Running\\\",\\n \\\"startTimeUtc\\\": \\\"2019-12-07T01:55:07.6378716+00:00\\\",\\n \\\"endTimeUtc\\\": null,\\n \\\"heartbeatEnabled\\\": false,\\n \\\"options\\\": {\\n \\\"generateDataContainerIdIfNotSpecified\\\": true\\n },\\n \\\"name\\\": null,\\n \\\"dataContainerId\\\": \\\"dcid.020_AzureMLEstimator_1575683693_ddd16e31\\\",\\n \\\"description\\\": null,\\n \\\"hidden\\\": false,\\n \\\"runType\\\": \\\"azureml.scriptrun\\\",\\n \\\"properties\\\": {\\n \\\"_azureml.ComputeTargetType\\\": \\\"amlcompute\\\",\\n \\\"ContentSnapshotId\\\": \\\"a5071b2a-37a7-40da-8340-69cc894091cb\\\",\\n \\\"azureml.git.repository_uri\\\": \\\"git@github.com:georgeAccnt-GH/DeepSeismic.git\\\",\\n \\\"mlflow.source.git.repoURL\\\": \\\"git@github.com:georgeAccnt-GH/DeepSeismic.git\\\",\\n \\\"azureml.git.branch\\\": \\\"staging\\\",\\n \\\"mlflow.source.git.branch\\\": \\\"staging\\\",\\n \\\"azureml.git.commit\\\": \\\"1d3cd3340f4063508b6f707d5fc2a35f5429a07f\\\",\\n \\\"mlflow.source.git.commit\\\": \\\"1d3cd3340f4063508b6f707d5fc2a35f5429a07f\\\",\\n \\\"azureml.git.dirty\\\": \\\"True\\\",\\n \\\"ProcessInfoFile\\\": \\\"azureml-logs/process_info.json\\\",\\n \\\"ProcessStatusFile\\\": \\\"azureml-logs/process_status.json\\\"\\n },\\n \\\"scriptName\\\": \\\"azureml_01_modelling.py\\\",\\n \\\"target\\\": \\\"gpuclstfwi08\\\",\\n \\\"tags\\\": {},\\n \\\"inputDatasets\\\": [],\\n \\\"runDefinition\\\": null,\\n \\\"createdFrom\\\": {\\n \\\"type\\\": \\\"Notebook\\\",\\n \\\"locationType\\\": \\\"ArtifactId\\\",\\n \\\"location\\\": \\\"LocalUpload/020_AzureMLEstimator_1575683693_ddd16e31/030_ScaleJobsUsingAzuremL_GeophysicsTutorial_FWI_Azure_devito.ipynb\\\"\\n },\\n \\\"cancelUri\\\": \\\"https://eastus2.experiments.azureml.net/execution/v1.0/subscriptions/789908e0-5fc2-4c4d-b5f5-9764b0d602b3/resourceGroups/ghiordanfwirsg01/providers/Microsoft.MachineLearningServices/workspaces/ghiordanfwiws/experiments/020_AzureMLEstimator/runId/020_AzureMLEstimator_1575683693_ddd16e31/cancel\\\",\\n \\\"completeUri\\\": null,\\n \\\"diagnosticsUri\\\": \\\"https://eastus2.experiments.azureml.net/execution/v1.0/subscriptions/789908e0-5fc2-4c4d-b5f5-9764b0d602b3/resourceGroups/ghiordanfwirsg01/providers/Microsoft.MachineLearningServices/workspaces/ghiordanfwiws/experiments/020_AzureMLEstimator/runId/020_AzureMLEstimator_1575683693_ddd16e31/diagnostics\\\",\\n \\\"computeRequest\\\": {\\n \\\"nodeCount\\\": 1\\n },\\n \\\"retainForLifetimeOfWorkspace\\\": false\\n}\\n2019-12-07 01:55:18,202|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.RunClient.get-async:False|DEBUG|[STOP]\\n2019-12-07 01:55:18,202|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31|DEBUG|Constructing run from dto. type: azureml.scriptrun, source: None, props: {'_azureml.ComputeTargetType': 'amlcompute', 'ContentSnapshotId': 'a5071b2a-37a7-40da-8340-69cc894091cb', 'azureml.git.repository_uri': 'git@github.com:georgeAccnt-GH/DeepSeismic.git', 'mlflow.source.git.repoURL': 'git@github.com:georgeAccnt-GH/DeepSeismic.git', 'azureml.git.branch': 'staging', 'mlflow.source.git.branch': 'staging', 'azureml.git.commit': '1d3cd3340f4063508b6f707d5fc2a35f5429a07f', 'mlflow.source.git.commit': '1d3cd3340f4063508b6f707d5fc2a35f5429a07f', 'azureml.git.dirty': 'True', 'ProcessInfoFile': 'azureml-logs/process_info.json', 'ProcessStatusFile': 'azureml-logs/process_status.json'}\\n2019-12-07 01:55:18,202|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunContextManager|DEBUG|Valid logs dir, setting up content loader\\n2019-12-07 01:55:18,202|azureml|WARNING|Could not import azureml.mlflow or azureml.contrib.mlflow mlflow APIs will not run against AzureML services. Add azureml-mlflow as a conda dependency for the run if this behavior is desired\\n2019-12-07 01:55:18,203|azureml.WorkerPool|DEBUG|[START]\\n2019-12-07 01:55:18,203|azureml.SendRunKillSignal|DEBUG|[START]\\n2019-12-07 01:55:18,203|azureml.RunStatusContext|DEBUG|[START]\\n2019-12-07 01:55:18,203|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunContextManager.RunStatusContext|DEBUG|[START]\\n2019-12-07 01:55:18,203|azureml.WorkingDirectoryCM|DEBUG|[START]\\n2019-12-07 01:55:18,203|azureml.history._tracking.PythonWorkingDirectory.workingdir|DEBUG|[START]\\n2019-12-07 01:55:18,203|azureml.history._tracking.PythonWorkingDirectory|INFO|Current working dir: /mnt/batch/tasks/shared/LS_root/jobs/ghiordanfwiws/azureml/020_azuremlestimator_1575683693_ddd16e31/mounts/workspaceblobstore/azureml/020_AzureMLEstimator_1575683693_ddd16e31\\n2019-12-07 01:55:18,203|azureml.history._tracking.PythonWorkingDirectory.workingdir|DEBUG|Calling pyfs\\n2019-12-07 01:55:18,203|azureml.history._tracking.PythonWorkingDirectory.workingdir|DEBUG|Storing working dir for pyfs as /mnt/batch/tasks/shared/LS_root/jobs/ghiordanfwiws/azureml/020_azuremlestimator_1575683693_ddd16e31/mounts/workspaceblobstore/azureml/020_AzureMLEstimator_1575683693_ddd16e31\\n2019-12-07 01:55:20,151|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:20,151|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:20,151|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:20,151|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:20,152|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:20,152|azureml._base_sdk_common.service_discovery|DEBUG|Constructing mms service url in from history url environment variable None, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:20,152|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:20,152|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:20,152|azureml._base_sdk_common.service_discovery|DEBUG|Found history service url in environment variable AZUREML_SERVICE_ENDPOINT, history service url: https://eastus2.experiments.azureml.net.\\n2019-12-07 01:55:20,157|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-07 01:55:20,158|azureml._run_impl.run_history_facade|DEBUG|Created a static thread pool for RunHistoryFacade class\\n2019-12-07 01:55:20,162|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-07 01:55:20,166|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-07 01:55:20,170|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-07 01:55:20,175|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-07 01:55:20,175|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.RunClient.get-async:False|DEBUG|[START]\\n2019-12-07 01:55:20,175|msrest.service_client|DEBUG|Accept header absent and forced to application/json\\n2019-12-07 01:55:20,175|msrest.http_logger|DEBUG|Request URL: 'https://eastus2.experiments.azureml.net/history/v1.0/subscriptions/789908e0-5fc2-4c4d-b5f5-9764b0d602b3/resourceGroups/ghiordanfwirsg01/providers/Microsoft.MachineLearningServices/workspaces/ghiordanfwiws/experiments/020_AzureMLEstimator/runs/020_AzureMLEstimator_1575683693_ddd16e31'\\n2019-12-07 01:55:20,175|msrest.http_logger|DEBUG|Request method: 'GET'\\n2019-12-07 01:55:20,175|msrest.http_logger|DEBUG|Request headers:\\n2019-12-07 01:55:20,176|msrest.http_logger|DEBUG| 'Accept': 'application/json'\\n2019-12-07 01:55:20,176|msrest.http_logger|DEBUG| 'Content-Type': 'application/json; charset=utf-8'\\n2019-12-07 01:55:20,176|msrest.http_logger|DEBUG| 'x-ms-client-request-id': 'b087e081-4f44-4f48-8adf-8c816a59faae'\\n2019-12-07 01:55:20,176|msrest.http_logger|DEBUG| 'request-id': 'b087e081-4f44-4f48-8adf-8c816a59faae'\\n2019-12-07 01:55:20,176|msrest.http_logger|DEBUG| 'User-Agent': 'python/3.6.9 (Linux-4.15.0-1057-azure-x86_64-with-debian-10.0) msrest/0.6.10 azureml._restclient/core.1.0.76'\\n2019-12-07 01:55:20,176|msrest.http_logger|DEBUG|Request body:\\n2019-12-07 01:55:20,176|msrest.http_logger|DEBUG|None\\n2019-12-07 01:55:20,176|msrest.universal_http|DEBUG|Configuring redirects: allow=True, max=30\\n2019-12-07 01:55:20,176|msrest.universal_http|DEBUG|Configuring request: timeout=100, verify=True, cert=None\\n2019-12-07 01:55:20,176|msrest.universal_http|DEBUG|Configuring proxies: ''\\n2019-12-07 01:55:20,176|msrest.universal_http|DEBUG|Evaluate proxies against ENV settings: True\\n2019-12-07 01:55:20,259|msrest.http_logger|DEBUG|Response status: 200\\n2019-12-07 01:55:20,259|msrest.http_logger|DEBUG|Response headers:\\n2019-12-07 01:55:20,259|msrest.http_logger|DEBUG| 'Date': 'Sat, 07 Dec 2019 01:55:20 GMT'\\n2019-12-07 01:55:20,259|msrest.http_logger|DEBUG| 'Content-Type': 'application/json; charset=utf-8'\\n2019-12-07 01:55:20,260|msrest.http_logger|DEBUG| 'Transfer-Encoding': 'chunked'\\n2019-12-07 01:55:20,260|msrest.http_logger|DEBUG| 'Connection': 'keep-alive'\\n2019-12-07 01:55:20,260|msrest.http_logger|DEBUG| 'Vary': 'Accept-Encoding'\\n2019-12-07 01:55:20,260|msrest.http_logger|DEBUG| 'Request-Context': 'appId=cid-v1:2d2e8e63-272e-4b3c-8598-4ee570a0e70d'\\n2019-12-07 01:55:20,260|msrest.http_logger|DEBUG| 'x-ms-client-request-id': 'b087e081-4f44-4f48-8adf-8c816a59faae'\\n2019-12-07 01:55:20,260|msrest.http_logger|DEBUG| 'x-ms-client-session-id': ''\\n2019-12-07 01:55:20,260|msrest.http_logger|DEBUG| 'Strict-Transport-Security': 'max-age=15724800; includeSubDomains; preload'\\n2019-12-07 01:55:20,260|msrest.http_logger|DEBUG| 'X-Content-Type-Options': 'nosniff'\\n2019-12-07 01:55:20,260|msrest.http_logger|DEBUG| 'Content-Encoding': 'gzip'\\n2019-12-07 01:55:20,260|msrest.http_logger|DEBUG|Response content:\\n2019-12-07 01:55:20,260|msrest.http_logger|DEBUG|{\\n \\\"runNumber\\\": 2107,\\n \\\"rootRunId\\\": \\\"020_AzureMLEstimator_1575683693_ddd16e31\\\",\\n \\\"experimentId\\\": \\\"8d96276b-f420-4a67-86be-f933dd3d38cd\\\",\\n \\\"createdUtc\\\": \\\"2019-12-07T01:54:55.3303306+00:00\\\",\\n \\\"createdBy\\\": {\\n \\\"userObjectId\\\": \\\"b77869a0-66f2-4288-89ef-13c10accc4dc\\\",\\n \\\"userPuId\\\": \\\"1003000090A95868\\\",\\n \\\"userIdp\\\": null,\\n \\\"userAltSecId\\\": null,\\n \\\"userIss\\\": \\\"https://sts.windows.net/72f988bf-86f1-41af-91ab-2d7cd011db47/\\\",\\n \\\"userTenantId\\\": \\\"72f988bf-86f1-41af-91ab-2d7cd011db47\\\",\\n \\\"userName\\\": \\\"George Iordanescu\\\"\\n },\\n \\\"userId\\\": \\\"b77869a0-66f2-4288-89ef-13c10accc4dc\\\",\\n \\\"token\\\": null,\\n \\\"tokenExpiryTimeUtc\\\": null,\\n \\\"error\\\": null,\\n \\\"warnings\\\": null,\\n \\\"revision\\\": 7,\\n \\\"runId\\\": \\\"020_AzureMLEstimator_1575683693_ddd16e31\\\",\\n \\\"parentRunId\\\": null,\\n \\\"status\\\": \\\"Running\\\",\\n \\\"startTimeUtc\\\": \\\"2019-12-07T01:55:07.6378716+00:00\\\",\\n \\\"endTimeUtc\\\": null,\\n \\\"heartbeatEnabled\\\": false,\\n \\\"options\\\": {\\n \\\"generateDataContainerIdIfNotSpecified\\\": true\\n },\\n \\\"name\\\": null,\\n \\\"dataContainerId\\\": \\\"dcid.020_AzureMLEstimator_1575683693_ddd16e31\\\",\\n \\\"description\\\": null,\\n \\\"hidden\\\": false,\\n \\\"runType\\\": \\\"azureml.scriptrun\\\",\\n \\\"properties\\\": {\\n \\\"_azureml.ComputeTargetType\\\": \\\"amlcompute\\\",\\n \\\"ContentSnapshotId\\\": \\\"a5071b2a-37a7-40da-8340-69cc894091cb\\\",\\n \\\"azureml.git.repository_uri\\\": \\\"git@github.com:georgeAccnt-GH/DeepSeismic.git\\\",\\n \\\"mlflow.source.git.repoURL\\\": \\\"git@github.com:georgeAccnt-GH/DeepSeismic.git\\\",\\n \\\"azureml.git.branch\\\": \\\"staging\\\",\\n \\\"mlflow.source.git.branch\\\": \\\"staging\\\",\\n \\\"azureml.git.commit\\\": \\\"1d3cd3340f4063508b6f707d5fc2a35f5429a07f\\\",\\n \\\"mlflow.source.git.commit\\\": \\\"1d3cd3340f4063508b6f707d5fc2a35f5429a07f\\\",\\n \\\"azureml.git.dirty\\\": \\\"True\\\",\\n \\\"ProcessInfoFile\\\": \\\"azureml-logs/process_info.json\\\",\\n \\\"ProcessStatusFile\\\": \\\"azureml-logs/process_status.json\\\"\\n },\\n \\\"scriptName\\\": \\\"azureml_01_modelling.py\\\",\\n \\\"target\\\": \\\"gpuclstfwi08\\\",\\n \\\"tags\\\": {},\\n \\\"inputDatasets\\\": [],\\n \\\"runDefinition\\\": null,\\n \\\"createdFrom\\\": {\\n \\\"type\\\": \\\"Notebook\\\",\\n \\\"locationType\\\": \\\"ArtifactId\\\",\\n \\\"location\\\": \\\"LocalUpload/020_AzureMLEstimator_1575683693_ddd16e31/030_ScaleJobsUsingAzuremL_GeophysicsTutorial_FWI_Azure_devito.ipynb\\\"\\n },\\n \\\"cancelUri\\\": \\\"https://eastus2.experiments.azureml.net/execution/v1.0/subscriptions/789908e0-5fc2-4c4d-b5f5-9764b0d602b3/resourceGroups/ghiordanfwirsg01/providers/Microsoft.MachineLearningServices/workspaces/ghiordanfwiws/experiments/020_AzureMLEstimator/runId/020_AzureMLEstimator_1575683693_ddd16e31/cancel\\\",\\n \\\"completeUri\\\": null,\\n \\\"diagnosticsUri\\\": \\\"https://eastus2.experiments.azureml.net/execution/v1.0/subscriptions/789908e0-5fc2-4c4d-b5f5-9764b0d602b3/resourceGroups/ghiordanfwirsg01/providers/Microsoft.MachineLearningServices/workspaces/ghiordanfwiws/experiments/020_AzureMLEstimator/runId/020_AzureMLEstimator_1575683693_ddd16e31/diagnostics\\\",\\n \\\"computeRequest\\\": {\\n \\\"nodeCount\\\": 1\\n },\\n \\\"retainForLifetimeOfWorkspace\\\": false\\n}\\n2019-12-07 01:55:20,262|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.RunClient.get-async:False|DEBUG|[STOP]\\n2019-12-07 01:55:20,262|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31|DEBUG|Constructing run from dto. type: azureml.scriptrun, source: None, props: {'_azureml.ComputeTargetType': 'amlcompute', 'ContentSnapshotId': 'a5071b2a-37a7-40da-8340-69cc894091cb', 'azureml.git.repository_uri': 'git@github.com:georgeAccnt-GH/DeepSeismic.git', 'mlflow.source.git.repoURL': 'git@github.com:georgeAccnt-GH/DeepSeismic.git', 'azureml.git.branch': 'staging', 'mlflow.source.git.branch': 'staging', 'azureml.git.commit': '1d3cd3340f4063508b6f707d5fc2a35f5429a07f', 'mlflow.source.git.commit': '1d3cd3340f4063508b6f707d5fc2a35f5429a07f', 'azureml.git.dirty': 'True', 'ProcessInfoFile': 'azureml-logs/process_info.json', 'ProcessStatusFile': 'azureml-logs/process_status.json'}\\n2019-12-07 01:55:20,262|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunContextManager|DEBUG|Valid logs dir, setting up content loader\\n2019-12-07 01:55:48,084|azureml.core.authentication|DEBUG|Time to expire 1814346.915499 seconds\\n2019-12-07 01:56:18,084|azureml.core.authentication|DEBUG|Time to expire 1814316.915133 seconds\\n2019-12-07 01:56:25,858|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient|DEBUG|Overrides: Max batch size: 50, batch cushion: 5, Interval: 1.\\n2019-12-07 01:56:25,858|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.PostMetricsBatchDaemon|DEBUG|Starting daemon and triggering first instance\\n2019-12-07 01:56:25,859|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient|DEBUG|Used for use_batch=True.\\n2019-12-07 01:56:25,924|azureml.history._tracking.PythonWorkingDirectory.workingdir|DEBUG|Calling pyfs\\n2019-12-07 01:56:25,924|azureml.history._tracking.PythonWorkingDirectory|INFO|Current working dir: /devito\\n2019-12-07 01:56:25,924|azureml.history._tracking.PythonWorkingDirectory.workingdir|DEBUG|pyfs has path /devito\\n2019-12-07 01:56:25,925|azureml.history._tracking.PythonWorkingDirectory.workingdir|DEBUG|Reverting working dir from /devito to /mnt/batch/tasks/shared/LS_root/jobs/ghiordanfwiws/azureml/020_azuremlestimator_1575683693_ddd16e31/mounts/workspaceblobstore/azureml/020_AzureMLEstimator_1575683693_ddd16e31\\n2019-12-07 01:56:25,925|azureml.history._tracking.PythonWorkingDirectory|INFO|Setting working dir to /mnt/batch/tasks/shared/LS_root/jobs/ghiordanfwiws/azureml/020_azuremlestimator_1575683693_ddd16e31/mounts/workspaceblobstore/azureml/020_AzureMLEstimator_1575683693_ddd16e31\\n2019-12-07 01:56:25,925|azureml.history._tracking.PythonWorkingDirectory.workingdir|DEBUG|[STOP]\\n2019-12-07 01:56:25,925|azureml.WorkingDirectoryCM|DEBUG|[STOP]\\n2019-12-07 01:56:25,925|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31|INFO|complete is not setting status for submitted runs.\\n2019-12-07 01:56:25,925|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.FlushingMetricsClient|DEBUG|[START]\\n2019-12-07 01:56:25,925|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient|DEBUG|Overrides: Max batch size: 50, batch cushion: 5, Interval: 1.\\n2019-12-07 01:56:25,925|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.PostMetricsBatchDaemon|DEBUG|Starting daemon and triggering first instance\\n2019-12-07 01:56:25,925|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient|DEBUG|Used for use_batch=True.\\n2019-12-07 01:56:25,925|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|[START]\\n2019-12-07 01:56:25,925|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|flush timeout 300 is different from task queue timeout 120, using flush timeout\\n2019-12-07 01:56:25,926|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|Waiting 300 seconds on tasks: [].\\n2019-12-07 01:56:25,926|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch|DEBUG|\\n2019-12-07 01:56:25,926|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|[STOP]\\n2019-12-07 01:56:25,926|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.FlushingMetricsClient|DEBUG|[STOP]\\n2019-12-07 01:56:25,926|azureml.RunStatusContext|DEBUG|[STOP]\\n2019-12-07 01:56:25,926|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.FlushingMetricsClient|DEBUG|[START]\\n2019-12-07 01:56:25,926|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|[START]\\n2019-12-07 01:56:25,926|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|flush timeout 300.0 is different from task queue timeout 120, using flush timeout\\n2019-12-07 01:56:25,926|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|Waiting 300.0 seconds on tasks: [].\\n2019-12-07 01:56:25,926|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch|DEBUG|\\n2019-12-07 01:56:25,926|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|[STOP]\\n2019-12-07 01:56:25,926|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.FlushingMetricsClient|DEBUG|[STOP]\\n2019-12-07 01:56:25,926|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.FlushingMetricsClient|DEBUG|[START]\\n2019-12-07 01:56:25,927|azureml.BatchTaskQueueAdd_1_Batches|DEBUG|[Start]\\n2019-12-07 01:56:25,927|azureml.BatchTaskQueueAdd_1_Batches.WorkerPool|DEBUG|submitting future: _handle_batch\\n2019-12-07 01:56:25,927|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch|DEBUG|Batch size 1.\\n2019-12-07 01:56:25,927|azureml.BatchTaskQueueAdd_1_Batches.0__handle_batch|DEBUG|Using basic handler - no exception handling\\n2019-12-07 01:56:25,927|azureml._restclient.clientbase.WorkerPool|DEBUG|submitting future: _log_batch\\n2019-12-07 01:56:25,927|azureml.BatchTaskQueueAdd_1_Batches|DEBUG|Adding task 0__handle_batch to queue of approximate size: 0\\n2019-12-07 01:56:25,928|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.post_batch-async:False|DEBUG|[START]\\n2019-12-07 01:56:25,928|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.0__log_batch|DEBUG|Using basic handler - no exception handling\\n2019-12-07 01:56:25,928|azureml.BatchTaskQueueAdd_1_Batches|DEBUG|[Stop] - waiting default timeout\\n2019-12-07 01:56:25,929|msrest.service_client|DEBUG|Accept header absent and forced to application/json\\n2019-12-07 01:56:25,929|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch|DEBUG|Adding task 0__log_batch to queue of approximate size: 0\\n2019-12-07 01:56:25,929|azureml.BatchTaskQueueAdd_1_Batches.WaitFlushSource:BatchTaskQueueAdd_1_Batches|DEBUG|[START]\\n2019-12-07 01:56:25,929|msrest.universal_http.requests|DEBUG|Configuring retry: max_retries=3, backoff_factor=0.8, max_backoff=90\\n2019-12-07 01:56:25,930|azureml.BatchTaskQueueAdd_1_Batches.WaitFlushSource:BatchTaskQueueAdd_1_Batches|DEBUG|Overriding default flush timeout from None to 120\\n2019-12-07 01:56:25,930|msrest.http_logger|DEBUG|Request URL: 'https://eastus2.experiments.azureml.net/history/v1.0/subscriptions/789908e0-5fc2-4c4d-b5f5-9764b0d602b3/resourceGroups/ghiordanfwirsg01/providers/Microsoft.MachineLearningServices/workspaces/ghiordanfwiws/experiments/020_AzureMLEstimator/runs/020_AzureMLEstimator_1575683693_ddd16e31/batch/metrics'\\n2019-12-07 01:56:25,930|azureml.BatchTaskQueueAdd_1_Batches.WaitFlushSource:BatchTaskQueueAdd_1_Batches|DEBUG|Waiting 120 seconds on tasks: [AsyncTask(0__handle_batch)].\\n2019-12-07 01:56:25,930|msrest.http_logger|DEBUG|Request method: 'POST'\\n2019-12-07 01:56:25,930|azureml.BatchTaskQueueAdd_1_Batches.0__handle_batch.WaitingTask|DEBUG|[START]\\n2019-12-07 01:56:25,930|msrest.http_logger|DEBUG|Request headers:\\n2019-12-07 01:56:25,930|azureml.BatchTaskQueueAdd_1_Batches.0__handle_batch.WaitingTask|DEBUG|Awaiter is BatchTaskQueueAdd_1_Batches\\n2019-12-07 01:56:25,931|msrest.http_logger|DEBUG| 'Accept': 'application/json'\\n2019-12-07 01:56:25,931|azureml.BatchTaskQueueAdd_1_Batches.0__handle_batch.WaitingTask|DEBUG|[STOP]\\n2019-12-07 01:56:25,931|msrest.http_logger|DEBUG| 'Content-Type': 'application/json-patch+json; charset=utf-8'\\n2019-12-07 01:56:25,931|azureml.BatchTaskQueueAdd_1_Batches|DEBUG|\\n2019-12-07 01:56:25,931|msrest.http_logger|DEBUG| 'x-ms-client-request-id': '18a01463-68a6-4c03-bc10-c9e912702ee6'\\n2019-12-07 01:56:25,931|azureml.BatchTaskQueueAdd_1_Batches.WaitFlushSource:BatchTaskQueueAdd_1_Batches|DEBUG|[STOP]\\n2019-12-07 01:56:25,931|msrest.http_logger|DEBUG| 'request-id': '18a01463-68a6-4c03-bc10-c9e912702ee6'\\n2019-12-07 01:56:25,931|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|[START]\\n2019-12-07 01:56:25,931|msrest.http_logger|DEBUG| 'Content-Length': '410'\\n2019-12-07 01:56:25,932|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|flush timeout 300.0 is different from task queue timeout 120, using flush timeout\\n2019-12-07 01:56:25,932|msrest.http_logger|DEBUG| 'User-Agent': 'python/3.6.9 (Linux-4.15.0-1057-azure-x86_64-with-debian-10.0) msrest/0.6.10 azureml._restclient/core.1.0.76 sdk_run'\\n2019-12-07 01:56:25,932|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|Waiting 300.0 seconds on tasks: [AsyncTask(0__log_batch)].\\n2019-12-07 01:56:25,932|msrest.http_logger|DEBUG|Request body:\\n2019-12-07 01:56:25,932|msrest.http_logger|DEBUG|{\\\"values\\\": [{\\\"metricId\\\": \\\"1a8ad3d8-accf-42da-a07d-fd00ef5ee1e6\\\", \\\"metricType\\\": \\\"azureml.v1.scalar\\\", \\\"createdUtc\\\": \\\"2019-12-07T01:56:25.858188Z\\\", \\\"name\\\": \\\"training_message01: \\\", \\\"description\\\": \\\"\\\", \\\"numCells\\\": 1, \\\"cells\\\": [{\\\"training_message01: \\\": \\\"finished experiment\\\"}], \\\"schema\\\": {\\\"numProperties\\\": 1, \\\"properties\\\": [{\\\"propertyId\\\": \\\"training_message01: \\\", \\\"name\\\": \\\"training_message01: \\\", \\\"type\\\": \\\"string\\\"}]}}]}\\n2019-12-07 01:56:25,932|msrest.universal_http|DEBUG|Configuring redirects: allow=True, max=30\\n2019-12-07 01:56:25,932|msrest.universal_http|DEBUG|Configuring request: timeout=100, verify=True, cert=None\\n2019-12-07 01:56:25,932|msrest.universal_http|DEBUG|Configuring proxies: ''\\n2019-12-07 01:56:25,932|msrest.universal_http|DEBUG|Evaluate proxies against ENV settings: True\\n2019-12-07 01:56:26,050|msrest.http_logger|DEBUG|Response status: 200\\n2019-12-07 01:56:26,051|msrest.http_logger|DEBUG|Response headers:\\n2019-12-07 01:56:26,051|msrest.http_logger|DEBUG| 'Date': 'Sat, 07 Dec 2019 01:56:26 GMT'\\n2019-12-07 01:56:26,051|msrest.http_logger|DEBUG| 'Content-Length': '0'\\n2019-12-07 01:56:26,051|msrest.http_logger|DEBUG| 'Connection': 'keep-alive'\\n2019-12-07 01:56:26,051|msrest.http_logger|DEBUG| 'Request-Context': 'appId=cid-v1:2d2e8e63-272e-4b3c-8598-4ee570a0e70d'\\n2019-12-07 01:56:26,051|msrest.http_logger|DEBUG| 'x-ms-client-request-id': '18a01463-68a6-4c03-bc10-c9e912702ee6'\\n2019-12-07 01:56:26,051|msrest.http_logger|DEBUG| 'x-ms-client-session-id': ''\\n2019-12-07 01:56:26,051|msrest.http_logger|DEBUG| 'Strict-Transport-Security': 'max-age=15724800; includeSubDomains; preload'\\n2019-12-07 01:56:26,051|msrest.http_logger|DEBUG| 'X-Content-Type-Options': 'nosniff'\\n2019-12-07 01:56:26,051|msrest.http_logger|DEBUG|Response content:\\n2019-12-07 01:56:26,051|msrest.http_logger|DEBUG|\\n2019-12-07 01:56:26,052|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.post_batch-async:False|DEBUG|[STOP]\\n2019-12-07 01:56:26,182|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.0__log_batch.WaitingTask|DEBUG|[START]\\n2019-12-07 01:56:26,182|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.0__log_batch.WaitingTask|DEBUG|Awaiter is PostMetricsBatch\\n2019-12-07 01:56:26,183|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.0__log_batch.WaitingTask|DEBUG|[STOP]\\n2019-12-07 01:56:26,183|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch|DEBUG|Waiting on task: 0__log_batch.\\n1 tasks left. Current duration of flush 0.0002186298370361328 seconds.\\n\\n2019-12-07 01:56:26,183|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.PostMetricsBatch.WaitFlushSource:MetricsClient|DEBUG|[STOP]\\n2019-12-07 01:56:26,183|azureml._SubmittedRun#020_AzureMLEstimator_1575683693_ddd16e31.RunHistoryFacade.MetricsClient.FlushingMetricsClient|DEBUG|[STOP]\\n2019-12-07 01:56:26,183|azureml.SendRunKillSignal|DEBUG|[STOP]\\n2019-12-07 01:56:26,183|azureml.HistoryTrackingWorkerPool.WorkerPoolShutdown|DEBUG|[START]\\n2019-12-07 01:56:26,183|azureml.HistoryTrackingWorkerPool.WorkerPoolShutdown|DEBUG|[STOP]\\n2019-12-07 01:56:26,183|azureml.WorkerPool|DEBUG|[STOP]\\n\\nRun is completed.\", \"graph\": {}, \"widget_settings\": {\"childWidgetDisplay\": \"popup\", \"send_telemetry\": false, \"log_level\": \"NOTSET\", \"sdk_version\": \"1.0.76\"}, \"loading\": false}"
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Counter499: submission of job 499 on 400 nodes took 9.16640019416809 seconds \n",
+ "run list length 499\n"
+ ]
+ }
+ ],
+ "source": [
+ "import time\n",
+ "from IPython.display import clear_output\n",
+ "\n",
+ "no_of_jobs = 500\n",
+ "no_of_nodes = 400\n",
+ "\n",
+ "job_counter = 0\n",
+ "print_cycle = 20\n",
+ "run_list = []\n",
+ "submit_time_list = []\n",
+ "for crt_nodes in range(no_of_nodes, (no_of_nodes+1)):\n",
+ " gpu_cluster.update(min_nodes=0, max_nodes=crt_nodes, idle_seconds_before_scaledown=1200)\n",
+ " clust_start_time = time.time()\n",
+ " for crt_job in range(1, no_of_jobs):\n",
+ " job_counter+= 1\n",
+ " start_time = time.time()\n",
+ " run = exp.submit(est)\n",
+ " end_time = time.time()\n",
+ " run_time = end_time - start_time\n",
+ " run_list.append(run)\n",
+ " submit_time_list.append(run_time)\n",
+ " print('Counter{}: submission of job {} on {} nodes took {} seconds '.format(job_counter, crt_job, crt_nodes, run_time))\n",
+ " print('run list length {}'.format(len(run_list)))\n",
+ " if ((job_counter-1) % print_cycle) == 0:\n",
+ " clear_output()\n",
+ " print('Showing details for run {}'.format(job_counter))\n",
+ " RunDetails(run).show()\n",
+ "# [all_jobs_done = True if (('Completed'==crt_queried_job.get_status()) for crt_queried_job in run_list)]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 22,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "array([10.16889381, 10.52522182, 8.67223501, 7.76976609, 8.98659873,\n",
+ " 9.54043746, 7.56379271, 7.95067477, 10.98772812, 8.58469343,\n",
+ " 9.19690919, 8.37747335, 8.49322033, 8.96249437, 11.00566387,\n",
+ " 10.18721223, 8.70340395, 9.07873917, 8.83641577, 9.93886757,\n",
+ " 8.43751788, 8.88584614, 8.46158338, 8.10118651, 7.95576859,\n",
+ " 8.02682757, 8.59585524, 11.43893504, 8.21132302, 7.56929898,\n",
+ " 9.16166759, 7.96446443, 8.20211887, 8.0066514 , 8.16604567,\n",
+ " 9.03855515, 9.27646971, 7.88356876, 8.6105082 , 8.63279152,\n",
+ " 9.63798594, 7.88380122, 11.83064437, 7.67609763, 8.36450744,\n",
+ " 10.36203027, 8.20605659, 8.27934074, 8.71854138, 7.48072934,\n",
+ " 7.98534775, 7.88993239, 9.49783468, 8.20365477, 8.31964707,\n",
+ " 8.24653029, 9.14784336, 8.39632297, 8.88221884, 10.17075896,\n",
+ " 7.93166018, 8.50952411, 8.35107565, 8.62145162, 9.1473949 ,\n",
+ " 10.16314006, 9.48931861, 9.52163553, 10.48561263, 8.70149064,\n",
+ " 8.83968425, 8.77899456, 8.19752908, 8.23720503, 8.44300842,\n",
+ " 10.4865036 , 9.38597918, 8.16601682, 10.31557417, 9.39266205,\n",
+ " 9.3517375 , 8.26235414, 9.90602231, 8.08361053, 9.55309701,\n",
+ " 8.37694287, 8.2842195 , 9.27187061, 8.05741239, 9.81221128,\n",
+ " 8.67282987, 7.50111246, 8.84159875, 7.5928266 , 8.2180264 ,\n",
+ " 11.30247498, 8.97954369, 9.08557224, 8.62394547, 27.931288 ,\n",
+ " 11.31702137, 9.03355598, 9.82408452, 10.98696327, 8.15972924,\n",
+ " 8.10580516, 8.6766634 , 9.18826079, 9.91399217, 9.63535714,\n",
+ " 8.84899211, 8.59690166, 9.08935356, 7.87525439, 9.04824638,\n",
+ " 10.58436322, 8.05351543, 8.0442934 , 8.51687765, 8.23182964,\n",
+ " 7.90365982, 9.41734576, 7.82690763, 7.86053801, 8.81060672,\n",
+ " 15.63083076, 9.12365007, 8.4692018 , 8.38626456, 9.1455934 ,\n",
+ " 7.9579742 , 8.32254815, 9.60984373, 7.72059083, 9.80256414,\n",
+ " 8.03569841, 8.56897283, 9.88993764, 9.825032 , 9.10494757,\n",
+ " 7.96795917, 8.83923078, 8.12920213, 9.14702606, 10.44252062,\n",
+ " 8.11435223, 11.10698366, 8.54753256, 11.07914209, 8.0072608 ,\n",
+ " 8.64252162, 7.86998582, 8.16502595, 9.72599697, 8.01553535,\n",
+ " 8.05236411, 9.4306016 , 8.3510747 , 8.15123487, 7.73660946,\n",
+ " 8.78807712, 8.42650437, 9.09502602, 67.75333071, 14.179214 ,\n",
+ " 13.08692336, 14.52568007, 12.39239168, 8.40634942, 8.3893857 ,\n",
+ " 7.80925822, 8.04524732, 10.61561441, 9.33992386, 8.05361605,\n",
+ " 8.71911073, 8.13864756, 8.18779135, 8.03402972, 8.20232296,\n",
+ " 10.52845287, 8.21701574, 9.63750052, 8.16265893, 7.95386362,\n",
+ " 7.85334754, 7.96290469, 8.1984942 , 8.32950211, 17.0101552 ,\n",
+ " 14.20266891, 13.09765553, 14.32137418, 8.90045214, 9.79849219,\n",
+ " 7.7378149 , 8.17814636, 8.0692122 , 8.02391315, 7.73337412,\n",
+ " 8.24749708, 8.21430159, 8.42469835, 7.93915629, 8.17162681,\n",
+ " 9.29439068, 8.39062524, 8.05844831, 12.62865376, 8.03868556,\n",
+ " 8.03020358, 8.72658324, 7.98921943, 10.13008642, 8.36204886,\n",
+ " 9.8618927 , 8.84138846, 8.26497674, 8.53586483, 11.22441888,\n",
+ " 8.60046291, 9.52709126, 8.1862669 , 8.47402501, 8.08845234,\n",
+ " 8.0216496 , 8.25297642, 9.52822161, 8.53732967, 9.20458651,\n",
+ " 7.84344959, 8.76693869, 9.55830622, 9.32047439, 9.61785316,\n",
+ " 14.20765901, 13.20616293, 12.79950929, 13.23175693, 10.48755121,\n",
+ " 7.89634991, 8.62207508, 10.17518067, 9.5078795 , 8.16943836,\n",
+ " 11.88958383, 8.53581595, 8.78866196, 9.86849713, 8.38485384,\n",
+ " 7.80456519, 8.7930553 , 8.67091751, 11.64525867, 10.70969439,\n",
+ " 9.57600379, 7.88863015, 9.16765165, 8.10214615, 8.1002388 ,\n",
+ " 7.79884577, 7.84607792, 10.70999765, 8.32228923, 8.15903163,\n",
+ " 8.16516185, 11.13710332, 8.67460465, 8.04933095, 7.92010641,\n",
+ " 9.71926355, 7.96389985, 8.50223684, 7.80719972, 7.94503832,\n",
+ " 9.14503789, 8.74866915, 8.32825327, 9.38176489, 8.7043674 ,\n",
+ " 8.11469626, 8.39300489, 8.52375507, 9.48120856, 9.30481339,\n",
+ " 11.00180173, 8.00356221, 9.36562443, 11.26503015, 8.29429078,\n",
+ " 10.5787971 , 8.23888326, 8.25085521, 9.65488529, 10.22367787,\n",
+ " 8.86958766, 8.67924905, 9.8065629 , 9.98437238, 10.44085979,\n",
+ " 8.48997521, 13.41537356, 8.53429914, 9.41697288, 8.75000739,\n",
+ " 8.67022324, 10.65776849, 8.78767824, 29.17240787, 8.29843664,\n",
+ " 10.48030996, 8.60965252, 9.05648637, 11.23915553, 7.71198177,\n",
+ " 8.58811665, 11.27894258, 11.26059055, 8.08691239, 9.09145069,\n",
+ " 8.37398744, 9.33932018, 9.50723815, 14.62887979, 8.08766961,\n",
+ " 8.1010766 , 8.15962887, 7.86279893, 7.81253982, 8.72090292,\n",
+ " 28.51810336, 8.20156765, 8.10436082, 9.35736108, 10.11271501,\n",
+ " 8.28001332, 8.10338402, 7.82260585, 7.74735689, 9.37371802,\n",
+ " 7.83298874, 8.09861684, 11.44845009, 13.80942464, 13.86787438,\n",
+ " 12.95256805, 13.5946703 , 9.04438519, 8.42931032, 7.69650388,\n",
+ " 8.3203001 , 8.93009233, 8.99896145, 10.261621 , 9.76696181,\n",
+ " 8.42695355, 9.45543766, 8.35829163, 8.19327784, 8.54582119,\n",
+ " 10.28408813, 9.96855664, 9.4126513 , 8.85548735, 8.37564468,\n",
+ " 7.85812593, 11.26866746, 11.99777699, 8.90290856, 9.73011518,\n",
+ " 11.37953544, 9.56070495, 13.08286595, 7.91717887, 8.70709944,\n",
+ " 8.89286566, 9.43534017, 9.63375568, 9.45693254, 9.41722798,\n",
+ " 8.95478702, 10.59636545, 9.07217526, 8.91465688, 8.43598938,\n",
+ " 10.09872103, 8.53826594, 10.51633263, 8.16474724, 9.60920191,\n",
+ " 8.79985189, 11.08250904, 15.82575488, 13.72388315, 13.76962495,\n",
+ " 15.5107224 , 12.99527621, 9.55358648, 11.27318692, 10.64224267,\n",
+ " 9.28194666, 8.15835619, 10.34727526, 9.13943338, 8.47959018,\n",
+ " 12.95671797, 8.67874169, 9.48093748, 11.13487458, 11.16393185,\n",
+ " 9.45039058, 9.26687908, 10.83345985, 10.013412 , 12.88114643,\n",
+ " 8.90868664, 9.11424375, 10.62471223, 10.37447572, 8.56728458,\n",
+ " 11.44042325, 8.61506176, 14.37763166, 9.26899981, 9.01356244,\n",
+ " 12.6770153 , 7.95549965, 8.69824529, 8.16541219, 10.80149889,\n",
+ " 9.85532331, 9.16404986, 11.05029202, 8.95759201, 9.60003638,\n",
+ " 8.64066339, 11.99474025, 10.88645577, 9.82658648, 8.38357234,\n",
+ " 8.1931479 , 8.36809587, 8.34779596, 9.29737759, 7.71148348,\n",
+ " 8.34155583, 8.46944427, 9.46755242, 8.39070392, 9.67334032,\n",
+ " 9.42819619, 8.90718842, 8.95999622, 17.03638124, 14.13874507,\n",
+ " 14.17324162, 14.82433629, 10.27358413, 7.75390744, 10.63386297,\n",
+ " 10.74013877, 9.25264263, 8.88592076, 15.62230277, 8.68499494,\n",
+ " 7.90613437, 10.8253715 , 9.28829837, 9.96133757, 8.82941794,\n",
+ " 11.07499003, 9.08565426, 8.76584291, 11.91541052, 9.45269704,\n",
+ " 9.68554997, 9.76184082, 10.95884109, 9.22084093, 9.07609534,\n",
+ " 9.72482204, 8.66262245, 8.85580897, 12.12771249, 9.1096139 ,\n",
+ " 9.55135322, 9.73613167, 12.00068331, 9.63835907, 8.8003633 ,\n",
+ " 10.78142428, 10.36234426, 8.7075491 , 8.79299307, 10.6836946 ,\n",
+ " 8.24508142, 9.70224071, 8.64105797, 9.16640019])"
+ ]
+ },
+ "execution_count": 22,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "(array([ 0, 0, 0, 16, 105, 85, 75, 61, 40]),\n",
+ " array([ 6. , 6.44444444, 6.88888889, 7.33333333, 7.77777778,\n",
+ " 8.22222222, 8.66666667, 9.11111111, 9.55555556, 10. ]))"
+ ]
+ },
+ "execution_count": 22,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "import numpy as np\n",
+ "np.asarray(submit_time_list)\n",
+ "np.histogram(np.asarray(submit_time_list), bins=np.linspace(6.0, 10.0, num=10), density=False)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 23,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Final print 24, time 107.859 seconds: Counter({'Completed': 478, 'Failed': 21})izing': 1})Running': 1})\r"
+ ]
+ }
+ ],
+ "source": [
+ "def wait_for_run_list_to_finish(the_run_list, plot_results=True):\n",
+ " finished_status_list = ['Completed', 'Failed']\n",
+ " printing_counter = 0\n",
+ " start_time = time.time()\n",
+ " while (not all((crt_queried_job.get_status() in finished_status_list) for crt_queried_job in the_run_list)):\n",
+ " time.sleep(2)\n",
+ " printing_counter+= 1\n",
+ " crt_status = Counter([crt_queried_job.get_status() for crt_queried_job in the_run_list])\n",
+ " print('print {0:.0f}, time {1:.3f} seconds: {2}'.format(printing_counter, time.time() - start_time, \n",
+ " str(crt_status)), end=\"\\r\")\n",
+ " if plot_results:\n",
+ "# import numpy as np\n",
+ " import matplotlib.pyplot as plt\n",
+ " plt.bar(crt_status.keys(), crt_status.values())\n",
+ " plt.show()\n",
+ " \n",
+ "# indexes = np.arange(len(labels))\n",
+ "# width = 1\n",
+ "\n",
+ "# plt.bar(indexes, values, width)\n",
+ "# plt.xticks(indexes + width * 0.5, labels)\n",
+ "# plt.show()\n",
+ "\n",
+ "# from pandas import Series\n",
+ "# crt_status = Series([crt_queried_job.get_status() for crt_queried_job in the_run_list])\n",
+ "# status_counts = crt_status.value_counts().sort_index()\n",
+ "# print('print {0:.0f}, time {1:.3f} seconds: {2}'.format(printing_counter, time.time() - start_time, \n",
+ "# str(status_counts)), end=\"\\r\")\n",
+ "# final status\n",
+ " print('Final print {0:.0f}, time {1:.3f} seconds: {2}'.format(printing_counter, time.time() - start_time, \n",
+ " str(Counter([crt_queried_job.get_status() for crt_queried_job in the_run_list]))), end=\"\\r\") \n",
+ "\n",
+ "wait_for_run_list_to_finish(run_list, plot_results=False)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 24,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "run_durations = [get_run_duration(crt_queried_job) for crt_queried_job in run_list]\n",
+ "run_statuses = [crt_queried_job.get_status() for crt_queried_job in run_list]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 25,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "array([28, 33, 15, 45, 18, 43, 30, 31, 65, 6, 42, 16, 11, 41, 19, 8, 5,\n",
+ " 2, 64, 34])"
+ ]
+ },
+ "execution_count": 25,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+
+ "[244.173832 244.510378 245.027595 245.540781 247.395535 247.411761\n",
+ " 247.933416 248.256958 248.468753 249.724234 249.874347 250.013758\n",
+ " 250.53221 251.10704 251.400594 253.192625 253.421425 253.968411\n",
+ " 256.888013 260.331917]\n",
+ "['Completed' 'Completed' 'Completed' 'Completed' 'Completed' 'Completed'\n",
+ " 'Completed' 'Failed' 'Completed' 'Completed' 'Completed' 'Completed'\n",
+ " 'Failed' 'Completed' 'Completed' 'Completed' 'Completed' 'Completed'\n",
+ " 'Failed' 'Completed']\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "array([232, 54, 195, 214, 250, 48, 490, 261, 329, 140, 336, 129, 311,\n",
+ " 223, 226, 370, 319, 254, 197, 85])"
+ ]
+ },
+ "execution_count": 25,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[92.52469 92.854187 93.127771 93.19945 93.319895 93.372538 93.557287\n",
+ " 93.579393 93.646901 93.681486 93.890417 94.05724 94.162242 94.165297\n",
+ " 94.182998 94.263456 94.316783 94.400242 94.406081 94.583321]\n",
+ "['Completed' 'Completed' 'Completed' 'Completed' 'Failed' 'Completed'\n",
+ " 'Failed' 'Failed' 'Completed' 'Completed' 'Completed' 'Completed'\n",
+ " 'Completed' 'Completed' 'Completed' 'Failed' 'Completed' 'Completed'\n",
+ " 'Failed' 'Completed']\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "(array([ 0, 0, 128, 320, 8, 1, 3, 3, 0]),\n",
+ " array([ 50. , 66.66666667, 83.33333333, 100. ,\n",
+ " 116.66666667, 133.33333333, 150. , 166.66666667,\n",
+ " 183.33333333, 200. ]))"
+ ]
+ },
+ "execution_count": 25,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "run_durations = np.asarray(run_durations)\n",
+ "run_statuses = np.asarray(run_statuses)\n",
+ "\n",
+ "extreme_k = 20\n",
+ "#longest runs\n",
+ "indices = np.argsort(run_durations)[-extreme_k:]\n",
+ "indices\n",
+ "print(run_durations[indices])\n",
+ "print(run_statuses[indices])\n",
+ "#shortest runs\n",
+ "indices = np.argsort(run_durations)[0:extreme_k]\n",
+ "indices\n",
+ "print(run_durations[indices])\n",
+ "print(run_statuses[indices])\n",
+ "\n",
+ "#run_durations histogram - counts and bins\n",
+ "np.histogram(run_durations, bins=np.linspace(50, 200, num=10), density=False)\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 26,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Finished running 030_ScaleJobsUsingAzuremL_GeophysicsTutorial_FWI_Azure_devito!\n"
+ ]
+ }
+ ],
+ "source": [
+ "print('Finished running 030_ScaleJobsUsingAzuremL_GeophysicsTutorial_FWI_Azure_devito!')"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "fwi_dev_conda_environment Python",
+ "language": "python",
+ "name": "fwi_dev_conda_environment"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.7.5"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/contrib/scripts/README.md b/contrib/scripts/README.md
new file mode 100644
index 00000000..836a05dc
--- /dev/null
+++ b/contrib/scripts/README.md
@@ -0,0 +1,6 @@
+This folder contains a variety of scripts which might be useful.
+
+# Ablation Study
+
+Contained in `ablation.sh`, the script demonstrates running the HRNet model with various patch sizes.
+
diff --git a/contrib/scripts/ablation.sh b/contrib/scripts/ablation.sh
new file mode 100755
index 00000000..81fcdaa6
--- /dev/null
+++ b/contrib/scripts/ablation.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+source activate seismic-interpretation
+
+# Patch_Size 100: Patch vs Section Depth
+python scripts/prepare_dutchf3.py split_train_val patch --data-dir=/mnt/dutch --stride=50 --patch=100
+python train.py OUTPUT_DIR /data/output/hrnet_patch TRAIN.DEPTH patch TRAIN.PATCH_SIZE 100 --cfg 'configs/hrnet.yaml'
+python train.py OUTPUT_DIR /data/output/hrnet_section TRAIN.DEPTH section TRAIN.PATCH_SIZE 100 --cfg 'configs/hrnet.yaml'
+
+# Patch_Size 150: Patch vs Section Depth
+python scripts/prepare_dutchf3.py split_train_val patch --data-dir=/mnt/dutch --stride=50 --patch=150
+python train.py OUTPUT_DIR /data/output/hrnet_patch TRAIN.DEPTH patch TRAIN.PATCH_SIZE 150 --cfg 'configs/hrnet.yaml'
+python train.py OUTPUT_DIR /data/output/hrnet_section TRAIN.DEPTH section TRAIN.PATCH_SIZE 150 --cfg 'configs/hrnet.yaml'
+
+# Patch_Size 200: Patch vs Section Depth
+python scripts/prepare_dutchf3.py split_train_val patch --data-dir=/mnt/dutch --stride=50 --patch=200
+python train.py OUTPUT_DIR /data/output/hrnet_patch TRAIN.DEPTH patch TRAIN.PATCH_SIZE 200 --cfg 'configs/hrnet.yaml'
+python train.py OUTPUT_DIR /data/output/hrnet_section TRAIN.DEPTH section TRAIN.PATCH_SIZE 200 --cfg 'configs/hrnet.yaml'
+
+# Patch_Size 250: Patch vs Section Depth
+python scripts/prepare_dutchf3.py split_train_val patch --data-dir=/mnt/dutch --stride=50 --patch=250
+python train.py OUTPUT_DIR /data/output/hrnet_patch TRAIN.DEPTH patch TRAIN.PATCH_SIZE 250 TRAIN.AUGMENTATIONS.RESIZE.HEIGHT 250 TRAIN.AUGMENTATIONS.RESIZE.WIDTH 250 --cfg 'configs/hrnet.yaml'
+python train.py OUTPUT_DIR /data/output/hrnet_section TRAIN.DEPTH section TRAIN.PATCH_SIZE 250 TRAIN.AUGMENTATIONS.RESIZE.HEIGHT 250 TRAIN.AUGMENTATIONS.RESIZE.WIDTH 250 --cfg 'configs/hrnet.yaml'
+
diff --git a/contrib/scripts/download_hrnet.sh b/contrib/scripts/download_hrnet.sh
new file mode 100755
index 00000000..157f20c3
--- /dev/null
+++ b/contrib/scripts/download_hrnet.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+#
+# Example:
+# download_hrnet.sh /data/models hrnet.pth
+#
+
+echo Using "$1" as the download directory
+
+if [ ! -d "$1" ]
+then
+ echo "Directory does not exist - creating..."
+ mkdir -p "$1"
+fi
+
+full_path=$1/$2
+
+echo "Downloading to ${full_path}"
+
+wget --header 'Host: optgaw.dm.files.1drv.com' \
+ --user-agent 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:70.0) Gecko/20100101 Firefox/70.0' \
+ --header 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8' \
+ --header 'Accept-Language: en-GB,en;q=0.5' \
+ --referer 'https://onedrive.live.com/' \
+ --header 'Upgrade-Insecure-Requests: 1' 'https://optgaw.dm.files.1drv.com/y4m14W1OEuoniQMCT4m64UV8CSQT-dFe2ZRhU0LAZSal80V4phgVIlTYxI2tUi6BPVOy7l5rK8MKpZNywVvtz-NKL2ZWq-UYRL6MAjbLgdFA6zyW8RRrKBe_FcqcWr4YTXeJ18xfVqco6CdGZHFfORBE6EtFxEIrHWNjM032dWZLdqZ0eXd7RZTrHs1KKYa92zcs0Rj91CAyIK4hIaOomzEWA/hrnetv2_w48_imagenet_pretrained.pth?download&psid=1' \
+ --output-document ${full_path}
\ No newline at end of file
diff --git a/contrib/scripts/get_F3_voxel.sh b/contrib/scripts/get_F3_voxel.sh
new file mode 100755
index 00000000..850d73f8
--- /dev/null
+++ b/contrib/scripts/get_F3_voxel.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+echo "Make sure you also download Dutch F3 data from https://github.com/bolgebrygg/MalenoV"
+# fetch Dutch F3 from Malenov project.
+# wget https://drive.google.com/open?id=0B7brcf-eGK8CUUZKLXJURFNYeXM -O interpretation/voxel2pixel/F3/data.segy
+
+if [ $# -eq 0 ]
+then
+ downdirtrain='experiments/interpretation/voxel2pixel/F3/train'
+ downdirval='experiments/interpretation/voxel2pixel/F3/val'
+else
+ downdirtrain=$1
+ downdirval=$1
+fi
+
+mkdir -p ${downdirtrain}
+mkdir -p ${downdirval}
+
+echo "Downloading train label to $downdirtrain and validation label to $downdirval"
+wget https://github.com/waldeland/CNN-for-ASI/raw/master/F3/train/inline_339.png -O ${downdirtrain}/inline_339.png
+wget https://github.com/waldeland/CNN-for-ASI/raw/master/F3/val/inline_405.png -O ${downdirval}/inline_405.png
+echo "Download complete"
diff --git a/cv_lib/AUTHORS.md b/cv_lib/AUTHORS.md
new file mode 100644
index 00000000..173bb039
--- /dev/null
+++ b/cv_lib/AUTHORS.md
@@ -0,0 +1 @@
+[Mathew Salvaris] [@msalvaris](http://github.com/msalvaris/)
diff --git a/cv_lib/README.md b/cv_lib/README.md
new file mode 100644
index 00000000..7ff14865
--- /dev/null
+++ b/cv_lib/README.md
@@ -0,0 +1,11 @@
+# CVLib
+
+A set of utility functions for computer vision
+
+## Install
+
+```bash
+pip install -e .
+```
+
+This will install the package cv_lib
\ No newline at end of file
diff --git a/cv_lib/cv_lib/__init__.py b/cv_lib/cv_lib/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/cv_lib/cv_lib/__version__.py b/cv_lib/cv_lib/__version__.py
new file mode 100644
index 00000000..97b8b400
--- /dev/null
+++ b/cv_lib/cv_lib/__version__.py
@@ -0,0 +1,4 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+__version__ = "0.0.1"
diff --git a/cv_lib/cv_lib/event_handlers/__init__.py b/cv_lib/cv_lib/event_handlers/__init__.py
new file mode 100644
index 00000000..589bbd86
--- /dev/null
+++ b/cv_lib/cv_lib/event_handlers/__init__.py
@@ -0,0 +1,42 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+from ignite.handlers import ModelCheckpoint
+import glob
+import os
+from shutil import copyfile
+
+
+class SnapshotHandler:
+ def __init__(self, dir_name, filename_prefix, score_function, snapshot_function):
+ self._model_save_location = dir_name
+ self._running_model_prefix = filename_prefix + "_running"
+ self._snapshot_prefix = filename_prefix + "_snapshot"
+ self._snapshot_function = snapshot_function
+ self._snapshot_num = 1
+ self._score_function = score_function
+ self._checkpoint_handler = self._create_checkpoint_handler()
+
+ def _create_checkpoint_handler(self):
+ return ModelCheckpoint(
+ self._model_save_location,
+ self._running_model_prefix,
+ score_function=self._score_function,
+ n_saved=1,
+ create_dir=True,
+ save_as_state_dict=True,
+ require_empty=False,
+ )
+
+ def __call__(self, engine, to_save):
+ self._checkpoint_handler(engine, to_save)
+ if self._snapshot_function():
+ files = glob.glob(os.path.join(self._model_save_location, self._running_model_prefix + "*"))
+ print(files)
+ name_postfix = os.path.basename(files[0]).lstrip(self._running_model_prefix)
+ copyfile(
+ files[0],
+ os.path.join(self._model_save_location, f"{self._snapshot_prefix}{self._snapshot_num}{name_postfix}",),
+ )
+ self._checkpoint_handler = self._create_checkpoint_handler() # Reset the checkpoint handler
+ self._snapshot_num += 1
diff --git a/cv_lib/cv_lib/event_handlers/azureml_handlers.py b/cv_lib/cv_lib/event_handlers/azureml_handlers.py
new file mode 100644
index 00000000..e69de29b
diff --git a/cv_lib/cv_lib/event_handlers/logging_handlers.py b/cv_lib/cv_lib/event_handlers/logging_handlers.py
new file mode 100644
index 00000000..b7c41651
--- /dev/null
+++ b/cv_lib/cv_lib/event_handlers/logging_handlers.py
@@ -0,0 +1,90 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+import logging
+import logging.config
+from toolz import curry
+
+import numpy as np
+
+np.set_printoptions(precision=3)
+
+
+@curry
+def log_training_output(engine, log_interval=100):
+ logger = logging.getLogger(__name__)
+
+ if engine.state.iteration % log_interval == 0:
+ logger.info(f"Epoch: {engine.state.epoch} Iter: {engine.state.iteration} loss {engine.state.output['loss']}")
+
+
+@curry
+def log_lr(optimizer, engine):
+ logger = logging.getLogger(__name__)
+ lr = [param_group["lr"] for param_group in optimizer.param_groups]
+ logger.info(f"lr - {lr}")
+
+
+_DEFAULT_METRICS = {"pixacc": "Avg accuracy :", "nll": "Avg loss :"}
+
+
+@curry
+def log_metrics(log_msg, engine, metrics_dict=_DEFAULT_METRICS):
+ logger = logging.getLogger(__name__)
+ metrics = engine.state.metrics
+ metrics_msg = " ".join([f"{metrics_dict[k]} {metrics[k]:.2f}" for k in metrics_dict])
+ logger.info(f"{log_msg} - Epoch {engine.state.epoch} [{engine.state.max_epochs}] " + metrics_msg)
+
+
+@curry
+def log_class_metrics(log_msg, engine, metrics_dict):
+ logger = logging.getLogger(__name__)
+ metrics = engine.state.metrics
+ metrics_msg = "\n".join(f"{metrics_dict[k]} {metrics[k].numpy()}" for k in metrics_dict)
+ logger.info(f"{log_msg} - Epoch {engine.state.epoch} [{engine.state.max_epochs}]\n" + metrics_msg)
+
+
+class Evaluator:
+ def __init__(self, evaluation_engine, data_loader):
+ self._evaluation_engine = evaluation_engine
+ self._data_loader = data_loader
+
+ def __call__(self, engine):
+ self._evaluation_engine.run(self._data_loader)
+
+
+class HorovodLRScheduler:
+ """
+ Horovod: using `lr = base_lr * hvd.size()` from the very beginning leads to worse final
+ accuracy. Scale the learning rate `lr = base_lr` ---> `lr = base_lr * hvd.size()` during
+ the first five epochs. See https://arxiv.org/abs/1706.02677 for details.
+ After the warmup reduce learning rate by 10 on the 30th, 60th and 80th epochs.
+ """
+
+ def __init__(
+ self, base_lr, warmup_epochs, cluster_size, data_loader, optimizer, batches_per_allreduce,
+ ):
+ self._warmup_epochs = warmup_epochs
+ self._cluster_size = cluster_size
+ self._data_loader = data_loader
+ self._optimizer = optimizer
+ self._base_lr = base_lr
+ self._batches_per_allreduce = batches_per_allreduce
+ self._logger = logging.getLogger(__name__)
+
+ def __call__(self, engine):
+ epoch = engine.state.epoch
+ if epoch < self._warmup_epochs:
+ epoch += float(engine.state.iteration + 1) / len(self._data_loader)
+ lr_adj = 1.0 / self._cluster_size * (epoch * (self._cluster_size - 1) / self._warmup_epochs + 1)
+ elif epoch < 30:
+ lr_adj = 1.0
+ elif epoch < 60:
+ lr_adj = 1e-1
+ elif epoch < 80:
+ lr_adj = 1e-2
+ else:
+ lr_adj = 1e-3
+ for param_group in self._optimizer.param_groups:
+ param_group["lr"] = self._base_lr * self._cluster_size * self._batches_per_allreduce * lr_adj
+ self._logger.debug(f"Adjust learning rate {param_group['lr']}")
diff --git a/cv_lib/cv_lib/event_handlers/tensorboard_handlers.py b/cv_lib/cv_lib/event_handlers/tensorboard_handlers.py
new file mode 100644
index 00000000..654c9b4d
--- /dev/null
+++ b/cv_lib/cv_lib/event_handlers/tensorboard_handlers.py
@@ -0,0 +1,69 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+from toolz import curry
+import torchvision
+import logging
+import logging.config
+
+try:
+ from tensorboardX import SummaryWriter
+except ImportError:
+ raise RuntimeError("No tensorboardX package is found. Please install with the command: \npip install tensorboardX")
+
+
+def create_summary_writer(log_dir):
+ writer = SummaryWriter(logdir=log_dir)
+ return writer
+
+
+def _log_model_output(log_label, summary_writer, engine):
+ summary_writer.add_scalar(log_label, engine.state.output["loss"], engine.state.iteration)
+
+
+@curry
+def log_training_output(summary_writer, engine):
+ _log_model_output("training/loss", summary_writer, engine)
+
+
+@curry
+def log_validation_output(summary_writer, engine):
+ _log_model_output("validation/loss", summary_writer, engine)
+
+
+@curry
+def log_lr(summary_writer, optimizer, log_interval, engine):
+ """[summary]
+
+ Args:
+ optimizer ([type]): [description]
+ log_interval ([type]): iteration or epoch
+ summary_writer ([type]): [description]
+ engine ([type]): [description]
+ """
+ lr = [param_group["lr"] for param_group in optimizer.param_groups]
+ summary_writer.add_scalar("lr", lr[0], getattr(engine.state, log_interval))
+
+
+_DEFAULT_METRICS = {"accuracy": "Avg accuracy :", "nll": "Avg loss :"}
+
+
+@curry
+def log_metrics(summary_writer, train_engine, log_interval, engine, metrics_dict=_DEFAULT_METRICS):
+ metrics = engine.state.metrics
+ for m in metrics_dict:
+ summary_writer.add_scalar(metrics_dict[m], metrics[m], getattr(train_engine.state, log_interval))
+
+
+def create_image_writer(summary_writer, label, output_variable, normalize=False, transform_func=lambda x: x):
+ logger = logging.getLogger(__name__)
+
+ def write_to(engine):
+ try:
+ data_tensor = transform_func(engine.state.output[output_variable])
+ image_grid = torchvision.utils.make_grid(data_tensor, normalize=normalize, scale_each=True)
+ summary_writer.add_image(label, image_grid, engine.state.epoch)
+ except KeyError:
+ logger.warning("Predictions and or ground truth labels not available to report")
+
+ return write_to
diff --git a/cv_lib/cv_lib/segmentation/__init__.py b/cv_lib/cv_lib/segmentation/__init__.py
new file mode 100644
index 00000000..4306a4e0
--- /dev/null
+++ b/cv_lib/cv_lib/segmentation/__init__.py
@@ -0,0 +1,17 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+from toolz import curry
+import torch.nn.functional as F
+
+
+@curry
+def extract_metric_from(metric, engine):
+ metrics = engine.state.metrics
+ return metrics[metric]
+
+
+@curry
+def padded_val_transform(pad_left, fine_size, x, y, y_pred):
+ y_pred = y_pred[:, :, pad_left : pad_left + fine_size, pad_left : pad_left + fine_size].contiguous()
+ return {"image": x, "y_pred": F.sigmoid(y_pred).detach(), "mask": y.detach()}
diff --git a/cv_lib/cv_lib/segmentation/dutchf3/augmentations.py b/cv_lib/cv_lib/segmentation/dutchf3/augmentations.py
new file mode 100644
index 00000000..e4df608f
--- /dev/null
+++ b/cv_lib/cv_lib/segmentation/dutchf3/augmentations.py
@@ -0,0 +1,221 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+import math
+import numbers
+import random
+import numpy as np
+
+from PIL import Image, ImageOps
+
+
+class Compose(object):
+ def __init__(self, augmentations):
+ self.augmentations = augmentations
+
+ def __call__(self, img, mask):
+
+ img, mask = Image.fromarray(img, mode=None), Image.fromarray(mask, mode="L")
+ assert img.size == mask.size
+
+ for a in self.augmentations:
+ img, mask = a(img, mask)
+ return np.array(img), np.array(mask, dtype=np.uint8)
+
+
+class AddNoise(object):
+ def __call__(self, img, mask):
+ noise = np.random.normal(loc=0, scale=0.02, size=(img.size[1], img.size[0]))
+ return img + noise, mask
+
+
+class RandomCrop(object):
+ def __init__(self, size, padding=0):
+ if isinstance(size, numbers.Number):
+ self.size = (int(size), int(size))
+ else:
+ self.size = size
+ self.padding = padding
+
+ def __call__(self, img, mask):
+ if self.padding > 0:
+ img = ImageOps.expand(img, border=self.padding, fill=0)
+ mask = ImageOps.expand(mask, border=self.padding, fill=0)
+
+ assert img.size == mask.size
+ w, h = img.size
+ th, tw = self.size
+ if w == tw and h == th:
+ return img, mask
+ if w < tw or h < th:
+ return (
+ img.resize((tw, th), Image.BILINEAR),
+ mask.resize((tw, th), Image.NEAREST),
+ )
+
+ x1 = random.randint(0, w - tw)
+ y1 = random.randint(0, h - th)
+ return (
+ img.crop((x1, y1, x1 + tw, y1 + th)),
+ mask.crop((x1, y1, x1 + tw, y1 + th)),
+ )
+
+
+class CenterCrop(object):
+ def __init__(self, size):
+ if isinstance(size, numbers.Number):
+ self.size = (int(size), int(size))
+ else:
+ self.size = size
+
+ def __call__(self, img, mask):
+ assert img.size == mask.size
+ w, h = img.size
+ th, tw = self.size
+ x1 = int(round((w - tw) / 2.0))
+ y1 = int(round((h - th) / 2.0))
+ return (
+ img.crop((x1, y1, x1 + tw, y1 + th)),
+ mask.crop((x1, y1, x1 + tw, y1 + th)),
+ )
+
+
+class RandomHorizontallyFlip(object):
+ def __call__(self, img, mask):
+ if random.random() < 0.5:
+ # Note: we use FLIP_TOP_BOTTOM here intentionaly. Due to the dimensions of the image,
+ # it ends up being a horizontal flip.
+ return (
+ img.transpose(Image.FLIP_TOP_BOTTOM),
+ mask.transpose(Image.FLIP_TOP_BOTTOM),
+ )
+ return img, mask
+
+
+class RandomVerticallyFlip(object):
+ def __call__(self, img, mask):
+ if random.random() < 0.5:
+ return (
+ img.transpose(Image.FLIP_LEFT_RIGHT),
+ mask.transpose(Image.FLIP_LEFT_RIGHT),
+ )
+ return img, mask
+
+
+class FreeScale(object):
+ def __init__(self, size):
+ self.size = tuple(reversed(size)) # size: (h, w)
+
+ def __call__(self, img, mask):
+ assert img.size == mask.size
+ return (
+ img.resize(self.size, Image.BILINEAR),
+ mask.resize(self.size, Image.NEAREST),
+ )
+
+
+class Scale(object):
+ def __init__(self, size):
+ self.size = size
+
+ def __call__(self, img, mask):
+ assert img.size == mask.size
+ w, h = img.size
+ if (w >= h and w == self.size) or (h >= w and h == self.size):
+ return img, mask
+ if w > h:
+ ow = self.size
+ oh = int(self.size * h / w)
+ return (
+ img.resize((ow, oh), Image.BILINEAR),
+ mask.resize((ow, oh), Image.NEAREST),
+ )
+ else:
+ oh = self.size
+ ow = int(self.size * w / h)
+ return (
+ img.resize((ow, oh), Image.BILINEAR),
+ mask.resize((ow, oh), Image.NEAREST),
+ )
+
+
+class RandomSizedCrop(object):
+ def __init__(self, size):
+ self.size = size
+
+ def __call__(self, img, mask):
+ assert img.size == mask.size
+ for attempt in range(10):
+ area = img.size[0] * img.size[1]
+ target_area = random.uniform(0.45, 1.0) * area
+ aspect_ratio = random.uniform(0.5, 2)
+
+ w = int(round(math.sqrt(target_area * aspect_ratio)))
+ h = int(round(math.sqrt(target_area / aspect_ratio)))
+
+ if random.random() < 0.5:
+ w, h = h, w
+
+ if w <= img.size[0] and h <= img.size[1]:
+ x1 = random.randint(0, img.size[0] - w)
+ y1 = random.randint(0, img.size[1] - h)
+
+ img = img.crop((x1, y1, x1 + w, y1 + h))
+ mask = mask.crop((x1, y1, x1 + w, y1 + h))
+ assert img.size == (w, h)
+
+ return (
+ img.resize((self.size, self.size), Image.BILINEAR),
+ mask.resize((self.size, self.size), Image.NEAREST),
+ )
+
+ # Fallback
+ scale = Scale(self.size)
+ crop = CenterCrop(self.size)
+ return crop(*scale(img, mask))
+
+
+class RandomRotate(object):
+ def __init__(self, degree):
+ self.degree = degree
+
+ def __call__(self, img, mask):
+ """
+ PIL automatically adds zeros to the borders of images that rotated. To fix this
+ issue, the code in the botton sets anywhere in the labels (mask) that is zero to
+ 255 (the value used for ignore_index).
+ """
+ rotate_degree = random.random() * 2 * self.degree - self.degree
+
+ img = img.rotate(rotate_degree, Image.BILINEAR)
+ mask = mask.rotate(rotate_degree, Image.NEAREST)
+
+ binary_mask = Image.fromarray(np.ones([mask.size[1], mask.size[0]]))
+ binary_mask = binary_mask.rotate(rotate_degree, Image.NEAREST)
+ binary_mask = np.array(binary_mask)
+
+ mask_arr = np.array(mask)
+ mask_arr[binary_mask == 0] = 255
+ mask = Image.fromarray(mask_arr)
+
+ return img, mask
+
+
+class RandomSized(object):
+ def __init__(self, size):
+ self.size = size
+ self.scale = Scale(self.size)
+ self.crop = RandomCrop(self.size)
+
+ def __call__(self, img, mask):
+ assert img.size == mask.size
+
+ w = int(random.uniform(0.5, 2) * img.size[0])
+ h = int(random.uniform(0.5, 2) * img.size[1])
+
+ img, mask = (
+ img.resize((w, h), Image.BILINEAR),
+ mask.resize((w, h), Image.NEAREST),
+ )
+
+ return self.crop(*self.scale(img, mask))
diff --git a/cv_lib/cv_lib/segmentation/dutchf3/engine.py b/cv_lib/cv_lib/segmentation/dutchf3/engine.py
new file mode 100644
index 00000000..c137af5c
--- /dev/null
+++ b/cv_lib/cv_lib/segmentation/dutchf3/engine.py
@@ -0,0 +1,130 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+import torch
+
+from ignite.engine.engine import Engine, State, Events
+from ignite.utils import convert_tensor
+import torch.nn.functional as F
+from toolz import curry
+from torch.nn import functional as F
+import numpy as np
+
+
+def _upscale_model_output(y_pred, y):
+ ph, pw = y_pred.size(2), y_pred.size(3)
+ h, w = y.size(2), y.size(3)
+ if ph != h or pw != w:
+ y_pred = F.upsample(input=y_pred, size=(h, w), mode="bilinear")
+ return y_pred
+
+
+def create_supervised_trainer(
+ model,
+ optimizer,
+ loss_fn,
+ prepare_batch,
+ device=None,
+ non_blocking=False,
+ output_transform=lambda x, y, y_pred, loss: {"loss": loss.item()},
+):
+ if device:
+ model.to(device)
+
+ def _update(engine, batch):
+ model.train()
+ optimizer.zero_grad()
+ x, y = prepare_batch(batch, device=device, non_blocking=non_blocking)
+ y_pred = model(x)
+ y_pred = _upscale_model_output(y_pred, y)
+ loss = loss_fn(y_pred.squeeze(1), y.squeeze(1))
+ loss.backward()
+ optimizer.step()
+ return output_transform(x, y, y_pred, loss)
+
+ return Engine(_update)
+
+
+@curry
+def val_transform(x, y, y_pred):
+ return {"image": x, "y_pred": y_pred.detach(), "mask": y.detach()}
+
+
+def create_supervised_evaluator(
+ model, prepare_batch, metrics=None, device=None, non_blocking=False, output_transform=val_transform,
+):
+ metrics = metrics or {}
+
+ if device:
+ model.to(device)
+
+ def _inference(engine, batch):
+ model.eval()
+ with torch.no_grad():
+ x, y = prepare_batch(batch, device=device, non_blocking=non_blocking)
+ y_pred = model(x)
+ y_pred = _upscale_model_output(y_pred, x)
+ return output_transform(x, y, y_pred)
+
+ engine = Engine(_inference)
+
+ for name, metric in metrics.items():
+ metric.attach(engine, name)
+
+ return engine
+
+
+def create_supervised_trainer_apex(
+ model,
+ optimizer,
+ loss_fn,
+ prepare_batch,
+ device=None,
+ non_blocking=False,
+ output_transform=lambda x, y, y_pred, loss: {"loss": loss.item()},
+):
+ from apex import amp
+
+ if device:
+ model.to(device)
+
+ def _update(engine, batch):
+ model.train()
+ optimizer.zero_grad()
+ x, y = prepare_batch(batch, device=device, non_blocking=non_blocking)
+ y_pred = model(x)
+ loss = loss_fn(y_pred.squeeze(1), y.squeeze(1))
+ with amp.scale_loss(loss, optimizer) as scaled_loss:
+ scaled_loss.backward()
+ optimizer.step()
+ return output_transform(x, y, y_pred, loss)
+
+ return Engine(_update)
+
+
+# def create_supervised_evaluator_apex(
+# model,
+# prepare_batch,
+# metrics=None,
+# device=None,
+# non_blocking=False,
+# output_transform=lambda x, y, y_pred: (x, y, pred),
+# ):
+# metrics = metrics or {}
+
+# if device:
+# model.to(device)
+
+# def _inference(engine, batch):
+# model.eval()
+# with torch.no_grad():
+# x, y = prepare_batch(batch, device=device, non_blocking=non_blocking)
+# y_pred = model(x)
+# return output_transform(x, y, y_pred)
+
+# engine = Engine(_inference)
+
+# for name, metric in metrics.items():
+# metric.attach(engine, name)
+
+# return engine
diff --git a/cv_lib/cv_lib/segmentation/dutchf3/utils.py b/cv_lib/cv_lib/segmentation/dutchf3/utils.py
new file mode 100644
index 00000000..adad1e97
--- /dev/null
+++ b/cv_lib/cv_lib/segmentation/dutchf3/utils.py
@@ -0,0 +1,46 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+import numpy as np
+import torch
+from git import Repo
+from datetime import datetime
+import os
+
+
+def np_to_tb(array):
+ # if 2D :
+ if array.ndim == 2:
+ # HW => CHW
+ array = np.expand_dims(array, axis=0)
+ # CHW => NCHW
+ array = np.expand_dims(array, axis=0)
+ elif array.ndim == 3:
+ # HWC => CHW
+ array = array.transpose(2, 0, 1)
+ # CHW => NCHW
+ array = np.expand_dims(array, axis=0)
+
+ array = torch.from_numpy(array)
+ return array
+
+
+def current_datetime():
+ return datetime.now().strftime("%b%d_%H%M%S")
+
+
+def git_branch():
+ repo = Repo(search_parent_directories=True)
+ return repo.active_branch.name
+
+
+def git_hash():
+ repo = Repo(search_parent_directories=True)
+ return repo.active_branch.commit.hexsha
+
+
+def generate_path(base_path, *directories):
+ path = os.path.join(base_path, *directories)
+ if not os.path.exists(path):
+ os.makedirs(path)
+ return path
diff --git a/cv_lib/cv_lib/segmentation/metrics.py b/cv_lib/cv_lib/segmentation/metrics.py
new file mode 100644
index 00000000..2d28a954
--- /dev/null
+++ b/cv_lib/cv_lib/segmentation/metrics.py
@@ -0,0 +1,94 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+import torch
+import ignite
+
+
+def pixelwise_accuracy(num_classes, output_transform=lambda x: x, device=None):
+ """Calculates class accuracy
+
+ Args:
+ num_classes (int): number of classes
+ output_transform (callable, optional): a callable that is used to transform the
+ output into the form expected by the metric.
+
+ Returns:
+ MetricsLambda
+
+ """
+ cm = ignite.metrics.ConfusionMatrix(num_classes=num_classes, output_transform=output_transform, device=device)
+ # Increase floating point precision and pass to CPU
+ cm = cm.type(torch.DoubleTensor)
+
+ pix_cls = ignite.metrics.confusion_matrix.cmAccuracy(cm)
+
+ return pix_cls
+
+
+def class_accuracy(num_classes, output_transform=lambda x: x, device=None):
+ """Calculates class accuracy
+
+ Args:
+ num_classes (int): number of classes
+ output_transform (callable, optional): a callable that is used to transform the
+ output into the form expected by the metric.
+
+ Returns:
+ MetricsLambda
+
+ """
+ cm = ignite.metrics.ConfusionMatrix(num_classes=num_classes, output_transform=output_transform, device=device)
+ # Increase floating point precision and pass to CPU
+ cm = cm.type(torch.DoubleTensor)
+
+ acc_cls = cm.diag() / (cm.sum(dim=1) + 1e-15)
+
+ return acc_cls
+
+
+def mean_class_accuracy(num_classes, output_transform=lambda x: x, device=None):
+ """Calculates mean class accuracy
+
+ Args:
+ num_classes (int): number of classes
+ output_transform (callable, optional): a callable that is used to transform the
+ output into the form expected by the metric.
+
+ Returns:
+ MetricsLambda
+
+ """
+ return class_accuracy(num_classes=num_classes, output_transform=output_transform, device=device).mean()
+
+
+def class_iou(num_classes, output_transform=lambda x: x, device=None, ignore_index=None):
+ """Calculates per-class intersection-over-union
+
+ Args:
+ num_classes (int): number of classes
+ output_transform (callable, optional): a callable that is used to transform the
+ output into the form expected by the metric.
+
+ Returns:
+ MetricsLambda
+
+ """
+ cm = ignite.metrics.ConfusionMatrix(num_classes=num_classes, output_transform=output_transform, device=device)
+ return ignite.metrics.IoU(cm, ignore_index=ignore_index)
+
+
+def mean_iou(num_classes, output_transform=lambda x: x, device=None, ignore_index=None):
+ """Calculates mean intersection-over-union
+
+ Args:
+ num_classes (int): number of classes
+ output_transform (callable, optional): a callable that is used to transform the
+ output into the form expected by the metric.
+
+ Returns:
+ MetricsLambda
+
+ """
+ cm = ignite.metrics.ConfusionMatrix(num_classes=num_classes, output_transform=output_transform, device=device)
+ return ignite.metrics.mIoU(cm, ignore_index=ignore_index)
diff --git a/cv_lib/cv_lib/segmentation/models/__init__.py b/cv_lib/cv_lib/segmentation/models/__init__.py
new file mode 100644
index 00000000..11d443e4
--- /dev/null
+++ b/cv_lib/cv_lib/segmentation/models/__init__.py
@@ -0,0 +1,10 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+import cv_lib.segmentation.models.seg_hrnet # noqa: F401
+import cv_lib.segmentation.models.resnet_unet # noqa: F401
+import cv_lib.segmentation.models.unet # noqa: F401
+import cv_lib.segmentation.models.section_deconvnet # noqa: F401
+import cv_lib.segmentation.models.patch_deconvnet # noqa: F401
+import cv_lib.segmentation.models.patch_deconvnet_skip # noqa: F401
+import cv_lib.segmentation.models.section_deconvnet_skip # noqa: F401
diff --git a/cv_lib/cv_lib/segmentation/models/patch_deconvnet.py b/cv_lib/cv_lib/segmentation/models/patch_deconvnet.py
new file mode 100644
index 00000000..4ee1ed59
--- /dev/null
+++ b/cv_lib/cv_lib/segmentation/models/patch_deconvnet.py
@@ -0,0 +1,308 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+import torch.nn as nn
+
+
+class patch_deconvnet(nn.Module):
+ def __init__(self, n_classes=4, learned_billinear=False):
+ super(patch_deconvnet, self).__init__()
+ self.learned_billinear = learned_billinear
+ self.n_classes = n_classes
+ self.unpool = nn.MaxUnpool2d(2, stride=2)
+ self.conv_block1 = nn.Sequential(
+ # conv1_1
+ nn.Conv2d(1, 64, 3, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv1_2
+ nn.Conv2d(64, 64, 3, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool1
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_1
+
+ # 48*48
+
+ self.conv_block2 = nn.Sequential(
+ # conv2_1
+ nn.Conv2d(64, 128, 3, padding=1),
+ nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv2_2
+ nn.Conv2d(128, 128, 3, padding=1),
+ nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool2
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_2
+
+ # 24*24
+
+ self.conv_block3 = nn.Sequential(
+ # conv3_1
+ nn.Conv2d(128, 256, 3, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv3_2
+ nn.Conv2d(256, 256, 3, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv3_3
+ nn.Conv2d(256, 256, 3, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool3
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_3
+
+ # 12*12
+
+ self.conv_block4 = nn.Sequential(
+ # conv4_1
+ nn.Conv2d(256, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv4_2
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv4_3
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool4
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_4
+
+ # 6*6
+
+ self.conv_block5 = nn.Sequential(
+ # conv5_1
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv5_2
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv5_3
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool5
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_5
+
+ # 3*3
+
+ self.conv_block6 = nn.Sequential(
+ # fc6
+ nn.Conv2d(512, 4096, 3),
+ # set the filter size and nor padding to make output into 1*1
+ nn.BatchNorm2d(4096, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ # 1*1
+
+ self.conv_block7 = nn.Sequential(
+ # fc7
+ nn.Conv2d(4096, 4096, 1),
+ # set the filter size to make output into 1*1
+ nn.BatchNorm2d(4096, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.deconv_block8 = nn.Sequential(
+ # fc6-deconv
+ nn.ConvTranspose2d(4096, 512, 3, stride=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ # 3*3
+
+ self.unpool_block9 = nn.Sequential(
+ # unpool5
+ nn.MaxUnpool2d(2, stride=2),
+ )
+ # usage unpool(output, indices)
+
+ # 6*6
+
+ self.deconv_block10 = nn.Sequential(
+ # deconv5_1
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv5_2
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv5_3
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.unpool_block11 = nn.Sequential(
+ # unpool4
+ nn.MaxUnpool2d(2, stride=2),
+ )
+
+ # 12*12
+
+ self.deconv_block12 = nn.Sequential(
+ # deconv4_1
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv4_2
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv4_3
+ nn.ConvTranspose2d(512, 256, 3, stride=1, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.unpool_block13 = nn.Sequential(
+ # unpool3
+ nn.MaxUnpool2d(2, stride=2),
+ )
+
+ # 24*24
+
+ self.deconv_block14 = nn.Sequential(
+ # deconv3_1
+ nn.ConvTranspose2d(256, 256, 3, stride=1, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv3_2
+ nn.ConvTranspose2d(256, 256, 3, stride=1, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv3_3
+ nn.ConvTranspose2d(256, 128, 3, stride=1, padding=1),
+ nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.unpool_block15 = nn.Sequential(
+ # unpool2
+ nn.MaxUnpool2d(2, stride=2),
+ )
+
+ # 48*48
+
+ self.deconv_block16 = nn.Sequential(
+ # deconv2_1
+ nn.ConvTranspose2d(128, 128, 3, stride=1, padding=1),
+ nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv2_2
+ nn.ConvTranspose2d(128, 64, 3, stride=1, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.unpool_block17 = nn.Sequential(
+ # unpool1
+ nn.MaxUnpool2d(2, stride=2),
+ )
+
+ # 96*96
+
+ self.deconv_block18 = nn.Sequential(
+ # deconv1_1
+ nn.ConvTranspose2d(64, 64, 3, stride=1, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv1_2
+ nn.ConvTranspose2d(64, 64, 3, stride=1, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.seg_score19 = nn.Sequential(
+ # seg-score
+ nn.Conv2d(64, self.n_classes, 1),
+ )
+
+ if self.learned_billinear:
+ raise NotImplementedError
+
+ def forward(self, x):
+ size0 = x.size()
+ conv1, indices1 = self.conv_block1(x)
+ size1 = conv1.size()
+ conv2, indices2 = self.conv_block2(conv1)
+ size2 = conv2.size()
+ conv3, indices3 = self.conv_block3(conv2)
+ size3 = conv3.size()
+ conv4, indices4 = self.conv_block4(conv3)
+ size4 = conv4.size()
+ conv5, indices5 = self.conv_block5(conv4)
+
+ conv6 = self.conv_block6(conv5)
+ conv7 = self.conv_block7(conv6)
+ conv8 = self.deconv_block8(conv7)
+ conv9 = self.unpool(conv8, indices5, output_size=size4)
+ conv10 = self.deconv_block10(conv9)
+ conv11 = self.unpool(conv10, indices4, output_size=size3)
+ conv12 = self.deconv_block12(conv11)
+ conv13 = self.unpool(conv12, indices3, output_size=size2)
+ conv14 = self.deconv_block14(conv13)
+ conv15 = self.unpool(conv14, indices2, output_size=size1)
+ conv16 = self.deconv_block16(conv15)
+ conv17 = self.unpool(conv16, indices1, output_size=size0)
+ conv18 = self.deconv_block18(conv17)
+ out = self.seg_score19(conv18)
+
+ return out
+
+ def init_vgg16_params(self, vgg16, copy_fc8=True):
+ blocks = [
+ self.conv_block1,
+ self.conv_block2,
+ self.conv_block3,
+ self.conv_block4,
+ self.conv_block5,
+ ]
+
+ ranges = [[0, 4], [5, 9], [10, 16], [17, 23], [24, 29]]
+ features = list(vgg16.features.children())
+ i_layer = 0
+ # copy convolutional filters from vgg16
+ for idx, conv_block in enumerate(blocks):
+ for l1, l2 in zip(features[ranges[idx][0] : ranges[idx][1]], conv_block):
+ if isinstance(l1, nn.Conv2d) and isinstance(l2, nn.Conv2d):
+ if i_layer == 0:
+ l2.weight.data = (
+ (l1.weight.data[:, 0, :, :] + l1.weight.data[:, 1, :, :] + l1.weight.data[:, 2, :, :]) / 3.0
+ ).view(l2.weight.size())
+ l2.bias.data = l1.bias.data
+ i_layer = i_layer + 1
+ else:
+ assert l1.weight.size() == l2.weight.size()
+ assert l1.bias.size() == l2.bias.size()
+ l2.weight.data = l1.weight.data
+ l2.bias.data = l1.bias.data
+ i_layer = i_layer + 1
+
+
+def get_seg_model(cfg, **kwargs):
+ assert (
+ cfg.MODEL.IN_CHANNELS == 1
+ ), f"Patch deconvnet is not implemented to accept {cfg.MODEL.IN_CHANNELS} channels. Please only pass 1 for cfg.MODEL.IN_CHANNELS"
+ model = patch_deconvnet(n_classes=cfg.DATASET.NUM_CLASSES)
+
+ return model
diff --git a/cv_lib/cv_lib/segmentation/models/patch_deconvnet_skip.py b/cv_lib/cv_lib/segmentation/models/patch_deconvnet_skip.py
new file mode 100644
index 00000000..d5506b84
--- /dev/null
+++ b/cv_lib/cv_lib/segmentation/models/patch_deconvnet_skip.py
@@ -0,0 +1,307 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+import torch.nn as nn
+
+
+class patch_deconvnet_skip(nn.Module):
+ def __init__(self, n_classes=4, learned_billinear=False):
+ super(patch_deconvnet_skip, self).__init__()
+ self.learned_billinear = learned_billinear
+ self.n_classes = n_classes
+ self.unpool = nn.MaxUnpool2d(2, stride=2)
+ self.conv_block1 = nn.Sequential(
+ # conv1_1
+ nn.Conv2d(1, 64, 3, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv1_2
+ nn.Conv2d(64, 64, 3, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool1
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_1
+
+ # 48*48
+
+ self.conv_block2 = nn.Sequential(
+ # conv2_1
+ nn.Conv2d(64, 128, 3, padding=1),
+ nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv2_2
+ nn.Conv2d(128, 128, 3, padding=1),
+ nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool2
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_2
+
+ # 24*24
+
+ self.conv_block3 = nn.Sequential(
+ # conv3_1
+ nn.Conv2d(128, 256, 3, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv3_2
+ nn.Conv2d(256, 256, 3, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv3_3
+ nn.Conv2d(256, 256, 3, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool3
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_3
+
+ # 12*12
+
+ self.conv_block4 = nn.Sequential(
+ # conv4_1
+ nn.Conv2d(256, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv4_2
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv4_3
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool4
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_4
+
+ # 6*6
+
+ self.conv_block5 = nn.Sequential(
+ # conv5_1
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv5_2
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv5_3
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool5
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_5
+
+ # 3*3
+
+ self.conv_block6 = nn.Sequential(
+ # fc6
+ nn.Conv2d(512, 4096, 3),
+ # set the filter size and nor padding to make output into 1*1
+ nn.BatchNorm2d(4096, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ # 1*1
+
+ self.conv_block7 = nn.Sequential(
+ # fc7
+ nn.Conv2d(4096, 4096, 1),
+ # set the filter size to make output into 1*1
+ nn.BatchNorm2d(4096, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.deconv_block8 = nn.Sequential(
+ # fc6-deconv
+ nn.ConvTranspose2d(4096, 512, 3, stride=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ # 3*3
+
+ self.unpool_block9 = nn.Sequential(
+ # unpool5
+ nn.MaxUnpool2d(2, stride=2),
+ )
+ # usage unpool(output, indices)
+
+ # 6*6
+
+ self.deconv_block10 = nn.Sequential(
+ # deconv5_1
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv5_2
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv5_3
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.unpool_block11 = nn.Sequential(
+ # unpool4
+ nn.MaxUnpool2d(2, stride=2),
+ )
+
+ # 12*12
+
+ self.deconv_block12 = nn.Sequential(
+ # deconv4_1
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv4_2
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv4_3
+ nn.ConvTranspose2d(512, 256, 3, stride=1, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.unpool_block13 = nn.Sequential(
+ # unpool3
+ nn.MaxUnpool2d(2, stride=2),
+ )
+
+ # 24*24
+
+ self.deconv_block14 = nn.Sequential(
+ # deconv3_1
+ nn.ConvTranspose2d(256, 256, 3, stride=1, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv3_2
+ nn.ConvTranspose2d(256, 256, 3, stride=1, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv3_3
+ nn.ConvTranspose2d(256, 128, 3, stride=1, padding=1),
+ nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.unpool_block15 = nn.Sequential(
+ # unpool2
+ nn.MaxUnpool2d(2, stride=2),
+ )
+
+ # 48*48
+
+ self.deconv_block16 = nn.Sequential(
+ # deconv2_1
+ nn.ConvTranspose2d(128, 128, 3, stride=1, padding=1),
+ nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv2_2
+ nn.ConvTranspose2d(128, 64, 3, stride=1, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.unpool_block17 = nn.Sequential(
+ # unpool1
+ nn.MaxUnpool2d(2, stride=2),
+ )
+
+ # 96*96
+
+ self.deconv_block18 = nn.Sequential(
+ # deconv1_1
+ nn.ConvTranspose2d(64, 64, 3, stride=1, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv1_2
+ nn.ConvTranspose2d(64, 64, 3, stride=1, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.seg_score19 = nn.Sequential(
+ # seg-score
+ nn.Conv2d(64, self.n_classes, 1),
+ )
+
+ if self.learned_billinear:
+ raise NotImplementedError
+
+ def forward(self, x):
+ size0 = x.size()
+ conv1, indices1 = self.conv_block1(x)
+ size1 = conv1.size()
+ conv2, indices2 = self.conv_block2(conv1)
+ size2 = conv2.size()
+ conv3, indices3 = self.conv_block3(conv2)
+ size3 = conv3.size()
+ conv4, indices4 = self.conv_block4(conv3)
+ size4 = conv4.size()
+ conv5, indices5 = self.conv_block5(conv4)
+
+ conv6 = self.conv_block6(conv5)
+ conv7 = self.conv_block7(conv6)
+ conv8 = self.deconv_block8(conv7) + conv5
+ conv9 = self.unpool(conv8, indices5, output_size=size4)
+ conv10 = self.deconv_block10(conv9) + conv4
+ conv11 = self.unpool(conv10, indices4, output_size=size3)
+ conv12 = self.deconv_block12(conv11) + conv3
+ conv13 = self.unpool(conv12, indices3, output_size=size2)
+ conv14 = self.deconv_block14(conv13) + conv2
+ conv15 = self.unpool(conv14, indices2, output_size=size1)
+ conv16 = self.deconv_block16(conv15) + conv1
+ conv17 = self.unpool(conv16, indices1, output_size=size0)
+ conv18 = self.deconv_block18(conv17)
+ out = self.seg_score19(conv18)
+
+ return out
+
+ def init_vgg16_params(self, vgg16, copy_fc8=True):
+ blocks = [
+ self.conv_block1,
+ self.conv_block2,
+ self.conv_block3,
+ self.conv_block4,
+ self.conv_block5,
+ ]
+
+ ranges = [[0, 4], [5, 9], [10, 16], [17, 23], [24, 29]]
+ features = list(vgg16.features.children())
+ i_layer = 0
+ # copy convolutional filters from vgg16
+ for idx, conv_block in enumerate(blocks):
+ for l1, l2 in zip(features[ranges[idx][0] : ranges[idx][1]], conv_block):
+ if isinstance(l1, nn.Conv2d) and isinstance(l2, nn.Conv2d):
+ if i_layer == 0:
+ l2.weight.data = (
+ (l1.weight.data[:, 0, :, :] + l1.weight.data[:, 1, :, :] + l1.weight.data[:, 2, :, :]) / 3.0
+ ).view(l2.weight.size())
+ l2.bias.data = l1.bias.data
+ i_layer = i_layer + 1
+ else:
+ assert l1.weight.size() == l2.weight.size()
+ assert l1.bias.size() == l2.bias.size()
+ l2.weight.data = l1.weight.data
+ l2.bias.data = l1.bias.data
+ i_layer = i_layer + 1
+
+
+def get_seg_model(cfg, **kwargs):
+ assert (
+ cfg.MODEL.IN_CHANNELS == 1
+ ), f"Patch deconvnet is not implemented to accept {cfg.MODEL.IN_CHANNELS} channels. Please only pass 1 for cfg.MODEL.IN_CHANNELS"
+ model = patch_deconvnet_skip(n_classes=cfg.DATASET.NUM_CLASSES)
+ return model
diff --git a/cv_lib/cv_lib/segmentation/models/resnet_unet.py b/cv_lib/cv_lib/segmentation/models/resnet_unet.py
new file mode 100644
index 00000000..05badb64
--- /dev/null
+++ b/cv_lib/cv_lib/segmentation/models/resnet_unet.py
@@ -0,0 +1,365 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+import torchvision
+
+
+class FPAv2(nn.Module):
+ def __init__(self, input_dim, output_dim):
+ super(FPAv2, self).__init__()
+ self.glob = nn.Sequential(nn.AdaptiveAvgPool2d(1), nn.Conv2d(input_dim, output_dim, kernel_size=1, bias=False),)
+
+ self.down2_1 = nn.Sequential(
+ nn.Conv2d(input_dim, input_dim, kernel_size=5, stride=2, padding=2, bias=False),
+ nn.BatchNorm2d(input_dim),
+ nn.ELU(True),
+ )
+ self.down2_2 = nn.Sequential(
+ nn.Conv2d(input_dim, output_dim, kernel_size=5, padding=2, bias=False),
+ nn.BatchNorm2d(output_dim),
+ nn.ELU(True),
+ )
+
+ self.down3_1 = nn.Sequential(
+ nn.Conv2d(input_dim, input_dim, kernel_size=3, stride=2, padding=1, bias=False),
+ nn.BatchNorm2d(input_dim),
+ nn.ELU(True),
+ )
+ self.down3_2 = nn.Sequential(
+ nn.Conv2d(input_dim, output_dim, kernel_size=3, padding=1, bias=False),
+ nn.BatchNorm2d(output_dim),
+ nn.ELU(True),
+ )
+
+ self.conv1 = nn.Sequential(
+ nn.Conv2d(input_dim, output_dim, kernel_size=1, bias=False), nn.BatchNorm2d(output_dim), nn.ELU(True),
+ )
+
+ def forward(self, x):
+ # x shape: 512, 16, 16
+ x_glob = self.glob(x) # 256, 1, 1
+ x_glob = F.upsample(x_glob, scale_factor=16, mode="bilinear", align_corners=True) # 256, 16, 16
+
+ d2 = self.down2_1(x) # 512, 8, 8
+ d3 = self.down3_1(d2) # 512, 4, 4
+
+ d2 = self.down2_2(d2) # 256, 8, 8
+ d3 = self.down3_2(d3) # 256, 4, 4
+
+ d3 = F.upsample(d3, scale_factor=2, mode="bilinear", align_corners=True) # 256, 8, 8
+ d2 = d2 + d3
+
+ d2 = F.upsample(d2, scale_factor=2, mode="bilinear", align_corners=True) # 256, 16, 16
+ x = self.conv1(x) # 256, 16, 16
+ x = x * d2
+
+ x = x + x_glob
+
+ return x
+
+
+def conv3x3(input_dim, output_dim, rate=1):
+ return nn.Sequential(
+ nn.Conv2d(input_dim, output_dim, kernel_size=3, dilation=rate, padding=rate, bias=False,),
+ nn.BatchNorm2d(output_dim),
+ nn.ELU(True),
+ )
+
+
+class SpatialAttention2d(nn.Module):
+ def __init__(self, channel):
+ super(SpatialAttention2d, self).__init__()
+ self.squeeze = nn.Conv2d(channel, 1, kernel_size=1, bias=False)
+ self.sigmoid = nn.Sigmoid()
+
+ def forward(self, x):
+ z = self.squeeze(x)
+ z = self.sigmoid(z)
+ return x * z
+
+
+class GAB(nn.Module):
+ def __init__(self, input_dim, reduction=4):
+ super(GAB, self).__init__()
+ self.global_avgpool = nn.AdaptiveAvgPool2d(1)
+ self.conv1 = nn.Conv2d(input_dim, input_dim // reduction, kernel_size=1, stride=1)
+ self.conv2 = nn.Conv2d(input_dim // reduction, input_dim, kernel_size=1, stride=1)
+ self.relu = nn.ReLU(inplace=True)
+ self.sigmoid = nn.Sigmoid()
+
+ def forward(self, x):
+ z = self.global_avgpool(x)
+ z = self.relu(self.conv1(z))
+ z = self.sigmoid(self.conv2(z))
+ return x * z
+
+
+class Decoder(nn.Module):
+ def __init__(self, in_channels, channels, out_channels):
+ super(Decoder, self).__init__()
+ self.conv1 = conv3x3(in_channels, channels)
+ self.conv2 = conv3x3(channels, out_channels)
+ self.s_att = SpatialAttention2d(out_channels)
+ self.c_att = GAB(out_channels, 16)
+
+ def forward(self, x, e=None):
+ x = F.upsample(input=x, scale_factor=2, mode="bilinear", align_corners=True)
+ if e is not None:
+ x = torch.cat([x, e], 1)
+ x = self.conv1(x)
+ x = self.conv2(x)
+ s = self.s_att(x)
+ c = self.c_att(x)
+ output = s + c
+ return output
+
+
+class Decoderv2(nn.Module):
+ def __init__(self, up_in, x_in, n_out):
+ super(Decoderv2, self).__init__()
+ up_out = x_out = n_out // 2
+ self.x_conv = nn.Conv2d(x_in, x_out, 1, bias=False)
+ self.tr_conv = nn.ConvTranspose2d(up_in, up_out, 2, stride=2)
+ self.bn = nn.BatchNorm2d(n_out)
+ self.relu = nn.ReLU(True)
+ self.s_att = SpatialAttention2d(n_out)
+ self.c_att = GAB(n_out, 16)
+
+ def forward(self, up_p, x_p):
+ up_p = self.tr_conv(up_p)
+ x_p = self.x_conv(x_p)
+
+ cat_p = torch.cat([up_p, x_p], 1)
+ cat_p = self.relu(self.bn(cat_p))
+ s = self.s_att(cat_p)
+ c = self.c_att(cat_p)
+ return s + c
+
+
+class SCse(nn.Module):
+ def __init__(self, dim):
+ super(SCse, self).__init__()
+ self.satt = SpatialAttention2d(dim)
+ self.catt = GAB(dim)
+
+ def forward(self, x):
+ return self.satt(x) + self.catt(x)
+
+
+# stage1 model
+class Res34Unetv4(nn.Module):
+ def __init__(self, n_classes=1):
+ super(Res34Unetv4, self).__init__()
+ self.resnet = torchvision.models.resnet34(True)
+
+ self.conv1 = nn.Sequential(self.resnet.conv1, self.resnet.bn1, self.resnet.relu)
+
+ self.encode2 = nn.Sequential(self.resnet.layer1, SCse(64))
+ self.encode3 = nn.Sequential(self.resnet.layer2, SCse(128))
+ self.encode4 = nn.Sequential(self.resnet.layer3, SCse(256))
+ self.encode5 = nn.Sequential(self.resnet.layer4, SCse(512))
+
+ self.center = nn.Sequential(FPAv2(512, 256), nn.MaxPool2d(2, 2))
+
+ self.decode5 = Decoderv2(256, 512, 64)
+ self.decode4 = Decoderv2(64, 256, 64)
+ self.decode3 = Decoderv2(64, 128, 64)
+ self.decode2 = Decoderv2(64, 64, 64)
+ self.decode1 = Decoder(64, 32, 64)
+
+ self.logit = nn.Sequential(
+ nn.Conv2d(320, 64, kernel_size=3, padding=1),
+ nn.ELU(True),
+ nn.Conv2d(64, n_classes, kernel_size=1, bias=False),
+ )
+
+ def forward(self, x):
+ # x: (batch_size, 3, 256, 256)
+
+ x = self.conv1(x) # 64, 128, 128
+ e2 = self.encode2(x) # 64, 128, 128
+ e3 = self.encode3(e2) # 128, 64, 64
+ e4 = self.encode4(e3) # 256, 32, 32
+ e5 = self.encode5(e4) # 512, 16, 16
+
+ f = self.center(e5) # 256, 8, 8
+
+ d5 = self.decode5(f, e5) # 64, 16, 16
+ d4 = self.decode4(d5, e4) # 64, 32, 32
+ d3 = self.decode3(d4, e3) # 64, 64, 64
+ d2 = self.decode2(d3, e2) # 64, 128, 128
+ d1 = self.decode1(d2) # 64, 256, 256
+
+ f = torch.cat(
+ (
+ d1,
+ F.upsample(d2, scale_factor=2, mode="bilinear", align_corners=True),
+ F.upsample(d3, scale_factor=4, mode="bilinear", align_corners=True),
+ F.upsample(d4, scale_factor=8, mode="bilinear", align_corners=True),
+ F.upsample(d5, scale_factor=16, mode="bilinear", align_corners=True),
+ ),
+ 1,
+ ) # 320, 256, 256
+
+ logit = self.logit(f) # 1, 256, 256
+
+ return logit
+
+
+# stage2 model
+class Res34Unetv3(nn.Module):
+ def __init__(self):
+ super(Res34Unetv3, self).__init__()
+ self.resnet = torchvision.models.resnet34(True)
+
+ self.conv1 = nn.Sequential(self.resnet.conv1, self.resnet.bn1, self.resnet.relu)
+
+ self.encode2 = nn.Sequential(self.resnet.layer1, SCse(64))
+ self.encode3 = nn.Sequential(self.resnet.layer2, SCse(128))
+ self.encode4 = nn.Sequential(self.resnet.layer3, SCse(256))
+ self.encode5 = nn.Sequential(self.resnet.layer4, SCse(512))
+
+ self.center = nn.Sequential(FPAv2(512, 256), nn.MaxPool2d(2, 2))
+
+ self.decode5 = Decoderv2(256, 512, 64)
+ self.decode4 = Decoderv2(64, 256, 64)
+ self.decode3 = Decoderv2(64, 128, 64)
+ self.decode2 = Decoderv2(64, 64, 64)
+ self.decode1 = Decoder(64, 32, 64)
+
+ self.dropout2d = nn.Dropout2d(0.4)
+ self.dropout = nn.Dropout(0.4)
+
+ self.fuse_pixel = conv3x3(320, 64)
+ self.logit_pixel = nn.Conv2d(64, 1, kernel_size=1, bias=False)
+
+ self.fuse_image = nn.Sequential(nn.Linear(512, 64), nn.ELU(True))
+ self.logit_image = nn.Sequential(nn.Linear(64, 1), nn.Sigmoid())
+ self.logit = nn.Sequential(
+ nn.Conv2d(128, 64, kernel_size=3, padding=1, bias=False),
+ nn.ELU(True),
+ nn.Conv2d(64, 1, kernel_size=1, bias=False),
+ )
+
+ def forward(self, x):
+ # x: (batch_size, 3, 256, 256)
+ batch_size, c, h, w = x.shape
+
+ x = self.conv1(x) # 64, 128, 128
+ e2 = self.encode2(x) # 64, 128, 128
+ e3 = self.encode3(e2) # 128, 64, 64
+ e4 = self.encode4(e3) # 256, 32, 32
+ e5 = self.encode5(e4) # 512, 16, 16
+
+ e = F.adaptive_avg_pool2d(e5, output_size=1).view(batch_size, -1) # 512
+ e = self.dropout(e)
+
+ f = self.center(e5) # 256, 8, 8
+
+ d5 = self.decode5(f, e5) # 64, 16, 16
+ d4 = self.decode4(d5, e4) # 64, 32, 32
+ d3 = self.decode3(d4, e3) # 64, 64, 64
+ d2 = self.decode2(d3, e2) # 64, 128, 128
+ d1 = self.decode1(d2) # 64, 256, 256
+
+ f = torch.cat(
+ (
+ d1,
+ F.upsample(d2, scale_factor=2, mode="bilinear", align_corners=True),
+ F.upsample(d3, scale_factor=4, mode="bilinear", align_corners=True),
+ F.upsample(d4, scale_factor=8, mode="bilinear", align_corners=True),
+ F.upsample(d5, scale_factor=16, mode="bilinear", align_corners=True),
+ ),
+ 1,
+ ) # 320, 256, 256
+ f = self.dropout2d(f)
+
+ # segmentation process
+ fuse_pixel = self.fuse_pixel(f) # 64, 256, 256
+ logit_pixel = self.logit_pixel(fuse_pixel) # 1, 256, 256
+
+ # classification process
+ fuse_image = self.fuse_image(e) # 64
+ logit_image = self.logit_image(fuse_image) # 1
+
+ # combine segmentation and classification
+ fuse = torch.cat(
+ [
+ fuse_pixel,
+ F.upsample(
+ fuse_image.view(batch_size, -1, 1, 1), scale_factor=256, mode="bilinear", align_corners=True,
+ ),
+ ],
+ 1,
+ ) # 128, 256, 256
+ logit = self.logit(fuse) # 1, 256, 256
+
+ return logit, logit_pixel, logit_image.view(-1)
+
+
+# stage3 model
+class Res34Unetv5(nn.Module):
+ def __init__(self):
+ super(Res34Unetv5, self).__init__()
+ self.resnet = torchvision.models.resnet34(True)
+
+ self.conv1 = nn.Sequential(
+ nn.Conv2d(3, 64, kernel_size=3, padding=1, bias=False), self.resnet.bn1, self.resnet.relu,
+ )
+
+ self.encode2 = nn.Sequential(self.resnet.layer1, SCse(64))
+ self.encode3 = nn.Sequential(self.resnet.layer2, SCse(128))
+ self.encode4 = nn.Sequential(self.resnet.layer3, SCse(256))
+ self.encode5 = nn.Sequential(self.resnet.layer4, SCse(512))
+
+ self.center = nn.Sequential(FPAv2(512, 256), nn.MaxPool2d(2, 2))
+
+ self.decode5 = Decoderv2(256, 512, 64)
+ self.decode4 = Decoderv2(64, 256, 64)
+ self.decode3 = Decoderv2(64, 128, 64)
+ self.decode2 = Decoderv2(64, 64, 64)
+
+ self.logit = nn.Sequential(
+ nn.Conv2d(256, 32, kernel_size=3, padding=1), nn.ELU(True), nn.Conv2d(32, 1, kernel_size=1, bias=False),
+ )
+
+ def forward(self, x):
+ # x: batch_size, 3, 128, 128
+ x = self.conv1(x) # 64, 128, 128
+ e2 = self.encode2(x) # 64, 128, 128
+ e3 = self.encode3(e2) # 128, 64, 64
+ e4 = self.encode4(e3) # 256, 32, 32
+ e5 = self.encode5(e4) # 512, 16, 16
+
+ f = self.center(e5) # 256, 8, 8
+
+ d5 = self.decode5(f, e5) # 64, 16, 16
+ d4 = self.decode4(d5, e4) # 64, 32, 32
+ d3 = self.decode3(d4, e3) # 64, 64, 64
+ d2 = self.decode2(d3, e2) # 64, 128, 128
+
+ f = torch.cat(
+ (
+ d2,
+ F.upsample(d3, scale_factor=2, mode="bilinear", align_corners=True),
+ F.upsample(d4, scale_factor=4, mode="bilinear", align_corners=True),
+ F.upsample(d5, scale_factor=8, mode="bilinear", align_corners=True),
+ ),
+ 1,
+ ) # 256, 128, 128
+
+ f = F.dropout2d(f, p=0.4)
+ logit = self.logit(f) # 1, 128, 128
+
+ return logit
+
+
+def get_seg_model(cfg, **kwargs):
+ assert (
+ cfg.MODEL.IN_CHANNELS == 3
+ ), f"SEResnet Unet deconvnet is not implemented to accept {cfg.MODEL.IN_CHANNELS} channels. Please only pass 3 for cfg.MODEL.IN_CHANNELS"
+ model = Res34Unetv4(n_classes=cfg.DATASET.NUM_CLASSES)
+ return model
diff --git a/cv_lib/cv_lib/segmentation/models/section_deconvnet.py b/cv_lib/cv_lib/segmentation/models/section_deconvnet.py
new file mode 100644
index 00000000..7234b1ee
--- /dev/null
+++ b/cv_lib/cv_lib/segmentation/models/section_deconvnet.py
@@ -0,0 +1,307 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+import torch.nn as nn
+
+
+class section_deconvnet(nn.Module):
+ def __init__(self, n_classes=4, learned_billinear=False):
+ super(section_deconvnet, self).__init__()
+ self.learned_billinear = learned_billinear
+ self.n_classes = n_classes
+ self.unpool = nn.MaxUnpool2d(2, stride=2)
+ self.conv_block1 = nn.Sequential(
+ # conv1_1
+ nn.Conv2d(1, 64, 3, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv1_2
+ nn.Conv2d(64, 64, 3, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool1
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_1
+
+ # 48*48
+
+ self.conv_block2 = nn.Sequential(
+ # conv2_1
+ nn.Conv2d(64, 128, 3, padding=1),
+ nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv2_2
+ nn.Conv2d(128, 128, 3, padding=1),
+ nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool2
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_2
+
+ # 24*24
+
+ self.conv_block3 = nn.Sequential(
+ # conv3_1
+ nn.Conv2d(128, 256, 3, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv3_2
+ nn.Conv2d(256, 256, 3, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv3_3
+ nn.Conv2d(256, 256, 3, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool3
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_3
+
+ # 12*12
+
+ self.conv_block4 = nn.Sequential(
+ # conv4_1
+ nn.Conv2d(256, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv4_2
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv4_3
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool4
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_4
+
+ # 6*6
+
+ self.conv_block5 = nn.Sequential(
+ # conv5_1
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv5_2
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv5_3
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool5
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_5
+
+ # 3*3
+
+ self.conv_block6 = nn.Sequential(
+ # fc6
+ nn.Conv2d(512, 4096, 3),
+ # set the filter size and nor padding to make output into 1*1
+ nn.BatchNorm2d(4096, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ # 1*1
+
+ self.conv_block7 = nn.Sequential(
+ # fc7
+ nn.Conv2d(4096, 4096, 1),
+ # set the filter size to make output into 1*1
+ nn.BatchNorm2d(4096, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.deconv_block8 = nn.Sequential(
+ # fc6-deconv
+ nn.ConvTranspose2d(4096, 512, 3, stride=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ # 3*3
+
+ self.unpool_block9 = nn.Sequential(
+ # unpool5
+ nn.MaxUnpool2d(2, stride=2),
+ )
+ # usage unpool(output, indices)
+
+ # 6*6
+
+ self.deconv_block10 = nn.Sequential(
+ # deconv5_1
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv5_2
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv5_3
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.unpool_block11 = nn.Sequential(
+ # unpool4
+ nn.MaxUnpool2d(2, stride=2),
+ )
+
+ # 12*12
+
+ self.deconv_block12 = nn.Sequential(
+ # deconv4_1
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv4_2
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv4_3
+ nn.ConvTranspose2d(512, 256, 3, stride=1, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.unpool_block13 = nn.Sequential(
+ # unpool3
+ nn.MaxUnpool2d(2, stride=2),
+ )
+
+ # 24*24
+
+ self.deconv_block14 = nn.Sequential(
+ # deconv3_1
+ nn.ConvTranspose2d(256, 256, 3, stride=1, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv3_2
+ nn.ConvTranspose2d(256, 256, 3, stride=1, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv3_3
+ nn.ConvTranspose2d(256, 128, 3, stride=1, padding=1),
+ nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.unpool_block15 = nn.Sequential(
+ # unpool2
+ nn.MaxUnpool2d(2, stride=2),
+ )
+
+ # 48*48
+
+ self.deconv_block16 = nn.Sequential(
+ # deconv2_1
+ nn.ConvTranspose2d(128, 128, 3, stride=1, padding=1),
+ nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv2_2
+ nn.ConvTranspose2d(128, 64, 3, stride=1, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.unpool_block17 = nn.Sequential(
+ # unpool1
+ nn.MaxUnpool2d(2, stride=2),
+ )
+
+ # 96*96
+
+ self.deconv_block18 = nn.Sequential(
+ # deconv1_1
+ nn.ConvTranspose2d(64, 64, 3, stride=1, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv1_2
+ nn.ConvTranspose2d(64, 64, 3, stride=1, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.seg_score19 = nn.Sequential(
+ # seg-score
+ nn.Conv2d(64, self.n_classes, 1),
+ )
+
+ if self.learned_billinear:
+ raise NotImplementedError
+
+ def forward(self, x):
+ size0 = x.size()
+ conv1, indices1 = self.conv_block1(x)
+ size1 = conv1.size()
+ conv2, indices2 = self.conv_block2(conv1)
+ size2 = conv2.size()
+ conv3, indices3 = self.conv_block3(conv2)
+ size3 = conv3.size()
+ conv4, indices4 = self.conv_block4(conv3)
+ size4 = conv4.size()
+ conv5, indices5 = self.conv_block5(conv4)
+
+ conv6 = self.conv_block6(conv5)
+ conv7 = self.conv_block7(conv6)
+ conv8 = self.deconv_block8(conv7)
+ conv9 = self.unpool(conv8, indices5, output_size=size4)
+ conv10 = self.deconv_block10(conv9)
+ conv11 = self.unpool(conv10, indices4, output_size=size3)
+ conv12 = self.deconv_block12(conv11)
+ conv13 = self.unpool(conv12, indices3, output_size=size2)
+ conv14 = self.deconv_block14(conv13)
+ conv15 = self.unpool(conv14, indices2, output_size=size1)
+ conv16 = self.deconv_block16(conv15)
+ conv17 = self.unpool(conv16, indices1, output_size=size0)
+ conv18 = self.deconv_block18(conv17)
+ out = self.seg_score19(conv18)
+
+ return out
+
+ def init_vgg16_params(self, vgg16, copy_fc8=True):
+ blocks = [
+ self.conv_block1,
+ self.conv_block2,
+ self.conv_block3,
+ self.conv_block4,
+ self.conv_block5,
+ ]
+
+ ranges = [[0, 4], [5, 9], [10, 16], [17, 23], [24, 29]]
+ features = list(vgg16.features.children())
+ i_layer = 0
+ # copy convolutional filters from vgg16
+ for idx, conv_block in enumerate(blocks):
+ for l1, l2 in zip(features[ranges[idx][0] : ranges[idx][1]], conv_block):
+ if isinstance(l1, nn.Conv2d) and isinstance(l2, nn.Conv2d):
+ if i_layer == 0:
+ l2.weight.data = (
+ (l1.weight.data[:, 0, :, :] + l1.weight.data[:, 1, :, :] + l1.weight.data[:, 2, :, :]) / 3.0
+ ).view(l2.weight.size())
+ l2.bias.data = l1.bias.data
+ i_layer = i_layer + 1
+ else:
+ assert l1.weight.size() == l2.weight.size()
+ assert l1.bias.size() == l2.bias.size()
+ l2.weight.data = l1.weight.data
+ l2.bias.data = l1.bias.data
+ i_layer = i_layer + 1
+
+
+def get_seg_model(cfg, **kwargs):
+ assert (
+ cfg.MODEL.IN_CHANNELS == 1
+ ), f"Section deconvnet is not implemented to accept {cfg.MODEL.IN_CHANNELS} channels. Please only pass 1 for cfg.MODEL.IN_CHANNELS"
+ model = section_deconvnet(n_classes=cfg.DATASET.NUM_CLASSES)
+ return model
diff --git a/cv_lib/cv_lib/segmentation/models/section_deconvnet_skip.py b/cv_lib/cv_lib/segmentation/models/section_deconvnet_skip.py
new file mode 100644
index 00000000..cb8b2ecb
--- /dev/null
+++ b/cv_lib/cv_lib/segmentation/models/section_deconvnet_skip.py
@@ -0,0 +1,307 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+import torch.nn as nn
+
+
+class section_deconvnet_skip(nn.Module):
+ def __init__(self, n_classes=4, learned_billinear=False):
+ super(section_deconvnet_skip, self).__init__()
+ self.learned_billinear = learned_billinear
+ self.n_classes = n_classes
+ self.unpool = nn.MaxUnpool2d(2, stride=2)
+ self.conv_block1 = nn.Sequential(
+ # conv1_1
+ nn.Conv2d(1, 64, 3, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv1_2
+ nn.Conv2d(64, 64, 3, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool1
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_1
+
+ # 48*48
+
+ self.conv_block2 = nn.Sequential(
+ # conv2_1
+ nn.Conv2d(64, 128, 3, padding=1),
+ nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv2_2
+ nn.Conv2d(128, 128, 3, padding=1),
+ nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool2
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_2
+
+ # 24*24
+
+ self.conv_block3 = nn.Sequential(
+ # conv3_1
+ nn.Conv2d(128, 256, 3, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv3_2
+ nn.Conv2d(256, 256, 3, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv3_3
+ nn.Conv2d(256, 256, 3, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool3
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_3
+
+ # 12*12
+
+ self.conv_block4 = nn.Sequential(
+ # conv4_1
+ nn.Conv2d(256, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv4_2
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv4_3
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool4
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_4
+
+ # 6*6
+
+ self.conv_block5 = nn.Sequential(
+ # conv5_1
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv5_2
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # conv5_3
+ nn.Conv2d(512, 512, 3, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # pool5
+ nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True),
+ )
+ # it returns outputs and pool_indices_5
+
+ # 3*3
+
+ self.conv_block6 = nn.Sequential(
+ # fc6
+ nn.Conv2d(512, 4096, 3),
+ # set the filter size and nor padding to make output into 1*1
+ nn.BatchNorm2d(4096, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ # 1*1
+
+ self.conv_block7 = nn.Sequential(
+ # fc7
+ nn.Conv2d(4096, 4096, 1),
+ # set the filter size to make output into 1*1
+ nn.BatchNorm2d(4096, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.deconv_block8 = nn.Sequential(
+ # fc6-deconv
+ nn.ConvTranspose2d(4096, 512, 3, stride=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ # 3*3
+
+ self.unpool_block9 = nn.Sequential(
+ # unpool5
+ nn.MaxUnpool2d(2, stride=2),
+ )
+ # usage unpool(output, indices)
+
+ # 6*6
+
+ self.deconv_block10 = nn.Sequential(
+ # deconv5_1
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv5_2
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv5_3
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.unpool_block11 = nn.Sequential(
+ # unpool4
+ nn.MaxUnpool2d(2, stride=2),
+ )
+
+ # 12*12
+
+ self.deconv_block12 = nn.Sequential(
+ # deconv4_1
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv4_2
+ nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
+ nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv4_3
+ nn.ConvTranspose2d(512, 256, 3, stride=1, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.unpool_block13 = nn.Sequential(
+ # unpool3
+ nn.MaxUnpool2d(2, stride=2),
+ )
+
+ # 24*24
+
+ self.deconv_block14 = nn.Sequential(
+ # deconv3_1
+ nn.ConvTranspose2d(256, 256, 3, stride=1, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv3_2
+ nn.ConvTranspose2d(256, 256, 3, stride=1, padding=1),
+ nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv3_3
+ nn.ConvTranspose2d(256, 128, 3, stride=1, padding=1),
+ nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.unpool_block15 = nn.Sequential(
+ # unpool2
+ nn.MaxUnpool2d(2, stride=2),
+ )
+
+ # 48*48
+
+ self.deconv_block16 = nn.Sequential(
+ # deconv2_1
+ nn.ConvTranspose2d(128, 128, 3, stride=1, padding=1),
+ nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv2_2
+ nn.ConvTranspose2d(128, 64, 3, stride=1, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.unpool_block17 = nn.Sequential(
+ # unpool1
+ nn.MaxUnpool2d(2, stride=2),
+ )
+
+ # 96*96
+
+ self.deconv_block18 = nn.Sequential(
+ # deconv1_1
+ nn.ConvTranspose2d(64, 64, 3, stride=1, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ # deconv1_2
+ nn.ConvTranspose2d(64, 64, 3, stride=1, padding=1),
+ nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
+ nn.ReLU(inplace=True),
+ )
+
+ self.seg_score19 = nn.Sequential(
+ # seg-score
+ nn.Conv2d(64, self.n_classes, 1),
+ )
+
+ if self.learned_billinear:
+ raise NotImplementedError
+
+ def forward(self, x):
+ size0 = x.size()
+ conv1, indices1 = self.conv_block1(x)
+ size1 = conv1.size()
+ conv2, indices2 = self.conv_block2(conv1)
+ size2 = conv2.size()
+ conv3, indices3 = self.conv_block3(conv2)
+ size3 = conv3.size()
+ conv4, indices4 = self.conv_block4(conv3)
+ size4 = conv4.size()
+ conv5, indices5 = self.conv_block5(conv4)
+
+ conv6 = self.conv_block6(conv5)
+ conv7 = self.conv_block7(conv6)
+ conv8 = self.deconv_block8(conv7) + conv5
+ conv9 = self.unpool(conv8, indices5, output_size=size4)
+ conv10 = self.deconv_block10(conv9) + conv4
+ conv11 = self.unpool(conv10, indices4, output_size=size3)
+ conv12 = self.deconv_block12(conv11) + conv3
+ conv13 = self.unpool(conv12, indices3, output_size=size2)
+ conv14 = self.deconv_block14(conv13) + conv2
+ conv15 = self.unpool(conv14, indices2, output_size=size1)
+ conv16 = self.deconv_block16(conv15) + conv1
+ conv17 = self.unpool(conv16, indices1, output_size=size0)
+ conv18 = self.deconv_block18(conv17)
+ out = self.seg_score19(conv18)
+
+ return out
+
+ def init_vgg16_params(self, vgg16, copy_fc8=True):
+ blocks = [
+ self.conv_block1,
+ self.conv_block2,
+ self.conv_block3,
+ self.conv_block4,
+ self.conv_block5,
+ ]
+
+ ranges = [[0, 4], [5, 9], [10, 16], [17, 23], [24, 29]]
+ features = list(vgg16.features.children())
+ i_layer = 0
+ # copy convolutional filters from vgg16
+ for idx, conv_block in enumerate(blocks):
+ for l1, l2 in zip(features[ranges[idx][0] : ranges[idx][1]], conv_block):
+ if isinstance(l1, nn.Conv2d) and isinstance(l2, nn.Conv2d):
+ if i_layer == 0:
+ l2.weight.data = (
+ (l1.weight.data[:, 0, :, :] + l1.weight.data[:, 1, :, :] + l1.weight.data[:, 2, :, :]) / 3.0
+ ).view(l2.weight.size())
+ l2.bias.data = l1.bias.data
+ i_layer = i_layer + 1
+ else:
+ assert l1.weight.size() == l2.weight.size()
+ assert l1.bias.size() == l2.bias.size()
+ l2.weight.data = l1.weight.data
+ l2.bias.data = l1.bias.data
+ i_layer = i_layer + 1
+
+
+def get_seg_model(cfg, **kwargs):
+ assert (
+ cfg.MODEL.IN_CHANNELS == 1
+ ), f"Section deconvnet is not implemented to accept {cfg.MODEL.IN_CHANNELS} channels. Please only pass 1 for cfg.MODEL.IN_CHANNELS"
+ model = section_deconvnet_skip(n_classes=cfg.DATASET.NUM_CLASSES)
+ return model
diff --git a/cv_lib/cv_lib/segmentation/models/seg_hrnet.py b/cv_lib/cv_lib/segmentation/models/seg_hrnet.py
new file mode 100644
index 00000000..dd06118e
--- /dev/null
+++ b/cv_lib/cv_lib/segmentation/models/seg_hrnet.py
@@ -0,0 +1,446 @@
+# ------------------------------------------------------------------------------
+# Copyright (c) Microsoft
+# Licensed under the MIT License.
+# Written by Ke Sun (sunk@mail.ustc.edu.cn)
+# ------------------------------------------------------------------------------
+"""HRNET for segmentation taken from https://github.com/HRNet/HRNet-Semantic-Segmentation
+pytorch-v1.1 branch
+hash: 06142dc1c7026e256a7561c3e875b06622b5670f
+
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import logging
+import os
+
+import numpy as np
+import torch
+import torch._utils
+import torch.nn as nn
+import torch.nn.functional as F
+
+BatchNorm2d = nn.BatchNorm2d
+BN_MOMENTUM = 0.1
+logger = logging.getLogger(__name__)
+
+
+def conv3x3(in_planes, out_planes, stride=1):
+ """3x3 convolution with padding"""
+ return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False)
+
+
+class BasicBlock(nn.Module):
+ expansion = 1
+
+ def __init__(self, inplanes, planes, stride=1, downsample=None):
+ super(BasicBlock, self).__init__()
+ self.conv1 = conv3x3(inplanes, planes, stride)
+ self.bn1 = BatchNorm2d(planes, momentum=BN_MOMENTUM)
+ self.relu = nn.ReLU(inplace=True)
+ self.conv2 = conv3x3(planes, planes)
+ self.bn2 = BatchNorm2d(planes, momentum=BN_MOMENTUM)
+ self.downsample = downsample
+ self.stride = stride
+
+ def forward(self, x):
+ residual = x
+
+ out = self.conv1(x)
+ out = self.bn1(out)
+ out = self.relu(out)
+
+ out = self.conv2(out)
+ out = self.bn2(out)
+
+ if self.downsample is not None:
+ residual = self.downsample(x)
+
+ out += residual
+ out = self.relu(out)
+
+ return out
+
+
+class Bottleneck(nn.Module):
+ expansion = 4
+
+ def __init__(self, inplanes, planes, stride=1, downsample=None):
+ super(Bottleneck, self).__init__()
+ self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
+ self.bn1 = BatchNorm2d(planes, momentum=BN_MOMENTUM)
+ self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
+ self.bn2 = BatchNorm2d(planes, momentum=BN_MOMENTUM)
+ self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size=1, bias=False)
+ self.bn3 = BatchNorm2d(planes * self.expansion, momentum=BN_MOMENTUM)
+ self.relu = nn.ReLU(inplace=True)
+ self.downsample = downsample
+ self.stride = stride
+
+ def forward(self, x):
+ residual = x
+
+ out = self.conv1(x)
+ out = self.bn1(out)
+ out = self.relu(out)
+
+ out = self.conv2(out)
+ out = self.bn2(out)
+ out = self.relu(out)
+
+ out = self.conv3(out)
+ out = self.bn3(out)
+
+ if self.downsample is not None:
+ residual = self.downsample(x)
+
+ out += residual
+ out = self.relu(out)
+
+ return out
+
+
+class HighResolutionModule(nn.Module):
+ def __init__(
+ self, num_branches, blocks, num_blocks, num_inchannels, num_channels, fuse_method, multi_scale_output=True,
+ ):
+ super(HighResolutionModule, self).__init__()
+ self._check_branches(num_branches, blocks, num_blocks, num_inchannels, num_channels)
+
+ self.num_inchannels = num_inchannels
+ self.fuse_method = fuse_method
+ self.num_branches = num_branches
+
+ self.multi_scale_output = multi_scale_output
+
+ self.branches = self._make_branches(num_branches, blocks, num_blocks, num_channels)
+ self.fuse_layers = self._make_fuse_layers()
+ self.relu = nn.ReLU(inplace=True)
+
+ def _check_branches(self, num_branches, blocks, num_blocks, num_inchannels, num_channels):
+ if num_branches != len(num_blocks):
+ error_msg = "NUM_BRANCHES({}) <> NUM_BLOCKS({})".format(num_branches, len(num_blocks))
+ logger.error(error_msg)
+ raise ValueError(error_msg)
+
+ if num_branches != len(num_channels):
+ error_msg = "NUM_BRANCHES({}) <> NUM_CHANNELS({})".format(num_branches, len(num_channels))
+ logger.error(error_msg)
+ raise ValueError(error_msg)
+
+ if num_branches != len(num_inchannels):
+ error_msg = "NUM_BRANCHES({}) <> NUM_INCHANNELS({})".format(num_branches, len(num_inchannels))
+ logger.error(error_msg)
+ raise ValueError(error_msg)
+
+ def _make_one_branch(self, branch_index, block, num_blocks, num_channels, stride=1):
+ downsample = None
+ if stride != 1 or self.num_inchannels[branch_index] != num_channels[branch_index] * block.expansion:
+ downsample = nn.Sequential(
+ nn.Conv2d(
+ self.num_inchannels[branch_index],
+ num_channels[branch_index] * block.expansion,
+ kernel_size=1,
+ stride=stride,
+ bias=False,
+ ),
+ BatchNorm2d(num_channels[branch_index] * block.expansion, momentum=BN_MOMENTUM),
+ )
+
+ layers = []
+ layers.append(block(self.num_inchannels[branch_index], num_channels[branch_index], stride, downsample,))
+ self.num_inchannels[branch_index] = num_channels[branch_index] * block.expansion
+ for i in range(1, num_blocks[branch_index]):
+ layers.append(block(self.num_inchannels[branch_index], num_channels[branch_index]))
+
+ return nn.Sequential(*layers)
+
+ def _make_branches(self, num_branches, block, num_blocks, num_channels):
+ branches = []
+
+ for i in range(num_branches):
+ branches.append(self._make_one_branch(i, block, num_blocks, num_channels))
+
+ return nn.ModuleList(branches)
+
+ def _make_fuse_layers(self):
+ if self.num_branches == 1:
+ return None
+
+ num_branches = self.num_branches
+ num_inchannels = self.num_inchannels
+ fuse_layers = []
+ for i in range(num_branches if self.multi_scale_output else 1):
+ fuse_layer = []
+ for j in range(num_branches):
+ if j > i:
+ fuse_layer.append(
+ nn.Sequential(
+ nn.Conv2d(num_inchannels[j], num_inchannels[i], 1, 1, 0, bias=False,),
+ BatchNorm2d(num_inchannels[i], momentum=BN_MOMENTUM),
+ )
+ )
+ elif j == i:
+ fuse_layer.append(None)
+ else:
+ conv3x3s = []
+ for k in range(i - j):
+ if k == i - j - 1:
+ num_outchannels_conv3x3 = num_inchannels[i]
+ conv3x3s.append(
+ nn.Sequential(
+ nn.Conv2d(num_inchannels[j], num_outchannels_conv3x3, 3, 2, 1, bias=False,),
+ BatchNorm2d(num_outchannels_conv3x3, momentum=BN_MOMENTUM),
+ )
+ )
+ else:
+ num_outchannels_conv3x3 = num_inchannels[j]
+ conv3x3s.append(
+ nn.Sequential(
+ nn.Conv2d(num_inchannels[j], num_outchannels_conv3x3, 3, 2, 1, bias=False,),
+ BatchNorm2d(num_outchannels_conv3x3, momentum=BN_MOMENTUM),
+ nn.ReLU(inplace=True),
+ )
+ )
+ fuse_layer.append(nn.Sequential(*conv3x3s))
+ fuse_layers.append(nn.ModuleList(fuse_layer))
+
+ return nn.ModuleList(fuse_layers)
+
+ def get_num_inchannels(self):
+ return self.num_inchannels
+
+ def forward(self, x):
+ if self.num_branches == 1:
+ return [self.branches[0](x[0])]
+
+ for i in range(self.num_branches):
+ x[i] = self.branches[i](x[i])
+
+ x_fuse = []
+ for i in range(len(self.fuse_layers)):
+ y = x[0] if i == 0 else self.fuse_layers[i][0](x[0])
+ for j in range(1, self.num_branches):
+ if i == j:
+ y = y + x[j]
+ elif j > i:
+ width_output = x[i].shape[-1]
+ height_output = x[i].shape[-2]
+ y = y + F.interpolate(
+ self.fuse_layers[i][j](x[j]), size=[height_output, width_output], mode="bilinear",
+ )
+ else:
+ y = y + self.fuse_layers[i][j](x[j])
+ x_fuse.append(self.relu(y))
+
+ return x_fuse
+
+
+blocks_dict = {"BASIC": BasicBlock, "BOTTLENECK": Bottleneck}
+
+
+class HighResolutionNet(nn.Module):
+ def __init__(self, config, **kwargs):
+ extra = config.MODEL.EXTRA
+ super(HighResolutionNet, self).__init__()
+
+ # stem net
+ self.conv1 = nn.Conv2d(config.MODEL.IN_CHANNELS, 64, kernel_size=3, stride=2, padding=1, bias=False)
+ self.bn1 = BatchNorm2d(64, momentum=BN_MOMENTUM)
+ self.conv2 = nn.Conv2d(64, 64, kernel_size=3, stride=2, padding=1, bias=False)
+ self.bn2 = BatchNorm2d(64, momentum=BN_MOMENTUM)
+ self.relu = nn.ReLU(inplace=True)
+
+ self.layer1 = self._make_layer(Bottleneck, 64, 64, 4)
+
+ self.stage2_cfg = extra["STAGE2"]
+ num_channels = self.stage2_cfg["NUM_CHANNELS"]
+ block = blocks_dict[self.stage2_cfg["BLOCK"]]
+ num_channels = [num_channels[i] * block.expansion for i in range(len(num_channels))]
+ self.transition1 = self._make_transition_layer([256], num_channels)
+ self.stage2, pre_stage_channels = self._make_stage(self.stage2_cfg, num_channels)
+
+ self.stage3_cfg = extra["STAGE3"]
+ num_channels = self.stage3_cfg["NUM_CHANNELS"]
+ block = blocks_dict[self.stage3_cfg["BLOCK"]]
+ num_channels = [num_channels[i] * block.expansion for i in range(len(num_channels))]
+ self.transition2 = self._make_transition_layer(pre_stage_channels, num_channels)
+ self.stage3, pre_stage_channels = self._make_stage(self.stage3_cfg, num_channels)
+
+ self.stage4_cfg = extra["STAGE4"]
+ num_channels = self.stage4_cfg["NUM_CHANNELS"]
+ block = blocks_dict[self.stage4_cfg["BLOCK"]]
+ num_channels = [num_channels[i] * block.expansion for i in range(len(num_channels))]
+ self.transition3 = self._make_transition_layer(pre_stage_channels, num_channels)
+ self.stage4, pre_stage_channels = self._make_stage(self.stage4_cfg, num_channels, multi_scale_output=True)
+
+ last_inp_channels = np.int(np.sum(pre_stage_channels))
+
+ self.last_layer = nn.Sequential(
+ nn.Conv2d(
+ in_channels=last_inp_channels, out_channels=last_inp_channels, kernel_size=1, stride=1, padding=0,
+ ),
+ BatchNorm2d(last_inp_channels, momentum=BN_MOMENTUM),
+ nn.ReLU(inplace=True),
+ nn.Conv2d(
+ in_channels=last_inp_channels,
+ out_channels=config.DATASET.NUM_CLASSES,
+ kernel_size=extra.FINAL_CONV_KERNEL,
+ stride=1,
+ padding=1 if extra.FINAL_CONV_KERNEL == 3 else 0,
+ ),
+ )
+
+ def _make_transition_layer(self, num_channels_pre_layer, num_channels_cur_layer):
+ num_branches_cur = len(num_channels_cur_layer)
+ num_branches_pre = len(num_channels_pre_layer)
+
+ transition_layers = []
+ for i in range(num_branches_cur):
+ if i < num_branches_pre:
+ if num_channels_cur_layer[i] != num_channels_pre_layer[i]:
+ transition_layers.append(
+ nn.Sequential(
+ nn.Conv2d(num_channels_pre_layer[i], num_channels_cur_layer[i], 3, 1, 1, bias=False,),
+ BatchNorm2d(num_channels_cur_layer[i], momentum=BN_MOMENTUM),
+ nn.ReLU(inplace=True),
+ )
+ )
+ else:
+ transition_layers.append(None)
+ else:
+ conv3x3s = []
+ for j in range(i + 1 - num_branches_pre):
+ inchannels = num_channels_pre_layer[-1]
+ outchannels = num_channels_cur_layer[i] if j == i - num_branches_pre else inchannels
+ conv3x3s.append(
+ nn.Sequential(
+ nn.Conv2d(inchannels, outchannels, 3, 2, 1, bias=False),
+ BatchNorm2d(outchannels, momentum=BN_MOMENTUM),
+ nn.ReLU(inplace=True),
+ )
+ )
+ transition_layers.append(nn.Sequential(*conv3x3s))
+
+ return nn.ModuleList(transition_layers)
+
+ def _make_layer(self, block, inplanes, planes, blocks, stride=1):
+ downsample = None
+ if stride != 1 or inplanes != planes * block.expansion:
+ downsample = nn.Sequential(
+ nn.Conv2d(inplanes, planes * block.expansion, kernel_size=1, stride=stride, bias=False,),
+ BatchNorm2d(planes * block.expansion, momentum=BN_MOMENTUM),
+ )
+
+ layers = []
+ layers.append(block(inplanes, planes, stride, downsample))
+ inplanes = planes * block.expansion
+ for i in range(1, blocks):
+ layers.append(block(inplanes, planes))
+
+ return nn.Sequential(*layers)
+
+ def _make_stage(self, layer_config, num_inchannels, multi_scale_output=True):
+ num_modules = layer_config["NUM_MODULES"]
+ num_branches = layer_config["NUM_BRANCHES"]
+ num_blocks = layer_config["NUM_BLOCKS"]
+ num_channels = layer_config["NUM_CHANNELS"]
+ block = blocks_dict[layer_config["BLOCK"]]
+ fuse_method = layer_config["FUSE_METHOD"]
+
+ modules = []
+ for i in range(num_modules):
+ # multi_scale_output is only used last module
+ if not multi_scale_output and i == num_modules - 1:
+ reset_multi_scale_output = False
+ else:
+ reset_multi_scale_output = True
+ modules.append(
+ HighResolutionModule(
+ num_branches,
+ block,
+ num_blocks,
+ num_inchannels,
+ num_channels,
+ fuse_method,
+ reset_multi_scale_output,
+ )
+ )
+ num_inchannels = modules[-1].get_num_inchannels()
+
+ return nn.Sequential(*modules), num_inchannels
+
+ def forward(self, x):
+ x = self.conv1(x)
+ x = self.bn1(x)
+ x = self.relu(x)
+ x = self.conv2(x)
+ x = self.bn2(x)
+ x = self.relu(x)
+ x = self.layer1(x)
+
+ x_list = []
+ for i in range(self.stage2_cfg["NUM_BRANCHES"]):
+ if self.transition1[i] is not None:
+ x_list.append(self.transition1[i](x))
+ else:
+ x_list.append(x)
+ y_list = self.stage2(x_list)
+
+ x_list = []
+ for i in range(self.stage3_cfg["NUM_BRANCHES"]):
+ if self.transition2[i] is not None:
+ x_list.append(self.transition2[i](y_list[-1]))
+ else:
+ x_list.append(y_list[i])
+ y_list = self.stage3(x_list)
+
+ x_list = []
+ for i in range(self.stage4_cfg["NUM_BRANCHES"]):
+ if self.transition3[i] is not None:
+ x_list.append(self.transition3[i](y_list[-1]))
+ else:
+ x_list.append(y_list[i])
+ x = self.stage4(x_list)
+
+ # Upsampling
+ x0_h, x0_w = x[0].size(2), x[0].size(3)
+ x1 = F.upsample(x[1], size=(x0_h, x0_w), mode="bilinear")
+ x2 = F.upsample(x[2], size=(x0_h, x0_w), mode="bilinear")
+ x3 = F.upsample(x[3], size=(x0_h, x0_w), mode="bilinear")
+
+ x = torch.cat([x[0], x1, x2, x3], 1)
+
+ x = self.last_layer(x)
+
+ return x
+
+ def init_weights(
+ self, pretrained="",
+ ):
+ logger.info("=> init weights from normal distribution")
+ for m in self.modules():
+ if isinstance(m, nn.Conv2d):
+ nn.init.normal_(m.weight, std=0.001)
+ elif isinstance(m, nn.BatchNorm2d):
+ nn.init.constant_(m.weight, 1)
+ nn.init.constant_(m.bias, 0)
+ if os.path.isfile(pretrained):
+ pretrained_dict = torch.load(pretrained)
+ logger.info("=> loading pretrained model {}".format(pretrained))
+ model_dict = self.state_dict()
+ pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict.keys()}
+ # for k, _ in pretrained_dict.items():
+ # logger.info(
+ # '=> loading {} pretrained model {}'.format(k, pretrained))
+ model_dict.update(pretrained_dict)
+ self.load_state_dict(model_dict)
+
+
+def get_seg_model(cfg, **kwargs):
+ model = HighResolutionNet(cfg, **kwargs)
+ model.init_weights(cfg.MODEL.PRETRAINED)
+
+ return model
diff --git a/cv_lib/cv_lib/segmentation/models/unet.py b/cv_lib/cv_lib/segmentation/models/unet.py
new file mode 100644
index 00000000..c6ae6813
--- /dev/null
+++ b/cv_lib/cv_lib/segmentation/models/unet.py
@@ -0,0 +1,116 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+""" Taken from https://github.com/milesial/Pytorch-UNet
+
+"""
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+
+
+class double_conv(nn.Module):
+ """(conv => BN => ReLU) * 2"""
+
+ def __init__(self, in_ch, out_ch):
+ super(double_conv, self).__init__()
+ self.conv = nn.Sequential(
+ nn.Conv2d(in_ch, out_ch, 3, padding=1),
+ nn.BatchNorm2d(out_ch),
+ nn.ReLU(inplace=True),
+ nn.Conv2d(out_ch, out_ch, 3, padding=1),
+ nn.BatchNorm2d(out_ch),
+ nn.ReLU(inplace=True),
+ )
+
+ def forward(self, x):
+ x = self.conv(x)
+ return x
+
+
+class inconv(nn.Module):
+ def __init__(self, in_ch, out_ch):
+ super(inconv, self).__init__()
+ self.conv = double_conv(in_ch, out_ch)
+
+ def forward(self, x):
+ x = self.conv(x)
+ return x
+
+
+class down(nn.Module):
+ def __init__(self, in_ch, out_ch):
+ super(down, self).__init__()
+ self.mpconv = nn.Sequential(nn.MaxPool2d(2), double_conv(in_ch, out_ch))
+
+ def forward(self, x):
+ x = self.mpconv(x)
+ return x
+
+
+class up(nn.Module):
+ def __init__(self, in_ch, out_ch, bilinear=True):
+ super(up, self).__init__()
+
+ if bilinear:
+ self.up = nn.Upsample(scale_factor=2, mode="bilinear", align_corners=True)
+ else:
+ self.up = nn.ConvTranspose2d(in_ch // 2, in_ch // 2, 2, stride=2)
+
+ self.conv = double_conv(in_ch, out_ch)
+
+ def forward(self, x1, x2):
+ x1 = self.up(x1)
+
+ # input is CHW
+ diffY = x2.size()[2] - x1.size()[2]
+ diffX = x2.size()[3] - x1.size()[3]
+
+ x1 = F.pad(x1, (diffX // 2, diffX - diffX // 2, diffY // 2, diffY - diffY // 2))
+
+ x = torch.cat([x2, x1], dim=1)
+ x = self.conv(x)
+ return x
+
+
+class outconv(nn.Module):
+ def __init__(self, in_ch, out_ch):
+ super(outconv, self).__init__()
+ self.conv = nn.Conv2d(in_ch, out_ch, 1)
+
+ def forward(self, x):
+ x = self.conv(x)
+ return x
+
+
+class UNet(nn.Module):
+ def __init__(self, n_channels, n_classes):
+ super(UNet, self).__init__()
+ self.inc = inconv(n_channels, 64)
+ self.down1 = down(64, 128)
+ self.down2 = down(128, 256)
+ self.down3 = down(256, 512)
+ self.down4 = down(512, 512)
+ self.up1 = up(1024, 256)
+ self.up2 = up(512, 128)
+ self.up3 = up(256, 64)
+ self.up4 = up(128, 64)
+ self.outc = outconv(64, n_classes)
+
+ def forward(self, x):
+ x1 = self.inc(x)
+ x2 = self.down1(x1)
+ x3 = self.down2(x2)
+ x4 = self.down3(x3)
+ x5 = self.down4(x4)
+ x = self.up1(x5, x4)
+ x = self.up2(x, x3)
+ x = self.up3(x, x2)
+ x = self.up4(x, x1)
+ x = self.outc(x)
+ return x
+
+
+def get_seg_model(cfg, **kwargs):
+ model = UNet(cfg.MODEL.IN_CHANNELS, cfg.DATASET.NUM_CLASSES)
+ return model
diff --git a/cv_lib/cv_lib/segmentation/models/utils.py b/cv_lib/cv_lib/segmentation/models/utils.py
new file mode 100644
index 00000000..70b4805f
--- /dev/null
+++ b/cv_lib/cv_lib/segmentation/models/utils.py
@@ -0,0 +1,103 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+import torch.nn as nn
+
+
+class conv2DBatchNorm(nn.Module):
+ def __init__(self, in_channels, n_filters, k_size, stride, padding, bias=True, dilation=1):
+ super(conv2DBatchNorm, self).__init__()
+
+ if dilation > 1:
+ conv_mod = nn.Conv2d(
+ int(in_channels),
+ int(n_filters),
+ kernel_size=k_size,
+ padding=padding,
+ stride=stride,
+ bias=bias,
+ dilation=dilation,
+ )
+
+ else:
+ conv_mod = nn.Conv2d(
+ int(in_channels),
+ int(n_filters),
+ kernel_size=k_size,
+ padding=padding,
+ stride=stride,
+ bias=bias,
+ dilation=1,
+ )
+
+ self.cb_unit = nn.Sequential(conv_mod, nn.BatchNorm2d(int(n_filters)),)
+
+ def forward(self, inputs):
+ outputs = self.cb_unit(inputs)
+ return outputs
+
+
+class deconv2DBatchNorm(nn.Module):
+ def __init__(self, in_channels, n_filters, k_size, stride, padding, bias=True):
+ super(deconv2DBatchNorm, self).__init__()
+
+ self.dcb_unit = nn.Sequential(
+ nn.ConvTranspose2d(
+ int(in_channels), int(n_filters), kernel_size=k_size, padding=padding, stride=stride, bias=bias,
+ ),
+ nn.BatchNorm2d(int(n_filters)),
+ )
+
+ def forward(self, inputs):
+ outputs = self.dcb_unit(inputs)
+ return outputs
+
+
+class conv2DBatchNormRelu(nn.Module):
+ def __init__(self, in_channels, n_filters, k_size, stride, padding, bias=True, dilation=1):
+ super(conv2DBatchNormRelu, self).__init__()
+
+ if dilation > 1:
+ conv_mod = nn.Conv2d(
+ int(in_channels),
+ int(n_filters),
+ kernel_size=k_size,
+ padding=padding,
+ stride=stride,
+ bias=bias,
+ dilation=dilation,
+ )
+
+ else:
+ conv_mod = nn.Conv2d(
+ int(in_channels),
+ int(n_filters),
+ kernel_size=k_size,
+ padding=padding,
+ stride=stride,
+ bias=bias,
+ dilation=1,
+ )
+
+ self.cbr_unit = nn.Sequential(conv_mod, nn.BatchNorm2d(int(n_filters)), nn.ReLU(inplace=True),)
+
+ def forward(self, inputs):
+ outputs = self.cbr_unit(inputs)
+ return outputs
+
+
+class deconv2DBatchNormRelu(nn.Module):
+ def __init__(self, in_channels, n_filters, k_size, stride, padding, bias=True):
+ super(deconv2DBatchNormRelu, self).__init__()
+
+ self.dcbr_unit = nn.Sequential(
+ nn.ConvTranspose2d(
+ int(in_channels), int(n_filters), kernel_size=k_size, padding=padding, stride=stride, bias=bias,
+ ),
+ nn.BatchNorm2d(int(n_filters)),
+ nn.ReLU(inplace=True),
+ )
+
+ def forward(self, inputs):
+ outputs = self.dcbr_unit(inputs)
+ return outputs
diff --git a/cv_lib/cv_lib/segmentation/penobscot/engine.py b/cv_lib/cv_lib/segmentation/penobscot/engine.py
new file mode 100644
index 00000000..0b1273bb
--- /dev/null
+++ b/cv_lib/cv_lib/segmentation/penobscot/engine.py
@@ -0,0 +1,119 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+import torch
+
+from ignite.engine.engine import Engine
+from toolz import curry
+from torch.nn import functional as F
+
+
+def _upscale_model_output(y_pred, y):
+ ph, pw = y_pred.size(2), y_pred.size(3)
+ h, w = y.size(2), y.size(3)
+ if ph != h or pw != w:
+ y_pred = F.upsample(input=y_pred, size=(h, w), mode="bilinear")
+ return y_pred
+
+
+def create_supervised_trainer(
+ model,
+ optimizer,
+ loss_fn,
+ prepare_batch,
+ device=None,
+ non_blocking=False,
+ output_transform=lambda x, y, y_pred, loss: {"loss": loss.item()},
+):
+ """Factory function for creating a trainer for supervised segmentation models.
+
+ Args:
+ model (`torch.nn.Module`): the model to train.
+ optimizer (`torch.optim.Optimizer`): the optimizer to use.
+ loss_fn (torch.nn loss function): the loss function to use.
+ prepare_batch (callable): function that receives `batch`, `device`, `non_blocking` and outputs
+ tuple of tensors `(batch_x, batch_y, patch_id, patch_locations)`.
+ device (str, optional): device type specification (default: None).
+ Applies to both model and batches.
+ non_blocking (bool, optional): if True and this copy is between CPU and GPU, the copy may occur asynchronously
+ with respect to the host. For other cases, this argument has no effect.
+ output_transform (callable, optional): function that receives 'x', 'y', 'y_pred', 'loss' and returns value
+ to be assigned to engine's state.output after each iteration. Default is returning `loss.item()`.
+
+ Note: `engine.state.output` for this engine is defined by `output_transform` parameter and is the loss
+ of the processed batch by default.
+
+ Returns:
+ Engine: a trainer engine with supervised update function.
+ """
+ if device:
+ model.to(device)
+
+ def _update(engine, batch):
+ model.train()
+ optimizer.zero_grad()
+ x, y, ids, patch_locations = prepare_batch(batch, device=device, non_blocking=non_blocking)
+ y_pred = model(x)
+ y_pred = _upscale_model_output(y_pred, y)
+ loss = loss_fn(y_pred.squeeze(1), y.squeeze(1))
+ loss.backward()
+ optimizer.step()
+ return output_transform(x, y, y_pred, loss)
+
+ return Engine(_update)
+
+
+@curry
+def val_transform(x, y, y_pred, ids, patch_locations):
+ return {
+ "image": x,
+ "y_pred": y_pred.detach(),
+ "mask": y.detach(),
+ "ids": ids,
+ "patch_locations": patch_locations,
+ }
+
+
+def create_supervised_evaluator(
+ model, prepare_batch, metrics=None, device=None, non_blocking=False, output_transform=val_transform,
+):
+ """Factory function for creating an evaluator for supervised segmentation models.
+
+ Args:
+ model (`torch.nn.Module`): the model to train.
+ prepare_batch (callable): function that receives `batch`, `device`, `non_blocking` and outputs
+ tuple of tensors `(batch_x, batch_y, patch_id, patch_locations)`.
+ metrics (dict of str - :class:`~ignite.metrics.Metric`): a map of metric names to Metrics.
+ device (str, optional): device type specification (default: None).
+ Applies to both model and batches.
+ non_blocking (bool, optional): if True and this copy is between CPU and GPU, the copy may occur asynchronously
+ with respect to the host. For other cases, this argument has no effect.
+ output_transform (callable, optional): function that receives 'x', 'y', 'y_pred' and returns value
+ to be assigned to engine's state.output after each iteration. Default is returning `(y_pred, y,)` which fits
+ output expected by metrics. If you change it you should use `output_transform` in metrics.
+
+ Note: `engine.state.output` for this engine is defind by `output_transform` parameter and is
+ a tuple of `(batch_pred, batch_y)` by default.
+
+ Returns:
+ Engine: an evaluator engine with supervised inference function.
+ """
+ metrics = metrics or {}
+
+ if device:
+ model.to(device)
+
+ def _inference(engine, batch):
+ model.eval()
+ with torch.no_grad():
+ x, y, ids, patch_locations = prepare_batch(batch, device=device, non_blocking=non_blocking)
+ y_pred = model(x)
+ y_pred = _upscale_model_output(y_pred, x)
+ return output_transform(x, y, y_pred, ids, patch_locations)
+
+ engine = Engine(_inference)
+
+ for name, metric in metrics.items():
+ metric.attach(engine, name)
+
+ return engine
diff --git a/cv_lib/cv_lib/segmentation/utils.py b/cv_lib/cv_lib/segmentation/utils.py
new file mode 100644
index 00000000..07951e88
--- /dev/null
+++ b/cv_lib/cv_lib/segmentation/utils.py
@@ -0,0 +1,39 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+import numpy as np
+from deepseismic_interpretation.dutchf3.data import decode_segmap
+from os import path
+from PIL import Image
+from toolz import pipe
+
+
+def _chw_to_hwc(image_array_numpy):
+ return np.moveaxis(image_array_numpy, 0, -1)
+
+
+def save_images(pred_dict, output_dir, num_classes, colours, extra_identifier=""):
+ for id in pred_dict:
+ save_image(
+ pred_dict[id].unsqueeze(0).cpu().numpy(),
+ output_dir,
+ num_classes,
+ colours,
+ extra_identifier=extra_identifier,
+ )
+
+
+def save_image(image_numpy_array, output_dir, num_classes, colours, extra_identifier=""):
+ """Save segmentation map as image
+
+ Args:
+ image_numpy_array (numpy.Array): numpy array that represents an image
+ output_dir ([type]):
+ num_classes ([type]): [description]
+ colours ([type]): [description]
+ extra_identifier (str, optional): [description]. Defaults to "".
+ """
+ im_array = decode_segmap(image_numpy_array, n_classes=num_classes, label_colours=colours,)
+ im = pipe((im_array * 255).astype(np.uint8).squeeze(), _chw_to_hwc, Image.fromarray,)
+ filename = path.join(output_dir, f"{id}_{extra_identifier}.png")
+ im.save(filename)
diff --git a/cv_lib/cv_lib/utils.py b/cv_lib/cv_lib/utils.py
new file mode 100644
index 00000000..d3e41aeb
--- /dev/null
+++ b/cv_lib/cv_lib/utils.py
@@ -0,0 +1,19 @@
+import os
+import logging
+
+
+def load_log_configuration(log_config_file):
+ """
+ Loads logging configuration from the given configuration file.
+ """
+ if not os.path.exists(log_config_file) or not os.path.isfile(log_config_file):
+ msg = "%s configuration file does not exist!", log_config_file
+ logging.getLogger(__name__).error(msg)
+ raise ValueError(msg)
+ try:
+ logging.config.fileConfig(log_config_file, disable_existing_loggers=False)
+ logging.getLogger(__name__).info("%s configuration file was loaded.", log_config_file)
+ except Exception as e:
+ logging.getLogger(__name__).error("Failed to load configuration from %s!", log_config_file)
+ logging.getLogger(__name__).debug(str(e), exc_info=True)
+ raise e
diff --git a/cv_lib/requirements.txt b/cv_lib/requirements.txt
new file mode 100644
index 00000000..e543e89a
--- /dev/null
+++ b/cv_lib/requirements.txt
@@ -0,0 +1,9 @@
+numpy>=1.16.4
+toolz>=0.9.0
+pandas>=0.24.2
+ignite>=1.1.0
+scikit_learn>=0.21.3
+tensorboardX>=1.8
+torch>=1.2.0
+torchvision>=0.4.0
+tqdm>=4.33.0
diff --git a/cv_lib/setup.py b/cv_lib/setup.py
new file mode 100644
index 00000000..23353ae0
--- /dev/null
+++ b/cv_lib/setup.py
@@ -0,0 +1,54 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+# /* spell-checker: disable */
+import os
+
+try:
+ from setuptools import setup, find_packages
+except ImportError:
+ from distutils.core import setup, find_packages
+
+
+# Package meta-data.
+NAME = "cv_lib"
+DESCRIPTION = "A library for computer vision"
+URL = ""
+EMAIL = "msalvaris@users.noreply.github.com"
+AUTHOR = "AUTHORS.md"
+LICENSE = ""
+LONG_DESCRIPTION = DESCRIPTION
+
+
+with open("requirements.txt") as f:
+ requirements = f.read().splitlines()
+
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+# Load the package's __version__.py module as a dictionary.
+about = {}
+with open(os.path.join(here, NAME, "__version__.py")) as f:
+ exec(f.read(), about)
+
+
+setup(
+ name=NAME,
+ version=about["__version__"],
+ url=URL,
+ license=LICENSE,
+ author=AUTHOR,
+ author_email=EMAIL,
+ description=DESCRIPTION,
+ long_description=LONG_DESCRIPTION,
+ scripts=[],
+ packages=find_packages(),
+ include_package_data=True,
+ install_requires=requirements,
+ classifiers=[
+ "Development Status :: 1 - Alpha",
+ "Intended Audience :: Data Scientists & Developers",
+ "Operating System :: POSIX",
+ "Operating System :: POSIX :: Linux",
+ "Programming Language :: Python :: 3.6",
+ ],
+)
diff --git a/cv_lib/tests/test_metrics.py b/cv_lib/tests/test_metrics.py
new file mode 100644
index 00000000..23a671eb
--- /dev/null
+++ b/cv_lib/tests/test_metrics.py
@@ -0,0 +1,126 @@
+import torch
+import numpy as np
+from pytest import approx
+
+from ignite.metrics import ConfusionMatrix, MetricsLambda
+
+from cv_lib.segmentation.metrics import class_accuracy, mean_class_accuracy
+
+
+# source repo:
+# https://github.com/pytorch/ignite/blob/master/tests/ignite/metrics/test_confusion_matrix.py
+def _get_y_true_y_pred():
+ # Generate an image with labels 0 (background), 1, 2
+ # 3 classes:
+ y_true = np.zeros((30, 30), dtype=np.int)
+ y_true[1:11, 1:11] = 1
+ y_true[15:25, 15:25] = 2
+
+ y_pred = np.zeros((30, 30), dtype=np.int)
+ y_pred[20:30, 1:11] = 1
+ y_pred[20:30, 20:30] = 2
+ return y_true, y_pred
+
+
+# source repo:
+# https://github.com/pytorch/ignite/blob/master/tests/ignite/metrics/test_confusion_matrix.py
+def _compute_th_y_true_y_logits(y_true, y_pred):
+ # Create torch.tensor from numpy
+ th_y_true = torch.from_numpy(y_true).unsqueeze(0)
+ # Create logits torch.tensor:
+ num_classes = max(np.max(y_true), np.max(y_pred)) + 1
+ y_probas = np.ones((num_classes,) + y_true.shape) * -10
+ for i in range(num_classes):
+ y_probas[i, (y_pred == i)] = 720
+ th_y_logits = torch.from_numpy(y_probas).unsqueeze(0)
+ return th_y_true, th_y_logits
+
+
+# Dependency metrics do not get updated automatically, so need to retrieve and
+# update confusion matrix manually
+def _get_cm(metriclambda):
+ metrics = list(metriclambda.args)
+ while metrics:
+ metric = metrics[0]
+ if isinstance(metric, ConfusionMatrix):
+ return metric
+ elif isinstance(metric, MetricsLambda):
+ metrics.extend(metric.args)
+ del metrics[0]
+
+
+def test_class_accuracy():
+ y_true, y_pred = _get_y_true_y_pred()
+
+ ## Perfect prediction
+ th_y_true, th_y_logits = _compute_th_y_true_y_logits(y_true, y_true)
+ # Update metric
+ output = (th_y_logits, th_y_true)
+ acc_metric = class_accuracy(num_classes=3)
+ acc_metric.update(output)
+
+ # Retrieve and update confusion matrix
+ metric_cm = _get_cm(acc_metric)
+ # assert confusion matrix exists and is all zeroes
+ assert metric_cm is not None
+ assert torch.min(metric_cm.confusion_matrix) == 0.0 and torch.max(metric_cm.confusion_matrix) == 0.0
+ metric_cm.update(output)
+
+ # Expected result
+ true_res = [1.0, 1.0, 1.0]
+ res = acc_metric.compute().numpy()
+ assert np.all(res == true_res), "Result {} vs. expected values {}".format(res, true_res)
+
+ ## Imperfect prediction
+ th_y_true, th_y_logits = _compute_th_y_true_y_logits(y_true, y_pred)
+ # Update metric
+ output = (th_y_logits, th_y_true)
+ acc_metric = class_accuracy(num_classes=3)
+ acc_metric.update(output)
+
+ # Retrieve and update confusion matrix
+ metric_cm = _get_cm(acc_metric)
+ assert metric_cm is not None
+ assert torch.min(metric_cm.confusion_matrix) == 0.0 and torch.max(metric_cm.confusion_matrix) == 0.0
+ metric_cm.update(output)
+
+ # Expected result
+ true_res = [0.75, 0.0, 0.25]
+ res = acc_metric.compute().numpy()
+ assert np.all(res == true_res), "Result {} vs. expected values {}".format(res, true_res)
+
+
+def test_mean_class_accuracy():
+ y_true, y_pred = _get_y_true_y_pred()
+
+ ## Perfect prediction
+ th_y_true, th_y_logits = _compute_th_y_true_y_logits(y_true, y_true)
+ # Update metric
+ output = (th_y_logits, th_y_true)
+ acc_metric = mean_class_accuracy(num_classes=3)
+ acc_metric.update(output)
+
+ # Retrieve and update confusion matrix
+ metric_cm = _get_cm(acc_metric)
+ metric_cm.update(output)
+
+ # Expected result
+ true_res = 1.0
+ res = acc_metric.compute().numpy()
+ assert res == approx(true_res), "Result {} vs. expected value {}".format(res, true_res)
+
+ ## Imperfect prediction
+ th_y_true, th_y_logits = _compute_th_y_true_y_logits(y_true, y_pred)
+ # Update metric
+ output = (th_y_logits, th_y_true)
+ acc_metric = mean_class_accuracy(num_classes=3)
+ acc_metric.update(output)
+
+ # Retrieve and update confusion matrix
+ metric_cm = _get_cm(acc_metric)
+ metric_cm.update(output)
+
+ # Expected result
+ true_res = 1 / 3
+ res = acc_metric.compute().numpy()
+ assert res == approx(true_res), "Result {} vs. expected value {}".format(res, true_res)
diff --git a/deepseismic/__init__.py b/deepseismic/__init__.py
deleted file mode 100644
index 8dc07e06..00000000
--- a/deepseismic/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from . import cli, forward, velocity
-
-__all__ = ["cli", "forward", "velocity"]
diff --git a/deepseismic/cli/__init__.py b/deepseismic/cli/__init__.py
deleted file mode 100644
index 1b0db11d..00000000
--- a/deepseismic/cli/__init__.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from functools import partial
-
-import click
-
-from . import forward, velocity
-
-click.option = partial(click.option, show_default=True)
-
-
-@click.group()
-@click.pass_context
-def cli(ctx):
- ctx.ensure_object(dict)
-
-
-cli.add_command(forward.fwd)
-cli.add_command(velocity.vp)
-
-
-def main():
- cli(obj={})
diff --git a/deepseismic/cli/forward.py b/deepseismic/cli/forward.py
deleted file mode 100644
index 0ef69d39..00000000
--- a/deepseismic/cli/forward.py
+++ /dev/null
@@ -1,123 +0,0 @@
-from functools import partial
-
-import click
-import h5py
-import numpy as np
-
-from ..forward import Receiver, RickerSource, TimeAxis, VelocityModel
-
-click.option = partial(click.option, show_default=True)
-
-
-@click.group()
-@click.argument("input", type=click.Path())
-@click.argument("output", type=click.Path())
-@click.option(
- "-d",
- "--duration",
- default=1000.0,
- type=float,
- help="Simulation duration (in ms)",
-)
-@click.option("-dt", default=2.0, type=float, help="Time increment (in ms)")
-@click.option(
- "--n-pml", default=10, type=int, help="PML size (in grid points)"
-)
-@click.option(
- "--n-receivers",
- default=11,
- type=int,
- help="Number of receivers per horizontal dimension",
-)
-@click.option("--space-order", default=2, type=int, help="Space order")
-@click.option(
- "--spacing", default=10.0, type=float, help="Spacing between grid points"
-)
-@click.pass_context
-def fwd(
- ctx,
- dt: float,
- duration: float,
- input: str,
- n_pml: int,
- n_receivers: int,
- output: str,
- space_order: int,
- spacing: float,
-):
- """Forward modelling"""
- if dt:
- ctx.obj["dt"] = dt
- ctx.obj["duration"] = duration
- ctx.obj["input_file"] = h5py.File(input, mode="r")
- ctx.obj["n_pml"] = n_pml
- ctx.obj["n_receivers"] = n_receivers
- ctx.obj["output_file"] = h5py.File(output, mode="w")
- ctx.obj["space_order"] = space_order
- ctx.obj["spacing"] = spacing
-
-
-@fwd.command()
-@click.option(
- "-f0", default=0.01, type=float, help="Source peak frequency (in kHz)"
-)
-@click.pass_context
-def ricker(ctx, f0: float):
- """Ricker source"""
- input_file = ctx.obj["input_file"]
- output_file = ctx.obj["output_file"]
- n = sum(len(x.values()) for x in input_file.values())
- with click.progressbar(length=n) as bar:
- for input_group_name, input_group in input_file.items():
- for dataset in input_group.values():
- first_dataset = dataset
- break
- model = VelocityModel(
- shape=first_dataset.shape,
- origin=tuple(0.0 for _ in first_dataset.shape),
- spacing=tuple(ctx.obj["spacing"] for _ in first_dataset.shape),
- vp=first_dataset[()],
- space_order=ctx.obj["space_order"],
- n_pml=ctx.obj["n_pml"],
- )
- time_range = TimeAxis(
- start=0.0, stop=ctx.obj["duration"], step=ctx.obj["dt"]
- )
- source = RickerSource(
- name="source",
- grid=model.grid,
- f0=f0,
- npoint=1,
- time_range=time_range,
- )
- source.coordinates.data[0, :] = np.array(model.domain_size) * 0.5
- source.coordinates.data[0, -1] = 0.0
- n_receivers = ctx.obj["n_receivers"]
- total_receivers = n_receivers ** (len(model.shape) - 1)
- receivers = Receiver(
- name="receivers",
- grid=model.grid,
- npoint=total_receivers,
- time_range=time_range,
- )
- receivers_coords = np.meshgrid(
- *(
- np.linspace(start=0, stop=s, num=n_receivers + 2)[1:-1]
- for s in model.domain_size[:-1]
- )
- )
- for d in range(len(receivers_coords)):
- receivers.coordinates.data[:, d] = receivers_coords[
- d
- ].flatten()
- receivers.coordinates.data[:, -1] = 0.0
- output_group = output_file.create_group(input_group_name)
- for input_dataset_name, vp in input_group.items():
- model.vp = vp[()]
- seismograms = model.solve(
- source=source, receivers=receivers, time_range=time_range
- )
- output_group.create_dataset(
- input_dataset_name, data=seismograms
- )
- bar.update(1)
diff --git a/deepseismic/cli/velocity.py b/deepseismic/cli/velocity.py
deleted file mode 100644
index 1c87c340..00000000
--- a/deepseismic/cli/velocity.py
+++ /dev/null
@@ -1,96 +0,0 @@
-from functools import partial
-from itertools import islice
-from typing import Tuple
-
-import click
-import h5py
-
-from ..velocity import RoethTarantolaGenerator
-
-click.option = partial(click.option, show_default=True)
-
-
-@click.group()
-@click.argument("output", type=click.Path())
-@click.option(
- "--append/--no-append",
- default=False,
- help="Whether to append to output file",
-)
-@click.option("-n", default=1, type=int, help="Number of simulations")
-@click.option(
- "-nx",
- default=100,
- type=int,
- help="Number of grid points along the first dimension",
-)
-@click.option(
- "-ny",
- default=100,
- type=int,
- help="Number of grid points along the second dimension",
-)
-@click.option(
- "-nz", type=int, help="Number of grid points along the third dimension"
-)
-@click.option("-s", "--seed", default=42, type=int, help="Random seed")
-@click.pass_context
-def vp(
- ctx,
- append: bool,
- n: int,
- nx: int,
- ny: int,
- nz: int,
- output: str,
- seed: int,
-):
- """Vp simulation"""
- shape = (nx, ny)
- if nz is not None:
- shape += (nz,)
- output_file = h5py.File(output, mode=("a" if append else "w"))
- output_group = output_file.create_group(
- str(max((int(x) for x in output_file.keys()), default=-1) + 1)
- )
- ctx.obj["n"] = n
- ctx.obj["output_file"] = output_file
- ctx.obj["output_group"] = output_group
- ctx.obj["seed"] = seed
- ctx.obj["shape"] = shape
-
-
-@vp.command()
-@click.option("--n-layers", default=8, type=int, help="Number of layers")
-@click.option(
- "--initial-vp",
- default=(1350.0, 1650.0),
- type=(float, float),
- help="Initial Vp (in km/s)",
-)
-@click.option(
- "--vp-perturbation",
- default=(-190.0, 570.0),
- type=(float, float),
- help="Per-layer Vp perturbation (in km/s)",
-)
-@click.pass_context
-def rt(
- ctx,
- initial_vp: Tuple[float, float],
- n_layers: int,
- vp_perturbation: Tuple[float, float],
-):
- """Röth-Tarantola model"""
- model = RoethTarantolaGenerator(
- shape=ctx.obj["shape"],
- seed=ctx.obj["seed"],
- n_layers=n_layers,
- initial_vp=initial_vp,
- vp_perturbation=vp_perturbation,
- )
- group = ctx.obj["output_group"]
- with click.progressbar(length=ctx.obj["n"]) as bar:
- for i, data in enumerate(islice(model.generate_many(), ctx.obj["n"])):
- group.create_dataset(str(i), data=data, compression="gzip")
- bar.update(1)
diff --git a/deepseismic/forward/__init__.py b/deepseismic/forward/__init__.py
deleted file mode 100644
index f9a9083f..00000000
--- a/deepseismic/forward/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from .models import Model, VelocityModel
-from .sources import Receiver, RickerSource, WaveletSource
-from .time import TimeAxis
-from .types import Kernel
-
-__all__ = [
- "Kernel",
- "Model",
- "Receiver",
- "RickerSource",
- "TimeAxis",
- "VelocityModel",
- "WaveletSource",
-]
diff --git a/deepseismic/forward/models.py b/deepseismic/forward/models.py
deleted file mode 100644
index f07b7a1c..00000000
--- a/deepseismic/forward/models.py
+++ /dev/null
@@ -1,162 +0,0 @@
-from typing import Optional, Tuple, Union
-
-import numpy as np
-from devito import (
- Constant,
- Eq,
- Function,
- Grid,
- Operator,
- SubDomain,
- TimeFunction,
- logger,
- solve,
-)
-
-from .sources import PointSource
-from .subdomains import PhysicalDomain
-from .time import TimeAxis
-from .types import Kernel
-
-logger.set_log_level("WARNING")
-
-
-class Model(object):
- def __init__(
- self,
- shape: Tuple[int, ...],
- origin: Tuple[float, ...],
- spacing: Tuple[float, ...],
- n_pml: Optional[int] = 0,
- dtype: Optional[type] = np.float32,
- subdomains: Optional[Tuple[SubDomain]] = (),
- ):
- shape = tuple(int(x) for x in shape)
- origin = tuple(dtype(x) for x in origin)
- n_pml = int(n_pml)
- subdomains = tuple(subdomains) + (PhysicalDomain(n_pml),)
- shape_pml = tuple(x + 2 * n_pml for x in shape)
- extent_pml = tuple(s * (d - 1) for s, d in zip(spacing, shape_pml))
- origin_pml = tuple(
- dtype(o - s * n_pml) for o, s in zip(origin, spacing)
- )
- self.grid = Grid(
- shape=shape_pml,
- extent=extent_pml,
- origin=origin_pml,
- dtype=dtype,
- subdomains=subdomains,
- )
- self.n_pml = n_pml
- self.pml = Function(name="pml", grid=self.grid)
- pml_data = np.pad(
- np.zeros(shape, dtype=dtype),
- [(n_pml,) * 2 for _ in range(self.pml.ndim)],
- mode="edge",
- )
- pml_coef = 1.5 * np.log(1000.0) / 40.0
- for d in range(self.pml.ndim):
- for i in range(n_pml):
- pos = np.abs((n_pml - i + 1) / n_pml)
- val = pml_coef * (pos - np.sin(2 * np.pi * pos) / (2 * np.pi))
- idx = [slice(0, x) for x in pml_data.shape]
- idx[d] = slice(i, i + 1)
- pml_data[tuple(idx)] += val / self.grid.spacing[d]
- idx[d] = slice(
- pml_data.shape[d] - i, pml_data.shape[d] - i + 1
- )
- pml_data[tuple(idx)] += val / self.grid.spacing[d]
- pml_data = np.pad(
- pml_data,
- [(i.left, i.right) for i in self.pml._size_halo],
- mode="edge",
- )
- self.pml.data_with_halo[:] = pml_data
- self.shape = shape
-
- @property
- def domain_size(self) -> Tuple[float, ...]:
- return tuple((d - 1) * s for d, s in zip(self.shape, self.spacing))
-
- @property
- def dtype(self) -> type:
- return self.grid.dtype
-
- @property
- def spacing(self):
- return self.grid.spacing
-
- @property
- def spacing_map(self):
- return self.grid.spacing_map
-
- @property
- def time_spacing(self):
- return self.grid.stepping_dim.spacing
-
-
-class VelocityModel(Model):
- def __init__(
- self,
- shape: Tuple[int, ...],
- origin: Tuple[float, ...],
- spacing: Tuple[float, ...],
- vp: Union[float, np.ndarray],
- space_order: Optional[int] = None,
- n_pml: Optional[int] = 0,
- dtype: Optional[type] = np.float32,
- subdomains: Optional[Tuple[SubDomain]] = (),
- ):
- super().__init__(shape, origin, spacing, n_pml, dtype, subdomains)
- if isinstance(vp, np.ndarray):
- assert space_order is not None
- self.m = Function(
- name="m", grid=self.grid, space_order=int(space_order)
- )
- else:
- self.m = Constant(name="m", value=1.0 / float(vp) ** 2.0)
- self.vp = vp
-
- @property
- def vp(self) -> Union[float, np.ndarray]:
- return self._vp
-
- @vp.setter
- def vp(self, vp: Union[float, np.ndarray]) -> None:
- self._vp = vp
- if isinstance(vp, np.ndarray):
- pad_widths = [
- (self.n_pml + i.left, self.n_pml + i.right)
- for i in self.m._size_halo
- ]
- self.m.data_with_halo[:] = np.pad(
- 1.0 / self.vp ** 2.0, pad_widths, mode="edge"
- )
- else:
- self.m.data = 1.0 / float(vp) ** 2.0
-
- def solve(
- self,
- source: PointSource,
- receivers: PointSource,
- time_range: TimeAxis,
- space_order: Optional[int] = 4,
- kernel: Optional[Kernel] = Kernel.OT2,
- ) -> np.ndarray:
- assert isinstance(kernel, Kernel)
- u = TimeFunction(
- name="u", grid=self.grid, time_order=2, space_order=space_order
- )
- H = u.laplace
- if kernel is Kernel.OT4:
- H += self.time_spacing ** 2 / 12 * u.laplace2(1 / self.m)
- eq = Eq(
- u.forward, solve(self.m * u.dt2 - H + self.pml * u.dt, u.forward)
- )
- src_term = source.inject(
- field=u.forward, expr=source * self.time_spacing ** 2 / self.m
- )
- rec_term = receivers.interpolate(expr=u)
- op = Operator([eq] + src_term + rec_term, subs=self.spacing_map)
- op(time=time_range.num - 1, dt=time_range.step)
- return receivers.data
diff --git a/deepseismic/forward/sources.py b/deepseismic/forward/sources.py
deleted file mode 100644
index 5a0470e2..00000000
--- a/deepseismic/forward/sources.py
+++ /dev/null
@@ -1,132 +0,0 @@
-from typing import Optional
-
-import numpy as np
-import sympy
-from devito.types import Dimension, SparseTimeFunction
-from devito.types.basic import _SymbolCache
-from scipy import interpolate
-
-from .time import TimeAxis
-
-
-class PointSource(SparseTimeFunction):
- def __new__(cls, *args, **kwargs):
- if cls in _SymbolCache:
- options = kwargs.get("options", {})
- obj = sympy.Function.__new__(cls, *args, **options)
- obj._cached_init()
- return obj
- name = kwargs.pop("name")
- grid = kwargs.pop("grid")
- time_range = kwargs.pop("time_range")
- time_order = kwargs.pop("time_order", 2)
- p_dim = kwargs.pop("dimension", Dimension(name="p_%s" % name))
- npoint = kwargs.pop("npoint", None)
- coordinates = kwargs.pop(
- "coordinates", kwargs.pop("coordinates_data", None)
- )
- if npoint is None:
- assert (
- coordinates is not None
- ), "Either `npoint` or `coordinates` must be provided"
- npoint = coordinates.shape[0]
- obj = SparseTimeFunction.__new__(
- cls,
- name=name,
- grid=grid,
- dimensions=(grid.time_dim, p_dim),
- npoint=npoint,
- nt=time_range.num,
- time_order=time_order,
- coordinates=coordinates,
- **kwargs
- )
- obj._time_range = time_range
- data = kwargs.get("data")
- if data is not None:
- obj.data[:] = data
- return obj
-
- @property
- def time_range(self) -> TimeAxis:
- return self._time_range
-
- @property
- def time_values(self) -> np.ndarray:
- return self._time_range.time_values
-
- def resample(
- self,
- dt: Optional[float] = None,
- num: Optional[int] = None,
- rtol: Optional[float] = 1.0e-5,
- order: Optional[int] = 3,
- ):
- assert (dt is not None) ^ (
- num is not None
- ), "Exactly one of `dt` or `num` must be provided"
- start = self._time_range.start
- stop = self._time_range.stop
- dt0 = self._time_range.step
- if dt is not None:
- new_time_range = TimeAxis(start=start, stop=stop, step=dt)
- else:
- new_time_range = TimeAxis(start=start, stop=stop, num=num)
- dt = new_time_range.step
- if np.isclose(dt0, dt, rtol=rtol):
- return self
- n_traces = self.data.shape[1]
- new_traces = np.zeros(
- (new_time_range.num, n_traces), dtype=self.data.dtype
- )
- for j in range(n_traces):
- tck = interpolate.splrep(
- self._time_range.time_values, self.data[:, j], k=order
- )
- new_traces[:, j] = interpolate.splev(
- new_time_range.time_values, tck
- )
- return PointSource(
- name=self.name,
- grid=self.grid,
- time_range=new_time_range,
- coordinates=self.coordinates.data,
- data=new_traces,
- )
-
- _pickle_kwargs = SparseTimeFunction._pickle_kwargs + ["time_range"]
- _pickle_kwargs.remove("nt") # Inferred from time_range
-
-
-class Receiver(PointSource):
- pass
-
-
-class WaveletSource(PointSource):
- def __new__(cls, *args, **kwargs):
- if cls in _SymbolCache:
- options = kwargs.get("options", {})
- obj = sympy.Function.__new__(cls, *args, **options)
- obj._cached_init()
- return obj
- npoint = kwargs.pop("npoint", 1)
- obj = PointSource.__new__(cls, npoint=npoint, **kwargs)
- obj.f0 = kwargs.get("f0")
- for p in range(npoint):
- obj.data[:, p] = obj.wavelet(obj.f0, obj.time_values)
- return obj
-
- def __init__(self, *args, **kwargs):
- if not self._cached():
- super(WaveletSource, self).__init__(*args, **kwargs)
-
- def wavelet(self, f0: float, t: np.ndarray) -> np.ndarray:
- raise NotImplementedError
-
- _pickle_kwargs = PointSource._pickle_kwargs + ["f0"]
-
-
-class RickerSource(WaveletSource):
- def wavelet(self, f0: float, t: np.ndarray) -> np.ndarray:
- r = np.pi * f0 * (t - 1.0 / f0)
- return (1.0 - 2.0 * r ** 2.0) * np.exp(-r ** 2.0)
diff --git a/deepseismic/forward/subdomains.py b/deepseismic/forward/subdomains.py
deleted file mode 100644
index 2ed6cedb..00000000
--- a/deepseismic/forward/subdomains.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from typing import Dict, Iterable, Tuple
-
-from devito import Dimension, SubDomain
-
-
-class PhysicalDomain(SubDomain):
- name = "physical_domain"
-
- def __init__(self, n_pml: int):
- super().__init__()
- self.n_pml = n_pml
-
- def define(
- self, dimensions: Iterable[Dimension]
- ) -> Dict[Dimension, Tuple[str, int, int]]:
- return {d: ("middle", self.n_pml, self.n_pml) for d in dimensions}
diff --git a/deepseismic/forward/time.py b/deepseismic/forward/time.py
deleted file mode 100644
index d3dfc00d..00000000
--- a/deepseismic/forward/time.py
+++ /dev/null
@@ -1,34 +0,0 @@
-from typing import Optional
-
-import numpy as np
-
-
-class TimeAxis(object):
- def __init__(
- self,
- start: Optional[float] = None,
- stop: Optional[float] = None,
- num: Optional[int] = None,
- step: Optional[float] = None,
- dtype: Optional[type] = np.float32,
- ):
- if start is None:
- start = step * (1 - num) + stop
- elif stop is None:
- stop = step * (num - 1) + start
- elif num is None:
- num = int(np.ceil((stop - start + step) / step))
- stop = step * (num - 1) + start
- elif step is None:
- step = (stop - start) / (num - 1)
- else:
- raise ValueError
- self.start = start
- self.stop = stop
- self.num = num
- self.step = step
- self.dtype = dtype
-
- @property
- def time_values(self) -> np.ndarray:
- return np.linspace(self.start, self.stop, self.num, dtype=self.dtype)
diff --git a/deepseismic/forward/types.py b/deepseismic/forward/types.py
deleted file mode 100644
index 772f67b7..00000000
--- a/deepseismic/forward/types.py
+++ /dev/null
@@ -1,6 +0,0 @@
-from enum import Enum, auto
-
-
-class Kernel(Enum):
- OT2 = auto()
- OT4 = auto()
diff --git a/deepseismic/velocity/__init__.py b/deepseismic/velocity/__init__.py
deleted file mode 100644
index 98225180..00000000
--- a/deepseismic/velocity/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from .generator import Generator
-from .roeth_tarantola import RoethTarantolaGenerator
-
-__all__ = ["Generator", "RoethTarantolaGenerator"]
diff --git a/deepseismic/velocity/generator.py b/deepseismic/velocity/generator.py
deleted file mode 100644
index ddc2eb4a..00000000
--- a/deepseismic/velocity/generator.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from typing import Optional, Tuple
-
-import numpy as np
-
-
-class Generator(object):
- def __init__(
- self,
- shape: Tuple[int, ...],
- dtype: Optional[type] = np.float32,
- seed: Optional[int] = None,
- ):
- self.shape = shape
- self.dtype = dtype
- self._prng = np.random.RandomState(seed)
-
- def generate(self) -> np.ndarray:
- raise NotImplementedError
-
- def generate_many(self) -> np.ndarray:
- while True:
- yield self.generate()
diff --git a/deepseismic/velocity/roeth_tarantola.py b/deepseismic/velocity/roeth_tarantola.py
deleted file mode 100644
index 6c3c0cc4..00000000
--- a/deepseismic/velocity/roeth_tarantola.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from typing import Optional, Tuple
-
-import numpy as np
-
-from .generator import Generator
-
-
-class RoethTarantolaGenerator(Generator):
- def __init__(
- self,
- shape: Tuple[int, ...],
- dtype: Optional[type] = np.float32,
- seed: Optional[int] = None,
- depth_dim: Optional[int] = -1,
- n_layers: Optional[int] = 8,
- initial_vp: Optional[Tuple[float, float]] = (1.35, 1.65),
- vp_perturbation: Optional[Tuple[float, float]] = (-0.19, 0.57),
- ):
- super().__init__(shape, dtype, seed)
- self.depth_dim = depth_dim
- self.n_layers = n_layers
- self.initial_vp = initial_vp
- self.vp_perturbation = vp_perturbation
-
- def generate(self) -> np.ndarray:
- vp = np.zeros(self.shape, dtype=self.dtype)
- dim = self.depth_dim
- layer_idx = np.round(
- np.linspace(0, self.shape[dim], self.n_layers + 1)
- ).astype(np.int)
- vp_idx = [slice(0, x) for x in vp.shape]
- layer_vp = None
- for i in range(self.n_layers):
- vp_idx[dim] = slice(layer_idx[i], layer_idx[i + 1])
- layer_vp = (
- self._prng.uniform(*self.initial_vp)
- if layer_vp is None
- else layer_vp + self._prng.uniform(*self.vp_perturbation)
- )
- vp[tuple(vp_idx)] = layer_vp
- return vp
diff --git a/docs/README.md b/docs/README.md
new file mode 100644
index 00000000..f85ae6a2
--- /dev/null
+++ b/docs/README.md
@@ -0,0 +1,6 @@
+# Documentation
+
+To setup the documentation, first you need to install the dependencies of the full environment. For it please follow the [SETUP.md](../SETUP.md).
+
+TODO: add more text
+
diff --git a/environment/anaconda/local/environment.yml b/environment/anaconda/local/environment.yml
new file mode 100644
index 00000000..4eebd0b3
--- /dev/null
+++ b/environment/anaconda/local/environment.yml
@@ -0,0 +1,38 @@
+name: seismic-interpretation
+channels:
+ - conda-forge
+ - pytorch
+dependencies:
+ - python=3.6.7
+ - pip
+ - pytorch==1.3.1
+ - cudatoolkit==10.1.243
+ - jupyter
+ - ipykernel
+ - torchvision==0.4.2
+ - pandas==0.25.3
+ - opencv==4.1.2
+ - scikit-learn==0.21.3
+ - tensorflow==2.0
+ - opt-einsum>=2.3.2
+ - tqdm==4.39.0
+ - itkwidgets==0.23.1
+ - pytest
+ - papermill>=1.0.1
+ - pip:
+ - segyio==1.8.8
+ - pytorch-ignite==0.3.0.dev20191105 # pre-release until stable available
+ - fire==0.2.1
+ - toolz==0.10.0
+ - tabulate==0.8.2
+ - Jinja2==2.10.3
+ - gitpython==3.0.5
+ - tensorboard==2.0.1
+ - tensorboardx==1.9
+ - invoke==1.3.0
+ - yacs==0.1.6
+ - albumentations==0.4.3
+ - black
+ - pylint
+ - scipy==1.1.0
+ - jupytext==1.3.0
diff --git a/environment/docker/apex/Makefile b/environment/docker/apex/Makefile
new file mode 100644
index 00000000..57d2dabc
--- /dev/null
+++ b/environment/docker/apex/Makefile
@@ -0,0 +1,51 @@
+define PROJECT_HELP_MSG
+Makefile to control project aml_dist
+Usage:
+ help show this message
+ build build docker image to use as control plane
+ bash run bash inside runnin docker container
+ stop stop running docker container
+endef
+export PROJECT_HELP_MSG
+PWD:=$(shell pwd)
+PORT:=9999
+TBOARD_PORT:=6006
+IMAGE_NAME:=ignite_image
+NAME:=ignite_container # Name of running container
+DATA:=/mnt
+
+BASEDIR:=$(shell dirname $(shell dirname ${PWD}))
+
+local_code_volume:=-v $(BASEDIR):/workspace
+volumes:=-v $(DATA):/data \
+ -v ${HOME}/.bash_history:/root/.bash_history
+
+
+help:
+ echo "$$PROJECT_HELP_MSG" | less
+
+build:
+ docker build -t $(IMAGE_NAME) -f dockerfile .
+
+run:
+ # Start docker running as daemon
+ docker run $(local_code_volume) $(volumes) $(setup_environment_file) \
+ --shm-size="4g" \
+ --runtime=nvidia \
+ --name $(NAME) \
+ -d \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ -e HIST_FILE=/root/.bash_history \
+ -it $(IMAGE_NAME)
+
+ docker exec -it $(NAME) bash
+
+
+bash:
+ docker exec -it $(NAME) bash
+
+stop:
+ docker stop $(NAME)
+ docker rm $(NAME)
+
+.PHONY: help build run bash stop
\ No newline at end of file
diff --git a/environment/docker/apex/dockerfile b/environment/docker/apex/dockerfile
new file mode 100644
index 00000000..3becd3c4
--- /dev/null
+++ b/environment/docker/apex/dockerfile
@@ -0,0 +1,16 @@
+FROM pytorch/pytorch:nightly-devel-cuda10.0-cudnn7
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ libglib2.0-0 \
+ libsm6 \
+ libxext6 \
+ libxrender-dev
+
+RUN git clone https://github.com/NVIDIA/apex && \
+ cd apex && \
+ pip install -v --no-cache-dir --global-option="--cpp_ext" --global-option="--cuda_ext" ./
+
+RUN pip install toolz pytorch-ignite torchvision pandas opencv-python fire tensorboardx scikit-learn yacs
+
+WORKDIR /workspace
+CMD /bin/bash
\ No newline at end of file
diff --git a/environment/docker/horovod/Makefile b/environment/docker/horovod/Makefile
new file mode 100644
index 00000000..a423b726
--- /dev/null
+++ b/environment/docker/horovod/Makefile
@@ -0,0 +1,56 @@
+define PROJECT_HELP_MSG
+Makefile to control project aml_dist
+Usage:
+ help show this message
+ build build docker image to use as control plane
+ bash run bash inside runnin docker container
+ stop stop running docker container
+endef
+export PROJECT_HELP_MSG
+PWD:=$(shell pwd)
+PORT:=9999
+TBOARD_PORT:=6006
+IMAGE_NAME:=horovod_image
+NAME:=horovod_container # Name of running container
+DATA:=/mnt
+
+BASEDIR:=$(shell dirname $(shell dirname $(shell dirname ${PWD})))
+REPODIR:=$(shell dirname ${BASEDIR})
+
+local_code_volume:=-v $(BASEDIR):/workspace
+volumes:=-v $(DATA):/data \
+ -v ${HOME}/.bash_history:/root/.bash_history
+
+help:
+ echo "$$PROJECT_HELP_MSG" | less
+
+build:
+ docker build -t $(IMAGE_NAME) -f dockerfile ${REPODIR}
+
+run:
+ @echo ${BASEDIR}
+ # Start docker running as daemon
+ docker run $(local_code_volume) $(volumes) $(setup_environment_file) \
+ --privileged \
+ --shm-size="4g" \
+ --runtime=nvidia \
+ --name $(NAME) \
+ -d \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ -e HIST_FILE=/root/.bash_history \
+ -it $(IMAGE_NAME)
+
+ docker exec -it $(NAME) bash
+
+
+run-horovod:
+ docker exec -it $(NAME) mpirun -np 2 -bind-to none -map-by slot -x NCCL_DEBUG=INFO -x LD_LIBRARY_PATH -x PATH -mca pml ob1 -mca btl ^openib python train_horovod.py
+
+bash:
+ docker exec -it $(NAME) bash
+
+stop:
+ docker stop $(NAME)
+ docker rm $(NAME)
+
+.PHONY: help build run bash stop
\ No newline at end of file
diff --git a/environment/docker/horovod/dockerfile b/environment/docker/horovod/dockerfile
new file mode 100644
index 00000000..0e12f455
--- /dev/null
+++ b/environment/docker/horovod/dockerfile
@@ -0,0 +1,130 @@
+FROM nvidia/cuda:10.0-devel-ubuntu18.04
+# Based on default horovod image
+
+ENV PYTORCH_VERSION=1.1.0
+ENV TORCHVISION_VERSION=0.3.0
+ENV CUDNN_VERSION=7.6.0.64-1+cuda10.0
+ENV NCCL_VERSION=2.4.7-1+cuda10.0
+
+# Python 2.7 or 3.6 is supported by Ubuntu Bionic out of the box
+ARG python=3.6
+ENV PYTHON_VERSION=${python}
+
+# Set default shell to /bin/bash
+SHELL ["/bin/bash", "-cu"]
+
+# We need gcc-4.9 to build plugins for TensorFlow & PyTorch, which is only available in Ubuntu Xenial
+RUN echo deb http://archive.ubuntu.com/ubuntu xenial main universe | tee -a /etc/apt/sources.list
+ENV DEBIAN_FRONTEND=noninteractive
+RUN apt-get update && apt-get install -y --no-install-recommends --allow-change-held-packages --allow-downgrades \
+ build-essential \
+ cmake \
+ gcc-4.9 \
+ g++-4.9 \
+ gcc-4.9-base \
+ software-properties-common \
+ git \
+ curl \
+ wget \
+ ca-certificates \
+ libcudnn7=${CUDNN_VERSION} \
+ libnccl2=${NCCL_VERSION} \
+ libnccl-dev=${NCCL_VERSION} \
+ libjpeg-dev \
+ libpng-dev \
+ python${PYTHON_VERSION} \
+ python${PYTHON_VERSION}-dev \
+ librdmacm1 \
+ libibverbs1 \
+ ibverbs-utils\
+ ibutils \
+ net-tools \
+ ibverbs-providers \
+ libglib2.0-0 \
+ libsm6 \
+ libxext6 \
+ libxrender-dev
+
+
+RUN if [[ "${PYTHON_VERSION}" == "3.6" ]]; then \
+ apt-get install -y python${PYTHON_VERSION}-distutils; \
+ fi
+RUN ln -s /usr/bin/python${PYTHON_VERSION} /usr/bin/python
+
+RUN curl -O https://bootstrap.pypa.io/get-pip.py && \
+ python get-pip.py && \
+ rm get-pip.py
+
+# Install PyTorch
+RUN pip install future typing
+RUN pip install numpy
+RUN pip install https://download.pytorch.org/whl/cu100/torch-${PYTORCH_VERSION}-$(python -c "import wheel.pep425tags as w; print('-'.join(w.get_supported()[0]))").whl \
+ https://download.pytorch.org/whl/cu100/torchvision-${TORCHVISION_VERSION}-$(python -c "import wheel.pep425tags as w; print('-'.join(w.get_supported()[0]))").whl
+RUN pip install --no-cache-dir torchvision h5py toolz pytorch-ignite pandas opencv-python fire tensorboardx scikit-learn tqdm yacs albumentations gitpython
+COPY ComputerVision_fork/contrib /contrib
+RUN pip install -e /contrib
+COPY DeepSeismic /DeepSeismic
+RUN pip install -e DeepSeismic/interpretation
+
+# Install Open MPI
+RUN mkdir /tmp/openmpi && \
+ cd /tmp/openmpi && \
+ wget https://www.open-mpi.org/software/ompi/v4.0/downloads/openmpi-4.0.0.tar.gz && \
+ tar zxf openmpi-4.0.0.tar.gz && \
+ cd openmpi-4.0.0 && \
+ ./configure --enable-orterun-prefix-by-default && \
+ make -j $(nproc) all && \
+ make install && \
+ ldconfig && \
+ rm -rf /tmp/openmpi
+
+# Pin GCC to 4.9 (priority 200) to compile correctly against TensorFlow, PyTorch, and MXNet.
+# Backup existing GCC installation as priority 100, so that it can be recovered later.
+RUN update-alternatives --install /usr/bin/gcc gcc $(readlink -f $(which gcc)) 100 && \
+ update-alternatives --install /usr/bin/x86_64-linux-gnu-gcc x86_64-linux-gnu-gcc $(readlink -f $(which gcc)) 100 && \
+ update-alternatives --install /usr/bin/g++ g++ $(readlink -f $(which g++)) 100 && \
+ update-alternatives --install /usr/bin/x86_64-linux-gnu-g++ x86_64-linux-gnu-g++ $(readlink -f $(which g++)) 100
+RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.9 200 && \
+ update-alternatives --install /usr/bin/x86_64-linux-gnu-gcc x86_64-linux-gnu-gcc /usr/bin/gcc-4.9 200 && \
+ update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-4.9 200 && \
+ update-alternatives --install /usr/bin/x86_64-linux-gnu-g++ x86_64-linux-gnu-g++ /usr/bin/g++-4.9 200
+
+
+# Install Horovod, temporarily using CUDA stubs
+RUN ldconfig /usr/local/cuda/targets/x86_64-linux/lib/stubs && \
+ HOROVOD_GPU_ALLREDUCE=NCCL HOROVOD_WITH_PYTORCH=1 pip install --no-cache-dir horovod && \
+ldconfig
+
+# Remove GCC pinning
+RUN update-alternatives --remove gcc /usr/bin/gcc-4.9 && \
+ update-alternatives --remove x86_64-linux-gnu-gcc /usr/bin/gcc-4.9 && \
+ update-alternatives --remove g++ /usr/bin/g++-4.9 && \
+ update-alternatives --remove x86_64-linux-gnu-g++ /usr/bin/g++-4.9
+
+# Create a wrapper for OpenMPI to allow running as root by default
+RUN mv /usr/local/bin/mpirun /usr/local/bin/mpirun.real && \
+ echo '#!/bin/bash' > /usr/local/bin/mpirun && \
+ echo 'mpirun.real --allow-run-as-root "$@"' >> /usr/local/bin/mpirun && \
+ chmod a+x /usr/local/bin/mpirun
+
+# Configure OpenMPI to run good defaults:
+# --bind-to none --map-by slot --mca btl_tcp_if_exclude lo,docker0
+RUN echo "hwloc_base_binding_policy = none" >> /usr/local/etc/openmpi-mca-params.conf && \
+ echo "rmaps_base_mapping_policy = slot" >> /usr/local/etc/openmpi-mca-params.conf
+ # echo "btl_tcp_if_exclude = lo,docker0" >> /usr/local/etc/openmpi-mca-params.conf
+
+# Set default NCCL parameters
+RUN echo NCCL_DEBUG=INFO >> /etc/nccl.conf && \
+ echo NCCL_SOCKET_IFNAME=^docker0 >> /etc/nccl.conf
+
+# Install OpenSSH for MPI to communicate between containers
+RUN apt-get install -y --no-install-recommends openssh-client openssh-server && \
+ mkdir -p /var/run/sshd
+
+# Allow OpenSSH to talk to containers without asking for confirmation
+RUN cat /etc/ssh/ssh_config | grep -v StrictHostKeyChecking > /etc/ssh/ssh_config.new && \
+ echo " StrictHostKeyChecking no" >> /etc/ssh/ssh_config.new && \
+ mv /etc/ssh/ssh_config.new /etc/ssh/ssh_config
+
+WORKDIR /workspace
+CMD /bin/bash
diff --git a/examples/interpretation/README.md b/examples/interpretation/README.md
new file mode 100644
index 00000000..7f151c60
--- /dev/null
+++ b/examples/interpretation/README.md
@@ -0,0 +1 @@
+Description of examples
diff --git a/examples/interpretation/notebooks/F3_block_training_and_evaluation_local.ipynb b/examples/interpretation/notebooks/F3_block_training_and_evaluation_local.ipynb
new file mode 100644
index 00000000..320f40a3
--- /dev/null
+++ b/examples/interpretation/notebooks/F3_block_training_and_evaluation_local.ipynb
@@ -0,0 +1,1024 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Copyright (c) Microsoft Corporation.\n",
+ "\n",
+ "Licensed under the MIT License."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Model training and evaluation on F3 Netherlands dataset"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Seismic interpretation, also referred to as facies classification, is a task of determining types of rock in the earth’s subsurface, given seismic data. Seismic interpretation is used as a standard approach for determining precise locations of oil deposits for drilling, therefore reducing risks and potential losses. In recent years, there has been a great interest in using fully-supervised deep learning models for seismic interpretation. \n",
+ "\n",
+ "In this notebook, we demonstrate how to train a deep neural network for facies prediction using F3 Netherlands dataset. The F3 block is located in the North Sea off the shores of Netherlands. The dataset contains 6 classes (facies or lithostratigraphic units), all of which are of varying thickness (class imbalance). Processed data is available in numpy format as a `401 x 701 x 255` array. The processed F3 data is made available by [Alaudah et al. 2019](https://github.com/olivesgatech/facies_classification_benchmark). "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Environment setup\n",
+ "\n",
+ "To set up the conda environment, please follow the instructions in the top-level [README.md](../../../README.md) file.\n",
+ "\n",
+ "__Note__: To register the conda environment in Jupyter, run:\n",
+ "`python -m ipykernel install --user --name envname`"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Library imports"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Let's load required libraries."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import logging\n",
+ "import logging.config\n",
+ "from os import path\n",
+ "import random\n",
+ "import matplotlib.pyplot as plt\n",
+ "plt.rcParams.update({\"font.size\": 16})\n",
+ "\n",
+ "import yacs.config\n",
+ "\n",
+ "import cv2\n",
+ "import numpy as np\n",
+ "import torch\n",
+ "from albumentations import Compose, HorizontalFlip, Normalize, PadIfNeeded, Resize\n",
+ "from ignite.contrib.handlers import CosineAnnealingScheduler\n",
+ "from ignite.handlers import ModelCheckpoint\n",
+ "from ignite.engine import Events\n",
+ "from ignite.metrics import Loss\n",
+ "from ignite.utils import convert_tensor\n",
+ "from toolz import compose, take\n",
+ "from torch.utils import data\n",
+ "\n",
+ "from cv_lib.utils import load_log_configuration\n",
+ "from cv_lib.event_handlers import SnapshotHandler, logging_handlers\n",
+ "from cv_lib.segmentation import models\n",
+ "from cv_lib.segmentation.dutchf3.engine import create_supervised_trainer\n",
+ "\n",
+ "from cv_lib.segmentation.dutchf3.utils import (\n",
+ " current_datetime,\n",
+ " generate_path,\n",
+ " git_branch,\n",
+ " git_hash,\n",
+ " np_to_tb,\n",
+ ")\n",
+ "\n",
+ "from deepseismic_interpretation.dutchf3.data import (\n",
+ " get_patch_loader,\n",
+ " decode_segmap,\n",
+ " get_test_loader,\n",
+ ")\n",
+ "\n",
+ "from itkwidgets import view\n",
+ "\n",
+ "from utilities import (\n",
+ " plot_aline,\n",
+ " prepare_batch,\n",
+ " patch_label_2d,\n",
+ " compose_processing_pipeline,\n",
+ " output_processing_pipeline,\n",
+ " write_section_file,\n",
+ " runningScore,\n",
+ ")\n",
+ "\n",
+ "# set device\n",
+ "device = \"cpu\"\n",
+ "if torch.cuda.is_available():\n",
+ " device = \"cuda\"\n",
+ "\n",
+ "# number of images to score\n",
+ "N_EVALUATE = 20\n",
+ "# experiment configuration file\n",
+ "CONFIG_FILE = \"./configs/patch_deconvnet_skip.yaml\"\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Data download and preparation\n",
+ "\n",
+ "To download and prepare the F3 data set, please follow the instructions in the top-level [README](../../../README.md) file. Once you've downloaded and prepared the data set, you'll find your files in the following directory tree:\n",
+ "\n",
+ "```\n",
+ "data\n",
+ "├── splits\n",
+ "├── test_once\n",
+ "│ ├── test1_labels.npy\n",
+ "│ ├── test1_seismic.npy\n",
+ "│ ├── test2_labels.npy\n",
+ "│ └── test2_seismic.npy\n",
+ "└── train\n",
+ " ├── train_labels.npy\n",
+ " └── train_seismic.npy\n",
+ "```\n",
+ "\n",
+ "We recommend saving the data under `$HOME/data/dutchf3` since this notebook will use that location as the data root. Otherwise, modify the `DATASET.ROOT` field in the configuration file, described next. "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Experiment configuration file"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "We use configuration files to specify experiment configuration, such as hyperparameters used in training and evaluation, as well as other experiment settings. We provide several configuration files for this notebook, under `./configs`, mainly differing in the DNN architecture used for defining the model.\n",
+ "\n",
+ "Modify the `CONFIG_FILE` variable above if you would like to run the experiment using a different configuration file."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Configuration loaded. Please check that the DATASET.ROOT:/data/dutchf3 points to your data location.\n",
+ "To modify any of the options, please edit the configuration file ./configs/patch_deconvnet_skip.yaml and reload. \n",
+ "\n",
+ "CUDNN:\n",
+ " BENCHMARK: True\n",
+ " DETERMINISTIC: False\n",
+ " ENABLED: True\n",
+ "DATASET:\n",
+ " CLASS_WEIGHTS: [0.7151, 0.8811, 0.5156, 0.9346, 0.9683, 0.9852]\n",
+ " NUM_CLASSES: 6\n",
+ " ROOT: /data/dutchf3\n",
+ "GPUS: (0,)\n",
+ "LOG_CONFIG: logging.conf\n",
+ "LOG_DIR: log\n",
+ "MODEL:\n",
+ " IN_CHANNELS: 1\n",
+ " NAME: patch_deconvnet_skip\n",
+ "OUTPUT_DIR: output\n",
+ "PRINT_FREQ: 50\n",
+ "SEED: 2019\n",
+ "TEST:\n",
+ " CROSSLINE: True\n",
+ " INLINE: True\n",
+ " MODEL_PATH: /data/home/mat/repos/DeepSeismic/examples/interpretation/notebooks/output/models/model_patch_deconvnet_skip_2.pth\n",
+ " POST_PROCESSING:\n",
+ " CROP_PIXELS: 0\n",
+ " SIZE: 99\n",
+ " SPLIT: test1\n",
+ " TEST_STRIDE: 10\n",
+ "TRAIN:\n",
+ " AUGMENTATION: True\n",
+ " AUGMENTATIONS:\n",
+ " PAD:\n",
+ " HEIGHT: 99\n",
+ " WIDTH: 99\n",
+ " RESIZE:\n",
+ " HEIGHT: 99\n",
+ " WIDTH: 99\n",
+ " BATCH_SIZE_PER_GPU: 64\n",
+ " BEGIN_EPOCH: 0\n",
+ " DEPTH: none\n",
+ " END_EPOCH: 100\n",
+ " MAX_LR: 0.02\n",
+ " MEAN: 0.0009997\n",
+ " MIN_LR: 0.001\n",
+ " MODEL_DIR: models\n",
+ " MOMENTUM: 0.9\n",
+ " PATCH_SIZE: 99\n",
+ " SNAPSHOTS: 5\n",
+ " STD: 0.20977\n",
+ " STRIDE: 50\n",
+ " WEIGHT_DECAY: 0.0001\n",
+ "VALIDATION:\n",
+ " BATCH_SIZE_PER_GPU: 512\n",
+ "WORKERS: 4\n"
+ ]
+ }
+ ],
+ "source": [
+ "with open(CONFIG_FILE, \"rt\") as f_read:\n",
+ " config = yacs.config.load_cfg(f_read)\n",
+ "\n",
+ "print(f'Configuration loaded. Please check that the DATASET.ROOT:{config.DATASET.ROOT} points to your data location.')\n",
+ "print(f'To modify any of the options, please edit the configuration file {CONFIG_FILE} and reload. \\n')\n",
+ "print(config)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Override parameters in case we use papermill"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {
+ "tags": [
+ "parameters"
+ ]
+ },
+ "outputs": [],
+ "source": [
+ "# The number of datapoints you want to run in training or validation per batch \n",
+ "# Setting to None will run whole dataset\n",
+ "# useful for integration tests with a setting of something like 3\n",
+ "# Use only if you want to check things are running and don't want to run\n",
+ "# through whole dataset\n",
+ "max_iterations = None\n",
+ "# The number of epochs to run in training\n",
+ "max_epochs = config.TRAIN.END_EPOCH \n",
+ "max_snapshots = config.TRAIN.SNAPSHOTS\n",
+ "dataset_root = config.DATASET.ROOT"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# reduce number of test images if running a dummy model\n",
+ "if max_epochs<2:\n",
+ " N_EVALUATE=3"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## F3 data set "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Let's visualize a few sections of the F3 data set. The processed F3 data set is stored as a 3D numpy array. Let's view slices of the data along inline and crossline directions. "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Number of inline slices: 401\n",
+ "Number of crossline slices: 701\n",
+ "Depth dimension : 255\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Load training data and labels\n",
+ "train_seismic = np.load(path.join(dataset_root, \"train/train_seismic.npy\"))\n",
+ "train_labels = np.load(path.join(dataset_root, \"train/train_labels.npy\"))\n",
+ "\n",
+ "print(f\"Number of inline slices: {train_seismic.shape[0]}\")\n",
+ "print(f\"Number of crossline slices: {train_seismic.shape[1]}\")\n",
+ "print(f\"Depth dimension : {train_seismic.shape[2]}\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "1aadea98bda8458fbc03782571b1d4b7",
+ "version_major": 2,
+ "version_minor": 0
+ },
+ "text/plain": [
+ "Viewer(geometries=[], gradient_opacity=0.22, point_sets=[], rendered_image="
+ ]
+ },
+ "metadata": {
+ "needs_background": "light"
+ },
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "idx = 100\n",
+ "x_in = train_seismic[idx, :, :].swapaxes(0, 1)\n",
+ "x_inl = train_labels[idx, :, :].swapaxes(0, 1)\n",
+ "\n",
+ "plot_aline(x_in, x_inl, xlabel=\"inline\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Let's plot a __crossline__ slice."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAABEEAAAFuCAYAAABuowaQAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAAgAElEQVR4nOy9e9BtW3YX9Btzrr2/c+7tTuwmkoeEBCgSlCqjAU1CAhHRUgFfYKDwDwUlpHwDvoiJGoilpaZCofggsQoFijJViCYUBYaEJBhMCZSAZSgtEwzRJhJCmtB97vm+vdacwz/GY4459/5u9+2+pzvnnPGrund/e6+15pqvtc4cvznGbxAzI5FIJBKJRCKRSCQSiUTiVUf5ZFcgkUgkEolEIpFIJBKJROITgSRBEolEIpFIJBKJRCKRSLwWSBIkkUgkEolEIpFIJBKJxGuBJEESiUQikUgkEolEIpFIvBZIEiSRSCQSiUQikUgkEonEa4EkQRKJRCKRSCQSiUQikUi8FkgSJJFIJBKJRCKRSLz2IKIfIqIfeoHlMxF994sqP5FIfHRIEiSRSHxMIKLP1X/M43/PiOgDRPTHiOhrieinvQv3+W4i4nejzolEIpFIJF5+hDXI//DJrksikXj5sH2yK5BIJF56/J8A/lv9+wmAzwDwJQC+HsDXEtFXM/Nv/2RVLpFIJBKJRCKRSCQMSYIkEomPF/8HM3/d+iMR/TIAvxvANxLRh5n5mz/hNUskEolEIpFIJBKJgAyHSSQSLwTM/IcB/Ar9+h8Q0ZsAQESfRUS/jYj+FBH9VSJ6IKIfIKJvIKL3xjI0DObL7e/w39fpb2ci+pc1/OYDRHQhoh8hot9PRD/7E9faRCKRSCQSP9lARJ9HRP8xEf05IvogEd0T0fdryO7pba77KUT0u4noR4noLSL6k0T0ix859zOI6D8hor+oa5q/QkS/j4h+xotrWSKR+HiQniCJROKFgZm/l4i+B0Jk/BIA3wbgFwH4TQC+E8D/DIABfBGAfxXALyKiL2XmXYv4rQB+LYDP0b8N362f7wfwjQD+BIA/BOAnAHw+gF8F4B8kop/HzP/3i2pfIpFIJBKJn9T4FQB+HYA/DuA7ANxB1iRfD+DnA/jHblxzBvDHAJwgHq2fBuDXAPh2IvplzPztdqJuuHw3JBT4jwD47wB8NmQd8g8Q0Rcz8w++kJYlEomPGUmCJBKJFw0jQX4+hAT54wA+g5mfxZOI6Gshi5JfDeD3AQAzfx0R/T0APudWyA2ADwL46cz8l5eyvhxCsnwNgF//bjYmkUgkEonES4PfC+AbmfliPxARAfgmAL+eiL6Mmb93ueYzAfzvAH4pMx96zX8J4PsA/C4i+lnM3PXc3wMhSX4xM/+JcI8vgWzQ/A4Av/zFNC2RSHysyHCYRCLxovEj+vlpAMDMP7oSIIr/XD//vo+2YGZ+WAkQ/f17APyFd1JWIpFIJBKJVwvM/IFIgOhvDOC/0K+PrRP+HSNA9Jo/DeAPAvhcAF8GAET0hQC+GMA3RwJEz/8+AN8K4B8iok99F5qSSCTeRaQnSCKReNGgqx+IvgLAVwH4OwC8DzMh+5nvqHCinwfg3wDwpQB+KsR91XC5eVEikUgkEolXHkRUAPyzkNDanwvgUzCvS26tOXYAf+rG798L4CsAfAHEy+OL9PefZlplCz4Tsr752QD+zDuvfSKReFFIEiSRSLxo2ALjrwIAEf3rAP4jAD8KiZ/9AIB7PeffhcTrflQgoi+DhL10AP8jgB8A8AyiM/JrIVoiiUQikUgkXk/8pwD+eQB/CeLJ8f9BNkj+JgD/Cm6vOf5aCHeJ+Cv6+Sn6+X79/Ef1v8fw5juscyKReMFIEiSRSLxofLl+/hki2gB8LYC/DOALmPnH7CQi+nQICfJO8NUQAbNfoK6nDiL61R97lROJRCKRSLzM0HXFPwfgzwP4EmZ+Ho59EYQEuYWfQkTlBhHy6fr5N5bPr2Tm/+pdqnYikfgEIDVBEonECwMR/UJINpgfgwiifhpkB+X7IgGi+NJHimlaVr1x7GdBdmxWAuTT9VgikUgkEonXEz8DEvryHZEAUTy25gAkrPbvvvH7l+nnn9dPC5n54o+5holE4pOCJEESicQLARH9UkiqOAD4t1QM9UcBPAfwhUT0NJz7mQD+/UeK+nH9/FtuHPthAO8nor81lHUG8Dsxa4MkEolEIpF4vfDD+vklmhEGAEBEnwfxJH07/Db1XrVr/i5Iut0fgmiDgJn/FwgR8uuI6B9eCyCik4btJhKJn2TIcJhEIvHx4ucEQbA7AJ8B4BcA+DwADwB+MzN/MwAwc9c0c78JwJ8loj8Mian95RCRsc+/Uf53AfgnAHwLEf1RLfN7NaXd7wTw9wP4k0T0LQAOiNL7CbJT8wXvfnMTiUQikUj8JMEXEtF//cixbwfw3wP4xwH8aSL6LgCfBeAfgeiI/cpHrvsRiOfq/0pEf0T//jUQvbGvWsJk/knIOuXbiOh/AvDnIGuRzwHwCyEbOT/nY25dIpF4IUgSJJFIfLz4fAwtj+cAPghJT/t7APw3zPz/Luf/FgB/HcA/BeBfgAij/mcQT5CHG+V/MyS05VcB+LcBVAC/FUKEfJtqf3w1gH8aEp/7RwH8mwC+5V1qXyKRSCQSiZ+c+GzIv/+38Nf12A9DiJB/CcAPAvgaAH8Ij5MgF8gGyzcA+GcAvAfAnwXwNcz8XfFEZv5BIvo7AfxrEHHUr4Rkl/mA3uP3f6wNSyQSLw4kqbITiUQikUgkEolEIpFIJF5tvPSaIET02UT0B4joJ4jobxDRHySin/7JrlcikUgkEonXB7keSSQSiUTi5cBL7QlCRG9A4v4fIGk3GcC/B+ANAH+7CjEmEolEIpFIvDDkeiSRSCQSiZcHL7smyFcC+JkAPp+ZfwAAiOh/A/B/AfgqAN/4SaxbIpFIJBKJ1wO5HkkkEolE4iXBy+4J8p0AnjDzly6/fw8AMPOXf1IqlkgkEolE4rVBrkcSiUQikXh58LJ7gvxcAN964/fvB/AVH00B9T1v8vb+97+rlUokEolE4mXH8eM/jvbhZ/TJrsdLgo9rPXKmO36CN9/1SiUSiUQi8bLjQ/jgjzHz3/xulvmykyDvh6TjXPHjAN732EVE9BsA/AYAqO97Hz7rN//Gd3bXW0vC4FBD4W8mPZ/07/Uk+42xnBDO09/pltPO2zny8FLXW3WM14dzvd5vc6+r+qzfH1k6XzUz9sHHgKu20PV9pnOuxgLXbf8YKkE9FrIeX8pfJokNc/x8u/p8xL5/OzwyjvH+0/H+SDk36jX1+XrdOiA2TgU+JjfneLz0bU4gxqPzeb391OhQV6/z+jza+Mbyl3vRY8ceOd/bv/zm84MeKeexuXrrPaN1fxQfYbLfunQai+X6aS7FvnybdwkxgD7ecVxuTP7wvrx6N9Hj8+NqzPTea7sefR/wjfM/wnvuqiyr+jt8v3k7H7vu1rv9sd/ivLpVziPj8yP/4e94BzV+7fGO1yNxLfIEb+CL6Je8uNolEolEIvGS4jv4D/yld7vMl50EAT6imXDjAuZvAvBNAHD32Z/NiAv2W6VGYw0AbwuB0QFqBGr6c1x4b4y+sVyzMVDHtVQYtMmN+SjgS0jWU9UIMkOjETjW0YyGfTSVOoG1fGrjdy7BhrA6HmI0UtO2FrmNtZEK0Df236mH9bPdu8u1ZOWufUjh3moI9SplS70Y1Ai88U0CgBpJnUKfRYPEzrW+p6b3rKPdXFjqcEidy6G/2zmV9TvLfVbDw+4dCQ7i+XcdJz6K9wHtZfR3Afq5j/li97G/O4CHKnW1e4T7cWWg8GSYexQb63xrNPWz9e8EHmN+i7Ah69/FkK4XuDHo4xDmiv3GRZ8NLaccoZAufQ8maedGPgb93MEnaSNbvfq4z6iI9kNhqWswwnkvwKVM9ZR+i3XU74VBlaUPdZzwXCpjz0W5AOVC01wqO6FexvPg/QKgn4D6oLfpADUe/drH7W3OtpPMm76FubgB7Qz0M6Of3oZQ02aCZKy6vo/YntfK881qKGAy7ml+Znm5ZxwPK0L77zGjme29Uxh4sAd9PNS8yTNv7y1i6fPtuX5vwPGG1JMBbM8JXKR/rY94m59VabdWYGNQ1XdqJ/Dzqu8HnSf6zkEjGRdrR+EwDmHeHIRy0feL3qLso03+Lolcyzb6x8kgHv8+2Dj4ewyPkDKb1Gmqp17g91zIt/hMUpf/+kn//Tn16XmgIs9A2TrqNr+8mQm9EbA9xoAmHsE7Wo/Etcin0PvfIU2WSCQSiUTiY8XLToJ8ELL7suJ9uL0jcw0C+MTzysV2JuNubNy53dYVKybjW4wg+btvUj6qkiC6YiUCqHYhQgDwqaNFW6WyHDMDu9EwSCCkCRhgtYic9LBF7rTg5mDfBAIjLq4Zb58w2exNAsgtHjHkWO37ctBMJnEgVfRvqsHYKISyA70R2GZiMNh859INO6kzL8YaFwYVgIrtGofr9bsRKVdGXSdpTychSsyw0Hbwmefzzfg3D4ImRj1IDS3GMPjtOAEFxUmJSYbHxu0gEAaREQ0mIQx4ECM33ESmXfcV3l2BNFiMJ4RTjAiJt6D1GShL/yoBwmcGm8Fb4+CT9gkrsQV5HiBzX8gNAG5gPlK3RkDXPg/jIHM8GK+EYRjbtcGAZyWOEI12AvpZyyqEvi3t3RgoMp5GuFm/9w2BBAVQ6XEviiJkoH8a73MSg7WfMMjD1QDG/N3Km37X95eQY+W6Hh1uXMt7YBAScV60u/hCMkZH/xfnQrgB2dgwyXsvXgsAleV9ewcnYoQItU4g9LtBGHItQGEZC9hcwSAPrR4034u7zBOy+WJ9U6W+ThYYuVhC/3XrQ3mOjYTgMBf8IbamxWdG+5Mj+VSunz2/tIR7hi7256pgIvyIw/uQtSpGysR/m2x+U/hu7wl9b4MJXZ+3Uhjk/z7ZA594B/j41yOJRCKRSCQ+IXg7s/dlwPdD4nBX/G0A/sInuC6JRCKRSCReT+R6JJFIJBKJlwQvuyfItwH4BiL6mcz8FwGAiD4XwJcC+C0fVQlFd8/M1R6QnbyOsXOsHgKzHkDYJVNvEsSdfguJCTumuJThsaEbe912r5dtWLadx6CdMO14xvLjd/MgiDuUTVzKsbF7I7Qyu1nTTlc758TDy4NYdrthYQSA7sTKl94IZVMPCN1tLObybvdh+Y2P0HVN3Mvbk+FaHndpWXd92bxo2hgIa1cM+5Ddb738orvhNNzK+4YptIcagJ3Ue2f0v4WL9G3sFJsnTBz60pbfVw8LHT66xIkwvAaszRKiApjb/ORRQ2NqmH5GDyFC7lkTb0EAWzhKnEMxDGaFbcAzEOc6b75pfxuFx70bQKQeNXfdj6OwDF1h4FA3Bw/pKqBd221zZr0Hw72XYjjUCGuyuvLwjughJGcZk3JgChlrT+XZ6HcSatHO+jyH/moQRxXvq4OmsCIPLQtzwcM1oPPU+mANPbEydP7SIbv40SMnenqYlwh1eAgKgaawI2oSslMuVva4tp1x5XVEPT57wPGEJAyFMIVZlOPGXLN3X4F7dR1PWYb5ro9+2gl87sC5ayEM7kB/r8wVIgbvOpiN0N5oN969UM+tMkIRzfPK5lQbbY3zpJ9x/U5hgoeVASj3BcXC6zrAJ3kPWEhOv9N6xHcGBy+4bj/RCL8rPHu52bvb5nId97ewR9oJtalHDgfPog6Q1xl+b65jLsT3en0u59bLmEz+T03o134eHjJMQAVGuFjio8HHvx5JJBKJRCLxCcHLToJ8M4B/EcC3EtHXQpZ0Xw/g/wHwuz7qUszNPrpUm5u6/Rd0PqgTOAol2iLX1pjmqq8LVKjLebkvTgwYxiJ9hIlYndZY7yne2xbPRerhxmPQYXBeRY0bDqSHaWG4L9CyKBcDgNw92l2yg8FDBeIKTwCfGf2pGoamv3Ev964PwRAN0hhSiH4a6WTnxWO++NeyYwiIGbzmzs6jD8pucfjhfExck4Qd6DiJcTePj+kB8AYPUZhCSUJde3RRP43fWe9N3ciWuS5mZHp4QjT4lq7wKRbqaXODq4yrG0Zex0EWRd2Y9T5GGNk1zretWik6NwehN/o3hvG0J1Y/mdvSh+zXmXG+vUWoD2a8Lw2NBAED1HjqI3tm2hNtYyUPkyjHuIeFUpEyeGWXAptqb+zvJXAhtCfSh/0soSn+PJWhHbOSlV6e/WAhXEp4eH+X0I+7GrIIxM+huiMPBNrDfIh9YXM8EBrePwfUeOeJY6kP7P1nVSwXgDV8zPqE+qi+aaBY2E40jqMe6jCk1YA3Uq8D21nmUD+VoYtyAP1U0e9C/xXgeKP79fV5ce2afmbEcD/rEzoklI6attlJkNFHRsj0ikCWkb5rRe/FutT7FEB9ThPRcxCDCnk/DTJLjq/PLFkYTcMULhePO4JWiP9uY2whlXfkz93UB/F9wTr3rVgdr3IA9R5KiC0PfCjPyBoPzdI+q/dIfPR4d9YjiUQikUgkXjheahKEmZ8R0d8L4LcD+L2QJdx3AviNzPzhj64QgC6ySGc1iK40P3TRPRnfheZFa4HrC8TrRYQPamQPw9JtPdMWYBE+jdoO0TtgNbp8d9Z0MIxEMOO6ahlQ47FL/ZxPsN1Mq/+yKzj6hwapwwQ0lp1+q6PqA3Bl4CQGnxEAfCrAQU4euA5JuK95GVi/rcJ+TlDpTi3tNBMQrMaktYGG8UBBo0TEDOG74EOXRA10YvS7Yc+5EaE7wqbd8NFkeGCEOaDThiuhNNbdY0ysBjUlMGiMf9RS8UIJLtwZDRg34IoRLkEoVq8VgzDseAcDeiKFiGa9CACbkVgYvxEPw7/sUA8EdnHQCST9J1oXBcRihBY1ysrBXp9ICnm7YQTPYkBiaK+wipQSax20T+ouBfSNRKyUpI0g9Ybocvz0IdGlOT3D0AMpo919G3MeBNeo8B33I9aNRv1jW/S7nUtxDLUfp3HDqB8QfqfRTysRIsawCokW4HgCJ1tM+PjKa4bm642sK/sgRvxwVY8BGudRCxo44Z14+vB47p0s0rGWssj7h6tUikt87w3D/ip7j/XZQvhwJRebNQIk9p0Rnd7/HAieOE48yjDNDierbI7eIOrkJhjeXSxEKpbxtmddhGHnee3vbr2e3xp9am0Qb6bwj8kyF6KnhxN0oc6RBLZy6z7GphLQK6FEbanE2+JdWY8kEolEIpH4hOClJkEAgJl/GMCv/LgKMevL3crhC8NhoQfjOxIRvtgkkO06+7GxqASxh2LEc1rYvbvKEGCGcJf6UYdvKIonOU8CqJMtFMMlguG7Gs9GcrgXSiR2rP5GNqgbti+k+yiXixpHIQMBnzpwAo7zbHxfudETJFykYKkjzV4N/n3qwrELHAwaQHby7aR+1j+beCj087Krax1YQr0gYT5z2BHUEwgTXMRS7+fhLmrQ8InR6mJ8WAO63TMQAXY47PgzDWPTs9BYvYEhurmxGC+RQDFPEVwbptFwivWw67dngQCw/gwGlT0D/SThEzJfQ7YUva40gJldVLXdSQH7e1UUNHpCYR5P83KZwkxohFyBxGuDGg0xS5tzNg4gJ5t409Aoq9tlGODW78SjTv0MJ7DYvCPsGdHyy07X5ATjaqe/7GGcaZASTUM15FmyZtEYayMHnFib+6ifpG/7HXs2Ij71EYZRANq6hJx0EuHlyuNx1LY0JnTLVnUrHMIIPns2Y7aX8D7anpF7lIzncnj99A3+bJXw7mxPF8LA3guKSFoQ05JhR96zMeOUv+esD5V4KCoU6nPLxnqTceUN07tuVMCbMkic5eU9vKJYPNKO8H5n8UYpxxCRnd5r2t7oPbMSVXIP8t8jgQrAPWk8/XSR+RWPWx8OL7Qx16wNLZLliY+Id2U9kkgkEolE4oXjpSdB3g14Wk93+WZf4DqiFoadB8hi2eOw1eAyjQo1poksLei6ZQg/D4AbcVa4ud7LAjUQKlY2QQwQ3zXF0B8IWTpAgYBwTxNL+xjc/DsNWZB+y+MCgyjAMBQt/r5eCL0GV/dNMkF4FpCrPMQYVkaluX3WTCM+zMvm1uUxReViOHsYw4lHOMipD0NOPXzE08f6dfT1ld5IBXithVtK4yfToujQe1pWEsL1PCjaTsZsuCEYNhzIF+Jprl3t5gPgDkRCaMqGEcfWOjXupq9l1etzrS+tjlEXxeaj7UB7+62PSEJsLHtIe9pH9iQjndx1YGW8WPpLMyvtF7NcaYSfrUxShKX8NK+pQzqGdjEoLfxFdufJybJ+7sNINK8vrYv079w+121ooy1ugGvGINNSsX7wlL6njnLXxhxscpMppa8zFwzS+VTPDaV2PDk1lNLRe8FxFNQ6nrta5XcrrxRG0clQiFFKBzOh9SJOWHpvu6Y1kr95pOxmppEshUnSfjNwaQVUpTzPOlIZl0uVlMZbH/1/Pyara8ko8bnObWu3h89pJi1phGY4IYAboXs2qDjBMc8zK2sqQ0/tBOwk9Yzvwz48stiuDWQqF4CVdOhnnolT6oO8iOl9b6StBuncXLyJAAwC155fm5uQOYaDNM2uvrxP9hIP7QY8AxnV+d+Yfr+hP73xzk4kEolEIpF4yZEkCDA0PSK2Phk0V6ghfe2uRoHv7gcjjNRoVuFVcg8NXYBqelcQo+8F/RgMC527/Yl+JZIo9+h7GWk+rd56X78YGJ4hdm4jcAsaG2y7l8EItGb44j0s+AEwVBulkIs0lgaQhU880PAQCQbsZEyol4fvIEcDfSFZQJhTt+pufPSqiHobBDGEqACt8gibCalRaS+uMeDtLXxlQ3tY0i0SwSs02kYWKqIkFltqVVK7zr0DBpFgO76TJ0icSj4eYQdY2zeHtNjfoZJ2DDTpJsRjXnf7n02VJ32QhKwECBMQNRJsXkRDKoa0mEHrDYN7KJjRDADMBZ6e2iuDYbhCxoC5zGNUhEQx7wZL92nplM0YJlKCqBO4D8OWVQCXn8gnnaSDODxPTo4xQBp7NJEf6n1BVVihSSDZujcMppEX9jyV8FlqRzfyo4wYJD5JXzHTEJbVOjITeiu4P6q0sRXwfWCw7BlSnQrXq4gG9qaM1tbFS8Q8XcyjZi8i8LwIP0+6JzYk54ZCjNP5cDKlFAYHkqY1mfD9DSVDSNoOAL0VELGkcLWQnsWT7oqP7IRuxJi9S+O7Pb6H9YFim282Nl3ejaZB5ASpDbW+J93bg+D6KhHuUVIwkyw6Fk6A2DxwJin0bWWdk2MegeTfjXrq02VgyL8lkH8vuJE/C95lNv10XP25KIxtG248pTD2yti2JEESiUQikUi8ekgSxLwA4kIemI2vNaNGJ1kJu0iinljjNaEcMw6J3WAxI43KMNhaLcPwgezMUjDmSpGd2ojjqLLwD4YCAPRDF/dqZFDpk+FpC30Rkgx94Luh5PXgCvGkUIHMiTCKBlQHyqUMA3838UJfec/Geijj1sa9h5KsHvnByI4in5FQAAC0YXfQobovS8jF5AGhbuE+pNpPUT8ARB5S4nWJRJFtYgfNCAtD8TAJJV2srv6f7k6be77VKRIjwQ4an21qxjgWvIrcWLWTgn6MX8fhnFBgv9Mx97CM4LEEyLxnDKNfd+LpNIwqPso1oWGio8+qaL0YGXWjnR4eEI1eCiE0GgZkhOZxnieZ1ZdZ688mqEleloW/zCEnctxDF7SsW3oZ7Wnx8Y5l+jlRGBUAH3Md0aVPGrEYp0FHAjACTIvqcDJu6G3ocOxysumLrA5YKwk26ZfoWPYTrjR6ALh46yAM4bo6QBAhJUZ/sqEzsD/poEvw9CgMnNQLxELsInnr3hvaZ2Gu0S6xWuZZYWFoHtakgqkeQqbZpsYYyDwxLzHw8NqK4qv23nKB1EUvxMKcbjm3AfAwJdco2XgIl1pY1UmHwOoX36tGOm0MPjNwzMQrV0Y7hfnD0o/9KqaS3EOODvKsUvZ+ISV7GMCljuxh9u+EkVeJRCKRSCQSrxJyhZNIJBKJRCKRSCQSiUTitUB6gjBQ7kl3Bm1LVdztYbvGR3QNkF28fqI5CwyTa3BEMUP3VtBd8s6WBYE9fj3GzPsucSdxFYe4MIsGAoYnCDFqZdTawYVAndDUDR7Qnfc2XOb5Uq/CcWhjoIrHCG/qvu8eDstu7GnE3U9OG+atYm7YB7k3Cy4FdFm8BpadfN54aLI8BnNdB1T7YTlZxwkNoCA+SyEUoVyg+iKmw6DFnURIEVVDDRpN1CCpeOLqVREzr3iYkNWFgygrMMrTcrgMd/QpzIZlh7ub1w0gGiRl6Z8wTtRp0jOx+3jdrR0cPHhsC52u+xGYvWuknOGlYtdOz8RSTj+px5Dt/nfdbTdPGHuuLGPHPtcVGF45V4KUdr3p4ExeDKOM1ePHwrF8Wqu4qWcZOWSnvBbxdhgiteTHPXSrY3gfjMcJ3bJLedgQRihErJNN5Y21PPNEwAj9AtwDaQqHCqFPq2inCa6a2CexZsCJz9yNcJIokmkeN9ZeOydmK3LPCm8PuThxbVYxAp5JWX0rU5psYhWljaLOdQxYzJ505YnSx+fkoRF+jxmWoripo5hXj3l+6GfMcMOjrDU9udQ3fPI8HtpMnydlh2TJCuNFHeD74JWDMX+ZRgYYEKHdYUofbR5tPfzr7eVY2/W9QeoBYnPdNaOirpM92xj92TcWb6BbwriJRCKRSCQSLzmSBFF3aI4L7wqwxoJ7Ng49F5AFb7nQWBxb3PwaF14gpMSGK+PdXJSh4okMuT8HF2cKoSWW7aAF43o/dwmv0RAEvtRJ+NTFGQHRACnBuCaIrkntKKcO3jRbhBu22iYjYsyQ63CdBXQlULrGnldGOXVxdQeAO5LY9G4EC836JcSgU0eJYUihn40cqlt3AkbIoaGbwq1oHbSN0a083uqtOsIHKoNVbwUn7YONUWpD26tobFgdjgJmuCbClXBnaIv3OzDCjqLIroUoAZNoLQiiwVAka0fZOkrQlyEgkGRibfUY+rSKZeo9euzrbkQLPERq0t2IQxC1PADQQ1EyMLQzhiJ1mow5+Q0oz1UHQg37VevEIAYdjzAgq56J/modo2hsObBksiAnAFxfJfZJ1VANTXkqmfYSI+4AACAASURBVIzY21juSTOGAETXWYi4Ykpl2k+YQIwRhkHDeBZdCC3jGAQOE1AKXZMc/vxhhFFZPcw453EcNOZqP2s/bxoaBKA9XdJOx/eckSlLqIeEg1hYCU0EGlfNMlKUPNQ5FUlCGxsjPupljCOT9FN9mMkFskxCfdTJiLC+4YqE8P4q8FS23gTVOelGdAWyqjQMwsT6TTPDFLqeoLfmrI1rFKIWsnj0IVeW1LiAp+qOKY4BJQZjGus49mQhT8D21vy8WIYi78NQR0tpK3OdnFDxvg4ivd4oI8P66JvSZWI9Fu6TSCQSiUQi8TIjSZBoXIVdNBf5vKEj0U3ewAwStl2+xZg3NFIhyXAKi6GBTtPC3LJRAGrYNVv0ktcNUI7jjoBaRVvBsnW4sWLETCBVgmeIHeNCYgySeaXovQsNcUUznI3I8R1J8gU+yAxZHobnSUiacm6wbBKdwqrd+k5j8p0oCWAMMoSKGvZGJESPkKqeLYWxncUC3jQdKBFwebNO1qYJ/jVrHzQDBi1EDzEI5IKzTOphM+WjDBXuGGRDPGaeF0q+uDaMernU2od3SOQuOk2aMKTjZETQtnW0U0EpfWS00FtZu0phHEcVkcmuGTOYRkYNq+LilWR9sD8/Sb+biG7oc+nExVKsLHP+ofg4rQKcbDo51kdV9HLqqbtTC6tWQewT+74fJFlT7IdDCDbL4BIz8gAY3kZKHJato5Yh6nHswjgcD1XK2ReiqMo9XAhT5zqFuVzfKv6Mu8G8vDssY05M+zp7s2hqVnuO4tzSTDPU4CTU9LzddXmv3A0Xi7o1VHsX8BhTonns5Ti5vtBxqUNU1QYEOk6nDhT2ucsmJKpj1pVka0qCUQ9ecySEU7mQk18AUO+1XuF9y0UM+B6zXkUyxPsrzC0TPjVSUd+7/q44ioyjZfrRsUTlmUC2uWkkc0wfG+brdh597UQtA7Uy9l3eOYelGjb9mQJ/RhD0gebBMN0ZkvTB4dGz8bdsMFErZpDmRnwou1JYPLQ49LPPu+5ec5ZyevU2SiQSiUQikXiVkCQIxH3fxBLtu4HPcKG/4SnCc7pMMxIeWTBSNNbtT/MSIUaHZH4xQkWOy6KVihAS0dUZVp2DgMNEG3VnMghF+onWFgp18RAPKYMLo29lXugb4cG2mznc3q1dvsPJg6ixNvatoN8x2tMG80wRIy6SIPbfSM87dkTNEKwSTVR5DvHQsYCmFrVxYU/nKcZerR1nNVaMQDAS4bhU9L0OoU4rfiUxAqjRLKIbB0XT6F6F7JhFsQlZMRmhrN4twfKI6Ue9PoTJ8ASA3rVvo2u8uktYVgiikfq0FPY0qRZaxUxOwki6U3KyBQC2raH3gt6HJwuIHyVdpF4F7Vy9Dy2ci9XwrIWxqXCqEThEjK10NKbxmGgdOg9CpJJkDJm8YTo50WMpW2vIbmJ9UYuQH3Ze07lS3pBzL8cGZmDf60RGnU4tZJiRttcgUswAHh5O6vVEMtU7uXcP9PvxUCeiaJ4jAJ26eD4Rex/HcWQeYXLWLqvnG08e0HvB3enA5ahgJuytTmLKlRhHLzjVJvUMfRhndLsr2J8ExtXryNffidCDFxxpXfsbElNUiHH3ZPfx3I+KY68ohXHSFLqXhzGBS5Ek1KV0HVOgVHt+tUrRM4LgmU0s5e+m7bNUv4beC/a9qneXEomnNmVG6Z08rfCT04HWCUerPldsLAoxzlu7IpTs+ZH+BfZWcRx1ylB0Oh3+rO37qtas5ehcujzU67bb+3T9dyd6bqiXomX5KZUnsmpWU5b6HkHAFkcZZFAikUgkEonEK4QkQYCxo+i7nfK92Q4j6WcIs2BiWSRqytSrNA6MYQjrzvTkXl9U9V939Cguau0etpMdNR7WGPhG7j1iu9PyJTQOcEIhpvQ0zxDaISvsxrMlRJhICqn3cILgwqKZUMdv1DD0AXZC2QltH7uP3j9aJW9HuK3tULpLvNpdbhqHPkLXKnepPzPhWHewLRuFhu3wfR3ZWXZCtcwLtrMa54LdynaNyXar57pEjYEr4xbSv0xi+DIAbpZOI8wVG5o4V2yu6d9tYYp6lUd4j8apZyEx4xkjrEl3ytc5WzY1ugElS0YI0vl8oNbBfpmR14NRCAyvGr/+TsuuwzIzvZj4W2uSFckN8uCNMlJ5Do8dUkM3llM0y9K2Nb//VvpkoFp9W5d79RBadXc+UACc1NjeasdWQ8pQrdrdJv3QtK7RMH7jvKMzoRnpoUb4offoveAw76TC6E0MdA9nUu8c1muhZBBRMPALo1Z43Xsg2549v0PvhGfPz+Be0BuhPwQPqCr35UbizXGLxwspu20cGJiIHLa03Dbl41zS9yRtHacn5pHV8OQkf9fS8Z4nD1PftV6Apw8+3lUJu1o69lbROuFkaXPNc6cVHL1oHxQnC1uTDCmH/w7vO/vctqak3UysbEoWddOBUaLDwvVKmEs2/vf7hqJ9ZPOLWf6utYM53DP0s5Bx+nxtMeYS3g9FicGjlWmeWT/EsV9TQZu3FyDPiJGUnYqfb63hriQ89TlMj9vQkEokEolEIpF4hfDakyDEQNHYddfjuOjOOgFsK33z+gBmX2FP/4pBejBG+It9V00F88TgyqJLoAYrEw+hQb1mGPFS0ej2jCreGeYS7+J+SjhEt+bhMs1DpNHCE4h9dz66va/hC9YTACZvDKY+xcLTpbiOCim5UHYa5ccSC8Q7oOpuuXlBhDAJrjxftrjDo2F45fQyp62Mrt91iHleCXESuxdN4HvskHvCcNVIFwKoRUJMx9vsjgZM6S6tb5jkJkFvwnUv9B7WrNhHToYxRPtgKXoIOQ7xxen+1ijSUIvCs3cSxPuJbX50oAXSr72niDFeuhuTFjrhf6v3yvAUCbdn1YbZyyB74nzoJAK64Vh3AnCMoWlpMAGHpcO11Koa0lU29XipI3ToVh0njRcAl7PkK5Vdc/FCKOqlAAzyxjwa9r3O+jQAzndKkDRyw7pHooKXe1t7LyNspNvYhP45LJTEQogsbewanmZkY5PraSecAinpIq0MT70MzPOdK485Z+dEsrIR6k5wYVgS4pMHj+PaLpf3ygN1qYzn253Pi+18SBiX6vn0XrAZSRLIsdWwj7+1VtD3IiFRB82Es717tX5T6NXGTgSat4ZpG5Hd2whfnw/XRIDVw4VDGePfAiO04z1PEoIVyy+BZJsLn8kL8cSiiXTsfdRxrrPNVxptYyG3+AjhY8DQBrL3fxRgroxybsMTLZFIJBKJROIVwmtPgqAD23NbOOpvLDv9Rjp4fL8e9jUxyfVRad+PsxoDSkKYWGPx+G9Cu+NJM2DKKMJmzPAwWBgTCUAHRnaCTqj36tUBJR6U/PB4+jqMZFYhyn4WEsOz21j5q2ZD6C/LmOELZ/c+IfCp+044Gnl2FRNYvCJaTAzTbhUNnlgfQzgXnUZWmEZO/Njxfifn0h5Ebq2qppOgIpK8aZ+Y8RcJrE5A55tjsHpq+PnhuJfTZHy8T+ywtx1uvHGNx4NRvExC141ZhTxjeIKJYRKhH1Lqatfxvc6XQJCY11Cz7BLW/4CP6SgATmKsoUINQvDUBzG6nLQJ9aMWiivQjEeYMoiMto253M9aV5vj2l/dSKBF9HNodtCsA/FW9cw9YCEeOIRf0SFj9uzpGWCgqGhxCXoOl7vxojAPJizdRCw6F05YEU/z1t8dwQNq0lLRtnqWoRb6cQ9tLaPvjOQdBCgGYWeV0npSt4qPvohzZfJSciIvlK3vTDChPYRzw63a+Q4PlQfB2Gn0XRgTI3MmTRIlX+ggbHZ9ID5JdVs8tNEETO15rzZ/5JkXPaah9SKFLGOhYxD7i7Qf6xGe5Rvvf/93oPIkpksMHPH9Gp5t62Ob+vdP+pizhtgn9lPQIrIyqA1x75X4RNfXgoUzBgHafmL0pwXYl2sSiUQikUgkXgGsJmYikUgkEolEIpFIJBKJxCuJ9ATB2AkrMQWF7aqqt8eaKnDaeafwG8aOoaVu7LrjR3G37wBq3OEHpnSa4/fgdRHA5oa+6UEGtmfkKRKjy7tnXwleJ1wgO5O9+C46FRpeDYsuhffLuht57SkOmPv+qYvrv2YV4R48Tgw1eHvorusIC6GxHRpuOfowhFDYrnsZ3jX9aQMK0DXdcdM0qnzioY1SWOpbRC9hFTSdBD+t7Tfc4/1403TLse9sxxo0skxEpxUKngO6wzt5Bekc9K82tl4CDY+BKGxrKMt9FjkI37wPoQSku8gAQB+uI5yKgDUl7shAZJ5Lep3vkEubfW7eSLspHkuL5wPFdobfrE1W8XCPkVZ38UKIIWF6uIdOshApYvOgoCk5DLp4ffRnMucsDWx8L7AJGEOeNUtl2kPaa0C9R9xDYPTzEL/hyVPDPDo884ddr/3lj+QJ/ty7J0lhlHPsaGB57UzPd2nsGkaTh1GYT/3Enp5XPDmWR4PFE60+H/MgevRs6ikifSblbKvnEq7fuX68By+Z2H4gjJ+UXRjyPK4v0EIuiO0hPbc8QUwnKIQPWcieewCqx14MvwMBsDTOJIWObCxyLoeQNeLgqRH1lTrQz2VuZwyN9GdwbiKPU6cwzFW7aoTSYXre+gnolzqniE8kEolEIpF4RZAkCGEIiwZjvJiKPgGe1dXWj5rlYl0gxkV00Hh0o7VvEO0CwOP03e7p5PcEgHbmoRFi8ffB2C9KSNAFrpcQjRGumA1FGmUBYqTQIcZfO5OHQrjgpzdq7qubLthxMR5TdloMPGlBZAa2nqApP40UIRbCpFwWoxCYjO5Yt5g6s7tOxHCtp1MHb2r1FbFstq1fxbpPmhY80sdS7WbDoO9FNAKC27ml4HSyyXU1omWmxy3chulaMyQSTQTwKViAFvajc5DN6IIa3kVZjYpBMESxymjNUzC6I8/iOirh+kgAANBkwWJkxuqdaIQQEV2VRdpdRvKxjsWUGrawpmoOdWoLYRb7yZ7HMK+pk2Q6MqNuJe0KO0EAANgGGdaLEjU7QNqG+DxhAzrYCQwpT9uEYehfSSis3wEX3vXwA3//yAPOcQ4zBTIJY17REsIV+2TjqY7OP3C4/hEej5QwBFiNcHJixfqw32la2jVTjJZLmmK4PpALCce5QA2SIlcN976xhxVNmj6Yp3D8jdVgF5IJk46Q11fJP+JRpoXLeKE298K8mEKRYr/G+9u7tWCEyoW4yRiqZOFeHmam5cb3PYBBfoT6C6Gk73PXew7Pv/25jqf1LUEzj81iz9IknkJ41lS9Zb8xnxOJRCKRSCReAbz2JAiTLqKXBWIzY57mc/2ElQSh5TgJ0TEbemNXmE4YC/YKUONJX6CfWI1GnsQzJ1KAWXbXGwAiiX834cpgnEVj0b0s1DAgjdsXjRD1YsA16WL1v0oNa7vfaky4qCMA5jIbgQQw8SBBWIzWEoQMYww709CuoODh4uNRgX4e917Hix6qiAGSGNhU2LMduLDgIeKMaDRrmNiO6OYMAOihTDvAXh/tX9FpmA35qe1m2EahBe8L+3kx/CM54r+FexCPco1gif0FgKkPsUiCCOGW+R5sKaKtWsGQpqZkjwlyAq5zY3/HrESud2E6DNpMMZz1s/LwGLL+q32IhrIIOlo9KbTX+2USGZU68mkcs/kUMZF0CHVUb6CmXktRt8Wuo05DRJmu21324JFj/Ra+e/+XQNZNY6ntNa8kJk2TqscXrycQRHTTSRD5LBuj27uEgLqNhnia45UgCuhGuBnpEsmmKplfSh2peS2DjPdVI3Ar2C9lzMk4pxuBLsU9l7gyyoOKwx6D9Jmes3C9a7eo1xefQlas9ZMxCacayTMRXEZoLBpI07suEjPWJzb+sX8M5lVmc7APsWopfBYnvuKSgvcQKbk6iKjxnveLV7Kwyrt2JVcpkmA2xjZfL3RN9rz2K4REIpFIJBKvIl77JY6vEUOaVxM79fAR23XbxnHquntshk0dYRi2Ji0XcvE5czmOC3w3Ck8MdPU0CEboWNgjhGSExT0T2DOd8ELEiPFR9tBGNZoAyf7hbv0Y10X3/6mDjAS5EZ7gu6mbZmBxg2McGymGCW4ykZzPGDvvIEbfzNBmN3JpcCtet34OBEAkP9QALLqo56KGEsmucwdGFp0jDBjT1U40b0VCCg41gIl999/vF3aNpU7LOADDYDKj0NplBucR+iA0xneUF7HSlYihTsPAN8PJuoZmYkTODZffEMD0cAfovO86vir26C70dn0Jttgh9fB+Cn0AMzY39mwcfBQlMYqHTjmMpFByiQrm8KQYSqJCu7R1IUgOEi8g4DoEZ7U6i3gNYcMgFQCff5YVpzch1YxQq9q2zoNcoCppaDuTpyY2kJI9IHiK1BVUxEJfUxD3WBZDMqMQj9TBOmC9QbKWWNMX4V6be9xpaitI2kTbIJ14qSIZicZ6TJ9/Pw4IUVIbcG6SVpZ4SlHLTGiamaRrPE+7GOuoz4P1cciM5P3ANGVsKQX+vWha2hjG1nsZmYw0a0qPnlLhntZGEDvRw6xz1Lo+9GEkE52oK8NbzD3FADQrw9ILL15KMQU7nYRoArGkUOZBhln9/LdAcMY2TFlteIy5/IHRr2WkbJ5IkK0Pb75EIpFIJBKJVwivPQkSyYnodUDTppga0zFWPSwixa2b5kW0GYrAlNHA49U3iJFWxGAyjQ+/407DayPuKMddYKtawTDiPRRB9EHKw/Cm4Dr0SYy4aWaYxt3q2OyKybtk3XS82kyOdTADX125r0y+ose6Luj1N7ZUlYRgyYwbeR+ero/F45OmwUPUiiBE3QGuarAbGdDCbvQu/Vsveg0TSsUMsytMg0V3t+3gcJsHQLOXgdRvZLSgA5MeR9yNFg8LmnRP5IDtbNPkhXBFZAWigwOr5HoCcc5PRFAgBQiDyAk2uoe4dBpkR6xiI0Db1nsBLoAwKrpDbs9L8EiJYSE+xyucVET4ZNOWIQYjGNRTBcc9/FrrZ30OxbtAUu1S6HvWEKl6bpJ6tzBqSKFb1PBujVAKo5MQFL3V6YEhiKEe4bxmL8JRHNW9KajwbU0aJk+t631gNwCm0Ia+h4GKjB3xNA9ADG5CAk16FcQjJawRAtFDJHocBM+PsnU0SPmWrado35XC6N2iSQhFw7+cgCjy/XRq14SG/01KBJB7dvVeJ/LBUh47QRFT3hq53YJH0TQwSjwZmWVdEt/D6mkWh8eJJgubIwihsbXHx6PEesHnFxGjlz4ROYOgYh2HcF2YazFUzypMC4ltaYABoJzaOE6M8/kY/ZZIJBKJRCLxCuG1J0HEO0CNqMXV33U7ArEABCN8G4tf7hjpBG13NIpH6pqYTO/iYutfGkQGMDMMeuEQrRsu1D3s+kudScUntXzVN5jTwgJFNT/M6I/lXKdZ1bbF3f9YPQ6ED8S4p7YYmXY9IaQdZu9nrOda32G0GQiL92DDoQDlfpBF4n0wYt9dr8HIhSaeMVOIk3pomIitXDhi5W0sW/QOoOthsrIshCnqywxiglzXYYp4ieSX7uyO+wfSg+3cMfAuqGhjrEQARaNN+4pLEIKc6q3zOHh/xPqZgKgTKGUp22oU+iV6PaEXTUUqv5mHjusRBI0Tf8YYk0aNkCwjbbWHhRgPUIOuw25WcmzkKJdiWIKP67iPzAWWUKjgsdMri+6MYetiBEPrsuk7pIRPphBmYXO5eH04jB0sLMsIUzWkr+aazad9HgfXklif0xjCE4mvykv4Vjgv1Jej0a8GPgUSZArNCO+9XsoVQenOLLFP7OUINchJSSQWYshua/1lfWb6PWgkHkQI757gLXblzWD9QOrp0QguWBw6gLUcsufRKhEJSHvHO9mgfUI8SMLCEl5p/eS6LvJe4yY+PO75QzJ/WuXhpdJHvbwcfVcIsyh9OE2V4IUSvUT8u9aFm4wBa6gTIO/1/bLdJocSiUQikUgkXnK89iQIKuP4VLFCfQHdgPJQhreAnkpHMBZ5GI1GUkzZYdgMbLtYjA0jVupFDXIWosIMlEmY1BbW/hlE/dQQde+DG2tV4lAvBkoLxEDTYxtplgpZCEdblLos5DuC0R4X+/p9DaGZzmU4QSP1VuIndOwqkImwi80h5GjVN+FDPV3aKBsg957gTcNl1AuDTHQ1dnE0nrfJhND7ByPRXNaj4co0G9u0fFrMvRq2smkb7lPEmJ3Ji0HC+NjoeWWnIa5qfd150CIEDaEJxjOZcc8+7rcytEx1t7HDsFOdEFivpTB3jMRpoZt7mG+NQJ09m4td3rdBhHAQno11WOceMVz1kzuNXo1uXLeeCyMVMdpkz8eUzaOWq7Y6GaUeRHRcH0PgRUSgWNsVSAi/Z+jn2G9OtFa66osYemRj7m3toX1m6Aej2zqOVEMFG+a+muayGd1XXSjViYKsC2R8yXVFrt4J8dkvI/RvIg95cEzjpnCCbxBu5MTW6qXGnYcwMkL7bI6bto0RCrGMQGJH0m7ywrp+ZUgZwYXC2ujzwjxnbGj9+QrX7xp2dRDWd8qqazNpMT0y76+0cOzcLhUxPSMuSjqxOOitJFYikUgkEonEq4BHlreJRCKRSCQSiUQikUgkEq8W0hOkAvRUtnNjZop24imzBBeewi5IdTyAsUO4pgaNgpEIIQ2A7CbWStNO8pVHB4Wd0QLXi7B7lAPouitJpGE9ttu4wV33i2qRuMeJ1pUOoHYpgytNYpu6J4raMWkjWF20m6YsMhTFBvWEqM8BXO/UogBdRUttBz7uvkZPGNPk8F3jGoRNAXjKR2vkxrobz8BTEUx0QcRV2HARR4THyXfRMDChRhIBQdNI6Kt7vvWfTSXbYe66q2/3CN4lfBS5H6k7e4grIRp17I3QbWv8RqgHwg5z1LuY7nkrRaqNXZjvUzhF8GaJO+IrTOvEPBkmCYtNd9C7titUwD0mABDzyLjBN+oTPCd4+Z3sU8+Zs4vMdeVHfo/hZDFMyb2JtOzS5Lkp5rFhzyqNttj85lvCCja+naZnniY9mbm99nyxaemYN0H0oggeBX5/LOVFT5+4219Gnaa+WtRRTdDZy7HxsvILjWvU02JkuIGHzVl7+wlzGIvWaZpnoQpTxp/oNeRt5tnrptHs3RHLiaKx8XY0vH2uxKAjHnOUmDxV5ER/HgrNIUQ3IILV5CF0HMeZ5np7aFIcy/gumLyPwvPPoS81M5YXewStnkQikUgkEolXDEmCdIAf6ojfB3wBOTKPsMTPn8Zxy2gBqF3RhllnmTncXZ8ABmmWEs2IsREOluwsUyaPxTh1IqWyiKW6joKQG2aIWPpdJ0FOjH7HfpwuEkrhNv49oTzAtRssdCcSOdSUeFHBTS/bjIJtMTaicQ/A0kcMTRMMIye0kRoNnQPGMBBiLD6NujkBQazpjXkYwC0Ypru4ybenalhXBp3lYBQZ9AweBy3shRm3qp1w6L1j2lKLu49G+9anTA580OyGvxpVWx/pRqMobKgnOklZm5Ayq0iih1sFvQCOJIKJW1rdo14ASx+Q6jCAgtGqxycR1DV8oEt/OUmo89G1WVRjg8+Mm+l/A+klpIAa4zHUKPS3zScAQ7cnasuMU2fYsYLrUA6WsC/PQtNEu8TnfB2pqqlLdiUUoHm4mxmq4b0xGcFzpVzKIYYpkEZahTIjqA+9G0/VXIJxXEO/AeD1eQz9EMORxg20PCUJKfS5n2LvtIkku3ETJc64BFIL8i5hYnAJ5d8gGSbSK3x3YV47XO23yG4gECCY57rdj5dy1iaQvRNHH85k1RhHrIRGfL6WdkzH18bGeaDaQjixp152DaptGddIaoU5cD3H52eemMaxG2F+8TlLJBKJRCKReJXw2pMgtBNOf20bu6gwozd+xxARlKskDl3JC666UNXFfFEhwrLr2Sy7nVyAplYrnxjYukgaxF27y7AILE4bhCEkGDKv+I6t/VYxjMwC4CTb8rR19EsRLwi9pj0vqA+Eek+gHbpTz25AxR11X2NH4xHapjoMs6gJAajxtw1tir5hysIzGQnB02AYRTy0EdZFv5bX77rsirOMZXkgbKYPcAD1QXZV21HFU+bcF4MkZH25qGUaPDG4Vr8fNd0djTByxzqAgH4eY8iVUR4CuWD6ANFYV48VYlKjeBByTpCpFwCZRohXP9SX5mPRwGfrb0DFN0M7rN/NYMRUBa+vj301Q1v7SEkR1rFjFbr0FLnVtFl41l2JhMihQphTtYaxN4w6HqlZAVAdYx3JMi5LXyyE2pVxZ+2zOU4LCVKEhLDnYoiHjs/h8RXIiINmYtHINfOQqHM/e5abG8bzRG7YvSn0Y7FxWIxfL0A/tX+cgFwyRsEeg0csYNpp9saYDmrbNWUzl9nD7CZMRNZghEcdQr4rhwAs/R3Eh2fyYfEowZjHXMZzPgoMP9F8/fQOsrlm8/4GIkGyetfcnENRMySkFW+W5tnOW0Wx+UYZDfO8cGJ+ee5J77dBGmLkTmXRcrpS5U0kEolEIpF4+ZEkSAPuPkiLkU1oZwyjQkUrXVTUdsjUoO9bEKyEiJ5yEQPczu+b/Fc0nKE9YfQnLJ4hm6bkLPBUkszQtIy6qCcGnWLeUlmgsmeUoDmDQAewh8wKBNB5eCh0AHwm9JMQIVakExo0iBvLnuJZdOwWd90X6vKDEUNmxYT6bMM4vNqNtHCLWzvTZljTsvAHBtGju5h8EjFLSyVTdiEtykEoH1Ly50xDHDTcg6ucz2QCn1KGE0HdSJBwnfWZh0GQewOY8dA3jPt53cdkM4LMfrYdX/Oq8Gw2LIPSK648GdawrWGQ64+xX4vWZyGCJgFNMyLtq4V3aDjPRI6EJvnuNVgIuTWMqhMsHbLf12DZPXS+c+HZYIvdFry2YvpedDHo2fqhwMcBLB4wkwdEHBP1fOhnndNbRzvT1If2HEWRy6l6nslozHM+yuMZNhjDoNXryAinFtRVcfXn/ENkIVt5/ALLWEKQODim6zKtjRQKaI4ExAAAIABJREFUCKQRM0AgMAa5GVM+y1EAlgVonYtLlf17CW3Qe7IJ3K7kQ6imZdNxEiR6XYVmT9dHL45IiGmoFoPm597eq8HDJ5IYV/M0VJCNKYkwItjvq/2k7033vGMebYvEjoX03bzvQupYO9qNvrS6lEG2j/cKS3afRCKRSCQSiVcQrz0J4q7vbfn5wAgPIYiRHTKrxB3caoa0LuSpC4ngniC6Ax09Ceo9od0x+rlIGMuZnQDxOnQC2ljA9id9Nt4rO/FBTTMkqAFQjpFFhE8FfLId03EP3jraGwSm4q7+PSzYuQD9LLv4vttaQhlVUky610Qr0gcxLl29AhCM9gi2/8X1dsgkAwBUJVyEiGeD0gxbM7RODC4dx52meTwKyr0QVBb+Ux4kBMkILYv572cexj4wiJxGKA3ukbJ6s9jf5hhBALCPhhYNM3LywW4QmiGeC+J9JNza7D1wFR7R3dQUY9PSz7IajoGYAcJ9jaC5YSROWYkwn+MkixqMTvx4qIi237ylHsvoYalMeSnX7hfn9lqG/cYAyg1PBDVYOWhlzOFZ0ulOrtgY2HQnFoO1hOti2IFpxhQGbTyRK4BOF5sI0UgNz4eVTYQRzlR4eBJQIECC/tBVuIU1JxKeAMBlCkmaPiH95iSmPZOrXklfjHrt98n76LS8pwIJ4rowSoKQjUs8fzXGKXYnWVNAmiUphtz599APxORZgm5l85nufWDUlTATIVaujYkSY6yV5pVfWr1I4pyI7z77KWpHMU0RmAA8DbiRRoTgCQiMsYWMx5V2S2Hx3rB+aYPscW+W6LXGgRA+YSZBzP0ms8MkEolEIpF4BfHakyBcgf098HS1BlvDymJc/wsLbTe2MBatRpJ0W3TadT0ImBpJcQ/U55qetpCX186BLGAhQkiN8H4XPFZI7xeMnRiqIbH4+r0R+gGACN0MGKsvQbQa4q6jlbExsHUhO+Lue9iRNFHQyaixhXRRkqayGHW6KL/SLCDIfawMN+50d7IFj5a4I9pIU0lqd1Wpb3lj5DZubxS0+4peK7YihllHGKuT6Iq0OyGivA8CCVF2MWydCOrDwCg7xm6xDc1ioDfVZqFD+z0Y+T586jkhniF8PQ7REyZqNDRCv4yQHu3gqR/JyAObz0tYEzDmuf1tcxcYO/pOVKwkBvOkGSMdG4xpDQmhBtEeMX7ghuePa4lEQsQ+zbA7tE4IBJE+ZyAlOhopOzmOe71MyHZqc2hwo0F6hH72PjxukxJMo92WttRDxIChkeHjOBvoDKlbMW8G98Ka3wneF+E+3ge3SBBrh6UALgCj+PMZQ+AQxUJtTCw8AxAS6NTnvgnkZG/FidmuYWAe1qflRQ2k8cyHjrBnY2MhRaIuRyTfIqEV5i4vpJNUTJ+Fek1SOXFncyl4rHm1Ks+EgHvV8NDSiG0pAOx9zCTvMiMlCklY1EK8Xzk+tVFvI+vld5IyArFBbM/teAdMnl3ef6PjqY8QMVbdkUi4GTGeSCQSiUQi8aohSZCN8fD+PsIcoIbIBYGIwORGb4Zq38Iau4o3AQBYJhfLyuLGoxkAUK2Ky2JEM9DPYyfUdt2NgOnHnOWAq9alDqPe+YM7oG8hI42HQQyCwYwT3x2uI+SGY6YXIzv2Mi2My4WGcQgz2IIxvDEstKE+L2K4mrEK+MJcstOIOK1pJYwBGkbr6kXi4pUq0MoV6HcFXdtYnhyodw29MNrG6G+IZ4h49egO7ZnBWwdOjPpkpM8Zwqdq2BGDKqPWDmYSfRUA+0MdRrMZlMG4oYOcXKGmdd14GFpFjaPgpUOFUXQcSukoRbwHShn3NtHT1gr6pYon/7pru2aCuWFo3jy+CHv259XnrhtRBeJ5BAxvmE7D8F+1X2gYsF2NLa8OLeetRn88Rz2jykGDFLNTGMCOsaMeMi+t4qTERnzoT3X0Cdl4Ru0W3TWX624b8ZMmjhExoR+mZ9pI0lCtEbpg/QAlSPnqXh4OVxayZvGUmMQt7VkC0FWHYyIk7Pop7EmMaY6ReJ0GKao6NS7oe9IwGwbwBGD722xzYslwFIx7bxOwEBQ8xiHCNFciMRs8ayyLk5fPGETqXgbRE7R0ItFSTk3mkJZRiFFqRzuMJQFIn0tA2xjel+Zcc+zihuKCxE5QqfjvER7ClQ0r14SWeyLuGJot5qFy6xlHmGv2b1nwJpmIfX9nLe+QpbxEIpFIJBKJVwEZ9JtIJBKJRCKRSCQSiUTitcBr7wmCwuBP2dE0O4WBHop7Ocgu8nCHtrCIqzh5Ey4lAI3QbHe+6w4cwcsou6ap3UkFPK93cS2Ehgk+UuahUDpQbKe7qtDqadSpPRXvBjo32aVUTw5vnwlRmqt6YQ1JCe7jR5XUug9FYvxVaNTSkrpWBkE8U7Yh3AnAU/ZyldCfss+7j9Bu9fMXXQo71zPEBJ0Lu5a6eM+4g8uJ0J6rl8TTiva0ST/cNdDTA/1N9erQXdxSGUXjnWoVLYDeCdxHCA7VJucAqFuX3WzbUd9G+hpWocmoW9L3IroPRxH9ko2xnRuK7pxbWZ4ilySVba3mCTLuZcdrmHdEjF76pDPh3cPzdVY/guxk+3m9+D1Me4XKmIzHWdrMrYBqH6EPDzJBaS/iodFpSuPs5W/iNcWnrh4vOriLPoGFC7gHQISL32q/3sjY4SmYtYhJvHLSksHwAAmhF67fUIMXi10WUog+qtfC4VytG2H87t4ftjvflzoG5wTXmAm79/5b1H+Jry2fQ8PLxbKreLvV4YF2OXEK1zGnEOJRd/PCiiFe90G8ghi9YAjUutcKo9w19coYl4IYtUgDCGNemudGV3Vm1nrZHI6eHVTGtZ1FHYdDJ/pQmJdIGBM6N5nHQQDUyzVPjhKeAcA9sey7PyOk7wqtpzXTzqu1ozOh6oQxLz9AvEu6eZB4JfVPE0pdszi59hJdZ3iyU6KWTOPhUWLzJoood/UuMe+78G5mC2+7vkUikUgkEonES4/XngQhAk5PjnlBCqBvVdyXLQwm+sxskkGCTEeAgFqHCzapkb1tDcyE4yjYn5+GQQGgdUJ7QzQtyg7XAYi6HpN+hxlOinIhJ0HaE8blfQ24i7oaAJ06TncHmDVFrIVeAECrbtxQl09fXEMy3JSLCIrWB1kcFxUVnAX+1GhcM65ASI1+koU2tdl49HZoatNyBNf+EC5DjT3e3TROJl0UDfMhks96Aba31Eg/EY43CO0po72nA08PlPMgNAAxXlor6HvBbmRRC8KbLMRS03rtpO2zMk6MchZjr5hBp3/LXFDiyjI/VCUUjGtq1YmQrmEG/Sjw6KxO4EuZQiCi677VpWxKTkQX/2DcxowlVHkyoMydn/S63gkUJvx2PrQdbSKKupJ+7aEKwbarHgSCACcgIUeVgXOXsAmr20Roqf7DpLuA+e/Kmm4XaE+ujUcPLTKdiFAGW9iHzS8XddUT7zpo62548o0UsLzRLBjLNBGh0z3Nto6ioTGkTnUzJtAcvmZlDsNW59A+srpQC91kdSuQMBrCyC5k97R6WPFhLou4LYCd5kd5IbXKXjyEz95LFqrRzuzvq/amhFH1NUtQCXPYQk6MQGaM0BW7jEN7LYxKmSIPQ4njZeO/anho30zkVdDRuEp1G8mj+Dwh/L7ON2ufiUgz0E8dVGcyiCFECBclcBaiyIkshod32b8vRrRyp6s6TVmFtG+cTOkQwjJ0CRcGn9jnphNu1QSxkUgkEolEIvHK4bUnQSyrQyEWsUBA4r/VGGXVB6BtWAJlYxTdzbyKC4eUdz4feHrecaoN9/uGn+ii3eDGFUHSxpaOdsLYib8EAyVkpzEBRN/wvGPRRSCgv9FRPmVHKYx20VXrsw28F1x2NaCPMgl60i4Lb1KjlXbJglIe9HgD6g4lZoYNwXV4qGhTh46HnaupJushBldneIpgAJOAZmmQTDxGgIRFvyzAjdAY9/Hde1rKIiFVTN+lXoDtOaE9IezPCe1pQXuzO6EByKK/PBScH5SIYqlPJJxM58SInCjO2J4w2t2GfpYdcTOYmhl+MS0wAcxFbm3jfYghTUq8dDKySY4XFQE1wgkkWi9Ry4J1J96N25CyeLRhGIHHWYkIm0uNgCbl9Co6EdE+byo0S/pcUJHnxoiVeu7oKuDJVedb5WGgaX2piDeJzEe6uZM9KjzqJ23RPlfdmu0kdeqhjGinsnpAWB25DR0VNw77qEM5N9m538h36WP9umnfhIwf6OQePa49oV4Ntpsf0aPBvLYPEI0O6zcjYjqNTDE2lS4y+ahh0vQoql1EzQxo1Tay/olaPOF55o3Gzx1OMAyNk1AG6zMeSM2yj+ewb0Hk+XnV3zgQoDLf+xbqUVQ8WG9AB8FT+er3SVdF+07eOUODxtpAbZAzpm3C8Z1Cw7h3cdzQTiOoKHxnKzzA+sDPD0KlsY39JETDFRnk74ZrMgyEQeqF58ePG9kJTHookyiyad4QS/9UgKMHmNaBzvIscStD+6XIM3EzM1EikUgkEonES47XngRBJ+zPzmJ8hJ02Alxgz4UAzRbpkF1rJjRdILeHCmh4ACrjeNjwcD7hfHegd0K73wALsQFcnG/KOhHdj+OnGV8BXNXQJpYwg0Y47ivomQzp+SdsR09DFlQUNRpCfRsL9e1eDKbVaOIKtCdwY6LfwcUxmZQ48DoaCTIMeC5CYPTzMN5jKlrSsB4jeGKojGVJcddsq9PSN2bE1ItkwTESpBwqQPthCcfpJ0K7K16OfVZrN0OyNkRbosDDDqKHinu7bFJuNOo4CNW2Ox4pZFWI0j1/YATQbFTOmWaUFJkM10BSFCOlwvwgmVMeLkXyPzMGu/aBjaPPQc1e4kan3m9/TxEHCh0/I7O6piLGWcMbtj5n+bHwMTXkuRcJl9F0zi7KqLfykAosz4Ke5GFoJ0Jb00kTPCsIab8YuSl9BpCFJfSi3jlDrLLvQnhS4YnkcyhxQrW7Jw2XQYJ6vVnJDkA8wmqow0ZTGFIpPIUsFXvfMKE1Qm9VQi3cK0nq0J+o5X6UyWuJ70mISAtX2xkoIXxNP83bxAmFEEJUTDyZhp0tc9caaM/ETB44YcAQYWACtg/ruIVMJuIxRvPzXOU5AjCIzqr/FdbsTDbQsbNHGQYPtwtdFucSq5eMp1sO14/QqdAm7ZP4DvJ72XmxbVoH84IDZNyFPB5zhew9oWTNlH7X/tZ3jhE3sU3xeXDHthK6h8fzHd9XN+HeYwyyFQFdz+1EIpFIJBKJVwVJgnTC9mMn0S3Q8JN25tmYAybXY9oLjsrwdJsATh8qqPe2iga4bOgbcH8nRMOmmhizYUducMpiPexQLtle3OZ145yHp8RegOcVp7cI2zM5c3sOr6/fLhg8/azhIm+qXkNh9NNsXNv9nYyoQ+fD4KmFzWi64Z5vngqrq7+tvVuXhq0hP6LTEBqwpJSc2tZphN240SeGSLkA9R6oD4zzhx4vo1fSlJxAX7On0DDMIiFRdmA7wjgxNNuNfK9GkCiZIp4d14SHZx6xPjmH2xeZD5H8mQy/Pn8Hjd16YBhaRrR1yy5RaNTBx5BUh2IYV/VefnNDVb1PTOPgeLPIvLDnpvLImAN5XkjT+JpXS4naIWb0hjkfU1Bbm4SBAVAK2rMK85CyOnl/BZ2HKelFDBNi9X7y8DMt78RC9iwwTZ9uWXwYQOUr6ZLouWHEl3sFFUbZpJxSGJ2HTgsA9AbXsuganmbZTgA4KUuqKWPeZ+YNY55m7VLEI+RB+rzeawsfaBjcfKOP+5gLfRtzjgs78eV9bUwxL/PPyQOS+xoxqM9TIQyiRq/lI5CvRi5WKIlAfu4Yx/EseHrYq7my/G1TImgMWb2NWF1DP4wE8bC/lZSzuhihE4gdkJInBJBmGiu0Tpalnk5yDmJkEDR0dT5vgVgpoz7WXs9GpKE58zNiRAyLxgtpFQJRcyi5nkgkEolEIvGqIUkQBu4+KKtHD9fYbuzgh9j+7V4IkZhWd3sGbPdqrGg4hRizhOPpMPjanX4+DcYEiys7iAYRs417Wz1BmHRLbHFenhXU54TteTB8y7LA1ygEDzWxRbMulo83Qqpc6GK4iyaIkx4awz52+uFu93YNb3EnMyzQo1CjdX0RfQnikXqVeRBL7qnAYs26m7xdryEY2EQngE8FOIZWSj+kv8shniD1QqjPMRku7qVS4aKbkYyiI7i5Bxd36+ftLZoMumh82Bi5CCY0/CeG9JjBY6mFlSSyeWI7yE6C6O7wuvtcGk2782XDMKqCMXULNlcNvUA0VgLRMxmEagCyhk5JmmJ5ZvpWwRtLOmTrg8OMXvX+CLvwcoL+pyEIBICZZ6NO52pVw36rNBnX5kUypVNe7tFdh0J/4zEfrV2eVjrMcWC0398JEMJk8kqC9VcIW1t25PtJ6nDUMLedBUGwTlWjpQLdQvFOwRMFQorUrWMzUVHTINqrkCNPRci2PZcXW7kvEu5m47jMJWwQrwUyoWXti8KTl45oD7Hr3UyivB2e+rU+s4eM5hCiZkTgqIsRl/5sBFKihfGwZ4DLSIu9jqkTMdrGSK566IqFsgRidSKel+u42CTQZpy0juPyKXwGwOx9d4OQngSfp4NLOcbH87gWLKQZWTrjMl/rHjseEiPeKCu9R6CpHl4Oy/uU3oZ4TiQSiUQikXhZkSSIGkvRc4KaeEkMl+xgKEFDLC6M0nTHtBLKzk6KnN5ilEP+YyK0OyFY2onE6AFwvCEeJ2bcmrHsoQZPu2Zt0Xr6gjhYW12M9PogRmavQHuPGkQnaUfZSckCTIvlyUirjP5mA911FygkFdRrHzzL7n3Tuh+YEBfNq1Ck9B8PIUTAdVcAuGHE7lauRpGFiqiBKHoZWuYWSBfrn012y3kTD4RumgkXOLFD7yGUg1CfC+FiBk/fJKyDT4MAirDwDW+TGYP6Wz8N8uNmeEDM/mBG+3kYeNZfprXiXkGn8TtbGFEkkqx81e8wHRYKfRm9JKK3h2c9sqEyo1j/5g1DgwMQ7yIlANxDJFhTMs+GngxX8lAoGSf49U40RDJNiZ+pHgjXYvQXwEMUOJzPPlc0pAOYBT3VEASU8FLPJmsjNfleLuTZYcSIHfWI3jQgIdlimIV7BygxYCFJNgeok2iuWF84sUReBwBu5LtnVpFKdhWY5QLsKhKLKsQIgCG+yUKQbE93PHl6wfGmdN6xVxFoPkSQ2TWCrA91bvPGwNOGcuoSEmGEDOS9ULeObWsgYrRWsNXu00GEoCWMZ39zgxG3JWSNaY2wX4JF3gjluWUaIvcKcyLLjPnQN1NmoUhQcHjeFtLU5gQ1EX1ewwyjbs50D2hdgkAtn+NDjpld0DL9PWa6LZEkYfJ5ekWC+Dn6d/Bcke/k/WLE3hXRadUL7zQnIMPxmO1r9QziC66ex0QikUgkEolXATeWX4lEIpFIJBKJRCKRSCQSrx7SE2TruP+pHecfLzi9JT/VB0ZVoUzfvT7NHhjlkGPtTBJO8d5x/PRhEk+QfYgR9k3PtR4vsrPb3uzAqcNSs/pO3KmP3cWQQtE9EQhyfgH4Itoex1MGPlXcUcq5oR0F/LzC0jVSZWxP5DixaAkcH9LUvZq+tG6aCYQYvRD2s3iEWEjHFLtOsns6PFlIszVYHWnsqBZcx5ebl0gn0TUxTDumY0ezn1l3PMOONQBcCvjUQacuGRBcDLOMtKoAegcOddX3zAxF6uAu/baDHFPk2m64et5MIQQUdFKCG/tIscuTl4WFvkQPAg/B2Hj2XrDLNDOQ6VLQTpMHjosk2pxg6Rvf2deMI+apM8R5xz1i5iFL8Wk748cbOv+K7tT3uY1llzAVdMkoxH14tFgfmX6Be1fFNL+6I++itxYRAjhNK2PPokPSNdwGoQjzgABAKs4bQ3zEw8jaJ2PWT8HDpEufUSMXqi3R06PKOS5oaTf3cdQd+Rq8FgDwQ/F3AB3mrSOV/f/Ze5dY3ZbtPOgbo2rO+T/W2q9zH7aJYzuKrQBCiiVQOkhA2hBEkw4SNNJLeiCMkGhGQtBCohHRJDRogGRaSEiR6TiAlHSISAgiKPa1z3OfvfZa/2POWVWDxhijqv51jn3Pte85vvecOaRz997/Yz5q1vzvHF99jxpL2q34V3UMEagoa6SmsCx0k6LkKUW+P78/xFki+wQRwn5Sw42wF1x2A5YlIq0BZQ41RQcAaMrgWMBcMI4ZwUxcU+Zq4CpCNc5ZRF/PhFumiMVAD0O2+Geu2wqsXibuY+JJPNdZtWg5c7umzgwLgtA5r7Ldr3oMleBjx9cMZ3MKmuJl3/MqOSAvqnep6UH9dkyTxUHq90UI2X6jNP68vf68fCyKGQu5v0u9IewezX1SGH3xfvgC+69jgtzIdGwblNF8eARf+L3uCSuE7v7sGGE3DK8vGN5stdVWW2211VZbfTvqOw+CxFgw/eojzscj1nfaEU3vCPGskhcAEKYa0QoooLGCkHdA3iv4IPtcG2u6BgVKFqVdhwu15A2XUXgTaJGfADQ+tE+PSWac2vkc1MZ2KEAsECKkAyBTwfhqxvdePgEAxpCRC+Pz874+qA8h43t3JwAAQ1BA+MPdC1xOI8ppQLkELLGbElGAWFB2gAwGDLgpJGCRi46IOLWebnTkVAiSoc3eyjUVR7/CwJRBsWjzl/T71QvEYkaFijbDk3aJ0oEpNDPCmVEGRjlmwMEQP/6u+dGUl9Jo7QCwsjX2pJHAQjWWFoAZgjqIgC9KSaD+HbW5976m19Lb52+Akk6yQdbw+nlpYg7Vzwmjpe5YMy/dZSpRAKZGibd9uYyr+huINsrVM6MDcqqEw2j/EkXHD4AcSwWKioNVgs74lJBnnatlQdtZPb52DHVfPSBmjX6vaPCesB5i1usng0pCyg7P5ApNyoDSjq2+XQh8dZAOzd+lSoY6+YN7ciS6kWHcdJDhGWDVAT4V2CNBnkptdsligdnAkBswrTuF6tfDouatnV+GF3cSC5fYORhXBgN5MuGUGetBQZBpTBhC1nhtFqzUQAMAGMaEEFpznzIjpYCcQk28QSHIzFjc/LYfD6DOE7BgmBIcDxWLSiESAxFy3W/ggruDureyYlwQIaw56OdxW4IGxqxrqPvQN6n+m7mALMHHwQwFSDLSwPXfgIIvdRP2/RjLzXislrSlUiD9nAMd1M33PlXF035W82kBUAEgj20mm+wOwngikEc4i590TVtCh5ah+02xwbPrVFNyHAQp7bN+7fIoathKnb8NbufiVltttdVWW2211betvvMgCEHwy6/f4ZNxxbu7IwDgfDcivmfEC9WGM4+tScojUHYFcsjgMWN/WHDcLXW1U4SQC+O6RixLxPU01saMrmYaeGH998xdBKYBBdAmPF5Ms+3a8dA8Q/KOkV4nYBDImDAeVnzv5RNeTtpM7MKKyAVDyLimiCKEQIJj1IYockbkAobgR3iJ64c780VpT8J5EqQ3SY1H3QyxM0JV51euzZiIAiXSrWDSakDOqt4lutpvzcAE5D01AClCH+LbcFiiRqlRo/2KrFwi4hNjeNAV8jSTAlIHY7OMBposNtbmfxAu3FbXZ02x6E0SuUuY0aaygQnVK6NbPXXtfxnQwK7qa4K2wurfo7YvN3ekrqH02GDAjoP0zxuvC+6/Ty1u0wCFG5NDQvXlKCM1MObLGh0yoI6APOob6b4oKNI3Vv6fjUsZBRig5rJiIMIzJobOCTuX1K10SzvPm88TOuaU+mmUkVAmM8UcSvcdO3k28MYZDl1jmYl1LtpKOmUFcAAoqFIayIhYboAm34U3+RRKbVL7Egcnih4zBml+OJOeV3ZQ8zmLoJ9T3uRGQMxriBycIvUjuRlL2PZEqgktpwA5M+azNu/zrmA4LCDCDTAQYkNXliUiZ9ZI76z3840vTtEoap+nFeDr5oLYn+thqGavwf1iRI1316k0tlEQxDs9yXFaKzCQUtAUnUINQCh8y6zw8a5jaEBtP7RBwIP9JgQxexj/kjFTbtgudp1SMGCw3wHqZ6Rw3TU/iz4GFETxcwmhoNjvHkMBjxLoFsDxbRdWIkiCMXWknZeehQ6+A30G0npsO9DA0/Y7IQ1Mt+sIADQCpWcoPbtnNxBkq6222mqrrbb6NtZ3HgRJOeC8jvjF+0eMJgV52O1xOU5YrwG0MGS0Zssp31PBsF+xm1YELjiMK/ZDi4rZxxVFCJc04JoiHqcJ6xqRMyFNOuSZB/DVwIEL14dPN1cNV2OjJNyAIP5QmvaE9IIQjgnDmHDYzShCeJh3AIBLGHAcFjA1KvmSAz69KNATuWAXV0xRV4fTBRjeU90/FSBPhPMUUPbFDBjFmgdr6lzC4w1Sl4IBwAAH6tJZOjYATCJ0CUh31tjGjqUAaNyqNUBSCDmxvre09Il4IsQrTL6grJvVWBxlz3UsneFBqYsrtvHWpAppzIlnvSlbQ5EDgAiUsQ5Bi+M1c1CVtbQNeIRwL5+5WXEFwFfURKEaJdozRUS+sOrrUhYCgNSAJZdQAG1byoCxHnlpwEt/EGLADHlyRkYdj/UUILGdi5uKOmBQhm6bfo+0aXIDMLmkSJktbf81HaSyG2xffpwCgFROVqKBPrFJeNzktCZi+PlT+75LeDSi10A622+etEl0gKXOxw7QExbADE0L990iasIRz9xkM6ymu7V6CZABKjflY1YIAlEgJEiLax4M+DEwzuU0NebXJUD+czUDnBnDo24g7wTpPkCGAt4nHXcWyKqDnBND7DcvXFjnoEufutOoGGhpc6U32fUqT6GmsESL6fV5UIZQr1sJwPJKfxcvxwku56Js8i1jxAF6rrHOzy/BJ/rfD38pACUoE0WiKAvPZXNuqlroVv6RbXxNogRu8z3bvHAGjrAgPzdrBbAMqu+iWG4kO0RSGTH1GDtAxX/vJLExilClj/UcfW6T3SwE/X/z3D6CAJtz+r70wJvfXw4YAso87Iyc3aR2q2+myquzV3siAAAgAElEQVQjzn/1r+DwP/5vf9aH8o3U+d/5KwDwnTnfrbbaaqutfrbqOw+CyML4/Q9f4zd++aPKohi44F3MmOcBOQXEIaEUQrAV3RAKpmHFEAqKEJYc8PxRsRgbJBeuK48xCjxeJQkhR5XN0JmbzrtrHCXYM3CfPNEXC/aHGftxxRAy3l92mK9DfXsYE8aYK/16XiJKbrp2ZsHL4wXXZdCUm1gPDySagjM+MNJMKBNDRqlJLoA1s9lSDqRbtffDW4HhUcEPBx1KF6EbrwryhCurr4oltDTZUevUKavvRBmaxwZbMkqeUP1X6NSkLBKDLqAmY2n4Ne9AgBz0+yTU0lm4nYt7QJQgKJPJDGwVG9BGgVZSxo74trvm5mzMg9SuqTJ62mfimUCxrayDn1uDUDtuftb8dQCLRtm2qNw+WaVKLPxrPUBgrBAmgAzI4RXgVeo4g1QeVs+xY8jkCeb5oUyTKid55m9Sk3Ci1FVxPxZOaP4YFbBo3+UCIAMhaW9X0146cMgBNiE93ht5krSx8+1zavPSWV9l0GYzDxr724M3QnqeLTK5OzfSZrjKVPox8r7S9+9NqcvJfPXe7ytrvG+OGbhlODAgQQ8qezywsVx41eadM4E6XyJeNBWmjIy8MiS2eQxApWWzfp9nugHl2oekxsMqa6gBW34OZOBJsHQRXp+BYFkQZh843YczRfLUwJGeNVXHpsYt4yY2ugEz7fo6YaIfQzHZWAWUuZMmdj/inHQ+hLVduxqh3jOp7H6vaUNo7/l9qv4zXcxwBbU6cL2Xt/ifiSv4pz8I3QH2IFoP3lT5lp4TRZUEudxGun2IEOKYIIay5ZVVkum7Oocmc9pqq6222mqrrbb6FtV3HgThBMQfTfjkzREvdvpkPoSMgwELaw6IrEZ/zhRx2vSaGSkHpMxgFkSjRC8pVmnMmhUIESEwl2qoN764Ih8ZIsByHiELg5bmCXLDNgDwHGUpg4CPCXe7GYEEqTAulxH50bT3ibEGwWWXq6Qkz0EfrL1EYzNFCPl1hoQAXnxcLHp3BsZZn6BVttGOhbM2vt6w1qakG1u2RqgM2iyXqX0mzPq9eAboCSrr6A7P42edDcOrII9UZQrpAKz3gmQr1fFC4AW1weJT29Z6p9tLe1FfCX/WNwPRuuqcfcW/azC8URmUVj/EAu68AnI2ir6t4FKQ2pjM16AgyKr+MHnSbTeDW0GyeNAaWdqZJN40od50DXLzmvuUhJlq81VXi6Esh+rl4gakvQeGAQ/uYUNiMiFrTOOlNZXVGyWhxfLmFiddVqX56xu+f5XLaJy0nfsNgCAWg0wNCCl0YyysDb4ehzfX8IYbt8dX2QAdoEbFQZpbsM4bV5cbOeuH53p56vd7T5N6bTomis9f6fbNz+UagN5LDoKYaW0dL4LJcnxsnu0jOfsAX1ilr8yMjrkhsTW+QjpH1H+FlVHTnQPP1EAMkgaMdtdSYCwZxo2RbzMCtmtV2n3oZrN1PO3aakSrHpczn+KlPyEdC78mfg4VXLPIV2k4k42DzpsqD+tivfX8BCVSBc4k0g04C9hvTrHfr46J53X7d/vN7oDTCuCJAmHq02IvOVNpCCgBAD+LjPbPSPOv6eVTwqLn7nPd4svlBiSz3xn37QlyY/4q9X9gniQCHnAL4F7Djb/JVt9MOUOirx/Hlnj+nT8rdsWXHftX/c7GCNlqq6222uqbrO88CEIZGB8I794dsd7rk+39bq7AR7CHwF1MOAyKEKwlYE4R8xqV7ZEZOQPRqO9FgEjqYdGnI3gyAgAcpgW7mBCp4P39hOsy4DIPSKtekrQw2DTlEOgKHUl9iKVYcLxT5spljVhSvEkgoJlAIBRrGPJoGvJ+1VCAdI0Iu4Td9y647saW0iIAnwKGR8ZwamBGTYmBNsFlbOCIrK1pBGz1dNLmMx0F6SAou1Ip17QQwpUwQv1Pbhpauza6kmwr57bC781GOgrSy1RXUcvICFczooWBLLbIWgaTA7zKoH3CsNPOaBhyBbuAZpDongEpaZd1k44hbRynKYGr0SuQkm6nuInh3qQ8mZETg0c9yd6oMd1FsKdmdCa4gLKO1LOAIFlBMx7KTXMiZga5LhpbQm7Qa0BNHHJLv4H6KjBJBXJ0G0DJAescqikvm3/N8Eg1ncbBB1+NB55dc2va3ZwTUKBKdhkYBBQKeCiIMd94MEjheu5i55TnAJq5zhVnS4W5Ndo3TA1u17vOI5/udsze2JeI6iEJ6PyGWMNt7APO3eJ86M6vr2csjzLY/HRZUHe/uXTEvybZGuge8HKDSmdB9ICGSyLME0aoS9TxfVhjLGQMBzZA1c/TpDO8oLI32sUz0CS2/rqCRB2Q4kBoZWH0oF53znlHFXy7Ae4INWXJjyec/Z6lCn6AFCTJPjZAA1o7cKj0MiMxBkzHpukTeNz0uIIyDnA889DJBkrQ2ACRfr7fpDPZnzcpUJ1MyGVyFXxzBpP/lpEmjN3KuKibc7egorDcgHuU6AvGyzVxyoCRwmJsN7+Q+tn1GlBBXmcpwX5DnjG5ttpqq6222mqrrb4t9Z0HQQBrqj7c4fGiw1G+x3h5uGhzywUpBwQutdkt5tifDAAB8IWoRPfi2MXU4h0BZAdBhhXHuGAMCcdhxrKPOK0jTot2LGsKmIaEXBhLClhTQAjtGAAghoxHk8Bk149XE0Z/6teHZEmsD8RugugP0axRlm/uT3gcUm3imQXz/YD5MKK8DVXWcruqTbXp8+arellAGQDrnaBMgnLM4F3CEIseKxTYWS+hghec9CGdPU2jehJQlcuUQVkFAFDuMsIh1cSSTEA+oJnLJmu2vOEYBRi0Cd9Na71OMTh4RRWc8HHwKM6SCWUJmvyTUX0a0l1CGHMFTXLiClwBQBwzOCglXWJGCIJ1DTVVQgrAQV/3JIqcuW4vhIJSyFI6FDAgbgCGgyQiQLG/9+/1czMLEIKART0K/H33KABWpClofGrh6qky720snKlhTaabirI3fBazK1ETk8rBjuN+RRxSBfA8QrW/d4RzJYcQ6TGlIWCtXg5c9y+xMR5qVWAANX63emVA56aDY+5nkmNb+a5yDpdRpNvvi0ul/CU3mvT3yZrTQUE/L+qMfh1AbJINuokK7tkO+oXbc3TAg0waRHaOVWbEbTveEPfSK5d2wcCjak7b7b94M2xjduPRAm/qfRypsXuqZMfBEVFDZW+6PbFJcJOGle3tfDSwa+Y6tsIWhdywXQVTGMoe8vuMGoBQTWV7ZlB3nXhRhpsnKbHFTZdBbuQsVeLCJiPqwKhq4ittTtyAbwKTJXXj+AyQu2GZ2LacUeLH5syRL/itGIjiUc5Atx3/KFOVW9VjLG2ugIzF55tlA04MrJLBPHK61Jyt/uyqZ1g8Z0x8GfviqzAyflrMiz8J++OP2s7GBtlqq6222uqbqp/ZJxwi+teJSL7kv3fPPveaiP4bIvqUiE5E9L8Q0b/0Z3XcW2211VZbbbXVt6O2Z5Gtttpqq622+vbVzwMT5G8C+D+6f1eFNxERgN8G8GsA/gaAzwH8FoC/S0R/WUR+/6vsgBOw/4iwnnXp7TRowsoQMoaYkQvhmiIWk0w8XieIEK6XETmxrvIXwsllHiy4xowQNDkmclHpdmmY02UdMKfY9sMZ+7hi7KgUgQsuacCJRghQPUcA9R1ZU8DlYQcsDFpJV/F8lXXKoKGoVny25dlnBno85hrPmwvjOC2YWY9pjBn3+yuuxwEPhwPS5yPixWQnsW3DV/6d4t7T88uhgPYJYSiYxoQYMwhq0AqY5GEowAtgNbYKBWmRwWuH0U0ZYSwYxnTDYFjXgGTTuLAgxNJYFLEo62IJwMk4/jMjzyPeX11TQzfeCmSr5uL7dq+KRIhXUgPNju2SnlhXkWGrv75i7EyRQ4GMUjX6KwA6h+qhIALIKFiDXjtfJa/HY3OrSqLsUIt9LrkUx6I4AZW1AMBqhg5yjjXNYg1ya54IaIRoLCBWVkpag7JDfJyPbeVepSu2Ar+6VIVbCo4YY+eQEe51KTzGjJIZaQnNeLFPFQL0iybfIBZQvB0HwFb/YVIG6WkZ0ChZYyCUUSp7oJfq6Oo7VdaKmlXatnsDSJN8lBb41BgON9qf7p++0u+fIWel4NZ7xRkEHZPluZzlxg+C0PxTun/XOfhc0eIsEEaNqu6P1w8BjJok0ss8XD7iepjeqNYPNsztXJRtQI1NQm5wTJUhpr8RNlcS6fWJ0rxpnDkCqGzKmR/OGgvS5kpogy794LmMw49D+nuHahJLThaxbZKbKglyk1ofUzONJRb7vFQ/pfqdOpjoKBXd37t4W70/GtuIijKTfFzLKLcMIPMoco8V3W//vrGTismdOgkh0DxRnDVSZVJdAlae+usKlYmZ2bHKxuT2Ht3qa38W+Sr102Re9PVVWBg/rX3/uO1vjJCtttpqq62+7vp5AEH+LxH5e3/Ee38NwL8K4K+KyN8FACL6XQD/FMB/BH1o+fFVgPGx0dvXhwEnIfCYsdurD8hlHquMY30ykf1K4IVrlGYK7nwHLFGfPJ8sTldEvRhC58OQU0DJhBAL9ocZuyHhOOr+ptB5TZgnxZpDlRBczyNkZYR30dJJ9KE4Ga1cdhkYURsyuvINZVqioKzaiF5XRloDXr0438h6Xu8uGA9P+HRa8FF8gfWqzTRP+qQdvFG1B/2cVXLjprCHKSFyqRIT91fx88pCkDFhN+izZC5UPVQAYM2hmvkNIWMaEt7szxUMelwmvD0dEGOpHhz7aakGtnfjgofrDu9PO8yzGs8ODwHh2mjhPBM8WhUE5NGlFG16OGDhsa43vihJO9+aNOINtN1ZeQoaIRu0OVQjWKqyIcpAHu39UTRppU/scNAjCshNIKU17dI1XuIyAY9ONT+N8YFuvQ+emV2WYDKOKFgPRRvVQRtVAKDJwDIHP9yU0001ojWbJBXUAQPF7pf5aQA/BQwXSy4R1LQcLx8jMS+DPIpup/OaeJ7g4Y2dv6f/tuPsxqGOTw9AeLPflaf3wDZDX8aTM3CiBzT6Y1EQUIEacr+Mbj/N0FTlHs8TdG6ASuhY3yR6eGqKvZbHZ5HIDsS4D0YGgoOz2c+fWsOPNsdgCUwVPOr2+2wI2vmmhvv4Z72RFkY1QHVfCzVTpmp6XAYFH/LOvj4YGBC7cSO0Zrz4C917gop+9EBV9UiS23sKUSqwUsGvL5i9AEikx51Jgajes6OaDzcgsV47wg1YQyzIN/G7us20KnjdG/nW08pSpU/FzYG7n1uPCK6+IoYlkUd/m3QMJpkRud2+sMkDpW2v30eYgXXEDeC71TfwLLLVVltttdVWW30j9fMAgvxx9dcA/IE/dACAiDwQ0f8E4N/GV3nwIDXvzGZOBwDhSqAcUYaA872xE65B01sADI9sxneaRuJ+ATUdIOsDpjdVaT+YDwBhNT8LEtOni372cT/iaZdxeanxCB57qzG7+vS6LBHZvCrkKYJnRjypYaVGmmqDDQDpjpAWhnsjDI/agN7EpEZWv5AoWO8inoaMaAACZ0YRQuSMV7sL3o4H9XFIjGHSp/L9tGKIGSmzMlPWgGlacRiNAcAFqeh2RAhD0G070ONmpAJYjK/5kQQ3DS0VEEmFEQtj7QwXRs64381gkuq1MnCpkcAMQcqs/h5FGR1hVhNWH4hwRddIoZqv1jEKvrqt/86TXeMuccIjgGtjgtYUxgtAT7Ytpmb06tOvADFA0yrMY6VPnvAmJO9I922AkzexbA3t8zhYngnxasdw0qaqrsrbajBZdy1mwpgnwnqvq95pD5RdqdcHNbmFNFrVm/F6oD5eNr+vDH7QExnfMcb3QJgFlKU26f04azoHacJKBNKOkHfA+sL8WvYFGAqEGMhQU1G+9e0gtKYbxVKE/BidHRGV/eFN4I0niAEjZKvvPRDg17Yaw+bOZBJQ41hnUFDnMSHUTtMAJKG27eemsiQKeoIE4siOb+A5QAIDtboB6NkcvfeIHiPqnxKoASXd6/28vkm/eeb5If1+5PYY/Fx51Rjsm2stABOAKyo4IYFQznaKE5nvi6BErtGz/Tj143Vr1OLHcDuoN+DvF+JsG1hWfU8yQSgoUGnj6Gaxvnm/35zJIqGB6EKADEWBEL9fGJVZVQHLMUHs/1NqCpWNUWUMFf3LTYqQzc92vt34doyd5hdC3cm1rzljCgCQ6cbsmBPpOXzJuG/1pfWnfxb5M66vm+Xxk9QfdSx/GobInyR1Z6utttpqq29v/TyAIH+HiL4H4B2A/xnAfywi/8ze+xcB/J9f8p1/CODfI6I7EXn64zZeAnD5gSDvpDZUlJWqHK6EVAZIEMQL1/jYeOoiHu01ZRHYX4s2lM4aGN4bUyPiNj5U7OE6EPJMKJHxZA/SlymDQ9boXWNc5BQ05hZAuFgSylWbtzAD8SIVBIkXQnoK9SE3XCzRw3dvkgBeNS4yXAjn+x2Ge821FCG8u+7xuEwIJFiXWM1BnSRBBAwGmixLQFoimAU5tuVDjxFmEgRWw8/7UfdRLGnlk9MdrmtESgHMpVLamQuygRhpCVjGiPM81Pf344opJuzjqtKYEnBNEZdFEYunecTpMumYsSbTrHd6Dbxhccq+Mz+CAQfeJLnxa95rZGhxw9ludT2v2jSUKC2y0xeIL4ThEQhLW3X1lXDfjyc7hNn27wkgAIIBJmk1Q0dymYQDGQoe9Oa0ACooBngzS5CiwAdnuWGz+Jf08wSJOjmddZT9I07nB0M6GoWwNMmHHfvwwJje6ufHB1FTXTuWEvVzjmf5fcJZIAmQVcGNPDfAZo0EGQHaZWWdFGgkscuWzLBVmGoDXrpEkptVc9Lr2Cea6GdsbDtwoPaObgjbyQ5YbgGzypLRQWrgGvpt9OP+rAZ0c8Q22G+fxdgc1DEROgyAReVdovdmYz8Y2OXj7cbDxjKprCRnBHiTXg+8nZeyV/S8nV3wHINwsIjWxlJ4DqbUuer3SY3m9mPoYoR7MKaTBjlo+QU2jbN1fAi761g8xjh04+HXxYfRwACN5W73Z+kYVD6PfH5KDwqygpoSDTwLASVKvedhpq4ylU7q0rFzhCBjqdHXVebYg7OJ9FoDagRtKTA1hSeUKlur8d+FIM5OETflbXMsG1AMADLjloW1FfA1P4tstdVWW2211VbfXP0sgyAPAP5LAL8D4D2A3wTwnwD4XSL6TRH5GMAbAP/fl3z3rf35GsAXHjyI6K8D+OsAEN68QvmVC8Yh4/q5eYG81WGhRIhPAIjAS3P7d+mDNw1U2oo6YA/E9gBPRR/w40mQ9gTHB5QGDiR7UKYVCIkg783fIgbkKAAL0jEhxIy8MGArhzwTeDZ9Ptoqb7zqsYQFLVnBKNHP2Sq8anNKWRAvhPAuYu0Al2WOECHEISPNQTXxiVDs4fuauKWpLBFyDZiB6k3BrKkjyZJt1qxeE7uoy80jZwQuWFLQhJvMGMZ0o+vX+GFGWQNyEKyz7gcAzruMab/iNI41SefpMmGZ7fpZo8axIOwSQiyQN8ByGTQtB1B/jqQMEYgCVgCqZCRPCpCVXdc0d82bbkT/o53rLFC9L+gSUIagXiK2yl5GIO2l/r1S0C9UV865qc1BIjUaVr/U7btLBKnAhgEMzmxaj7hZWea1azbR5ofPI1qACGoAH7jGbQpbA8Z0IwVwyQYJgRZCPBHGxzZI6QCkvaUJDbZa7nPRpDEeReqMhLAA4/va4WMVQI5Z/UJctuJsjKyJTWVnIMjMlcliB6bNsc1d2ecbZoUAOlkMYCmw5BD/egBQpKV82Dg/T3JxYKfNldaQCwHIAHdyHeHmBSG2Hd9eBRi6VXxhqqwel8e0aFeVTD33qqjX2QGEIi3xpNspZQNGDHTwJBlPGalD2YFE9beul4rYjjU5pn/NZF3SgEIq5tFi852d4ZPb3/03zM+h9MwQAzmeg03wYZPuMwDYr4/Lwny+dVOlMiJEgTn/PPvcIdsOtc89nwuezuLHJoFaapDb4kzUQMt8y9JzRgygjI0ycE3+0i+33yyV4enfK2NtJ3XOc0Jj/vi1lg7AcYnWDeCGrVp9I88i4/7VT/3Av231kyTIfBVmyx+XuvPT2sefZvtbbbXVVlt9ffUzC4KIyD8A8A+6l36HiP5XAP87lFr6n+ILa6216Ete67f9twH8bQA4/sYvyq/98DM8zhOubxUEoY4xodKYbpUU1qx1D7+VMm8PrXlnDIJB2SUet1lGgKyRFV9t7RgHhK4ZWNho1YIs1txdA/iqT6fBABBeW0MhoVvpywAn0ZVSH5GONl2MgSJBajTj8EQoewdhbK2fBXln5q+xGJfdKhPmeQCT6PtG2U4OMJACIRq5SlgNnPD42RAK7nazyldIEIeMaUw3McCJRbdDgmFMmGVEcQr5zLieI67DpCavQgqQVLNDAR8Sxl3Cm/sTDsOKl+MFn88HvLvotX73cERZGcUYNnniSncHgDIWYK+mrESCdIlqCOoNSRCEXQZzwW6/oFhssnuU5ClgDgPSE1c5kgxSY35lMsq5ENKRkC5sLA5fIUaVWvhqeW+qWRxESwrGqc+CSknyXj+zvCydnEHUQ2bpmnNGjRPmVSU84drSlCkTytR8OyDq9+GsGDfGhQBIVBvs9aDnkA5AOgjyoSgIZ/OkSlEy6TGZsS4vxsSaVcrj8304BcyvGflY9LtmIKoDgeZVIibl6U1gAUAEUhi0MugaQEtjckgUBSSc1cJAed4I9h4V/ld7rd6DHUNDG2z5QnPsPxic5BmIgQZWufzmhumBBkT5Zrp7WkjU4JPktomtDAW7ToUgA7WIY2dE+Mf78+skXv4ZEVHDYDvPEtr+btgnpIBFL+9yAKOPtHVQDgDE43dXm9PPQY4CO0c7Hrn986Z8Fw5YwH5nO0NYNp+UL2OrqAyOGmupe70C4I579hG1DkimDmBcgUDt+4DOaT+nGzYL+W85KnNM4rPr4kCpdMBh70W0o3reN1KuLwNQCdWLp/fEEeou3He4vqlnkbvXv7xRb7baaqutttrqG6qfWRDky0pE/j4R/d8A/hV76S10BeZ5vbY/P/9x2xw4YwoJf3B5gWAsjHAhyIC6Il/ZFm5Q5w+8vgIYdHWWU6MaU0H1HSgHbWQltofU3hizeFPMrTkIV9tGAWhhZWQ8hro6X6UTQF2ZvlldX6HMFJdWiDZkbE/SJQJpD2DfVoF5AfiiT8ZijU2ZCiQThv2KGMuNsauzNwoRpt2KPGgijjNB1jUgZ0IphFIC5BRBmXA9t2l3fTFgHBPGMeMwLSabMR8IIeTCEDNOJRIwC57s+/wUwAtBiCHR/ERS19gOUhkZgQSBCu4HvZjnVZdMPT3CG+ZyLzcdFUVB3K2YpqRpNon1O97ABzV+5VBUtrMGpDnW5ptYEO5X5BCR/dhsBVxPkur+ZFeQRgEtjbHg/haUFQSp/g7eNHKTvlDRxjSPwHonKOZfUiZfBgcwFgiJmpb6pQwCJFaPm4uic8MTwIse15gVwMsj1WZKgRZ9Px2lvs6Lmr6mA7C8snnwMgNjack7ngzTX6fRfGGKgjFhYsRLY18NT4LxQT1v1mNA3uFWzlJXze2+2xXQlW+lGlEBIJWRMeK5MTfS0eROo3l5xOYX0r5v96cDD3asehIEODhVjPXji/H2dTFmlzN0wHTL/rAi917x7wRnIHT7pi/5ntCXAyA9umGmIdUvJUAZZ34c3PZDRVk9z416i/3eueeRsjWk7QMK3niCyY0pKWuSUgWLSNOgqjyMdPwDE8gBvmcAQRnaOT33MfniOBrgW+camszJwM4veKLaT0IFTkapEq46jBkIi7GlDEy5AWbsvzKiARX9dOqAF3/v+T3tTJzg7CP54vb7uci5AW7x2mQ//We520dNEqL2/xWNyYSOCbbV8/o6nkW2+mr1VdggfxJ/k580meYn3ceWfLPVVltt9bNVP4+k137F5R9CtbjP618A8M82De5WW2211VZbbfU11PYsstVWW2211VY/p/VzxQQhon8ZwG8A+O/tpd8G8O8T0b8mIr9jn3kB4N8C8N99lW1mYXz0dI/zuz0mMxUNM4AV1aySjQniTA9Aje+oSEva6CjNPMPYF5b2sStId/oa26ouXXXlL541CYN35tbfeR04TRmiDI14smQT2wevKnnIY+/x4MyObtmR2r/jRf/MU0eZ7lYqe/8AN0+VTIix4LhbcByXyvR4Hyd8/vkdRIDhmDFEjbGdV51W6xo0BnhVLxO+miTEWRKZkNcJ15eM/XHGLia8u+wQqqdIWwJmQjU8JYvolat6lFBBi9CkbuU5E/AUcV0ZHyaV9Dy82GHggkeTw8g13HhDIJi0p/TLuirHCSFjnQJyLDfxxYB6qKyZUK5RGQhmUCgsGO4XlKHoXCkEWhh89WVmbqybXdGV8hHN4BCoj9nZjBLBPQPBqfDGHALMmLExAmh1iY9Kn2h6boIgNeY2B8Zqnh/lovuI1zanXZ5QQqPzUyZNQzJpgQQgvSgoR0saOiR9PbOyQGZlndRxD9IYFqznnkNBGfuLSZgeBOODYHgC1qOZoPplMsaA+1AsL4NJfOrXkQ66D01LAsZ3bYzSiZBHvSfKoJ4KHiFaL0M3prUqy6JdA2cnVJ+H6hOjPhllMKnIM/Y8dfPQZRYCqlGx0nu4SFvFr+kugppuVJNhMt0YxLp8z5k7wp3XDaFJZggQab9vjc5COjefMxGqXKUxQnihL7JpXMrTe6KYXwUAwLySslFo2ORgTVIkliTU9tunmvRhMc6MKpYIBNjf2ZkX5pfRy5FsfNs2UFOExCRsGg9NyFdW36WClkoE3W6Vp4y322oXGHUMq9eUO04bi6ZEZ1fd/jbreUr1HXEJS6YuEaa7xd1jpQvWuvF9qd4laAzFAmCc6Za9slWtr+NZZKutttpqq622+ubqZxYEIaK/A+CfAvj7UDf237TRJcsAACAASURBVATwWwB+BOC/so/9NoDfBfDfEtF/CKWc/hb0EfM//yr7WVPAZ2/vgJmRzeNgfE/gqz68cm986g+bkzZKnKiZjna07bCoFwivMC8Fqg+03vgwq1dHuNr3yHwcdnZgQvrQHQBkbTjVK6E7eAbSDlhfSI1FbRRmTX6BmIzBjnn3mTcp3UO1NTI3uvdRQBcGrQBKwDwpAFHk1hZE5gAIcJ0HsIEb11k/m+YImQNoZfDZfEISKr2fChBPjCQDzgZinD/f12MIUwaRqNeHAOVp0JSEvnnuJRHPaO18ZsQnhhCjPCjv/cOXE4YXczVfrb4R3qiwmXw+o/cLACLBYacDHE0WNKeAeR4gmVR649T1GkdBKIWrcShWqp4cPgbCpHNP0M6ta65rUzrqBaMoKA76FAJiAceCYkCPJPW8gHvYzFwTN0qm9t06ULbvIEAQpFcZ+U6NdwFNeonXJgNwwMyp9fEMlFUBmrzThrNMpUqGyhKAhavvhxv2NmNWuo0uNV+Osi+47s149xCw3hHG9yqNCYvcSBaqWbHJwsiSMdwomIoCJ2nfvE3KgBu5TSRtWstA+tlDN7WKggkqxWiNdJ9W4jIFTbpRSQxRm0MggEyGUn1U/Brbx8ivd/FzkG5ak0bzRtHEjyg3SUTNBLTJiKiY7M3nlEs9rNFX359bEFA/0x0fAOrcWz0W9qZD9s9WmZegeIJRB2LU6Fg0ALFQuQH1AIBMRqMmzFL3JebXUo9BLNXEAQYfP91BvSbuwYMAyGTalZUaQNj9BvQAJC2k32HR3x6g+g+VyCorE4AWbp5Ri89Hi1GmBlwBbfz9eN0cuYzdve/jLFTjt3tQww+2Jt1EUTC8HgNVvyi4vKbzr6ryo6jyLE8t4tyuQ3zfpu53ub6pZ5Gtvnp9nXG+X3dU8Ncti/nTmr1utdVWW31X6mcWBIHGzf27AP4GgAOADwH8DwD+MxH5FABEpBDRvwngvwDwXwPYQR9E/g0R+b2vshPJBDlHbf4+0K4uPQ0YzJSvBEBGXXFzgGJ5WWoDRZk0WeXaklr6eEXAfAxeCPJeKgNgjZrQAFYgZHwPlDMh7/T9tLdGkGyFedWH5ew+D/YQm+4F6WUGH1esSwBmfbpOx4D9R7qSt94D8wcFZV+Q974kCuiquDeFgjyhrtxKEAN3CARCkhGX04AL72sTw1MGnTV+dnmnB3YdC2Amo7QSwmJjcyHkvW7TbDpQovpcDI+MPI84XwLiY4v1zYegx2ENxvREkBCaaWnQRllG6w4G64DdT6OotwoAFDP+JIlIY8H9qzMAYJr0IpXCSInNv4Qr06MIYRis6SmahhNYMIa2LJszI5Mm4ZSBIQUINkZ51e6HatOmDb/7Qrgngdwl8JhRLu5s2BpIn1NuYCvdijAA8FAwTgmlkJq3rlH364BW0khOVKAs3DY3brYZFHSTQwK/aCYH1+MAvrI29cZG0bQaY4pcOkDDzodWbtchkxoEd6BbnuQLXg7VYNIjWEkQXq42FxLOrwKWzyP2n3TMAGe7GAgSZjFTYGVI+Sp/mBXoABHWCCwvBelA1VdneK/gZbjqNnhRz4cbJkggi0al2lx6yo8zDSQ4qcGTWqiCRZSAkBU8zSMgg/XsveGq/3YkqnG4N6SdbCy0bOBR11zDtuUNs8e6UsdqErbbKwMIuGVmFTImiY6TPAMb/DMoDhR0k8hddIMYEEIVwBVCMxKOAvL71YGNkashqif4CFBjXfFsV/oxaUAClQqotDifbkwSNbCUBTxlBX2jsbAS34AgDnaAAOmpP31CDomyp1jpV9KxxyQSZOabNKkvxAh3AJ57f7RBtn0HUSDHfwscwHA2Sya9d0nBDNmXxgpKCjqSjT93JsBAA2WEzD/H7+s+pedpQ0CsvpFnka222mqrrbba6purn1kQRET+FoC/9RU+9xbAf2D//Ql2pH/QVBB32nDNHwQIa+MtURudshOUe31CfPHBCdOQsKaAeY24POxAl4DgbIcChDNZ/CzqQ3J5uaKM1sFbKshyCdh9FDC+B8ZHgVgaxvzKVstXqg/F64uC9V7fV6o+QYYC2mX84vcfAADJIi0eTntc5Q6UCMv3Mj74lc/xan/Bj773EgCwLhHpGkGPURNKhqKN+oNJTlY1uPTVQcpcWS/eWJYpIqzKMKGTO77ixkBWQRBjDVjz6AaZZRSUQIhXQrgA5Ry1UavP3tqchFkbLweaaiDICKwrIx8sdUVgRrDt4T1ParToco08CSgWvD4opeZunDFywjUPeFomXFPEmkKV4pTC2I0rzvOI6zJARFNtrsYECaTpNcOQQWOqfUY0kGTNQc1jBwVYVus8inU0ccwgFry+PyNwwSfv7jRu2IAkEWVp0MqayJGpsU1s/hZEzAaAEEOZHtyazWLMEU9hcdPdtrJsoIM11Xlk8H7FMGQb54ScGGlRgIrMWHW96DFOn7SfETEzy3imG3lHNWKMei/Ri6Um6Eg2wKSavmojTCurlAq6+h5eJaRDwmmaKkvFAbHKwpq14aOs8pf1zoCaczM4FlLjWJlyTUgZjozhiRDPaNHRNvZ+/Loj1ChsXWF30BIokzKTJAKFCTIClJsUI1ws5edqLAFjpNyAQUZ66BM/bmQU0v6rq/vPUksUZEKT9fS/8t7IF6qyjrpp8n05uGK/MR1rpbJskm3DmQ6+j9AxmqQBMJUJVQiSCBRVdkZBGRYy+lwgyKLsMrCywLC2Y6yAQGEFP/2tigT5tZF2bExAH28NaOLUUFAK6T1WumM1SVHdBut/5DHAhRTwKNQMfjugRwgoXFAqIKFgtIMkPXPHGVA9q6hKqCpb5JaVI0MBxqIpVWJfCAKy3xL9irHnFkXlyo4qgOU7qfMrd69356iAH77z9Y09i2z1naqfJO63/85P6/MbS2Srrbb6rtfPLAjyjZY9oJZsDIYfXnHdjbqCuCsY7hbc7Re8OSp74J87PuAYtaN6WPf46OU93l8nXGZd3k9rwPVxBK2M+KgSiXwouHt1wXXSz7x6ccYUE+YU8enwCvJhBH1KiGd9CB1O+vDujYxEIL9M2L3U/Q5DQkoByxIhhbC3+NdDND3Ma+DvpV/Dco04vrzi119/glfjBb/+4hMAwFoCPrre4w8fX2CMCbuY8OnTEY8PCpKEc0sPKUHlPS7ZCCblCItR84kUqLAG31f8w4IKgBRr9mRoTR1lqswBzrpKno64lRjU1VVtauOlraw7EycvCoLknXlTdCvreS8og0COub4WWCpYlAojEqMIgUkZHkPILe2SBEyCE4DlGrWp6BqiMBQQFwxDBjMQuKAI1ZXp3ZCQA4FJ31sPjFIYQ9TjuZ/0er6ezihCWHPA6TpiiUM/PZGvUa9HCg0YA0xipZ4rxeQsCALaZeyPuu1SjOGSGXllpIfB5Fpt9bxekwKUC2PlEdnkN8OUEGylP8RSJUpL1J+PZeXmyTEA8dKxPmAN+c4YO4NgPC54eXepbJs1M+Z1QCmEnBnrHFHmALoyYIlNeQwo+4Q4ZqTXK9JiDaAzDBjgIWs8cybwY4SMcuM/E58Y4aJzFlHHCAd9fxkD0l3QaN6rym5UXmOA3UCVpaHjpKDa8OTADZBXUjaVs0IGu29deuBJPsliswFIuh37Tvlx81p7Ac0Pw1KqaqIH6/2VJ2Wd6f5LhxqiAw1sX6nvchUUIfucy59A6CQioolV7kHjX18dGZV23/aAjcssrgwwW+KKaGrQUOr8EgKKaCKVyoIEJO23iJcmQSwOVqDdD8LKehIDUdTvBiADLaUQyqo5vVR9aATkQBygf3c2xlBAowLEkjvQzhKOKFFlqtTYX/+7+wINDkA8A2IyVfZVeSY/qtfoxuTEvhgEYSwoQSAr2zxQGVy7HwQ8FORsk4UFUgTk8eUmseFEwIKb30w9NtxEC2+11VZbbbXVVlt9m2oDQYQQTgy5MIrFrP7gL36G82FAKYxXhwve7M+4izP2QZkikTMCCSJlHMOCv3D/GeZjRLLl4iUH/P7jK4gQPvv0HjIz+JDwYn/FftRt/MLdI14MVzAV/CMSfDK+RN4N2H+kT528mtfC5GwUgMaCP/9Gk/ammFCE8NHTPR4e9/j06Yh1H/BqUobDnz++xV/+5d/Hx+d7BC747HrE2/mAv/TyIwDA98dHfH98xJ87vMNaAooQ3p4O9WE4Xk3PXk1GUSNR3XckXoyt0q9o5sZdd1M/91+gAmWKeEOzon6/FuFm5dojLsukq7XpdOt3EMw0EKJMkbxrG8xmMEpJmyIyuUy6RHz4mYI9HDJCUB8QMenLELLGtQLYjyvmNeJ6GVHOETRzNa8FgBS1sc9D71rY/jrtVsSYMcWMXUx4tfM5pIMQqeCURnw+H7DmgDFk8H7GYrHAwRqr+RCwrhHXMGocsDdSZi5ZWRdBEKaM3X7B/V61HlUqAAUcHoYD8hyQO9mSMyIoE3jWGGBhfX/exdo0p7HUBtLjh+UuaYNozIBsKJbLAeSYMB50HEIouN+pCa5XEUK2+2ItjOsyYEkB54c9wludDHRhyGPAep8VvBiKNqIud9kl3f6hgFlwGnaQJVQT3d1hUXDy/Qg+K5Akp1ilVDQVZczcEdZESIegbBb3rRi0GXXfESrm0dNFVnPSOSgWyZr3JmGz+ybvpRplBgPuatw20MxK0QEd/b2BBnSU0K55qSa8ug/Z6xhxEOSFK0BBmRuzZTQZR9/kOjOkA9ngZrxeUwZScw4li2+mq80FAxZcLqQmsVRlaSo/avGteS/IO0G6M7+boSiA4WyS3uuj+1O6e5A6pgiMvaDSHq7Ag1T6mhoTA1CAyJkopTMV7eKpC7jJgjq/DD0+HR9n7VSGEOnclw6QqAAJoIMTi4IWzjbpzzHbAHJ30kLtYiVCSR09SFCPrTJuWKPJK8PKfssbUNPmNpvMrQcuIU16udVWW3099cf5d3xT3iQ/rjbGyFZbbfVtre88CEIZ2H2qD8W+ohp/o+DPvXxA5ILvT08V/Hi3qsPgR9d7rMYamXPEm90Ju5DwZlSmCEMQuSWIXJYBRIKBS+0t5hRxpgFjyHizP2P8pYy3Lw542t0BAHYfM4YnaArMQOBRkK8BT6sySQoIx7hgP6w4DSMe3h3weNrhw6h6mbcvD/jnX3+ISAVv5wP+yR/8AJIJn52PAIAfGggzhYTHdcLDssf79/vKDuAZoEBtxTjrKmzeSW26sj80l271G6gghDChTAZGBKBv3AGXBaAmH0hQz4piTYGDGy6x8eakNJJENQJUhkq9qvreAm1cM7BcB21G2X1CzOg1ijL3zVxw2WXwlNXwEMBTLCjXCCQyXwwzpLSeitcAWQQlsDaEK6FMBWyN1mUfgUFwGjPmu4BXe8Kb3bkCZu/mPT5+f4freQQK4eXrEw7Tgv3Y5DaBC17sBLkw3k8TrstQpSQlMzgU5BQQYsY4JhzGFcdxwTVZSk9hDCFjDBn3U8FuSLgsA66WtrMsUQGRQsDqXgIKhgDa4FbpDBngFVElBnS/+nAiDhlpCgixIDjb5TBjP6wVWIpccFpGZGPjEAmGkDFw0f92ymBhFjytOl/DY1AZTw5IL6Bsl9T8GrKox0MYCsZpRRgK8tNQm0K+E/zCm/c43Q149+4I+mhCPKlMCQDysQCxaHLOKEhTQb6Em9V9wKQDlirCKyE+OStK51uYAczG+khqduxzJU+aMFIGQbIGn3uphzfW5OOMm7rxhAhApWI42GQpP95g55nAj6Eyt2htxIJ0pNYUe3MdFDBkTwWp9yWhLMaIMekErc3nJcyE4cnnh2437RpwyasyawDzbHEplqjJdNoTlpd6DOlezGvDGB0kzZcEprKJz67JDZsF9YNq6swtdUUPsCWgDIS8L8BeQbXq/8GoAAfPBHq6/b/JYtdR3BfEUrkqiJJJsRfR3wRNZeoAZRJQFEhovjv15OpJomOA2Dk6BmOsD6pSHft87GhEhSALNyCJb99XjImUARc06awljOm+15dlM0bdaqutttpqq62+lcU//iNbbbXVVltttdVWW2211VZbbbXVVj//9Z1ngvCqsbFCLaLws8cj3uzPeDWeETnjkgc8rDt8fFaWxcNlhzUFSxNhfH7YYxoS3uyVCXI3zEglYBdX3E26Cn5NEXMOdfX78XqPwEdbBS/4wfEJv3T3gH8yfB8A8G58gcPvRQwnIKxAWYDwPuAPP3qlxz0UHA6z+vGFArlElCfCYit5v/dWWSt344x31z3w6YQwEz4z49NPjy8Q9wnTbsUyR6zXCH4YmlQlAWxxvJR1ATEshLQjiK36lyhVGlAlGc9Y3+vRzEgtttNXo32743upRoQlukSmyWlcb09JV3BdXgMow6RKZ0RZIby2VB5eNSKVEzC+A9YXjBJwszKcJ9133gskEPJKkDlU+j5lQpxRjQslqLwk7ywu88p6vLbIyguQkkopAEAe/dwGPL4cML8ZEN4ULMYk+uz9EfNne03FAXCaUk2gAYBrIYwx425ccJyuOAwLcmE4pygVxhRUWsIk2IUVkQtSYXz8qHMpmdHrcbdgFxMOw4oxZNxNSp1ZcsCSQjVxTWtAWgKKmd0OD+qV4ewAjbTVMQOUVRAmZaEcdwvIvFWcCeXyomUdUIRwXgacLhOKyWmIC8bRvFhIMA3qUfOD+yeMJpt59+6I9WEEX9hMYpW1UFNp1giJyuBIcwARVOZGuo8TDrjbzfiVl59jjBkfff49xBPDrH3AC6OMjHwokKmAdwkwnxdAZQbuUyELI2dCFkLe6QfCldQMeW7sJk6ArMqq8huiDEA5FI07LqiMI5/DOiD251BuJTGpk0S4/0tvmpoJNDPoMdTEquGpyddq1DcR0lGNL8vQ7kchS9hZ7B6xOV8i4DG6eUD1yyC7H+MVmD43WZEo80Ujhu2+KEA8NXaXEBBnvS/lpKyq8VG3f33DloKl7I8yqJFuHaLn0cLUjRfabxEngBIjzGrSW6PDCdWkt4yENBOSkEpjnCVhMcZgILxn7D+hm9+d9WhpXztjhIwCgTQZzaL74dWSbkK7Z/zaqVzIBqPo4LvpK3pZismKbiRJ7tXB6lckpHOThwzy+VqAMoeW1hMEPOYaDV7GUj1OsstvzJC41n26ScTZaqutvr76uuUvf9J6flzfBnnMTzrW34Zz3mqrrb5Y33kQpDbaToEGMH98wIeHe6SDJa3MO5znEU8nzcjNprF3T4THxHgE8DaolGWcVgwxYz+uuCwDpiFBhHBdhmrIeTlPKNbUsEUy/sVXn+LXP1Dj0v+HBO/SK+DDxokPVwI+Geu/T3GHsisILxajbzd6fXzH+D18H+PrqzZwLACpOSQA4MwQHnAZd5rmkelGdi4MhKtAmECiYMf4XhuptNcPzS+pPqBTphYp7IdMmqyTdxo5SqLpG9QZm4YrAFJ6PxVAElVgw68NZ2B4L7WBqQEQkxpR1tehcoR40Qf3eBWNAI7qX+LNTb+NMphvw6jHof4GGunrYx4veizrETVO2ONqOaEm6OgG1dQ2nu0YF/UMKBFY5oiZdvhkyFhXHaTlcVQgpWiDVzLjfJ1qOs0yDwgxY9kHvNgx9nHFFBNiJ+DfhYQpJKwlgKlgKREPyx6n9zpfxRrt6zRiGBOO+xljzJhCZ846KaAymlbgtI747FGlKMtybFImaedbPQ+EEGPGy+MFd+MChiAJYzY5zrvLDvM6YFkUnMgpoCRqaRnMKDlgJgFxwTxE4HDFB/szfvjmEQDw2eGMj1/c4eHhoDT/TJCgCTL1OoAgE4A5qAdMbnIp/izi07s7/OrLt/jl+3f4+PULrGlCPLl0yrwtEqNMhJwJtMs1jljcKyOISoZM5lAMDFPvDwYv2jC7FwZJmxt81mY4FSDfWzN/Y3xp/zb5Bw1mQuveKwT1uEjcfCC6xpWvjPG93mPDo4CTzn/O0r5vF3F8aiBIf23VuFWqtCxNdJsuY78RHvHMZrAZZwM5zOMkLEA+6e9HiZ2dxY7qPceLfi8sQPhc7Droe2nnshqV1LlErhgI4+bI7j1SqwAgAi9kvwX62xGvnUTPzjtPhOE9Yb0EpD23dJYodbthJgyPgvFJ6jilPWG+ENY7wnonyMcCOeYKDstIKsdJBOq9S/x2yTpvhXzM7PP+Pkv9Xa1SoIAGSERNuCLWZCoRnZPlGkFmZhyGAtpnhJj1XiCVnYXQUq8ANfEWoCXedEBcHFPzK9lqq6222mqrrbb6FtV3HgSRAFw/oJrcAADDW8anxxd4PO6QM2F9GoHEaooJbXDKvtRYQhSCrIxy1ifeK424RMH7wTTxh4RxlzSO0Rq/vDBw1c/nc8DH60ssKeDPv3wHAPgLrz/DP84BJ76vcaDkK3bQJmP8iFAGxvUHrBrzKMijPrSGa8DuDyPS+6OaDh4y0q6ALNY0zFSjfMHmjbGXugq5vmCQkIEa6oFARXD4uGA4+fGw+n0wIcxq+oiRmrQ92kqyJ0uQxoTWxjRJjQn1lVIqrcHRfZjh5Go+AyOBkp+jgiucjNEhjfkBACUQlntdueWsAE2Ym38JgJo6k++yAi4z36yGeowkrwCbUaCvoOo/bMXXkzcEGB7b6juvZhjrTJiZcXq3b0kUC6PsC8qdNr8swHweavpMmQMSR6QUcF0G7MYVY8w4DGawSgWndcIQMuYckQrjsg54+/4AMSaHR2PKJWAeIpbrgDgm7Myk9TgpQyRywf14xcvhigLCi0mNVf/xNWI9RjWWJYFcA/gcUCxxh4eMaUy4GxcUIVzSgPfXCcti6THnEWL3DqI2bIiNwiAASgKkMIgC0hJBBOxiwvd3ajbxq/ef4fv7J/zo8BKfnQ6Y50Gjhw3gyReNGRruFqQ5QmZGOkgFszgB+WHEh6cX+KXjA37wvff4JNxjeafd/PA+VA8MXgiUAnJqhrGw3weNtHXQQtT3Atqsln1G2Tn7Qb/Lc2OrxLPFRS+MlKj6q9QEJG7sBzXXZCU7eCys30arNcw2p9zzo4IfZtgqAUgHNMNO34bofRAWTX1qfidyg8kACqB4hDKgnh4AkFMzN017Qp4MjFqNkWVASh4FJRKWF8ZcOqinBhlgGy6E4SQY3zfgUhYFL4SBIRprI9oYGQuCl3as6x01IETQjiMBlKT5BkGPy2OAKQvCqqBlGdo+yqhASd4Zs2tvgMpV5+z4qOM2nAjLhbBeA2YAYoAYmXmwFNLDcbZHz/oBWloP64WpAHQs5t8BUFTGBgUBOVMlq9dHZREJwCdlr2WLYE8vEobjimlaqwnzmkPdhoMhOTFKZgVKySKLgfr7E0NPNdpqq62+6/V1M1Z+2qyLn8bx/nEGtltttdXPb33nQZA8AOdfKkrZNpPDeCGUj0fM0wBeCPv3+hBL3fPgek9IdwViNGOwgJ4MYLh4ZKg9VE8R1w/S7aqvmQu61IMujId3r/GPfkmbsr/0w4/xK68/x4+44PG0Q75EXfn1hunMOPwhgR413nH+QJAPBThqY7u8AfY/ijj8IWF5GXH9xRW0z8BOO4I0B11FXtXoE2NRQ9CrNa73Ccv3WE1JxwxJjPnNgDxEXRW14gzkqKu3ywtqVHagMTSElNYeTc7C3tC0SFExWvvwdNuEgZWlcfk+Yb3XbXtjOzwBu7eC6aEg7RoTJR2cWk+4/EJB2WddJX/LlfHTYnrtT0dupL0GoDZ60sWYSmiMG29Y++ZreC91VblEQjpqQ7Xea/OHx9iMRoMg3K+YdgtECJd3OyBxY1mYIWKWiPMccAkTOAh2e0VZmAtSCiiFsC4RkklNEy+hpe6QnV8CKAXIylhjRNrrtb4ME6bdiiFkrPuApUR8MJ3ww72yMN5+cMCSAl4fLphCwqfnIz5/ONYGiVkQQ8ZlHXCaR5wuI9bTWFNJ+MLgYoaSxWQDuTMdFTOZLBZtKoQLCz7jQ5XUfLA7YRdWfH+voMhlHDCvsR7D6Tpivgy4O16R94x5iViXiPSky/MeZfqjT1+hCOGHhydMIePTnbJdTvGgoMJFTWAdwHAAMu/EJAtSmUu8Uid1YY0BDqJmmVHZQsItijgbY4oTMLxvDXeVtNh9UXz13+YVG+giBJN6UAPnOjNLv7fWF8BsTfx610x8K7MqAfFENcrY56qyWxS87M1LXT4CaGqU3iOCMhDWO2B90e3DwY0rYXzQY0gHYHltDIRdl5pSFBAKV8L0mW4gng2oSI2ZEhbpaBR2rhlV3jMvfGOWLNR+avNej9GZW2G2e5h1vDgLhrMxwIrF6Fo0+Xok/Q35oSAdCYNJ1oYnQVgFw0kBp+FJQeflhbEr7jUOmqYCNjYPkVhcLSrjQgppylIhlak4QBEFzAUcCmIslUHl8rH8PoJWjSXXtCDB8MAYHgFHq5bTgOX7Fsc9anLSfG3ap2CgShFCWS1ByEA5QEEQ3pUKmmy11VZbbbXVVlt9m+o7D4JgENCbGeUSUWzlmk7A7lNrYBb1rbjRZBMwPBGWlwHLSsivgWG/Yj3YyvgStdF50qZAmCAUawQmYCvALikQXWkcHgnzVSU1/++Y8MHxjB/eayP6KPvaiANA4YjlnjE+EHZvdbV2TYzVVgLDywXLU8DhQ1jiSsT6ilXnDWh06GirihZ5CkC9EAD84vcfcDfOWMv/z96b9UqyXXd+vz3FlJlnriregZeURLXgbsBwowHb8IO/gz+pv4AfDD/ZL/1gGJK6xeYleacaTp0hhxj34Ie1I+IU1ZKIFiV18+YCCNY9eTIyhh1xcv3XfzCUxqNV4uPnDW9vr7H30nHYkzRjc4Svv/SSsDGnVUQFvUaPWoAOExmvFGq7Sk38NpKK3OQERdI2x2jmU51k2+NtgIuJoprkyzwwPBaYUVM+z40SjFtN+0a23/104ubzZwCenjfwVBNtTqXJbBHVyQX1XvxCVMosmYWtIv8fZhVSbgrXFIW8nTi/N+9zKfswbWG8TIRtJNmE3RvUuAI9YZPQWxVWdQAAIABJREFUWZYyjRZ9sPIZxYsFp5DkiuwdEHTilFlE6ASjRvXmk/0mgb+cm7q8rTnOc8zgV5Dz6FXCOwc2cihqtAm8ujryqj4B8JPNgYjiy+aJjRn4prjmbfa5ARgmS4yaD89bps4JU6TXn5ybOWZUBZEJzA0cCJC0NMaKnK7hOIyGNkvQ3tU76px6M0vKtI5cVLJYajfxdrpgCoaLuudm07LvS/a6kd81kam3hIeSb/01zZcjV2VHZQU0/CbBNBmm0gnrq/90ah8zU0ikCQJ8+t4u7DBzVNiTMDdilUgv2E8zWyS6RHSSVjLLb2aZ2svScyxtBixeMqNmVsecqAQriOE3wmqaLoSVoupA1YzofP3Fw0jAsnEw6INBe7Wsj7mhnvddBYUa877MwSk5OcZ0ilAm/GVEXYwLgyABfhSgbXg2xEIAIdXMN5Tc53PTnzbgk8JvZC2Zk5wfMwhYZDphhfyt+3GWxGSwY5X6yDmKVsCc4TrhN4nYrB4+87m3rcJvJeHH9gmbPZDMlNbI21t59kyXkg4FUOxFRuNOIjcqDgnbQinp5YwXBr8xTNuY10JOHZqZHBn0UCahTSKlhNZpTQEOSqJ5k1qAD+81Md/z1XuD6eVZE50wYWyX2YGZUVPsYThZpp1hrBN9HSX6Oj+bpyIu8dHLdYEFkEtRYUxaPKzOda5zneufo/4+5sbvw8L454wW/kOwQtr/7X86s0vOda5/oTqDICoRR4PqxEwQwJ7ki20ost69/x2auJKfCb1d03vHdAvk5sHvArHQ6EHhWpk4usM6iQSZTMpkVxGahB7li+y8D/v6kuNVzdX1ieOpIhwcn5RK9K8jsdBUH2TqKu+VSxpqT7yeiM7hTonNdzA9G4bbPE0ssw+HTTkSNR/fa9nBu/rEm3rPYaqIKHZ24KvNI/vLj/z2cA3A/WGDnwxTb3n1er+YYc7T+yEYHg8N4/tm0b+nnUdlKcQ0GVwzUpYCsvSDY6Ci+GjmQxSTxevI5vMD103HthgYghzj41XNc7gmOoPpZPLevYbhC2FJvPn8iZ9sDtx3Gx5zZOY84V6n50karVbh6/yz6YWXxDRPlNd/vzSTtJ0AC4vxIsJAGa/yWthKQ0oRoTciuYqQinkdJKZjwRQVqtcUB1kXMUsckkvMZqTLMNwo0izTSAIomG415Yw2A1Ob3DBnL5SlcZ6ZK/kzdFSkAUC8NGJQfL8vebwRAOFfv3mL1ZH9VBHyTXBVdnQmm+yGhmNbMT5WqFFJE69Y/DIW40qVUH2+L/YrCJK0sJEWo84IejSkk847D60tOZWRh+2EsQGb11CRY3hLE0hJcfzYwC283h2pnGfMjJlNOdKWjva+gHclv22ueXN5YFfIen+1O3EYCk46EbwhlHqZlAM0m2HxVaicX8CYx5Msmvb9BvdksmxMEV2ORrUvABGT8DsvIMuoheWj/zbAOkcUQwZB5ojbsEr2Yj410a0GtaFMxEtPuR0oCpE31cX0SSx3TDAUDrbQVQVhZkAB2kWsiRiTvUiSWsyfF9lRZutMvZhumo2nrNbFb0wk1QqlEsOFw6rE2K3PruQ1jAIWoUUCplyECzkw3+RYV69QmSUiZsfrORDGQpYJzYyVlwo1s4Ju01Uk1QHbCHoSGo0fNXiNb4T1YwZ5Ntj2BRtlIssE8zbrQNjKNrpLzXgwuL2WvxV7KI6JKoMgxSHhK8W00dl7xOSo8BUYm/2SYr7+wbAAECLHyvdHhFBHiRnOZs3VB3CtvM/XwjaLDsYdC5Dj2kS5T4RCMTUK31hUkOfYfJ79xhA2Eb2ZUGWW74T1uaJUEpbIuc51rnP9V1D/tZnH/r6AzT+037/PcZ2BknOd6w9fZxDEa4ofHPakKPbyI3dMlPvIcKHF0HL36aQWJTry4iiacj0puuDwF3ny7iJx5xmw8CAJBSrIpM69kJLMjWl/oxbpiDvI65tvNP6h4ulOJDnVQS80bxB6/nTj6beBWIjspXxKS/N0qErU7Uh/p6jfibFfcUi440zDl2MLpXxx1l6kJPtGGpZv9pccppIpGHpv2ZUDb+oDf759z59txLz18bbh5Ev++uk1//b2O77vLhijxWa6i1aRbTHyy2MpDV8UE9hmI43n3FBW1mN05FlXxCvN5MV8QwVFcgl9PXBR94vvhcsIxc+vHvj2LyL3dzvYO5JOFLc9P7kQBsNl2fOu3XL/KK+DgBi+BpX9PcygsG3Ctuu501NapSSIt4gKwiBRUSb3ixFrv7JAQiHU//4uEq+lqTNlwACht5hWr2yC2YfBK+yzE2r+pBYZwMIUqdTKpNCJ5CCmhI6rP43pVX4/y1qLicX0M5kVQFEhm7vq9EISlBkkuSE0PaS9YzgJGPXd5pLGTTy2NSZLXxo3LVPi0VumzqFbkRslI4kZql4pDGnSqMGIcWg203wpL0teoRwZqQE1gor6E9lSKBXhpJmayFQFtI085RNVFRPBa+xHxzFuSElRumkB5IyOXFQDhzpSPBq6+4bvg+bu8risFR+l4fdOwLzSeYrM0nFmnZrvioHKTNyVJx52AhT9pXnDIe0wJ7OAaFFBLJKkvADohKsntE54Lx4mL9ldKDA2EIMhBpFKTJOYwALgNeaoP/EQiUVavCh07dlsRgrr8VEzTJZhsgvRxHthF6SosC6gc8JMnNe9jqQkv6d1wtpIWXq8N4yziW0Gn2KWU6QI3alczHe1CxgbsS7QVCNjll+pwSzXUcyChYGysE/KfCJcFFaJg1RE/IZM9ZgXSv7/DA4SBCzhhW9KMmnxckkmCeDRzm7LrJLCKoiXywUoFxnHfE+1RiSNGSy1z4ZYaWIGQXTtiWWg31h0pxkvFeWTpszmrrZL8r8+Lfd70hCKeZ/kOKatXgHJF8emvTDbZubgcCnnbgbAzJiBUYOAbRbGCwHbkpFjqB4Umx8CxSFQ7hWhkL8fU5YK+kZ8WkavCFXAWE/UfOJ31B3KT9NiznWuc53rXOc617n+SOo85jnXuc51rnOd61znOte5znWuc53rXD+K+tEzQfQIu99I6oH2sxZ+nviL3nq4Tcv0DmQ65w6K8kEiHstnUEkx5uSV8VIRLgPpZqTXBW4v5ohmXA0GVQTb54mfyhM9zQJL1fcJfwTbGomrHT6VcYRK4RtN/cWRYeNoY82uXzXhzQ+aY2XpXwXAEB8VxXN6YWqa4ytNlvvkGNfhvRzD43TNQxVRLpK84r6IvN9t6YPl81ooM7UZGbSlcRPfdxd8/XgrZpVZonC7adkVA+V2wE+WMGqUYkkrWGJgg4FgJKK1nOgu5STEoNEuUFUTh77k0TdLZCzA5zfP/JvbtxSvvuWb0zUxKS6Knv0oPhLfPV9y/LDB7A0mKqaL7M1QrkkMZlC4Z0VxQFgYUVggvpoNMVfGgopihGhPLAkzyss1C6VE6E4XkXQ3cnXZLmtsCobTvpBEh4LF7wHEA6F4fMn2YEm8gRdRoDpLp+pEQr3YpyzFebFNlWS/iv1qqLnIBZJIdMS0k/V1eGHOqHB70JkK/3Z3jW08fl8Ie6nxFPW0Ht9gSZ1BaYhFTrbI6RYAaRQmgDmK34OKCt/Mn7XuwJKOYkQpoaaVnWNGeZ/K0aM+QSwVfWYgTIWYwqoE9slyUg3ppl2MU6dgKEzAXE5Mk0b3mvGx4n32XGjeSIpGU0rCzWXZUxm/eIYcp5IhWAYvCTz7ULGzA69L8ewZbwz/IWi6Q0XcW/SoiL/LhomKMBmiScvEfTbHBFA64VzA5tSewgZigpTPT4ia47Eiei0xwVlOojM7w9hICJrTVIo3S1CfmCm/jD+dyqx/U2kxsI2DWRNMVGKsAtoKUyTlZxsvTFtTAgY5l3NCTbIWXycmnRgvR0JvUZ0wK+ZrrKJaGCHChhOmAmQ/FSUskNl/BZNQ+Tqu50qeHzEp4mhIs3lsPj56I5Hho87yotlDJzOqLCJVSgmKSFFNqEUqYsQ4ejDYZ4M7KlKrCNmLyO8MqQ4SoVwFxkvFdGUYrmbPEC3ms60YvLpOzFeL48yikfu3epIdFiZZWvxeZgbInJqlvDyHZkPp4XpllIQKpk1OtyoifZbATReGUBqqj1qMZkNOv5mfe2OiOChUUvTWMdWZqTOnVkVFsX/hMXSuc53rXOf6vesPLd35x3qlnOtc5/rbdQZBJqgeItGqha4cK9Fxh1LkDcOr7OkwNxNBMe00fqOp3itcm3DHl3IKTQ/w+QR3Az6W2JPESs4xjHOSie2zjruTuNg55UAozwrtk0RAsn45nvfbHTTeG+6ujnz4ueIUara/kSahfExMW8vw2UT3RWK81JQf9SKHmSnVehKatkgkoHiW193eMO0MfivHnWzieLL8v13BL5s7AJpiYpgsUzD86nRHei4wR82QjSBPrype3+5pqpFQeHrriEEz+tyQkOMcvSEGTQwKbRK2zLRznTAmEqOi3TekUaOPhrmt/M1g2biR//nma14VR46h5G1/wbdPVwC0326pPhqUh/5NgMsJpRNF5ZeISO813b4kfVvgDtmUs4D+bvV30YPGHhTJKGzGNhavgEr+Pe0Svkmki4m6GXEZqJm8yYkn2SNDA15h29l/RsCxaCRhJ6mcfLFE7OY18YKVrn+nMZl7WTVT5PO+vTSYfbkNPYpfRZx9SdxqjBmUvCYJH/J6+V2BbxxFBlt8rxmd+8TLwoySACTpO0r8RuY45l4tcajRiU+G36xAlIrpE2lMLAQkkESPlM+DNMw6A4mm0wSTmLUewWvx3NhEzEmMTYfBkYrsBRE1vY6U1chwB+HgUJMSYAd432wpnV+ihxs7YnOTDSzRw6eh4NCLTOypr/l8K8a7lfH87OaRD+WWB7MhZsAoRbUCC0EReyNyliANp37hZxGLRFs4dONRJjFk75NZilM6T9r2pKToO9nvGJSYGwPTaJhyHLJuDXpOuZmBtBwvG4pEGtUna0p+IQMTKpvVBiWJ2ZrFAHbxMMkeFqbNcr9ZNqEVIRuLjqmQn5ssC0JAMpVyfHCv8lpXi5dFGmQxR2dkLRdimpvcCqLMz+Hg8jP5BQD4yTEpAQNfeorINrIkzMyfpRmSSGJgBVqwkVhq0ilHET/lZ2dvmHaKUEcoI6YKUAbGJoPgr8TE2B7EI8YeRSoz+0GhckJPnzKShEijZtlSJWs/GgFXk8kJU/lvw3Ab5Z6NCrLvzGws7S7yh+zgcFXQ3TtsrzF9Tv3Jzw4zsgC6zaRJWi+pQCCbKx9/x6/mXOc617nOda5zneuPpH70IIgCfK0YLjReglnwlXxxtrkJVFG+oM5fkhOauA30dURNFpT6hEki8Zea06Zg8+bEqXakzkiTPE/fDZitbLv5IVE+J3SAKUc5LlM7lc0PqxzTOYMg2RRwuq852MBPXj3zTkOfjRrLp0T5ANPOYD7r4BLaK4d5lkuuJzEDnJvXUAIJqgfZfvGUSFYRyrl5UJiTJnUVRyNMi6OSqW4sI/bZSDrBk1omlv1Q8T7BdtdTFzm6d5LoUoAYNSnmCfSkUUHhy4jbCgIwM0r6zsHJSqMfWQCEMJb8JZ8xBMttdWI/Vnx9f8v4rcSeVvcaMwmQpW9HXt0Ig6WyfmGjxKTYNxUfTzey30kxXkXqL2XCf12NtEPB6aFm6g2m1UsjDsIqiWWCrRi+Oif7fOqlSR0GRxyNNMRFFMPBtDZlZmBJsghVBl5YJ7BJSSMUs19GLEBl08Z5Accy5cSV3OBZ+fmnfhp5seemO730ImBli0QXZeqs9MIkmU13ZRqdML1CHdXqTWHyuo4KJmnOdFgbLv2i4U4272NuhgFSzGCBzkCihmgjya5rybZq8V5JXkEPsdKk2W/DS0ObNoGQ9ym0luEFCyJlT5qyGhlUErPh2UPnqaEtAmHXiiFo1JTWM+QEHB817ejw3tCfCtKkOZmK53y/vbo48ro5cFO3jN6w9xr95NAPL8CitAJMczqJCiuolTQkZYiFExBCQVcnTrPBbB1wlce6gNJRvENGg+pXsGkGI8R3JXvFzOyxcr5W2YskyTkNm7hcx4gwJKgijMLyiMVLhEpAleR/B3ibzV/TfBygJk0qI/ZiXKJWSxfwXuNHi+8MatDoQS9+FwJ2KZTPwFhUKFaAYAGCEyRjsnnyp89VVE6kUSqDymlhbskx5PsjgEIAjji6JTkl2SRWHTkpZ9oJgGGyMakewEUxbQ2lJjQayrj8fbCbEWMS05XBB8XYWYZb8wkQozy446pGnU2gQbyFkk35PgGiHM8MJKWdR7sopqXZhJhBYzrNpOW5s7s7cbnt2e8qht6SBrMkSQHYo8E9i3F3/V4if+OLcxSNQoeVzXWuf/qadvD9/yon/PP/64w+netc5/r96ndZImdmyLnO9fvVjx4ECQ4OX2nGq7Q2A2VA9Yb6O4NtofrB4Bu9sDTEZT+RmkBoElMQhsc8cTVjQj1DeGsZLiVKZJa6RLtKMsIWko15LCnTuRnk8DWLREVozznpwKxfjtSgsQdNaxs21cjP3nzk659+ll812BNUHzTdjaXaDrhrz5CNT0NUxEnkKaYI2ZBRM05i9GjbLJfIU2GVmybbrfuoJ00y4LfZ3y+nMsxsieqjojMVh6SIO0XKiQN+kqYtTTkJIqglOjVptbyOSqQo0ZAKaQzCJqHi3PSB+r7kl8fP+NVuIvYW99FSPc8UAzEMHO8Cb64PXJY9vXf03i6mnlolkRyUkfFSk1ygvOv46fWTnAcdORUFhfUygR8dw6lYBrg6m0BW1cQ0WYLXdG25TP/VoCWSVkmTqbwcqx5yQ+XXtBk9qRdgRL6Mc1P5IsJSe7Wsg3mqrVQilAI+fPL+vI1QiqnqJyDIi9fVbOBopfmTeF29XEc9wbRNy7TY9C/SapQiudx4p08BnLmiFUlZIr//ZD5hqqBYWFJpEikJ+kXjp9ZmXnnQCJMgZNYRQRgXqvakKsssBrOYfhLFQDPaRH+pZL2XkZQZDqkz+EHzTEMcDc8mirxkvo5aWBfGRji6BcgYTvII/X4yYqZqPNtqYNoZhieHPepVUqAlLjkWWZ7gc9Ofnxv2BO6UcnSuHK+vFKGUz5i2luHW0TVRJCKTxL3OzfkMDs6fp+ILJhHzOmIFF+brl59Jeiumra7wbOuBx+cNPhYCTL147kSVgWEgqHxdZ6BEIYspgyLNZceXV88Lm0WrxGkqOAwl7VDQnQpCb4k5fla/iCZe2EEB9PzvvE7lw8lyErUc45yMlMoo688kYVAsoGFCebkn9SRgkI45tSufRwF/BQBIJhF2gVgpQp3P81GAUDEkzgynwggzBPAXiuiiyHUMmI0nFlGSYOZjAMYZmJjP20s2Sz6PSid5Tnr9IsZWGEV4vcSsK69wB40e5EQc1IZw1VFkJlQq5Px7l9l3GkjCSjJTEhB/WoEYChiu9RkEOde5znWuc53rXH+U9aMHQWIB7Z9MmM1Emaf4SiWm0RLfN9guUX+UqEFfr1PlaacYrqVxD6XIGZwETSxRltU9PN9UKJsnk+MafSkpHRFVRoY3EqnrDqvcIuVYU+3JVGaFbxTMEoY6kIpIGhzmYPjw/RXVV/eUn8kGhtMW7SUxZvq+oLsxqMYvVG+tE6qIbDY91gSGyTGltZkYd+QYSqDIjbfOjbp+AeRUokcPRrwFojHUH3Kz0EH9QdGnkuNgUC6iTFz9BWYqu5YmKtk8zc/naGoLaW6ntemnjPj879gaTKuo3lriR4Pps7QkH0P/KpKuJy6uWgoTeOpr9m0lk/zF+4BlpJ12HlMGNvXAYZT4mBBFvmNNZFcOsIFDUzJMcuuMk82SHU0IiukkbJs5nUL7WWIjrJm5eTZ58p1UTn1IYDq57qFYAQQdZrmUWgATmYLPjKEcjeqkwTaDMEJQLHKXl7XEh9r0if6f7EGgPGvzl6NXfa2W7UUrcc7JQpwVEv7FceT7I5brdH6WWs1rwnQCqpRPaTmG6OQ+WpgiTkmEaDnLKPJnTVlKltfiEvGq5DynzLqZY1b1NHsd5Em+UvixZNp5aewzAMAkYFw4OHROxfFGPGPkGETq45uwMC60Z7nOQZW8tTuudy2lCby+OPKbqxIf3ZrSoyA2AbOTEzUFSWpJnayl4qMhfRDvHpFLxEUmItdBY1tNqAWQncGShXHjWfyDZg+jeV2AALcqy3DULNF6kV5zdXnipulwOrB1A4UJ/BAvUe9K2R6ZoVBH6dfnpt3F1ZfEBbROXG07Rm94vT3y080TOiMaY7RUZqK2E0Nleae3nKiIc7T3fD2y7EVNcs8sLIq8TiEfe1qjq5f9KzIIM+WTroC8f8omedYYuRFUUDMuuIC7SUG8yIs6CEMplZktl0sPORrbr35PJjNy/CA75vM9L3KeT/1hbBGgCOgsyzMmSmIQ4CezRNOmoHMCjkLNiVBZ4qTz8YUq3ydp9QEyY0F/sMRNWI6BfJ+DgJWxSIQaupvsGxJXKV2oFKcv0qdg6bn+2WpmhMx1Zoac61zn+n1rZoacGSHnOtffXz96EAQbqa76xaQTRKahTSRk+Yo7JYpDJJT5i6kC18oE1jfgmwRubZhUUqiYMH2ifqsZr+XLpPYsDAJx/9PEiOjJfxKYdnbx5FAZIDDZ7FSPCjPopfGYdobxJkhT6hXlD5Zv3Q3bKwFBxlqYAcUTbH8L47PFN3aRXCQrU/2Dl0ZMHcXMsTi+HN/PjaN4VyQrLI/ZNHS8CaSNp94NGBMZBsvkCsZBOtbyAdxeRrfjKJ8dHYuJ4hr3G0llQlXSzc3nSHUmN7Jzs6cIVy9+rwpMpUV/EABEhxceLoB71XG57dAqcX/Y0O0r9N7KdZqHsk7OQ9p5XDNibeTUlTw+Zm1UVKRJo2tP2xTUxURpPT6sE+txNGusaW8WlgdIkxSzXETlyTWwNlSFgGdmSLijNK3tG7XQ91MGR4p9woxJwIJibQRDJWPj6TKQtgE/aoqPM+VivZR6UjBlcMLkaf58nmemQpjfI03X/H7fpMVQclkaRVqn7y/el+xs9MsyuU4mN25B5FK2FXlBuZd7LhRyTPMUPxqRxExbxZQvw3gZMaPCHlUGELNfwcz0MAm8Wr0rYmZCzGttNuJUUEyKMVliE5fmmCKuMc4+G89GkQEBYiJsE8GbT+Q98ynWnWI4lBxspNod2biRn33xkYfrmsNjky+mYnvd8tOrJ4oMNFy5jlOQ++WvHt7w/sMF+kOBPWlsazCDGGzKNUT8h1oBXWeviLishezloVX2FxL2mL4VnwhrImPrYDDoXiJT09bjGgFl7pqW2+pEzEd1U7ecdgX7+3Jhm0SjiLuJajPm9ZlBoHwdnAtYE7mqOkLSVMbzONb02dCi8w6rxOdkY0eu6p6uLVeWhAFswlYebSJT5wiTJrY5JjZjEyhIRSKphB7XezFWUUCZIoicangBuALoSFF6UuWZKEWOM+W1NwMpReTmzZ5hcrT7SthCaWXDzPcQILIdP4MS+RycBOye7/lkBIAK/fr3Y6rCYvpKUgIOZ+CDbNw7g5TJJfFPWSJyxSxVe/H1iKWiv5V7pHjK5qzP4pszXmUp5wtT7XmtJJfwNRx/nham1byWYh1wr7pPZUTnOte5znWuc53rXH8kdf6KA0yjJJcsFaXJUdvIcG1QUePaFyZxSr5Qls9JmtAsIwjVi9ejQk+J4vkFvRpWKQmgeoUZDH6rYDfB5cRQ5sn1KJoFMwjYUn+M1B/BtdnLwin2PzX0d9KM2pOC3xYcZqaJWnAWqsdEcZAG2ud9TFqmyb6phG3Qyhd6Pc1T49yABvlyP7ML7Ckt3UJvE8oktE7EqChLTwyG6SJPN3uN7ZHJ9iB08pfmrvP5jNbgN5rpJixT6fkczHR/08uEu1WGkBMQ9G5C7SbGqNCdkuZ+G9jenQDYVgOjN9w/7uDJUd0bbCdSI9/khsbJ5Li+6KnLUeQuT7UAMKwyFZ4NnS051YH6tlubP6/xgxVPCyXMAr9Ja8PUaWIVSVWUie480Z6XW4J0b7AnhR7lPeN1WjwW9KhwVtJrgOw78yLZRQMNcOG5vjmiVOKhuEDv7brWvIKU5DPy58fF6yXvQwZqospMlZlNQF5HeTuJFx4WZgU55orZk2F5EwKkmVam5cXz+p7hIjeFFoITDwIVwPiUmQqzLgDCn/T4SRM/OsqPemGDzOczZYNI3Ysp5cx00NlgVkUBVqJLeT3JZD1mM+RU51SbmAhNZnoMapmMi7REQTevWRajURB2BZOmawv62qJV4t/d/hb7KvLNq2sAWl9Q2YlL11Gbia/KB/60fE+lpLt9um741Zev+cvjZ/z2cM1zV9F1BdOzIGbFR1knppfjC6XIvWZgMzYRyiAyCRdx9cSf3z1wWcpO+6j54XTBMFkOxxptItumx2QAwplA6wsOU0nvLUYlqmLi+WaEb2UfdBQw7PXFEZOB41naAhCCxgMPXYMzgee+Yn+qGPtZkwO29GybgYtqwJmAKzxhZj4kjSkDTTNws2l5amuGydJlH6LYmU8AOmwiln4BOVQRBOgqPYXzdL0jBkPIYIixkYtNT+0m7t2G7rki7a2Ad5mFYi5GPtvJDfdbfcXh40aYQvO9O98vJkG5Xv85AYe4phklK/+tJrB+vedSZ2U7ar1d5lcT5Oe+3Bv9Xd63OVFIQawT9CJPdA+JaBTtLyYGm59bUTya9CTPOt3Naz7vokvEKhGLSHnXrc/wzIbclGIM/M2L5/G5/uXqd5kh/7k6s0XOda5zvawfEyPk7Ityrv+S0v/wr5zrXOc617nOda5znetc5zrXuc51rnP9t19nJkhQpHcldlpHcctUvAn0rxTTpcIe9SdGjpIek00BjUzn/HaWmmSmglG4Y6J4Fj+RaftiUpMyRd8LXcMnR6olahGEqQGSVNOPFtspmvu4TN3rtz2mK9iPlvFSNP7FkyJambhKaolICuyQMCPYIWL7WW6z6r11kGSbpNbJ96AkBih5AAAgAElEQVQ0oRR2hc4+ESquxwZg94bYatqDI5mE2U2S9rIT7fs0OGwr56B8SsTTeuyQrTiCbHfaQusN006Lph+ZvM/a9+Iwswg0vpFzM9xq4qWHxpMuheZeFdMST/t0bOgfKor3FndU1B/kuo5X4K/kd8xuwhWeu90JrRL96FDTakI4p6iYXqFbiJ1MpU0dloNIeQqtbTaurcHkCWrwGlt46mpCq8QUDFrHJZ0mRM3xsqI/Ofo7kT+Ynx+ZT3Pwmu7kCJVDj0LdN+NK3Y8O/C5RbQcu6543zYFvnOddeUnIHgXRK3RmtjRvhaEUnVqo72QPjmhm3wiFPbHcD5IWsy5dFfK1y5Nt8TsR1oo2wlTRg/hvgLBK7AlMZhkN14pQS1QrzEwS8TNxR5H/aJ9QYY2dvrkSw517vSMeKkyrSJu0MFuCVZClODqt8pvZLBQlE/HoVn8Q80K2NF4qkVe4KKwAFyWpZWbk+N8xfNXynFi8X4KYekYcD27DsfBMV4Yv3BNvLiWV6BAqPow7vu8usDoyRksfHXdOWAf/pvyWPy/e8b9s/oaPd1ueQsO93/HL9jUAf/n4hg+PO9qTE+aDTZTbgU0pTJLX2yMbJ9KXynjuyiM/rz7y3SCR0XtfY3WkMhP3uy2FDhTGsx+FZTEEy/enhmNb4geLMomLXcftzZH7Vh4MalRYGzE6UtuJQnumYFZWSFcRgubjwxZlEuHkMM8GN8yyIghVweOm5Lgb2W56rrctT1mbNY2Wuh652bS8aQ5cly2HqeJt9s442Yq0z6k+XoFRuN24eGvEqIjBUJcjr7dHwk5zGEoOXWay6ETtJj7fPnNR9nxjr9hPO1nL+Z43JnKaCl7VRz6/2PPr0TK2BSnfT4lscDxpMYTNrJBZIZdUyswW8Rpa453nZ694imjPwlqaTWtB7sHo1velIsrPMltGjYpYR3SriU6z/S3UHxLdFwauZC10NmEPRvbrxhM7QzroRdaUrEjcKIUZY3TE6ci2kPVT6MCvHm+WVJ9znetc5zrXuc51rj+m+tGDICooiiedG3z52RwxOpaKeOVJJjINZjFMVFkiYo9KwIbZTK6YjR4TSQlo4lqRkCif5SAz0KLT0mSaTmQx01YtRpDyJVVkFeNPJo7JEUrDeCnvufqlprqf2H4fmJ417WtN0lA+ZanKDYwXUTT8hcYd1y/VkJs2L6kAQsuWONyQ5QGhVIucBitgjJhjpkVmUTxm08IAUwPDK03cekyTQZArjemsnKs+ipHkqqYRqYkH2wk4o4Kmv9ULmMQL80ZfKdxBvuxnCwXcSdO9cfhtpHjTUhUTUzA8P4sHg3lbsvko5rB6TLgu0d1oxltPcS2IVllOGJWYosYHI7G9dUBlrwhtEinCdHLoVtI4VGeIGeRQGjGbVSkDAAml16hc28jvFTZQWI8CajexsaLTiCjsTaAPjs47Pp4a/vT6gT7IokpJ4ZPm8XVNSoq2L2gfq0+SInCRygamIOvzTXMgRL3EtwIMrWPcGJq3TuI5+7W5mRuwWIAfxXvADOvrflqBhJSlYGZMizlrKLNJ5EDWYcn75/tJj+vvnz5XtJ9F0tZjsreL1pEQNNNgmB4txaPGZANZn+00tEpU1nN1dWK/LWh+0MvnzwcRNrMOJgMgJ4XLwFsy4DcsC88MIs/R0vNhBp2BzJx4tInyTMj3dCjy/bok1Ahoavx8jAoLxNEwhZqxCvzfxc/59faW21J2YgiWj/2Grz/ckLLJpXWBwslG/sfPfsu/2rzlp+6BV3bPK7vnC/fIL8p3APy73a/5+s0rnqYapyIRxdXspAxcuxNN1v84FYhJ0caS37Q3gMhxCh24dD1fNM8AHHzJlNGw++OG/WODai06N8vPUfHVTx4wX0l2djs6jEp8OG4obWBbDkxRsy3G5Trtu4r+IGvPPZkl3ni+DnpUpFYxdYbHrePVz99SXco5GLyltB6tEq0veFUeaeyEzg+uDybwxIY0GNQoBqRVLd4iAI9tTd9pQtQUJvCqfOZQluwrAXreH7c8tjWNG7mrTlS3E//faBkeZy0j+NHw23c37C9LPtsduL04sXeBoXT5fjCko8UeNWnSxEKMTxfPHJNIOvvNaERqGSGZDGJk42wxnlaLOc9qSjpH4gpQp7xCbSfIt3NKiqqemC4M/bXBHUu230a2XxsOf5GfS1uPLyKqM9jKo5qRqSwIx/xcUQiINGkenjdoHSmKQD9HQgfN8ZsLYjiTRf9bqd9HMvP71j+FtObv27+zlOdc5/qnq9+Vivzn6nflI39oKc3vsw//UDX/+//ze2/nD/F5Lz/3XH+c9aMHQSCbhOpPPRBURBgBTcC4iCoCvpDTFb0Ss75CIl5jGWXKlz0SgpsZIoqpl7QHM4ovxpqyIEaL0Qo4oQdwUS8gSKgyuGIT9rZj+ioy3Tjcnej739007L4uuf6bCXecSMrR3ekXMb3g30yMjWG60Lij+GuolxLvqLA9zPG8vnpxDnKKiIriPeCbRGgiqQmLX4Y7GYpDEr+ODlTS9FjILAm9mRheKVCGZDRmFMAlurWpVj5hRoNrE+WznLfFt6IAXwvoEV4Le6G+T4vPQ/ko7JVxMPRFRa9LVGso72X/qnsojsIoABguFO3nifKmm0kKHPc1qZ2NH8R01VaeIk/XrYkUNjDuDKdThT84MT0c5qmwHAcmEZKSZBudCDmVRJuE7y1KJ1SOXS2riU0pB2F15E8v7/lq84gh8rRreJ4q2l66f60SV2XHz18/cOVajqHkV3d3S1N4nAqJGh0cHyeLj5rLsqeynmOejmsdoQGzG+heX+AOAgzZbnZplescR4XphdWRzJqyMwNls8+Gyl4bKs6+KizGvzrI72r/Yi1puRf6a0X700DxqqUuJ8rc/Bsd6SfLFAxHV9MXDtMLMOkziPTh446i8jTVQLj2hI8FtmdhisTM5IqWJW5ZfGRWRo8exBtFBZnC2zYt7C7byT6HSkCdcWeIBavfRpEjtHPqCknAovl+mo0xkwHbGnytue9v+GCuVyPfKL4S9tGiorBUVIA5Efr/+HzH/3n153x2+8xXu0euXMeVa/mskLjmK9Pyb5vfUOmJRg1ENJrIWy/I6EPYMuWT3saCt8Mlv2lv+Kv3b+QYosY5z3214aLs6bxj35f0o1zo9rlGP9vF+FWPkB4KDtcF/93te9lGUnx/uuQ3391yiIr9dpRY3ewDVFnP6DzH7NeiMoj50jcGBESyJ4ij44fbHT+/fgRg5wYOU8l3j5ekpPiw3XBbt4uvidGR0nlZ811BTIqmmBYGw3EoGHTksK/5RiX8hUartDCvjm2Jv6/5j881H24P/MXNB35+98CvuWE4Cltkjtp+fCjov3D85PKAaTq6vF59kCjl1AkoSlKkmFaPnCRrMEUtXjRhfd7Na2l5vs4u0C9JgkZYGrGUvw3lvWGgIG0ysFp5do3c4wDfHl9TPmp230RCBmr6V4ZURuxR41UJFxO28Uuyljpa7EGjjob0VJMUdEWizb4oKih2v9a8Hf9wjfW5znWuc53rXOc6138t9aMHQZKB4S586o4SWRIH0qDxXqHLsEzslIVEJDT5y65NMK2ml6lOxE3AK8MQhWWhJ2mU5pACPGjEMDUqiVJUgU+SRZQFUIQLQ9lM2IueizzxVJdHvr+7ojiUNO89zbsJXxfLNNF0ClsGVD0Ro2ZsnEz+5tJi4skkMZG61XIc8zBzlIm+PaolijJlI9T5y75vErZVJCWNc7GXONWxki/iqvbom4G+skwbtxg6Rrs2zzPbun6nKR+SJGCc8vajIjpFconp2uOv+SRBx51kqupOoL8RhoNtoThkWvsg0/tQSPzq8SvwXw40NnJ6L7SA6ntL/T4Jm2WjaD+3hNcjmSDApBKpHmmKKU/WRRJjDp+mTiSbFvPM6FapUQqK6iBN5RxN29aJU5ZZJJfY/0QmzkZHtm7grz+84fQkY1/tAvvLEi6hNJ7aTPx8+7BcxiEavjle8/zUkEZDfyx43tY45+m7NSPXOi9gw5/1DM+O6YOlfFzp+aaXWNY5ZtU3inGX18KQKfyjsIeizVPrF41bdCqDWpKMNJsHg4AKvoHus8jm8wObciS+0NdMweAzi6VqRkaT8Pn+U3mt6e8rhjoy3RncZmS4tdTv9BIhGz2oqPFNWiRW87WX9SzpO6FcZRnzGgRZM2ZK6KfZQFgxbtXy+36j6JOwxWYzSz29YHbNrBAPDMJ2iCeTgRK7nOfFGDjl1Kc5Vhgoni1+Y/nhquaby1fgIuXFwBc3wtp4VR/5WfPAjT2xNT2VmpiS4dtRmB5/fXiDz6yOzjs+nDYc25LpkN07E/QJDnbDD2UgdvaTZ4I5ihwrafC7CIh84ulpw9NO1uNNZrXQGezRMI4aXOQ+38gXTU9MYi4aWss4R1vP0bc5plWPAiDoCQ7f73hfyImorOfjqWH4RmKB3tmGh9cdX90JSFIaz093T/iNpvUFz0Ml7JNBmBxhjpF9KHjeO563G2zpKascS/xUUb81qGh4vnH81Z9r/oc335FuFV9HOY/TocTtFeWDofU7Pvxp5KIasEYWdGk94UJxGDXpYFdpSS4VVJa7aExOF4ouLTHPocrMEZvZI1HSZGb2jR4FzI6ZNdO8Tbijob+VD5luNGHXclcLS+b4s4LD4w3XfwUXX2fm2ZNmvNIUe0jaMNwY+s8ndGbpRZNWppSQp4RZNd/TSkDml4ky5/rx1B+SVfJf+nlndsi5zvXPV38Xc+IPyaj4x9a/1L78XZ/7d7Fn/rnqzFD5x9ePHgRBJ+zVSEpqiWZNQRGdkTQPr6FXpN4s6QEUQnOePQiANQIV0WynKhHrwGg1vhZpzMxwgMw08aBcbjpqmfrNk2XlFTpJYx2eCvqNodwO3I/SHHx5+8S/+vId/+lff8V44bj8lcedElMjn2F7aDuLayasDbCB6NeIXW0T2gRS1KSkCIVdEQmAvUyEVSJ/kVbowRIqs+5jlGjTUAmTRU/gjgoZl8N0oeFVz/a2pWsKxt7CuO4DVcAUEa0jx02Frw3VxxWI0ZN4SSivmK4Um9cnho3j1MmyNU9W0jI6RfEM7pCW5BKQKF/fSMqNbxL8SUvlAqf7hs3Xso2LX0ead8LKmHaS0dr5Yo2GDHCqag6Zrs8kk9/iQa/XMYKa01By4z032XoSYEZPQF4j00avKQ0FtKdL/uNuK03vZmR82+Da7AmzMTwlRT863m22VNZzXbaMubFu7Eg3OdJoUJ1GRUM/GPoirmkWUREbRQyaz++e6C4dj5cbxsfclUWwrca0wgyKFqZdYpp9U04ClJle2ETJga8kXQJA94rkYpZKyZpJJmHyMcQyEpvA5q7lbnviNBZ0oyNkqr2fzPJvrRN1M6A2CQV0OVXEdBW2Mwy6RN0IG8QfCgrBB7CtsDmUV8RiBnIgZQmM7WZGloA7kqzyghmVAZzqOaJConyOmFEvIIo/ilwrVCKNSUYSPGZAMJYJcrLOHEdqgoBKJgM1KiWilfjaZOS+md8P4jFkRgEw40dLtOAbx9fXwgr6VRX499svKUuPUokmAwejl/vtcKzXlJXRCDDrFepFEpHO4GbSlqKT12cZx5ocJE26twHdadJjwX8wwia52LWM3gqrIYLuNAyaTgkI4b3G2oi1AdUADSgd0bPHTn7OBm/wR4c5aeze8P69sFmUjcTOUj1pYckFzRAbfpPBxt2249XmxEXRc1O2aJX45umKYZgzpRUpyXUoPxiSNfi64LTNrKjMcnFHcEfNgSu+3Z64LDoutwIwP3iDry3Vg6L6oDk0O4ZrR1HIM0CrhFGJzU1HVxXrsc2spNEQrEY/mRVEcCv4i8rrbZT0GDVpAXMPq1zGniTFKjkBH+sPafEU6YLh+aLhue55VR35xc09//4XNfu+4eLXcpzNvUgM9QSuizTvFU/e0X2Znwn2hVwtrDI3SZ+SZ9h4uT7HznWuc53rXOc617n+mOoMgqiEUokYXkwiFKgikLyGINNc3enlS2zYIlGuJglQEjJbIk9VTadEGlEFUhNIlyNjb1GtWYzpTJ4Wq5D9GFw2WpyBlDzJJincXpNOmvFgly/CX3eO27sD5S/2PF82ROeo7teJpApg7x3TpYYyoHSS48n7GICQnByDkuNAsU5sYaH7lw9pkTdEx2KoGd3abFJmEOTA0piOR01LxfRZwJiIqjw+uMVbJSqZSGoN5nqgDyW2NZ9Ox/fScMbC0jYlzXbgs1v5gPvLDd2+wnx0qChxkVMlbBTIUbibxHQd0NuJ11dH3n53TfO1Y/dNpu8/egEuSiNxwg/SiM5+E7ZNJK0YrgpCmc9BmSif53OUsgfGLBvKjZBZr2MspMFVU46eVatJaCgU0WhCJ2yVsbXY09p5JJfAa7qHmr4tUDrxsG2YcuPrbBCgQMm1T0hMbMqeFfPFTq1lmjTcwE8vHnm1OXJ/tVk+px0Kht4xjAbtItoEfnIpU/9DVxGjYhwtcdIonWi2A6932az0uBGjzNyUG5UwOvJwkubdmkDtPFZHusmxP1VMgyWO+SRlNlLSiWgSofBcND1KpQUEmTZJ7plO43uLrT3DjcVmoKU8yesqKIYbAb2mS4kABTAnQ7EH0yXYKUKVGC9Z5GPjhZys8dHgTgl3SstrAK5LcC9yGd+IgXDSq7QrNIkwG8R6FmaXgCZ5I2oFypIWM+D1IkljPoMowroB2ytsm6VX2pBswaTltcdCGtnZvFVFhfUzCKMWg+fQrJILFTITgwy0jhnAg8XwlZgb9DoSdwHzZEnfC8jxWJUCBtssD4pZZpf3cRorJiteRroIKAVGp0V+5pwXIMQFeiBoK/K6/eqBo7OEJjpIPuEOCh+EifKwLTjsajbNQFOOIqF6bFD5mZJcQtWetImoe41pBayaMjg0XUb6N4HoNOWDYvOt5pc3b3jzk6eF6VGUnv4ugJJI4uLeMI01YzZ8JihM47m5PFEX02IeOsdmd6MjRkVnavxOi/l2GVGZhZGyv5Rp1WIwLGysfJnmZ3iCqU70r0Q6NAN27qiYPpZ8o665rU7s7MCfvbnnP3HHYyn3XP1OAA1fK5LWNPee3W8Uycp57j+bGK8ioVbLfhBlDQHEUoDOdP6GcK5znetc5zrX36p/aZbMjykC+Z+qzl9xomI6uRUEAGlUXMoTUzF0VAPoWfrQa/EGzQ3W7KkwEynmxsJjSMWEKz3RBbxz+CIDAJ3GtvJZ0uRL4zCbjs5TWZXTUUyrlkk9gD+UfDxY3vzinqufdXxfXuP/plwaNxXB7RUqGEIlUhcd1ULPngGYUEmzgZZEghRfsEG0NEbJSPPonqKkymjZRn9lCIUca1IiP7GDTNEB+pNBT5p23BKbSFIJezQUT7MkQRNqQ6gSYRvEd7JkuQ7LsZ9ENtB3FaefWZqvhLmxrQe0TpyAHkcsRIoy+zj4bYAy0lx1XG86ntqa+tcFu2/i4hXRvrYMV5pxJxPR4kl8InT2EXFtxLaR+qNmasQ41tdqeX0GbMRYVEmSyovpaXQZCDEKO6XsjwE+x79Mm9xAV/n864S/iJI6AbjtiB8sTEauRVAcgdjPsofMrHH5PTaRZrbNYkWgUFH8Sp67isaNXJXd4pOgSRSXHpu7LK0Sx6nkTS2pJo9jg81GnL13jNFwVXR8UYtXxXf1FT7p5f2l8UzRcFudlu3tx4oPpw1PTxviyaGmNZ1FxcxOsCK16fYVpfOfJFOE24kwaDHDDIqymmivNP5BmvPyKUteTjDtZB1wN1Bkg9ppa+meKwEK85qOr4clQYfswzDeGEyrcUdN8fyy8RSAw3ZAkmdCWL00CSmDDSplTxIBNJLhBesnCZtmknSaUCVhkMymoTqzbcL6ntlEE8S3RwCOLKNZpF6rh44KLEwqYZrwqR9HQu7rCKGU9Tg/a+R3czpQq/EG2HhiIdIJEJlFKBPjq0Cqojw3R7PI+Mx+XXt+Y4kKhsrK8xRQJqJ0NhFOoBtPKiIpgygqg63TZVr35aAXlkTqFP5keHYVTzPAdTDYo7we6oRH7ofhJi1g8wwOpzLiLkbGC4PfFNQ/aKrfFLwL1xQXIoJLUaE2ntEm0keL6QWIDtk7RUVFGAzHwrOpxuXUltmjQ6mEVml5zQdN6TxFBlneP2+JH7bYk/rELHo2AU5GALpYJOIucLwQqeUM+JlW4Z40Yaz5q+oNf3bzkeuq5b//8jt+2dwB8PR2h90bQhkxvcL/2lI9JDbfymf42uKvPOk2MLVW/Fv8KodJZr4nzpKEc53rXOc617nO9cdXZ7Lruc51rnOd61znOte5znWuc53rXOf6UdSZCRIV6pT9MObJeVDEELMcRSZz0a7+AWJgl9kgaaW+vyzlxYdgKgyjdug58jYnRUSTmKzG9HPEYpRpaZ68hWwgSFToTmO00LqXeNo9mMHw4XbHz14/8OVnD3wTblE5AtGeFLbNk+Upew8kSaQAVkbJpHIixsKEB/I0divvabUiVMKCsH0SCUj+HYkizayJLmUDykz71mBGTfGsGK+MsCIClE+ZRTHOfihi4jdtk0zX83XQSqQtZkxUj5HNu4QZLPfjrfzC1Yi2CaUTYRfo62xAWmZa+27EucC2GkhA++2W2+8kFWW4FPzv+RfgPx8oNyN9b5m+rnB7tRhu+kpTKbBdojgl4qCwg6K7y34WjRxDKIRCHm1OH1kHxKggU/oYxKdkuhB5B0DYRqHKlyK/UjaibaR8kU6z77JsKcsZYmsxz3KdiyfxFRhug5izFlHWUlRLWlGKyMTeJNq25Acu6Bu3mJPGpPh8+8xdeeTWnRii5Vt1RRfccgxXRUetR7RKvBt2+Gj4oRcfh9YXjNHQeUeIGqMjUzBsnJwEoyMf2w1PzxvSQ4kdsneGX9daMqvhqNpbnk0j1zazQVwzQp1ZW0bkNtVmpH8l+2g7I4tX5XVlodkMVDnRYywN+5+ZbLIgn1s2E6GQteIKj9aJeKmYJkPXFowPdmFJuL3CHde0GRUk9lrPRJJe5dSP7KNgEyp7U8zrOblE9GrxuolWibQlMz1UWj1m5oQqiVFd19ESG+1Ypvbz6/P9JP+R/61Wyc8sd4lG/EvQiajUQmGbGWmmz7HGB403Bsq4GI4We2EV+VYTN/OOpU+kgGZEfGZOwkQJpUi95oplxLtsnltJHHWa419fyhKBpDShTi+OUWEU6IMiKb2wepZjnMTDJmwT6bOecTAi1ZkJDTnS+urqRFdPtHpD84OmeGuZZtlRGcFF7GZiAuLeiInri6QUNSi6x5pxY9E6EoPBWNlJpRLWRv709iONHdEIM+T/Z+9NfibL0jSv33uGO5jZN7iHD5FDRVZmVYlG3VKzYIOEhMSCDVLDigVCLBDqDWokFtCwhSV/A2ILEmIBK1YtikUvWkAjaOgiu6syKyMjI8Ldv9GGO5yBxXvOveZZ2VlZVFVWVpa9i3APN/vM7j333PPd9znPUO+3ITge006lTJWV5DLTt8pESNBej3ReY7VzScB5PCn1aP/ljv7HDn8QTvma7yfht1594NubBz791jMAd683/Oj5lsZGnoaWu+tbrv+xY/ulrvDbzw17Z2EXyNuZHI1KQuspBlHZ4oUIcqlLXepSl7rUr2z9cbKci1zmn10XEERYdSxnTYWZTGka9GE9dnmh72u6gXYUFVwAVpNUXxjhxTgwz57YKMAhpUGnjWSfCN4uFHVM1hQa9EE9V312Y0m9IR0NyVWvCqVS80XHj+QFt9dHrj45cOq125g3nnzviqRE1ijR4k8QoKSVqKdA9h/HWOY2IV1k6lQTM73UNAd3NIvRY/JK0XdHbVLU30KQW21Mk1Pvi+1XCX80hF6lJBVpcYMmcqQ9uMGw/6yk5ZRjdEcpCTIKQPlDYvd5XKjxp9cdYZcRC3Gb1D8DkOJr4n3EmMRhbNh/2LD7Q4sbEvPWcHxbJAC/eeS7b+5wkrg7bXg/OKYbizutPgnDo6W9z7jjOmbTtR7j9KIYgtbpNBcApTRMJqgPyHQN060wbzPxOuCvC0CQRadPFuLRkKMQozCXOTlLRo5WpQEnNVq0z47NT4o84akCSgoyhY3VpqpPsKt5yZlsDGITMRj2zx3H41lXChwnz9Ou47oZmKLjbtjw1cPVMo63mxNbP3HTnnh32rEfWw6DXihjMtNkCZMjR1HriwzttpyjTcyzJZ0cpkjMQJZx0+Z79dgxg8F+1apco03L/eK7gHhFCIbR07Yzu2+rZOfZXhE2Dnsqvis/RePftBPu00fu5luae/V/iVGwrqRpuIhIpu206Tx2MwffUbHN+eBp3lma51Xqco4a2hHkzhTfHF0zKiBQ76swGr0nTlI8RwQ7rYBZ8tUriAJeFLlK9SbRwBaiF+arFfBYgI9iuJmtAh25+JacAwAq71OTVpmlJAKVtUHy4i+ygBlPltgncrlWoa/nK0i2xRBaliW0pu3UBByJZ2sV+m/VBDcblQWmNi1yGY2PkoIOsKRvhV15uYwJo14H8mrkWz/fzEIaDf3ridgapsaRxtV/ZnpoyRm2/UT8xpHxtMU/y5kfhhBbQ7RZ5WiNJe09y2TICijKYIihIZqMDHY11vaZsYvcbTZMjaWzASOJx0kH7zR55muNW84uIW3CtYHffqOpT6fZc9udeN3tGaPjYep52R6ZdnoOvyeZ0/0t/Tth90PDIV7zbjNw2xx52yoI8t2bd/yNqy+I2fC/P/wG9rPM+/ASd9Kx37zTyXScO+bbiGwCvg2YEqs9nTzsfyrX+FKXutSlLnWpS13q16QuIIhk1banVQ+dRP8jCTK6Y5qbRFpyNbVx0IdwWYwOaycQ7bo7KQlkNOSgngOL5UaTNG62TZqYAmqgVx7ErdWmUEwmmkxuhOgtcVu073uDmVWrHuKGdy8b/G5aHmLddiYUBoA9FeNJ0V1YUPbKsttsWJgGi0DKJ1w3Y6Kd9aoAACAASURBVDaZlIR8bTAuMgfLaSjTZhZkNPhHPRYTSn8w68OzmdVLpLvPS5MU+jWmNLZSEjH0xeRy2Tmu/188Wzth3grDC8P268jV59pRtY+GaaeJHcdPrTbNlsXk9mhaZfIcHLsfONr7zLwV5p0w3ep39JuR+2PP4dQyn4rB6M1MUJIDtokM3xRODw3tB4vMCoKEwuTIb0acj8zHBqZq8KlAGCgLI3mNYZYXE00XuOlWhsJXH26Ie43c3dwZbSIFYtcsY9IcNfHE7XWuuSNsvi7+HUHHT5JdDHZDL0wvLEMFAlzxwSgXN0chzbIYYgLcPzU8bjcr+yka3Bd6DMdt5tBulYGxnclJ1CC1sI6yVyaATLJYCGRRk1eA3CjTBZNJ12Fp5k2z0qd8mbdhtiRxtJ87miSktsz31jLf2GV+TqVx/+7rDwA8thNfX12TP7S0dwrcnU4NqdxPXQPX3cjw9sAp7TCDEJ5bQgEdc1YwR/0cwJnE9mpYrlO4Mdyba2LnsNPqqXNuZNzd5cX/JdsC8g1nIEgH40tZTIbtlJEHlt33mPQ1O6k5rxvqYJa5ZtSQVYoBb/WXqcADUuZ/9QExeYnMPi81XxU1Xd2feZZ4NelNjXrU2JOmPtnBLOauqS1eHiXetp5bnUqxzYRdAW/Gyjr7mClXxy1bZdDwbBemSGryyp6p63GXSRXQK6BOmo1GkmcUoClglhycgi6jYRj8ajtbDtAeDWYU4sny9Mqy2w08vZkxc/ORpxNJCM6RmkjTz8xnoFqaFbiWwSoDxqi/jRnPxnEWvvxwg7ERWxhZ41EvVD453KuR3Xbg5fbIi/bIm26PK4N0N23ZzzogWzexDy3H4BdPkU+vnvl/X17hjo7NV5ndj4Svv3mNN4njpqwbWfjEH/Bm5nHq2DYTp28/83TQhe3l/y1sv4zY0bD/zDL/RljYUAApGuYX88Imu9SlflXqzzu+9xLNe6lLXerXqX6aKfKnZYb8SQxhf9VZKBcQxGSkq0kw1SBQdyNlMGUXXU1LK4BAhjybVUKTRBvNM4eV3CRiKgyMlJcUBVNiQ1LSHc7lITOjIEgxagxZ2SBilh4IadJixhqdIc4GMzmaB8EOnrCzxKu4vFe6iNhMbK0eo6hcQt+gx5RmA8GspniluSQIcS4MBMlYF+nbmXZ3WqjxIRrmaDnetkpjT6LATv2M2eAeLeMLgzuVWNKbVXaUpQAlg8HOBQSp7BsgXpXG+CYzvTJl191yXZpDf0zYORcgREheisFgGeMng5mheRQ2XyXmrTBdiwIxxTx1/7DBftng90IzwnSdmV8k5Ea3530TeLE7sr9qeb7aIPdeAYWaohMNczDK0qiUeSmRvIDxxRz1xcSnrx9pbGSKloeD7grnr1v694buLtM8JWKjjXBtXsPGaLNrtClOVk1Z5119ECysgpiRCcw+EwZTzGr1Q1KTiX0mO53LkgUzqNmifoJKU9KTLcCfNqhVOiUJzL2DBNk55uuMuIwtKTapy5oskUtTXCQbdjGrhCz6M76faZqAtxFXgI85WOaoO+lNGzgmIVvH9icrOym2wnByeh4GUmuYguHDVhNuvnX1yMbP/KS75rTrkIMl7j2nmgrS6ge9vjrwxeSIP+kxz5ZcdsbHjUF8IkaDtQnvIp0PbBudB0Yy8Y1hv+mYZgMnixnN2uifBB7VONXMSW+vAnTUkmiIvbKCkldgwE5rekw2Ki9LTpCnjCQFHGpSERT52WPCjrLITGpCTfJC6HTOgX6/OUuPochzqvzGjMokWgAKUYbHdC2Mr6ICvRGaD4UVAoy3+j4JCspkW+6FMxlOdMokS32ZC7IyqxZgo7BUTAJ/EDWJRgFPiRoZrtIddGKVddI2iZyUQUG3fqbUdXRQ42aJEO9bne9JFlmTGQV30ntsMg0nF7F9ZHoRF7mLRF0LzWCITw2pD4hZ1y2krMuu6JZcIkjGNCtITob00JCSsu7MKLjy+XYUmk+f+Z1P3vGqPfC6eWZjJn44qMzvae74/PGG3xvfcLM9YYuBsS0ojZFM82JgiD2SLO1dxn7e8Xl8ycOtSmZ+crjmph3o7MzDUdea635g/p7eB0/HK7Y/NoX1KHp+QCipU9YlXn/jnnt/FpF0qUv9EuvPG+z48/jeP28A5Rc5tguIc6lLXern1S8z1eZnfdevEjByAUHKDqHYRC45jmIyZCGVHesF7KiNb4Zs4/paOANESknx/iCJghizIUe76spFpTSVZr5IcaZ1tz7DIsdBUClAeVgVn7Th3yUkGswE/tGskpzWwDZgbUT6f/YvxWwzBJBFvlGAoNmSRkOsDb/NTJ2n7Wdc0b47k2j9jL1OWJOWXfRQ6O4xC4erjuMLhzkVvb07kw/1UYGT2eCejQ5fifcEyJuojVmrshYkc3zcLkwTOyoTYt7orrSZ9edrQ2WC7uo2e/V3ma6EsFFQoNLz/Q9arn8/4w8REzPDjeX0xnJ6o83EaeeJ0dC1M+12Ytw7zLjKi2Tf4A+yRJtmqTGqVT6g7AdrMw+HnpyF02OHvdNOdfOV0L/LtM+KPrjxbCccmCf1YqnNdOgUEDh+VnZsPfgngx21AddI3+q9UqagE8JWVCLhWRhA1ZNDsjZpZuYsslmBkzo33VEjPDV6tXjILD4MCujFLi33iMxmkQTZk9HI6DYRfcL3I42LzGWeTMFyOrQYm2m7CeMT023C/v76HXEEkhB7bfznKzCj5b3VnW2RzIvuxDdunxh2R774w0+Q2SweE/NkeZgt2zczt9dH3j222GeLLccYo5CbzBQNxkdSK1iTOExF8iOZl9sjfTMTk1lifkMBLdPBYiZl8rijHnfYlPup4mWtEBsYPg16Hxe/n4/sqct7m3vD/GAVbCirtASd7/1dpruLCrQ4OQOKDNOusEWKD00WHTNYJSqgc1SC/rwp/+hOKjnzByH2ZR0S9cPpP9S5UIAvV2VMhXlVz2Eo3z1Y9buxWUG4c1+TMm9zmY92Ur+V+npyOv8rM4wEFDlLrFKZxAogB7PIn8wsi4TIPZqFUVLZLjWNS5Lg9oa5bVWKcxWIZe2VkrhkRsE+W/LRrOs0aqeSXfGM8gnjI7Qr1SWNVoHlUSWEdgL/vP5yyAIxGVIWvh527EPD09TzxV41do/7numxpfvC8363Jb2eEJuVHYj+fjIm414OHH1DbD3tB5DY8Fzm47Pd8YVLGJ/wTSAlQSTzybUmNv3ktz1h1+L2wnSbyNFwOrQK7qMsratPxvNfaZe61KUudalLXepSvzZ1AUEyiyFl3U1UEEQlKKSzHcx1812lKjZjfCJFBQAWY1VBdwvPfiy5VIznyoN2ZJXS2HUnNZeGxIxGm1QpO7lGgZGF9l0kDnGbVkPJIIsvBihbJYhVfwGrpox5OR9ZjFclrKav9c8aOQnVMwSSdQxbt4JBhSEjNtN08/KQniuwJBnnI9xG4pUhTZZ8sguAcM68ydUg0rAARamwbSJWvVAycJU5fGMdQzuqMWht4CWtRq3NXhkSZBhuDbEtDb5dqfrbH2Xap4jbR7IVNnPGnyztXWmerxzT9Y79y0TqEu5oNOK3DIHfF6PXSqDpVI5Sd77nnbIQ0miJH3okCjf3ClYAmDkrw8MJ45XGo8Z2lRdkI0jS3WyNHVUj2+mNogPd7cDpuYXJaFToSc9NsoJEUBrCo15rbT7L8BfZUf0uKcaiiDaJ9fUaVSwBJBWZxLzS/82szfDsReevKPOjjomat6q5bjgZ7kerZpihXMeDxtJmlznuHNIm8m3g9EmD39e5ps24KVIOjajNuHtdwt7lW46vGr5180jnAv5qYn5oF8NOZiEPDfe7nm074W9G5tQi+yrdEt3AF0OaDFM0hNlhbDW7BK6OdC6QsgILo4tMTsGs4DInZxlmNSOGAl6kddnIxXNj+/bArhuZgiUmw36vgJuYTN9PeBt53vcMTw0ymtWQeVb/kLC1dO8N7XPEHRNm0jckJ/jDem+omXMmFkmRmTOxFaYrw3Sl983xG3kBPpsn6N5D85zZfGGYrllMa0ORq1RjWFP9OLx8JMmRAH4v2CFz+tQSNhp1W4GcxUdFCtPDZOarM2PVEVIBKlOX1aBzlAVQS35lddR1y8wscpoaN0sua8Pim1LmuoXoM2bS8bRPjtQnchfXNdgW5l8yGkt+/DhELfZq4JyLkXWORhmFhakhLoOLEHSMGWWJGgeYrzN58PzTu1ecRq/Sk6dmAYolgA9C9w42XwjHfUfsMmFbJwJknzCbwO71gWPfYv+fnvZOMJNbjrFOvOkbI00/M0x+Oca3rx8ZbjxPzz2+CYzve+zJrL/vjOOfmDcM02qOfKlLXern108zNf4sWBl/UmbK+fsvrJBLXepSv2r1Zy3P+dPUBQRJop4cJrOkJJTdRvGJPNr1wbCKxmuDmvXfnFeWgtTnR5OJpcFLQdkZOQs0iVSZHKMp9PQKhJRd9PpdkzZ75NKAleYhl6f6nMvx+kS61d3IlFZac5otTIZ8cmAy0SeYzx5yaxXpwrmvAIAriTeSdTc7Bd14taNdGgs7lYd7kwmbhsmxNDewNhOmD4r9iCanVPDDHdfpp022aDNRzSbPgBiyNvapyUwvC9jiMuaklO7qO6D07vr9ELyasc5bbUiyU1lIBXnckBivLKFV00p/yvjnSPehGATeOLKB00vDdO3IFtozjxMzQ3NIiwGlRJWnVAnDdCWER21ubn4/FoPXuBjcho1h2hrCBsZbBSnm67Tu/s+rseSySw6LbKnxAXurTJzDVadeGcXEsbkvjJlBj7N61GSrYFDsyjg5yDMLILfsvteeK7Cacjai4MqgzXK5NKRGMKMl9jqOkliAoOZRfzY5ITVCuG8UaCmb55W5k62UpKAEryb2302455Wh1N5rI1+TZbJd2SzuznGQDfftxBwsbTcze68JIlB25WF/t2HcOvp+IgZDuU0xg1n/NMpuiS4TTRlwA++j4Xp3WkA+a5OCfCjgJ9uJtp3JWXAlIQdWI9AwW2xhlLzsjgzR0djI9+fXAHTNzHdu79n5keOt527Y8jw2nMZm+fkQDU9vHYcni39y9F9l/KHcL3Pxh4m6FknWe6l50kGSlJFsGV4oQyI2MP/GhBRgdXj2TDeO/ivo7hOSDKmB8QbGF5XVUwFX8MesYN5Zgy9Bj6N5Tki26r9zfWaCW3HgLCoTFCFuE1NZVNxBCJtcmCbFa+YcpA2yAB3uUBJ7hnUuzzuVuilLZmXRVckRRiV3NcHJHYUUDXlvFHShrBHLDV6+N67Lfypm0znJKv0TVmNUm6ErrD2ncrDUsI7Bywlmw8O7HXJw2JOweTynEarR63QNmy+h/yoTO2F8UX6neEAM4WSZfOT6+sTjNxv6H9szRo2udxIgbD1mO2FMZpx1oX+5OfHXXnzN4ZOG/dzye198htufAW6TkO97zOmnfl9c6lKXutSlLnWpS/0alPnj33KpS13qUpe61KUudalLXepSl7rUpS71l78uTJAMMgvZGD6KtsioiWiRi8CqzdetSCAYDZVxqei00/q5onTqNNvVbNTlJQoy26heIkGW3UpMXiQ5qVHK//LdRaJQd+dzieiUJmGbiG+C7kZXIslkmUNbvrvIekz++ByL/CF7oEhZYvn8Kak0g1TYAyXas6ZigHoISAQThdBJ0fGzUtBLXGjcWJXUON2Zrjuq9nyXUVYmzkJ3jyzSmcrumG4gvKiGHspwkdmQnbJqdCdTfyZ26o8QO73ONfWi0uUBpmuVaSSrO6fuAP17oX2kHEvGjgk7ZMKDYXihTIglarhTCcW59qnZZygSBTsJ5iv9zv6rEUSva7I1VgPmnUaexg6mVxFzOy2+KzFYxkElTUyG5p3FDkL/uU7G53iN3Ey0/Yx1EeczZaiYl3kr2JOQq4fHufyFMqXq8WdRM0lWPw57Wq9tbEsc7Jn3ip1U8uD3yriJnc6BaqYpOUMEk1Qm0TysrBRQn4vQV3aQyg/2W4v/xpH4phibzob5XUNzb3BniSvnc01OhoenDTGY1SizqRItyMng3nvCyXJ8abAukTd6kkksRME9qxmwGc0iQ6tjlibDY5TFn6HKv8pJsu1HPrt5YOMmnIkMUXfda9LH++OWEA1D0AhiPa55YRCkLEzJMiWNVf10+8Sr3vD1UaOK52JI3NqILUyTH7+7JT/oZHTPBncwi1TFThQ/l7yOlcDwqkq1MrvbI9+40ljV02vP3dsNj394xfX3De6UCSIMbxLclov50IBk7GBo7oX2IS8Gr1DWgwChN7ijfreZZDHyjV1heJiy7mZlfYSbMt97vY/NXFhvZ1IgnauZ5FdfH3kU/DHTlqjogzF6j5mVeUGG1K1SEkqctx10XbGj4J9huinsrZtMnlb2SdhohHg1ElZmlCzSrCUhKK5zsRoaS1bJIm3ElrnYtjPDocHslQVSo9eX3y8J5pvI/GkkbBv6r3WtXtdLZdSZUZik5+mFxb8+cco93Vd2OZYs+pn+yTC+8NxcH2nKutLawJv2mZfbA//g4TukLhFHWSR0koT2Pi/X9VKXutSfvP6izF1/0e+/yGUudalL/UXXL2zU+t//d3/m330BQVAPhZTPDAqLp0GmxDCWZJX6QP5RCsxsyEWykovEISXla6coEGQ1O2U1jrR9JIWkRnTF74Eki3adVr0+cijfnwvdujZ3on+vVPYYDeYMiLE2M1d5TQFXxKZqSaJNYTX3NBnrzho6IHRxNSGcS/QrBjuxRKvmamCIplaozwmrCQLASUhHlXmEjZok1vGrcZjVdwCKdr88iJPKuMrawH3EXSrmiDln4iYvqQ6xWz0DkFwaciE22syYSZSyDgyfKPgRej02CcJ0Y/DP+kV+n2kOBol6AZIzhNuVfl8jbet5m6nKNPQY/DFjB6Xlz9ee5IVYpDcAyWpaTaq+JrMQ926JhkUyttfs4TgZmmdN0mhK09c+WE5veqZtS+pLkpEATdLoZyAYoz4oocgAiryl+qJkWRNCqlym/h1Y0mrquVaDzeGTClyweDO4IRfPjtWnIdyqhEGBE8EeSwObanMuiyeMP2TcEU5vHeblwPVWkRZnI89XHYf3G/x7VzxJhLAr83+TsEfDfN8is/rpGANpU06sjEt7J8SjZZo7jeutc7VJxWBT55qEgsnVcckKssTckHwidZHoznwgJDNHS8gGIxkrmRs/kM5iiPeu5Tm03D3sMDZiTGbTrV3mNDt+9HCLkcymnUq6hybpnNerfs+3Nw981t7x/s2O3z++AuBHz7c8nTpiNKQkDLPD+cDz4JdjTKPVNWSwyDboMlLWjFf9nk+3T/ygnfiQPuHqBxo/zYuJt68VFXy+6vA2Mkye423HfOewR6F5LgdXfEJCD90ddB8S/Ye8euRs1U+lGgenrGtMvFUwKvcZ2VtNJsrlPs0rCIroupF9Yr4RzGQxQRbjVr/X+ZcaNbtdkmxcNXTVNSB1ifxssUUm1r9PCxiRfPESKXMhbjLczKRyT8udX8Bc9cdZz72WmUSTjARSn/FdWOLLp8mRZ4MpJsTzVSK9gFxAEjMYzNXMZ5/e8fX1jkNzjTuuYEvyumabWejeWeZjh/udJ9w395zMTqfzewXzktP0nfmhYewnPi2A18ZNjMnxHDs+DFvs1UxwiXioiVJ6X6aLJcilLnWpS13qUpf6NawLCILuMlZvDihacg811jZbFh05lIa8sipKNGxm9etYqjBKlkYrysrEkIz1+jCfo6yMkeLZIT5hmqgpMVnUvPXMtwRQYMNkUhRStOQsa1NWAJJc0mvEJqxLy4M4qE9JigbnI9aquWv1O2ja0pRkmCfHfPSa8CBmidwMPQpKFF+RJdVk8cuQJaFFm2tRb4iyQzvfRDVzbfIK7phMWgwC9cOqNwBAatMCKmmTkhdAqF6XJb62jnkUciOrh0iQFYRockl0yeQ+Ik3i9ApO5Ri6rx3jUZY43/Fl1ua++sfYckxBwIIZhLAxy46qfxbsJIuHRWr+aGNR/98E2P1QO68KssQG5puEGYXuUei/ythipgrQ3Wf6D4ZpK8zXunM+72C+yYSddk2pS2QvpFGTKiQKJq1MD1N8Fkp687LrXoGimgQSthk7FnNVgf1vF6+JUdkHbm+wQ5lbBoa3pWuzqMFl0F1p19cb4ux+Qr0d/EGvpx1guOuI1/riy5sD37h5Yt8PfNXc4n7S4J+EsCsf5RP2ZPHPDilGsOqvUprvm6Rxr0+ZOApmMkyTX+ZiutLElrRJOqfM4lS8jImdgIMheVFzVJfW6NY2cjy0/Ehueeh6ej+zdRPpDBFMWZijJT006tGTBPcm0TY6juPk2N9tIAr7TWC8dmyaGVdAipSFKVo+DFucSezsyG91X/OimIJ8b/Oep9AzZ8OUHHOyfKt/WICYOVse557vP7zm8dSxbSeeTy0/+PASgNYHXm6PfHP3xPA9x7PckA20/Uzn9Bg318/0Trv+p+uOxzcdz489Q/H3kSSY2wnfBJ5/uIVs6N+nhanhhkzolBkSO4gBjIXU1Mmn97rfS0kqKuthWbZiZeZlIV5FxjeR5AwV3XUH9UWJrXrQZAd2WEGY2Irew9uo177E5WYD3YMe4/hS44drhG82EDpzxuLLan5bvFHOwY9aZkKBcJ8xB8ts/ULBktFgS8JUajQ9qtlNNGUeDKeGOBtiMnzz9okf/oZjuG+xxaC1xlDX+O/2Xjjc97x8+0R6o6DhNG+xgxplZxHck+Hotux3+npjIr+/f8XD0PNhv6FpZ3ITmHu9Z8NsmN8WFsulLnWpX8v6WUyRCzvkUpe61F+VuoAgsKSKLP+fNB63MguyZG3kKn4RlBVRk1qq0efyMJw//sWSa7RtMMtrabIYn5S54RR8yMlQnRqz1Qd9scXkj4SxZ7+cJGPOJTcJ3cmurzdpSbzJUT8/RrMmt5ik5omlmdfXIMWVgeDK+VXgJPtMzCtgkV0GfxZ/UR0L61iWmEj/LMWoLy9GqgCyC5DBN3FhohiTGDul96coGJuxJunxT1Yf/u9WFKGyCHJb3VTzYg4rNpGN0fd4vT5mtDqeZ8kolflA1p9p2kDe6EkNplPavlemSW6jGubOlc4iev2iIE0ktoZ4vbJ/xqOyZ8wkS/TsOVYmnDVSWbj+ItE8x2VMQ2eYdgZ/ykjSGN1shdDqG7pjYvPlROeEsNUTGV5YxqNw+JZ+UbyKSzMERtNjpnXO+2c1ck2NyntqpGhtQMNWzVLDVSJbZZVkC28/u1vOIybD475jfOjU9Fcy7Te04RLJxGiI0TDsPDKaMndWuYJMBrs3NE9FhmA0+SWV9JavTp7TywMvNieubo88Pzn8k8Udyv3UWQWcBmUluYPeN/NeX987s4ypmcEJ8CgL2DQ5S+5qSkhWIHQ+k6NFkGRwJ0gTpGCJm5VRlGwmD5anacuha3Eu4n3E28imUdBgKkapBNFY61lISdh1+nqIZmFdpdFyHBpiMuw6RdS8SYzB8XjqeBpa3p12fHV1jTclsrpQBbwkAhlTLvCr4pbpJUILh9Cwa0ZyFp5PLaf7HoATsN92fPv1Pd++eeTxnx+JSZvxUzHUvGpHQjJs3MS3dw/8jZcn/vD2BU+jDuScDN+9vsNI5v9w3+SZa7I1C3PJjplmr8yjMAu2UdCuSj0EWRp8EtjClqhLqovg7jLTSRiiJbyemd4kKAa2mx8L7pQ1PnsuTIgjS8qQxucKw2vLfJ2wo0UinF4b+neFRTfq+5JTmY87CnHvGT+pWsTKelOQJdZY6Pr7ocjyJKlU0D8LafAL6FjlPqmBeQtEWUBoAN8E4v2WH/3wFZtPjmw3I4+joy7NdX1NHUiydO8F/7Xnod3Sb5Uyk7YRCbYw47ICse8tn3cKeD3cKmPo+NCDybTbCWsT262imI2LvN09M3QnLnWpS13qUpe61KV+3eoCguRCfz8DQcyoVHuJ+gBpZiFalqdcmQWZhNyKSgmqjMWtn1nlLWTAZcRlspSEFoDREbuokY+VVu8yuUgECOpTUbQ5QHn2rQ/aFmJpGCrbwQxmjYLE6LHVj5iMMk7qg3RhskhJsjkHUmrNBeAQW447o01ipW37hKnRkJLxPuJsWnaeRTLj6JlvPPloizyFJcbXSMZ6Zaec+ys4XxgGjSZwVInPIXUfpfVIkcsApGy00cdo0glFnlSZKUbHVMrO7gKCGJXnyGiQwZKSMA4O0+sxmO2MGLBFS58KkJRKJ5Inu/i65KRMF9fNpMJ2SDshPfq1mbYFsDEfAwAkBWpCLzRP4J/0JJr7TFEOcPh2x7zTqN/k1zFoATMlmsdA7CzNPmFmTVoBGGzxRLCZ1CfSIORhjQ91g45DyELe8rGcCb0HktfjjkDcKAA2zqXxbCdlCuzgMQthdItMDMD7SOtLtGw3k5I2fdf9UK6dMM6O/aHj+Nxo7G0uDeh9AQU/NBxeeJ7f9GyuRridmQtoouNqPvIZyQ7MWJglaCM73STmK2WraDMMuUSv+idDiELqo7LAXIJk1hhnpwwxexLcKKQZspgVBDHqKSKTIc5CFM8ImE3gWHb4rU04m8i7gHl2kIRp9EwFJDEmQxt1uvpESrKAEAAbP7NtJg5Tw2nyfDl53u23dOV+aWykLRG+c7RM0fKPecvG6+d/un3ixp+YoqOzgYexx9m0rimTISTPl/6az17e86+8/SfM2fIP3n+H+6MCJSJ63RsXedkfedXu+c3tHYcCXB5CgzMRL4nfevmB7yfhqd/RvFcAqL0rPiJZGRpm1lSkpQq7okqpUk1EqiDIQSOpzazA4r5z5NuZ+VYv/Ckq6OpOa5KSpIwrDKXuMeIGZVvt/1pkuhWVuG2y+kKh88KOeix2gP5dUkCwXIewzUtyV+zUZ4jMEjVsxrysOyaqz0bzuHqMVJBRGWCCmR2n3J+Byerv0t5bxrtrjm8nlS9VsF0KMN8kwk4Ie0t7Lwym4NV1rQAAIABJREFU4/CmIrr6+fakzC1Jhb3yI71O+0evMbuDIXWJyXjEZjZbBdxaF3jZHjBnzMNLXepSv/7183xEvvm7+RfyObmwSS51qUv9Zai/8iDImR/nWTyg7iarTEJ3/czM4vmhGnUhF4mH+loI6RxJyag3QYTc5MV/w5z0QdoOQpxUChJ9UuaGOZOFBFG2QZZykJSI2/LxTh+8KZr5GmkqBSDIPsNsVlJKkCUGVE+2YCs2F6nHT7FZBPJcmDCNWaVATcS2CggYyaQiqalMhuRW0KJrAn4TYTNw2jWMgycVNgdAGhy5jeTBMeaz7y8AgWsjMRTQIQnp6BRwqECP6H8kFWDqjD6vY7Du1Ks3QInJtXn1XrHKXpGs4Jcc9fjCrjTfOz3fXOKHU9mlX7xJorIFss1Qdl5DPrutTFa5SfXaqNf4zIOlxusGa3j6nmG69vSlafTHhH+OzFeWx9+yKkGSNUY4OcO8EdyQ8aektP2ssoP+Xb2YhulaFvPJ83kOa8O5xPJWA9WF8YN62czruUoUnr9/C8Bj8VqhSsYi2NEwH7YAjJuI2QSMyXT9RNcElW6Vo+tcoLUKlOy7lsN4RXNvsCc58z7JhC+F413H4TsOfzMyvw6YwlBoHoTpmgXkktJ8urKRrea+mfFlpv2wAkDVz8EdBf8kzFdCbCFuCxunTkubCRsKS0GWn633dbZ2IUJhRA0zZyEFYTTFk6OL7G6P9DcDp3mDiIH7hofyGU034/uZnMzCCgjBcCgRuSKZjZ/ZtdqonibP8dAx2BrTC00TSGUNmGdLDJZ3k86lLzbX9N1M4xQseRpaZYb5AuglXRSG55Z37RZe6j1+mBrGqXhFZGE4NWTg6djxOHa86E5snE6mY2j4Yn+DM4mNn/idV+95vz3x4Y3OheevNoQvLO6kIBWi12XxBJKMiEpWFg+MJi+ARvvB0D7qvd7dZ2JvODQWisHt6AzTrRrESlkT7U4WE183GJqnyPZLOHzHEq8jcZsw25kjymbZ/aFgTysQ0t3rXJhuynWsoLRjAbyzASrToyzJ2UEyGVNkaHXtlqzMKzMryOKOgh39avLrVBLUPGX8szA+t4sXCuj9mlplK2WbCbuM/0ro3gsjeozhqgDVQbEbNXLOC3PKTqvkBwwJBeUOcZ07h7Hhvhj4XupSl7rUpS51qUv9OtVfeRAEWBv/n5KxqDlcoTVPQqJqwlH2QGlu687e4joK+vA9FWnIJAvjpHpFmKlQor2QGlFqvTtDZEBZGQndma6Mj9o7R0GyLGZ69bgq5Xp5f908rCkGZ+corKCJnm9Wfw5YJSvVlLV4bxi/uhyoFMeoeWsUEjAnWYCco080m5ndZuB6MzD6wOHUEEaddvnoyNEtGnmKsmbxHLlShoREbShN0nFf8BKvYEBGQRBTdvVrM5EyiJElKQGB2OoYy7ya1dZzlETR6oMpjJg5CbEzxHMvlsEu8hsp567GoHqcyLobWw0/69+TAINZPCeyzSwpPS4zfWdkfOM4PuhnuL3DDo7pNjN9Y1J502CRcq6hM4SNqGfFqKCXGzJ+D+1jRZv02AYUvHGH0oBN+vrwSbnGlsXg9DzlR0oahj+YxTxSIuw+XxGzbNW/JHndea5zGzQpZL5yZAf760aZRAke6tz0CgB2/cQ8W8xg6N5rksyyg3+nE9sfHOA4fi/jr0bGt/oh7YNjuk0ry6b49dgip/GPBknC/CJgJqeAV2L5fP+kPitho0lHwytH2Kal8cw+kzeRqTOYScFNmVcgx47q+5KaTM7KyJIguMkuzXzsDMemZbsZma4n4lND/7kjFBPe8ROLv5qIs0p3clBZUfUJPjUtm+3AdTfibKRxwuzswrjJSQjBEqMgoubIEEn70hgfe558C03C+LQAe8sNVefiLBxOLf/b/W9wnBvu7reLjC4ESzw5mAwnGk4fer7cBna7odxPcDx05Ayb7chff/Ml37t5z29eq3TqD65e8kX3EnfnF7PP5M4At7Yw5rqEnKyuO23CdcUvwzdIdjSP0DxmmofMeGuRF2WUOpXvhWCJz+rDIUEW8NkEQ2/BnTKbH1sO/1xg83KgcZGHWedK/nGDGzJmUiPX1BTgYCxylWcFD8Km+OnUpK7KBAl6z6S2+Euh57iYDxemS3LruuSfWRKnsqhMa74SuneZ9lFBk1lxJKYbTZMKWUFXBYkK0HNX10Dzkals7FYQDxSE93shlXs+T8oETAVwm21DHAU5/JTP1aUudam/svWLpt38aVNxLkySS13qUr+MujzhXOpSl7rUpS51qUtd6lKXutSlLnWpvxL1S2eCiMi3gb8L/IvA3wR64Ls55x/81PteAP8l8G+W9/x94D/KOf+fP/W+DvgvgH8HuAX+IfB3c86/+4scTzXAE1b2g2qoS2JIiTs180qDj8U4sRrgLfrvykSQEhk7rwwMM6/vq2Wm1UCPbFQjXpNNsu40AyQbz2Qx1QuiIO1RFvZI9unjhJpC1cZksmRSb1i2jcvPLsfss5rpVamLyYUez+rBUUxH665wLF4WRFl2XO1xlewAzFvHw62l3040LtC2Ydl5nmc1gq3xqZWNUffXpch/ZBZMUN+VZNcxyl53rrPNGMzq+VGGpmr067/VaE0JssZhRrOQb5TNoPGt9Vq7Z1T+Iuh3leu+GMiWkqxjaSZZjBXrNaieGvqFYCezSE7qnMgGUpeR64TrT8w7vTVDFHI0tLuRJgsxWGKQ1bTUZ1JbmAnFcFF9AIT+vV6I5vAxY8MWich8pZ8xfqJxzHV3uJYdVuq8ROjeZ+yo42OnzOYrlUCYIYIRYmuJnSH0Zk3UKOM6F++O2Lk1gvcs8jMLjJ+0kJXWv/k6YafVRFYykDL9u5lsPanxDJ9lpHi3hN5hT0LYJqRJtP2Mc5HptqSW3O2wR4FXgfG14Iqpaqy+JaI7+N1dIvSCZMNJpCociG3Cb9TrJSeVZ3Hm9WIm0QSSk7IEYq+78HaQhf0lD8JROg5vhLabONLQ3iuzAMCePOPrIi0z0DwYlW/VC2Jgv23Y72b14ymGs4skJxpmWDxqTB/0Hq4/Pgp5tph7pylNJSp2pbMUVohPxGD5wfuXjM8tMqwStujyytzKOnd5cDxd14ijjEwqn9tPli+317zq97xodNL91s17puC4a7cMo9XPjit7rBoPuyYyl8/XCO/iyXM1c/KZ9IUnG5UuZZt1XUElQ30zY03izm6JoTBlXurnP/uO0Bt2nye2X2QO37HYF4nWB3w/l/uiwcyCPyRGLzx95vD7vKwrfq+sODtpAk0diyorA/AHyEdWCSWrDG25J67Wtdg/f5walV1mvFXWVveYMFNaEm7MpAlXZizpMo3eQ+6UFyZgcsL4AnL1BXGQm0zY6neaKPhHvdyxJD4hkMb6HSolW5iFv6b1q/Y8cqlLXepSl7rUpX459Rchh/lt4N8C/lfgfwH+tZ9+g4gI8D8A3wX+DnAP/GfA3xORfyHn/PnZ2/8r4F8H/mPg94H/APifRORfyjn/wz/2aEwmXik9vz7oV+NOTFY5RhIY9KETtEFOvlDuz+UlVRMuLBTpTF7BjrjGoWbD0lib4r1hZkqCR/n6WJtpqz4W5w/J1aQ1aNMlWb0aKq1csizvQVDT1SZw9vHkKEq9B3AJv5uW10UyKRqsS8SwdqIpWtJi7mq1eTGApIXG7UqUo5nAzIY4tRx2juFqpmkCzmtDkzeiHhvekjZFulMSeYDVq8CbFVepkbiwynRMMf7kYwBmMcmUMjZN/ljGVK6ljluhrrd6ndxhfV07MQVVqkzELEhLOYZcaehZ02jOEmdSC7EvMbrl3GpDoxIl9Q+IM8ymZdxFTBmjpp+LtCFx+HqrcbQJUr+m4aTrsBh4yqDymOwNlejVPGbsBN0H9VaInUb+nt7qZ8Q+gQE7WB2HctzNcx0/lXF19xkz5wJaZUKvJ2lFSI1R01ejYxu61XdDIjTPCqDYOSNR6f1STICT1feOX2vTb2aNOZWYl/thvLEkJ/TvZvr3gdh4JDZLYkfYZK7/APbBkxoYbi2mDzRdldEomLN/azFXM8HqoIftOkfGW9Ex8ip9MJNgyznEnZCKV4dvA8YkjlUKBkhSw972ESRmpusiCwrrXOruE2Y0HKeO+TNdc0zIS6yw34MdLbHTxn77uUa6LhYzCb1XvCU1JfXj7HZxJbbVTAXQ6x0klsZXShRy90FjYUMnagxaY4K74iPRRHKC8anFvfe6fBSgJpf5n4023zpvQO6LlKTIByXB7Cxf3F1zvPJ8aHSgr5qRXTuSbiFEy37fkU5uAXLFJ0xJyzJNJI2WPFjGpAiDmEz/4sRpFmJvCTcB6TSFB3Tdqmaeu83Aqcg7brYKwhw2I8/tFZIcm68y7tGy3/S0n8RlXRreRLJRzeP0AsbXkc0f2sVfpsZTm4D60ET9e51L9Z7xe5VYzbuq3yv3Q13rS8qSLh2r74mUn88GxltongxNTot8rXtMmGCYd8L4Qten5BWYtMXnJxs9tuQylgKOjkblgOWedEf9ux11nsVuPUYTdHMg/foLZn+1nkcudalL/anlNLX+/8hq/iTffZHtXOpSf7nrL+IR53dzzm8BROTf52c8dAB/C/iXgX815/z3ynv/PvAHwH8C/Ifl3/4m8G8D/17O+b8u//Y/A/8I+M/L5/z8MsDVvLImAOtUL5+DNjk5C9EKSkPQB8TKoJBUd6lX5kFt/mKbEaONQypWEecgyLIzWH/2bNdNov47ppiwOn3QrX4ZiDZKEgQRbWo/WrqD+gJIKtG0PkGzeo6IyYhV9ghJNE42GWJcPyUnWZJeOGd/jLYco+gzc40Qdhm6mbnRaWVOam5pT4KZHGE0nLYOU9gm1kWcBbfReEZvI3O0zCV1JBfiS/U4kJJ2U8ks8+g01SYDLpHawpipv5cqWFPLpwUsSJWNUFMazmJ/s2SCKZ4ch/XHky0AzDmQZCrLIy/NamrX6yQRUq/jn62yZaJdTUmrl0i2+mf73hKOhrjT7x+7wkKZDc07i50UiAll7sRNQtpILn40uZiUTkZ3g/W4hea5JGoEGF/oOdSEHJlNYXfoPFIgYvUyISkokI2m18SmeIC0a3MXu8KAmhW8S349RwU19O92TJi57KoXgMMU74LmKRI7w3htOX1ilkYQUN+cDhBP+xjp30fMbDjti3fLFq5/ONM8WUIvTNee0Hum29IQPyrQNf/YM74x0CTiVVo8dcKNshGGDw4zQ/ugYM7Chnm2pBKVHPuArbHX9Zr7TGrVzNMOyiyJfQHMZj0GN2SuPo/YyXB/rWaX0/Vq/moHBazCrMCrPybcCaRMeEkluWRKxEYNcdVPQ4+hfVSDHDtnYiNEr4vC4Zs6RrHRn2/vNUI29ADCfFMuo5fCJhPyyWL3lvZBvSa2XxbAzMPplWF4reesgLFZWD0mlrjbDMkbZt/ykIUH1GCz7Wb6ZuaqnTjOnrmbGbOyWM7L2IR1mSnrOkwxdc4u0/oAb46El4a31+p4WhN0QjScpmKW6yLORZxJ7BqdgN/aPfJDF3lvb4iNx06Z9KHhuPNLQlX76ZFh2zK+dHAzc/tyz+P4gubBlOsghRUGwyeA6Do8lYQaiUJyyl6r3iI6h8s4l/vEPwuhzwVIXa9jruB7hnCVGT7RpKemxPz6Y6L/EAsQYoidstxiKwtQ0j5mhoNhusmkRhPO3H5lHdV7e/H+qcB9+f0UegUWFzD317d+tZ5HLnWpS13qUpe61C+lfukgSM45/fHv4m8BX9QHjvJzjyLyPwL/BuWho7xvBv7bs/cFEflvgP9URNqc88gfU8bmJdGkVqSks1S5iUtL7KnZG0yVuhSGR5VzANpgFnpxNrpju9RZA12ZI1XqUVNKgJL0on+6M4ZI7OqObC4Ruuv3EPho912SWdglyWsU4mJAWNghNV0mR0uazCqzOaO/K+Ol/NBZAs2SuGLQcTLgthM0lbpumPcO+2yxJ8E/GdLJEAuDYN5FTBPxXndyGxcxAq5ciyk4vI3ELDiTsCbjbFzkOM+2ZTg1pMkiNtP2M8akVa4TDTHod6UoKhuI2sjmAgAQRGN/DVATcnwmWb04s2hT/FFKTwGWQFlDuYxRBmVmRGGJwE2yMoZKEg8mMZu1qTOjWYx03b7IKgqLIw/aENuT7t5T5ltlJaVG1NSwShPQ65P6yFRBCizZCe6QMVGZAdmg0qUyD+2gc0/ZHBmCELry0xNEI4SNEHsFHLTpqkydTO60AZfJaMT0pGkrdX7bQeUC88b9UYq9KCOiu9MUjuGlMLwp0qd6CpIVaLCG/p3Qf0gaBVw+a7oSzJy4/oOR2DuS1Tk/3hbgshhwXv1AcEfH6U1S5kO57d31RNvNhBvD+NCRrcMdZGnu23shW0voVX4Uu/wxK8nAvMsMrwzuuN7bsQGK7Ogohu2Xic3XidOXjuEbgeF1XsCqKj+ozILhpcGdMr6ykgL4fcQ/zWRv8BtL8qs0yj8VSUiC2JsCThlCiRkOfZlDWYEVMwt+v64p0gm5y+SDo7lTuZDfq0So/1o7+WyEeduoDKXRuR9JiyzIjrJEFeeDsrxmv1LYwmzJO8HbyDg7rE1YHwkVBMkCkmjbGW91bThKSxrKryqb8S7y9uoZU1gfp+B5OOlkDckQyj0fk1nWlTGUOOfNxO+8eMfL/sg/3b4mf91iR+F0aLm6VqrHp7dPcAtTtOz8pEk6bzcMfTGY3ejnh5uI2c1Lqo4pzLU4KZgcW8/4wuD35dz7dcq7AZrHFWRL/iwWuGW5l5OD8WVmeAXNo47R5kth8y7gjwpmTbe67g+vBF/YW+2jmiPP2yJ1Kde7miVnQU2A+3rPr4bIAHGj11Z+kd/Wf4nrV/F55FKXutSfTf1ZMUr+pJ9/YYhc6lJ/OepXlez614H/62f8+z8C/l0R2eWc9+V9f5BzPv6M9zUo1fUf/dxvSkI8OqI7M5OYBQnazC2NoV+lJsln7FlSS6V/L8qYqmXPSkdOjj8im5AEYs8BDf3BZUe1JMKYIEjZXVfgpXyFUZp88tpYZwsURsj5MdlBFrZDmsxHrydffDJK3K49mo+scnNhm2TL6ptR5CfAmmyCsgmQTJgcvujzt/3A3FpOviM/Ovyz+iNI2bWNUciN5TBYjq5TNkqWJR40J8E1ETGZYJOyRdL6OhTpUQFpRLI2VeX16ISpyHrAEI9qsJB9RroiyQmClFSeHN16/SpYdBWI4xk4VN6wRO/acqGrDMkWdkCVx1TaylSoOjYjzcpAsE0iJ/UdScGQrF3iXOux1Ib63ENgAdySICezgDPYrIBUu57jlFVG4Y5nCSaGZa7ZcmyheODornRGCoiShnIPOJi3mXgVwSUdN8A2EWMTIlk9S2ZDevLUtCQJYDYrhabO4fr9ZD2GeSPETjh+KxNeaxLOMhdKk3xqHLE3JG9onjPutO5877/ZsP1JOach4vczviSvHL7VkQ00e73HJBjmq9UPZpwMh9bjdrMCY44CzujrFTwws5CsEPu8+FHoGyBtI6dtRCaDfzCYIEw3aZEuySyEjWX7RaL7AOMrIbyayUbnXTyYRRqjkazKIujuirxshKbVtKIsysRJVhamyHDbLOtQdjruoT+T04iCMgoYyVlSVflzELKz+AfD9scUiZi+P32mHbo/JAVerHqJSPHBObciqtfUnSBbw9RYBRoB4xI5w3FsmINd2F1L7PUZE83bROd1QMYiVfE+sm0mbtsTThLvTjuex5ahsD9yFmWzZSHMltQom20UHYT3zY63/TO/c/2OrR/5ve4Npy93mlR1rd/bu5nb4mFiJBOy4eb6wLQpPj2vLM5FvnXzqMcWHTkLh0mpHsPs6Hzg+bolZuF01ykjr84Vl2i+9PRfCX6fF9CosgTnjQLKoVPAcnwVMVczU0352TgkW/wx4/cKkCSfOb3JRF9ZP8rGMRGi12SY2AupADKSVL4TNspwqzHeFdQzo7D5sSz311/x+uU9j1zqUpe61KUudalfSv2qgiAvgR/8jH+/K3++APblffc/530vf9aHi8jfBv42gLt9gX20qlWpfW2JSZVY/CKKtCButZmpGvranNpRGREfeVXYlV6c/SqjyJVxkmSJo03VP8So6SZATMo8kFgNN1dAAwrIMQFIIWiUY6ogR5TFcLSCJ3LWYJjCGkleG7pM0alXuU45XslAbVYLbTr71Y+imqIqWiTkg2Mq32NtwrlIfz0wmJaQ/GKyCkUGEgTzZH5KYrL+PWySfp+BGn0qlWVRJTI2kYJhHDzBWkyhtadoVuPILJjBqNxlG3Cd8sFzMqvniU9qKnl2jH47ERtLjoUZlOVjBgAUP5lCka/jUfcXC/BxziTJmeUccl7lV9JGYm2UqsdHLHGcW41ord9VI3LJokySMidSU65RMIvJLZtI6BKhSArsUY1La5JvFpXV1PkIkML6HbGVZX6mPmvEreTFdDMGQxitnpPoB2aXCbuq2dFGfQESM4uBJmjDZYIwfAKpTcSXs5pUnvntpDqGtxOj9SRvaR4N3Ycy7yM8/yYMr1rsKdPsFSBZYoBfClm0qbNzZvM1xAcWmc70tSVby/iJpzsWs+R0JgkqDSUDJd1Yr2dlYWQL6Srjt/qBk+80Qvd24puvH5ep8JMXNyTX0zxl7NGQXkzE6/IZBhCVuMSNer2IS8y3fhknOwpmtItnA7Iys4a3Oj9yk4uhsCH7iK3xsLOQXCbeqjmxu3M0jyv7zB0FMxv+P/beJeaWLMvv+q29d0Scc77HvTdvvurVXd3uRu625LYx4iGEsWeAYIDEY8DAsiUGiBkSEoyQBUwQA0tIDDwChEAIiYGZIYFtJpYtsA2WTbtddrneWZWZ9/G9zjkRsfdisNbeEd/t7K6q7qqs7KxY0pd5vxNxIvbesSO+WP/9X/9/d2fJ+XSpTE+E4/swPXWNmo8i8eRaOW4Pmx6EXK2EkzFi4tlYJP0NQGR87ue4NuTlNHbkOTCd3Ce2aqtMgTwFbk+J8fLM9cWJfT+x6+xh+3R/ZJ8m7qaBosLNOPD6bk92kCyEQskL+6uUjjGkVuryHX3COSd+4fIlz4cH/sh7H/C3py8xf7jndDQQ4+VglI1cArMGhjhzNYxcXhnNIknh+XDPe8MN3zs/4fW4YyyJ2cHdGApPd0e+cHXD8+Ge771zzVRiA0nePtzz9/ovoGHg8D3Txkk+ZwG6eyunOT0X5gOkpyPvvnXDyUV+X19f8CrsufyWMUHSvZWnTe9OnDrbZ3wqzcpXBcqhcH5Lmm11ejCQpVmh179lK32ad/7WPd84fc6VUX+0+Km9j6zfReKzZ7/fdm6xxRafgfjdGCgbS2SLLT478VkFQYRlrfvNz38v+z0KVf2LwF8E2H35KxpPi/sIPAYbbNUTQFEvQcgXxcpE1M5UTgHJSzPEXVUqkNJKW4RlxAU0VTBBbL/ASszU2QlA2buWxFmaa0k8ipe6YGKf9fRvjIaGFaiwJhL4yIXRE766UL8q6TFXGRYAJ3kSW1kKK7BAnU0hs+kJABzLjtBndoeRNMxMz2ri7l+aAjIK8RwWkEZ5xEaREsj941XKJlPSKXo1E/oMM5RTpBCX789vTINgrJc0ZGJleoTMfHZ1QsH6VhZAK6VC32dyDkxjQpWmF/NoMIua0805LOVDABFUS2MHocApLn0ASm9lGRpAXGtCHSBQAgwYc6SxcNQ+x1bvqybNurcyCiW5dktXSLtM6QSJStHB2Q4+Bk/mJoo7j8ZI0RhQZyqVnTN98Pk5C5TQ3Isgko5h0UeRld4MQHLtmCE38Cd0pQns5lMkuyuJuOvJfOpMdHQFhCBK2GVkl5ne0uZGY9dRGb965vxeJDxEYx0dF6bHdKWEsyWY8SgMrwvDDaQHa+P+IyWMhek6IUU5PY1MF9Luaw12f1Yh25Ah64rVMoPcR6ZpZ/3cZdgZwNU58Hk9nLj44shv3X8R/UYizMp8vzyCy65QxmjgZF+QVLi4OpEP4zIE7ghzPJqljioNMHvrvRt23cyz3ZH7qW86Ga+9VOThYSClzHvX9wjwg6tLzt8+kO5tv3R0XZgAx3etjKIMyvQ088d//esAfOvmGS++9hb9K2OtlGQgcH0WjE+V/M5EuE3EcyAeYf8hBC9ROfY7ZJcJUckPif6DZM+0ykAYQaPr4TxPvHo/cHE4c7Uz8GSIM8e54+P7A1OOlCJM59TOn6vSaGUQ+X1UfC5Op8Td3Y7z24nr4cQuTvR9ZhaYbgyk+EivuNsPduwp0nWZd67uuXZWyqyBfZy4ywO308BHx0umEtp4qwrnnHg6HHmru+etJ/ccS89HZ1NO/cr+JfOXA18fnnMfLujuhf71og0TZnt+5N5chna7iT5mLjubB9e7M9+SZ9zGPZfftOuGCPnLmfCOgRZ5Coxd38rm6AvzW4X5qf/9uI3IJKQHIU400dfOgZjdq0L3wWs0rp5zP7/xU3sfWb+LDL/wlS072mKLLbbYYotPKT6rbzgv+GQWR10qefkj7vfiE7ZtscUWW2yxxRZb/CixvY9sscUWW2yxxecsPqtMkL/LJ6u0/zrwTa+/rfv96yJyeKMO99eBEfjaj3KyKm5axUdLt+gVFNfdsFXg1fJ9wFgDQc1adb2GU4y5ER+CO78s5TK6Oi5V2LSYXgBeFlPPodF/j4r2hbyjlW7kPjRRPfCFT2emgK3YS8GYCMkYFiUtZRwaHrerfraIa+JlDLSSoHb82tds/aSdpzTnFgB5iOgxclRBgtLtJ2IqTYS2sivG1CGTsWnWQnytjr4Kr6p1tLrwmB6hiSNqVyglPnKEqeVMuN6LXmYkFbp+ZvZx1BLsO1XAtHOtA2fpzFMkdZkYC9rNlCLM57SUu6yveXZ2TlxrJDizRKE614RjaK4LkkHH1TXovL3eD8nLNs7h0fywa+ZWpVWXxhkuYRLKg9uW7oQSFYk2VnnIiMauEW7gAAAgAElEQVTGfBquz3RdtpXvU4LKclnrntRyBS8XCmdpYpiiRskHs47WIKZf0mqz6rxxcVpRyppJk6ykSWoZ031HOAXXSlixjYKSDwE6YyOVq8zZXXh0V7i4PJP3gekqcr7okDE0LQpUCPcRQrD7W63ELPfWjt3LTJwK8ePR9HaCUazqddSw1ksRdHSHKN8ezsZ2CCOUIXF+O6NDYXoY+MbtOwB0VyPPn96RrkfOzwPpQeg/SiaeijFBZHZC0m0kF+EhKHtnglzuzlwNxoj4we0l02xMiCr+O+dI7CcOyfZPUjjl1OxjX8XMOCfGOdGnmWGYubuqtW5moYrC+EyZn83IgwkayznwpDcWxJO3v8df+fAK+WiguzML19wv4s/5KnP99j33h4HTeU9/I+w+UnYf+VTIHdN1Yr4sdHeBy2+Z20l99obs5YdRiGPgvt9xu2ID3Z17juee80NngsB+r1SmUvB7qTli+TN7zdwKd4Hv8YQPu0v6fuZ07NG+IM5gm1W4Oya7EGNgAl7GwuBiyXdTzwf316b3MXbcPQygQtfbdlXh7NfkNHc8Gx4458TomkM3856vXLxiF2f+P97j+NBxuk3NfSbdm9vO+ZmSn82IKHfngUu/9hfdyK+8/yH/SN7mYb6gu7UyFi3C1RP7M3gaO45FkI+r3YsQ9jPJRaunXULvE+lkQsVV86Zz21wpcPyVt5nuPv/2MD9CfKrvI1tsscXnN36YWOtWLrPFFp9efFZBkL8E/FkR+RdV9a8CiMg18K8B/8Mb+/154N8E/lvfLwH/NvC//ThK7Ja82b/Hp9q0ACRLsxKtiWcYgwEDYEDAzpLrpug/B7QTcqFZ6eLM/gqkBFWK2Au7ZBOsFA1NILCCEyWpuVgoVi6Q3HklFXQwSrPMiwZIS9qq64uXT7hkxyJqKjTL1Fa642UVACEvjjUarNa/ZKGE0koxZBYrx8hWWpG9P80Zpdb633SUrqBDQTXTuX7A0M/0/czYz+Q5NleXGnkW050osoAgUWGsmafbqwKpy8zeb21WxQJJiYN1bhgmZk8c57Pb8Obg2gYCQVpZT43poWOSjrgzHQURNW2QlUNODRWl7Hzs6+dud1nnz+KsU9EmBxMqyFTEXGrqflHdMnMps9JV+7RTFAPjpAI7wcYgugZIVrHL2hVK1e2QpS3zHBlPnemIHKOVNHVlAUFWpVIyu9bMJEvXddHQqG0MszYxU63g2xwfgXQtAmjxcZ2FdGu6E4/EaB1EimdhulR0WLUPoMD9611z6KAzJ6TuYDfyPEVKFua93Yint8XbbOc4fxhIx0TnQpDzTkyseAU6WakGMGtzh6r9kQxyC+nBRWenSOkj3d1iszxddfzgC3vKUAjBng3d/SKIOe8j6YEGNua7yHS/4/6J7XC67njd7QmhcLwbLLEXDBwDbl8O3AwXfPfwxDRmguu21OfSFJHbxHGUpmvEUNrzLe/cBvnJzPtffsHL2wPj9w8MH0b++rd/EYAvPXtNGjJ5p+YucqHkYXFbChcTQZS3ntzz4hfg4bZDY0d3Y6fbf1/pb4TxygSA08mEQeN5QRVLJ+QLA5bS68CcB165KCjFQBnJEGepZjKkBy/zOOGOXDBf1ufA8lwkKuk+MH04MEVl7O05KfvcRFnDKRgwGTHwYIabjy84j/bMKDkwHbsFFC7Wrsn1OMSfm+dzx0f9BZe7S8Jqvr8673k6HDmkka++/YL7qefm2cDpHUPD7u87K63bZQ5XJ1SFu+PQHHR0L7x7uOUX333BP5wi+dsD3Z25RI3ughNjodtP5NjZc/ohUhSqCc+wnxiDMp4DKgYMxjOcXZ/m9DRwfD/A1xaQ7Oc4PvX3kS222OLnM36/jjYbiLLFFj96/ExAEBH5N/yff8L//y+LyIfAh/6S8ZeAvwb89yLyH2J00/8Ye+X8L+pxVPVvi8j/BPwFEemArwP/HvBLwL/z47SpdMuLsjw/m82pur5FEwC17fEYLFHMJgyYAfYVZYAqnqlDoYTQQAYN2jQ9KLYS/SYzo664x3NlMkAeI2VQym7RcZA+OzghJjD5BhMABzw0ebJYwYR1n91hJpylWfzWpNBAGQdWxPYzN5DwhnOJEDIOVkS0c9AGa4uoC3FGQR8iuUvMgyU042Gm62cXUHW3lFBawpBVmOdo4qZ+yr7PnE/2fXERxFICQTKpy2jKTRgRTCgxJneWidkS/mOHnmK7DjVxp4JAkTZWcjKgKVetj04X1gg8LiiLxiQJNfn00DE0NogK6EEXNwxvQ0Wpms5G1SzZ5dUcMUHSJkYLVLFVGTJ6xq2K1cQxfS6FUSgSTMukaiWIItVK+KYnHM3NRLIl+oWANuuf1Q8LkFeqpkhwJoCam1Ht55qxU51VaqwxkNKZg1IDV2bs3hMW4Ua/X8JZSNFYRNpp07OQG3dM2bvbUbSfqvNgJzJR45xtv3KZ2ziPT5Jb+Uq7B+Jx5Q4zQbqvfbT7OZ501YfaR2MzHL5v+/R3hd0L12kYAvc/SJyfJfJg4pTDK10BlxVIsvErHfSvhemVu5JceCIbYXcU05GJi7hr1dMoqX/EqKp9ULHzdffqzjJw84fiSjPImFMyBe5OA+KMiuElnP6uWad87Yt70sXE9KSQh0A5FHRnVtdgLIjXLy+4eHLk/bduOF8nPt5dMn5o1+HqHwfiWdlNMD4R7r9orJveQZISoQzmWiLZNErii4i6iq+oPZuqu1F1Vhle2rWIo20br4RxDJTedE3qXC1uAX34XqAkmA829+ahGAsMkAe7X+0esQkcbhLnynwaCrJiSGhSwhiwB8cy1uPZdDmOw0DqZkITQxY+TgfevbrjSX/kojtzPZxIz+z8Vc/l7M+xm4cd0xTN9QYoKgxx5iKNfPHdV3xXnzLfdpCFhztz8dkdRkJQ5t7ckOJRkCmZ9g4QnhRSl5neGjnvkumD3Acw0hK5L7zzhz/i5n888XmPz+L7yBZbbLHFFlts8dONnxUT5H9+4/f/2v//V4E/papFRP5V4L/0bTvsJeRPq+q33vjunwX+c+A/A54C/w/wL6nq3/xRGqJYslHS4vqy2010KVNUOPU9eYxNYA9s3zDZy3iYBDRQpq4lxKK++u1CnGa/aqv9pZIYVivdJZlzRhFaGY6KEIqvvM/mIFPG2NpY9mIv7UnR4ueoK5O1YwL0xWxbi1gfat7mCU8t0SkOeNTstDhDRZ0YocHam47aBDNreUDBEpE4AmchT+7OMFg2J8UdQBxIKl6CkIfI6dBZWVEy15fQLeUydSW7MkRCtG39YKv7MRbubhI6BUoMxD4jYfk+br9ZiiBiFpbzmNCHZdq3kpvOfiQWdAW6WNYlrYRF1ZIladaoSkgG3EhQui4jooyrVePs16OuzocV2ySEYsmmH2566JGgrTSkH5wl45dTQrE+VBDHy2U0O0hTwbukZAej1q5ADTyJ2s4p5+DXx69n9LIkn5/qbCEbJ1AHiiqDQKOaY0xQyjm2c5YVQKQB5LRiHNX5Bl4OtuxbOi9BkwUgkKIEt/2UbA5OjNKEH405AuUo5EEN5OiV5Ba5ZbBjaa/MUdFdIexXien7VupU7VU1i4l7HuvxzVkjjM5egOY8Yw10V5fB5lt3r6STEk+FcLabOh5n4qkwfxQZL03wt3sozWEmZKUkIZ7VBJkDlHulf12fE570K4SpOOi5Am/9+tl2XblDLe2Mx0yY7TwaBUJndqpA6c1atbuJjK+fkHvoJmO3XH3Dvp8eOu5/VeGQyUNp4Grxez7cJNK98PCqI/1SYehmvvDeK15eHAC4SZfsfhBIDzBeKfNXT+wvRm7uDSSRgNvmKvMP9vSvAt394tKT+xWJasaeR2LjTb0mAiUGRO2ajE+Eclmvk82Ni99U5p0wPhHmPeR9asBkmGzsK5bamF313gk2fyjmOBUeDEBYz+fSKXky8DpPQu4DIS3I1GnuKUXYP5/ow8xld+ZZbxUUZRfYx5EfnK/49u1TSjEAc57smXJU4QOueHY4ctGNfOG9Vzw87Xn1/SvU778xOdNtnykaCZO4o5j1YToauy11GXk22/GvYyuXGbrMP/nOt/kuPxdMkM/M+8gWW2yxxe8nflQmycYY2WKLnxEIom/WPXzyPi+AP+c/v9t+R+A/8J8fP6JbeQbMgQNbqetTpiiEg3LPQPYVQQA9ZPJsq/a2Qm2uLevF+dIpZeelDJHVan59g18NQVgzODwhGbAEe3S73llIExRf1S1jJO8Xm92qe9FAjuqMMgdUdSm1aCDIcnpzh9Hf5sBSV5Cl2ApqyBAfZAFqotpK6rAkpvHoDBegjK59UWxF10AjwBPLEgV9ZftUvRIDpLS1i1X5xtwVpr5rJQ8xZQN2poAC8ywQ9VGyAQY+TIIlCAXTihj8WqcCnRL6zLCbTKfk2C0gyM7KeDjGxhaRVJrLiQToukxKmS5mYlByEbK7KohnRiJKUSFGY6XU5LsyX4q6g4oKqct0XvaUYuY0dsZ2CfbdEzBVC10HW5SllIric2pY5jNgoJlT9SvIYt9NFA0GhnXGIpFpmSvSJjYG1immAVNLT4KNX0yFuZZQTaHZLVegpkzBQMMMqP52GwUxILIMboksj/fIztoJJ7vnKAsQI+oMEmc0UQSZljKJ2bBK5kOGnQGUZQpN2qXbTygBSbldm2nITEdnYZwD80Ug3Qv53kokujuW+yjAvJeF0VLqcyAyXbrWw0lJD5n+ZiI9BI5vd2gU5sGvpZfXhFntfpuVNIMca3KuyKyE6kRVFE2B+VCde2gAZRztfpZZkVLHCEoXyHsHk4py+e25fWe8DgbCjEqYlOlgZRJhgnT2Y2QYn3ZMTzJShHgf2nMCYHghZrMqwuv4BL3IPHv7lsu9VQLIV5SH3QXpVSRfFH7x/Rf8yvVHDH6QFDK3045v3j/ja3fvo3cGStVnDmJAcliBxfMFPPTV4cZKleaDJf39a4MP5/1qHl0U0lkMzMqB8VrQtOi/hMnOI9lLhDoHs91yWoIih5kyRuQuMHxsQE3V+TFHLWEehbxTpmu7B0sFIwMwCaeHnpvLHYdu5JBG5rJisImSpPAwduRs4Gl1U5rHyN28I5fAWxcPvHe4JVwo/+/DYJo+0Ep7usPIHBOzLiC9TW4hS4S+sN+dSaEQLgvX7sLTh0xAOdZarc9xfKbeR7bYYosttthii08lPquaIJ9aSFLCWyNaIPhL4jQm7qAlnSEUtFtsUcVfhnUOzEMg3i0rbWAriTJbOUBJivoo28uxJzDr/E490ZQFDMkXSp6FeLbaeKisEH+5PUGYgq087woqj41+zJpXYFaq9e5v01hY1clrshX+BsYkpdT9PakuoyBzaGUOYIml7rIl2sGsbmOtzz+Klxm5TgIs2hcYyBJOfnxxlkgHhAqSQNVS0c4TuEEpex+jqAZq1FKlbGKfJa0pCCzsmCoCW5kLGKARk1HDd/3ElCPzFNtXkjM78j4yn5JdqwrMYNezrMY1F2EuoSXS4mwU2yjt88pWKSrMOXjJj1D8fTz69jlbOZCxFHy1PSjR259Pdl1lljbPJAvMoD5OYWfL6FUYtba7tTkqpY9eCuTMCAmPbHGldiK5iGVY9kV5JHSqXhIjFVT0+ZIvoUwVPFxdooDPUQciK1NGaMKWdMVtnJXcRfTeQKmyd+vTayu7Erf+Lb0iWZpgpwafB8Guuc6uBeN9nOr94ucJXtYULw3Ry7vAvIvkfSTvTZMiD0tyLmoshVJFTqPZbldgD6y8pr+NDK8LYbaSlOkCRtdh0GBsqvMYDQA5eqI++1xyQEMFNAklBUonzDv/vrNRNEDYRy+t0cY0EVWmg1B6IUwGFlx+dyYe/XnmOXiYlOHFmZICmgL3X+wbQNDfFy6/ETg/s8nW3VtZUB0HY7bYvV26yHSIvLp/ijx1sdY+Ey4m5qgwCR/fH9inJ/yxp9+2c4tyO+14ddwj5+DAztI26wjkDuiNeXN6r1Ce+Bx/SKSbAOrjd7QSoNGtYfMBOGRuvzSw/6iQjlYOA7KcQ61P84VwfA+zQ4cGgpeH1LRJutvA4fvW/wq05J0xU+JJrOyoC+SiK3aZTRgtwuvjjqkEzjlxyjZRigpBrnmYeu7ud8xTJK6AV80BPQtHgXF3pmigjxNvPbnndbJGTM7MGQYTwD05YDmfVwwyB2ZKMevsi37iwm14+zDzatpznFflZFtsscUWW3wu4verPfKzio3BssVPMn7uQZAQCvvDmXFMFFeNm287ckhthXud8ILl6yFmpMuUPjAnpRxjoxoTjN4fRlzo0l/khQUQiTwWL3UWQxMu3WdQmIdglOuzmHNIBVpmE4kMM0wi6OQgyooab4mmJ0jVsSSsX+grGuEMlfX2VGkPutKzEObDwvRooEanSMyULjB1sQEycVwesnkwd40MCxjjJTSSl/ZWodZ66DrseYCSXINlVd5RBl3AJQdreBPsqe493j9NSui8RMGTC1VzVFCVxySdoKSUub44cdtZbf5abFKzMGf7bIqxld/E+JiFUROgPBvYUT+fpkiZgiXlvs9pjEy9l9MUsRVgrQyhTIiZWAVyDw7OVVecOgVHISf/ZbcAIBK0ATOtHKbLZGdwILbirJ2XWdVhTKuLEpcSK8AETadgpTBBkVMwpkhY/bFSgb6gPTZX16436/tLfRtCE4PF2DzaFSQp0hdbVRcQd3+RWMgprVyE7Ge68kOvQRcXw6VIu2dlMoBLY0Syz6vOSmZqGyUpejnbdVZBY1hAkNnKqoy9BHlPu7crEBOPwuksdLeReLZnwbyD0zs+VwZFXJ+nuwukOxhe04DPkur/TbjVEmwWxo730dxv/HMHRmon5oOxfcJsTBk0MdyUdvyQlTwEJPfGPCkrEMnPcfm9zP5FaK42JrC8sE1yb6VOh+8X5kHo7iLjM0vO54OiB2MkpZvI8fYpv3lxzbe+9NS+L8rxYaD8YMfuo0DVl6kRJutf3sN8Yey3+P4Df/xL37Xjl8jXX77FzYsLpjEAicMHSndr3593gfkKbv/4mfM3Bq6/bgBGuldyBZMidA8GBk2XRj3RpPDgz7UHobs3J6h4dMbIYOwTMDAkng1I6e6g9ML4BLIPokY1lpQKd7c7ximRUiY58DnlyDwHE2C97aEI85Abw01nrARpjBzHjo/CBV3M7LuJcfBSxDK0sqJDP3ExjOQSuH0wzZDxuDA8cg7kGBhz5H6yizqFyFhScyPbYosttthiiy22+DzF9oazxRZbbLHFFltsscUWW2yxxRZb/FzEj8UEcVu4fwX4BUwcbB2qqv/pT6phn1aUEhjHxHROTfw03kUTBQ1WJqC9NucAgDxhIoldIaVMuCxMXaL498scCA+h2eMKXhJS2R+w2IbWVdwpPLJWDV0mRCV3gdIFtDP6f3MPcbZJ1SRp9ri+WX0fa5DreiRFqfoDtCVWxUoH1q4oqup6HH5Adx0peysNsuNKY45IVEI3o31mik7rvg9NK6UMSq5t9E5LqSwQaWyQ6lAC/rt/rtFWgKuDRu1kGOsKvCylFGvaOSCra9fKLGoXSkBnL+eYQtPMaAKxs4ka9mmm62ZUYZ5SK4FRL6Mo5+hsAV9V93Ouq3GaLkARci01GeNi0VvbfIIsK/HW2cZZoxoL5gBdbwyF3cXINCZyjMbIwBgeOgnBrVNLTNCp2ePWcpyVMGoVbTW7z2B9Wo0R8MiOVqLa3MzL3GgsKPHzBqW4XSdVgLdzB46qkdMGhuU+mI0JYoKeSwM0ulhoFyBqE7RtdsglLqVO9dqs7Y5rCVdlR3l/awlEPDuDZLbyMz2ZTkR2m+GqvSO7jPYFVWG+gqobGVxDxRyblvtYu+XZMQ+BWZTxeUAmc6IJo1AunM2yz/DEWUkXHekQyYM0/YuzC3aWzl1NOpvrzR3GmWHN7UVqe7wtQcmDjeUcjQFR+sD44rHzSukhPu+aiK25tNQyKdMH6W4n5kPk+DySO1qJXLWjFTV9jjgq+4+V7s5ZLD1MV/addHLNDg2cv2lMkBJgP0F3a7okeYezMbyPVRvkYCVPmgrdqszrN55+m2fDA39t/CopFU7XHXnYc/iefa2/EfJV4p/9Y7/F33/+Drf5bYZX1tZazjIfjPWWTkr/2kVxe9c6AfobZXhd2TNw9+XAdKXkfnm2l0HoXwn7D5XdRybeWqMkoWhBicip47TrTMC6Mj3OsTGawsn+juRdoFy7OmxQ04AqcDz2THO00q1QlkefqLlUeYndoZuYSmDo7VpP52rnuzzij2NHdjXjXIR9Pz1m1W2xxRZbbLHFzzB+WBnPVi6zxY8TPzIIIiL/PPC/YornnxQK/IEDQXQWxpc7ZHT3Elz8s1jCESahTG5PWxP+SUAiuVPKfiYNs9uzLsn2XEERtSS/JUY1KWuOA7jDgieoK26OhEKIgkq2BFa97AUog+kZhGkRT31UPlB1PlhAgzDK48TT9Tk0LXT+moxr1AVU8XZqp07lrrUgDjjMQiESegNu5Np1FEJHOVqJhPbFNRlWCaJ6iYFiL/3FNRry0t6qr4KXEulKU6QmbaZ/YpoMraSixsrFRcSSggZaYG2X2c4TT2LaJ2+AVdMQeZUDMRaKCmUWK6nA272yT9bJMsDZt8sKeAAWu9xVSZCsS0Owa7GAXZ7UBy+HUCXH2DRFdvuxXc7ifdfkX/EEPhwjetZHJSo5rsAiWICfOnap2E/t41psVr2UpmrkZNeq8bmi0cotkmvZEFwbZ4omQLkC++rxxIGgdj2x0oemc1JsTDQuIIMGhXFpQz131Yhp8xTfV7yfNZF2G2uATFiVYtl8YMKtT63cQZMDSdHAlVqyBhioVe+/9T0GCwAjiuwy8WpCgem2o3uZEB9HnZR0mJCg5GuY+0I+xGUuYG1Y3HiAMZh9M1aakU5eRqS4PshqrFf/185KMs4XM2WodtE0xxkpND2f3YeB4WWdJkJJVg6SB2G6FN7UzgxVoBaaPkgFatJJ6e5d7wfYvS7EY+Hyg+X4NfJOmCc7TysT9P/Ek9Ddms3teT7wfx1/EYCXXzpwkexkQzfxpSev+UZ6RvnY6qK6W5hfBArCn/ri1/hLf/jA+MGO/pUBGQD52cT5FBk+jAwv7DuaDPwAXE/FgNd5EO6/OtM9PRH83pnHyPk6MF8k8i4wvLQ+N9Hr3uaLRuhuhbwTSh8N1MIAOWEZw3i2EsTJHV/kerTyxixM9x1z5wBgkSYaLaKQMhAc3LCyvfpcqs/0EE0LqaiQS+Dkc+147LllpSa7xRZbbLHFFlts8TmKH4cJ8heAfwz8u8DfUdXxp9KiTzkkC93LiMxLwhVmWVZToSX6/mpqNpWe9OVRmPaRsJup2qSpm2FvteCIrbIHF8JryXN1sVglju1cQBljs2oV8WQaRWvDOgMW8hzQhwCjOFNkYVnQdEHs33GVrJuNpBKyJTFhFDSsXRiWZK5EZ7FMYu4h/rJe2yxjgDOUMVC6QtxVlcTiDANPHOv+NSeMhejaHMVXIDUvehl5DuTRVs0pVoMvb6xMhtkT5FkRjeRhjSKZ/oc6Q0AFYxlkIdb6/qpJMkuz4dS00l/olDIJ5RyYOk8oVy467fcKzlRQi+VY7TfRJkjIGlOoY+MgkcaA1MkkDlbI6tynwLRiioToFr2drQQHUXJXKL5PeIjEYwUfxIVmV2DUCiBolsEuEmo7SBMl1SLth5qcz8bc0ORJelI4BZLb14ZRkA50BIKQe5tPTaM1G7AYnPHjOASaaKvr4mBRyM6oqa5IdWjXc7usgMe2w+qare676vihqZCn0ByfJC9in7WNYCCdBtMJirvFFkV78XvWBDPNfcUBiQpyJLfp7Qpdl9ELYT7FZuObJ2HKYta9UUn7Gd3PTXRWREmVLeC6MtnbDjB3Ae28D6wYIatxXgNfGhSSMj3NbdwIPpYZ9GomdIWHbqB07r7yYP0KU0CyMh/8q91yrnpPTbJcE9EKIJjYc7WyzZ2Nd7pzNozavTddRnPNUbv208VyHaszz/ACNApSAiUZMfEb3/ky0/sjIpDnSBcLv/j8Jf/4uXnkHr4rDC+Fv/73f5l/4dd+i9/4yrf55pNnfPTxFcnZVW9fPyCifLB/huTexF+Py3yYLoXTc3OTmffQPzvx3tNbXh+tDQ86EPcT8y5z/oJy/s6ey2+6mxAGTqUklAj9rTNDOppLT31my7yIrabRmEkA85UQklkr632P3CWb67qIuMphRlUIMTONiYcyoLNZ7S6TWk0TSZRxNEesPBs6lW960qv4aL5sscUWW2yxxWc5PokpsrFDtvid4scBQX4N+LdU9f/+aTXmZxLFVhVRbJUXmsChBsj7VYLarGGX8o34ENBJyFXYFGdpREWCrbQBlBzRwsq6VkxcsYqtFlm2gbEianJdV3/XK/U1KY5eopLES3iWF2kpNHYLxVbWa+JZOghIE29ttpCP3WUBCMFdJ6KJvTaAoDfmilRQKEBJwcbi0Rh74p+dqVDHuQp3Cibu6X1q9rN9poiBAswCUzBWiIf4yrtkiJ5cpbQcH11YB3VVW2ZpVr9g9Pr1kNq1WQg5mixBj+5oUhPLmmCX3lkJsayS7GXlurF/6raAs34eX0cZLLEOUdGSKSswRxxIowBzsOvsTJapCKHPNteCC0WmQoiFqTZnNpCsAQXJwAwqSND5HC8spTC1H/CIydKES3VhbBjYZMK92puYaEmRPFlmLEdL0FopAwYytrKnQmMv5B5nMdncWgMZMi/gkfbOhqjzOanNkaQGyvncaODiqh+t9KwsgFw8zAYoniNzZ8eQzG8HuzJeGgbrsilZsabauOCAQb0fJmNy5SyUgwEdZZ8Jr3ycMugpMV8F5n0mughu6JebMgRzIyq5li0pUhkvXSH3VtYlRRZHqMpwOzsAFqwPAmhmAbvEyvDKOcIUCJ25Y+XnZ46dtTHexya6Gk92T6QHHkXpbDzqNdZIe+BCxRMAACAASURBVC4FFxEN82p6zZF+Vxk95pozXgkahTBqaxvYsYpCf1IufpCRGeIpt+fW1XcSr3554PSOlQt9/1VP/oVXjM9tsvWvE+kIl7/Z89cvvspvfOk7fPnqFbks4GsMhXf294zvRl4+PCPfBIYXQjZNUcYnMD4rjVlXxsjHdweOtwaC6Cky94XuMPKl56/5aD8yvXhKf2PfT1MFNu152rcbdZlrVmIE4xNhvHLg58baNz1LlP1MSAqTsP/AGDHzhTYwKgvkVAyILwJ3iXS3CPnmnQGR0xR4NUX0wQFTZ4/1t8LVN+A7q+ftFltsscUWW2yxxeclfhwQ5JvA8NNqyM8snIWxtmGsAAjBKe8CvsQOgKrZTobJyzXOYvxuT7LKORi1OaqtoIqa9kNdJYZWb1/p31UXo0aoFrvRE8yVm8zSbpYSlZ2xLpbVRGmAgTmvyKNSkjx4uUvTT7BtDRBYMxU815PZWS3+0l48wanaHABRILsWREk2rmF+fJx6DqP2R0vWZgdKolpCC1aC4Um3VAbHKI9ACxXAKfe1XTWzre44VUehgVeFNjb1upvmiC7HrU6SXmJTy2Wq9kpZlWmUXlsCRTLnC6nuMF4eIVKBH+/TqqZHghK6Yjl0MM2MbliyjxDULHSLMI/RAJC6ojsFitJsjgFyMOZGteOdh2z2xq6/ohEDQjy5Dvu5lfpoFsoUlzIfb6Ml3LIwmHTFXgqgKHSF0Ge6fqakvIAwXVrKXGq3C4/me+lsLudDMc2P3lCRZjUcMAvnmqhGNetq/11CIc/RSknuO0u6HSRsF7XOu7XrjQMU6cIcd+aolGzaI48BIRxc8ZItgUxakDOhOc600ihfna+JaRiF+CCEMTKPQt4Xf0YsLIlwBsWAxDwGch8fz6XKIqquHe52BJjd8z7bfVc1OubQgJ7y5v2t4tdxAbtUlz6XU6TEQBwy0S1o8z63c+cxEE7GxGjMpmC6HuKga6hTvR4zwXSgMbrqc6U6q0gxx5y8s++lo4OcrsfR7uVgwEoas4Eg2cbg4nZE5UA6Rua9EObAx9NbcLDJNl3as2J4qYy/dcHfyl8hdZnzsWv31Msi3L01MHQz8nRkHBIlpfZM1AjlekbvkpWqfHfHpDuGh6UPpYPxrcgHUXnn+o7vPr+2ccL6FE9qU0eMFZNO5dH9cL4O9jdmNMBUw8Ik6T+MjO9A2WUEuPiuze2H94Tp0q+vRvJeGric7gK7Dxddk7wzYDvvAtDZ/VnM8tjmqnLx/bwCLrfYYostttjiD178TjoiPwmGyJvH3lgnf7DixwFB/jzwH4nI/66qNz+tBn3qIXX1efno0eqx1p9VQoQlONVlNoxCHB8DGnp2YMUpzHU1+E1woX5mJTbLtjDZy38oYslf8JfhNQAQsVKNXUF36tobtdm+sp+CMzV41P68L86QCAsDovb3jXZan2yb6CqBmaUlH2tL3gYwJEt62qqvLMewcfZELSxJsQZpyXxJ4ZE1p7zxbGlisz6YTSOkLmw7W6dS6JtV6CoxzTsHhJJShkorWFb48VXzCuRQpU2aTkPN8ILrqAi8wQrRecX4EZC02C7XMhOwBF9LLX1YavtNywQDS5JSJDcmiLETLPluYq1FjE1Tz5GMLSSTgHo5U1+aYGwIdlGCKEqgzJ4tV6BFrB9NF6SKx/rv0hW33qXprvR9Jj21jOs8dGQXgEVNP0RWArgasPbtzAY0xmJASgnNolOCASKL9bCVUFVx167LaG8HPBahpGDgWffGRAYDjFairmClV6kzICR1yhyTWQWvWB2a1TVqnHV1kkeAnjgAUi1tGwtp721OlpSne0HmwDwLZV+YL6wT6R6i2rMkZDEQogsN5AhvAKWL8GkF2AK5W4DDdVtqG9d9riU+VVdFg6LngLhuirh4Zt5FxK2CJaqJJgNczOSdcI7J2HRgzxwvdYonocxmF9vA27CwEBAHSsaFZSF+b+Z6LyJ099DdLzf/+MS0SG67SHcX6O8i6Wjb+9cT/W0mZGW8DF6aE7j/yjIOJdkQXX4LxtsDJcLF+vkA3D9P3H/hbAyrw8wcFLm3HdKDEF4nujshHoXuDtKDEr0UsYKk401k+uiK7/xqojydOfmf23QfSMfFalydQbd+5p6fBsbrpb2518YkOXwgzBcGfuq+oDFy+HBGJVFvesleJtRp03gJk7UTHFBx7D5OuP6Vks4O5Cfh/CQ8klfaYosttthiiy22+LzE7wqCiMh/98ZH7wFfF5G/Brx4Y5uq6p/5STbu0wiVpaShvYSuwYljbBoQi26HeCK4sAjIq+ReXVdkxnQTWtKvTcug0v2rfsGbWhcFhc7bIgtI015KdcXQmMRKS9aHEIXeV5mdNk9cLQV7Apv3nuyuc3p4pGsBIF5OsV7BD5O1P6wSRXTFyJiE4EyRpb5kGefGDIk+pC5M2bQMvAwHcbAjGFtjSTwruKIUd48JXrZSr18DZ1bn0QjzpXU0DwYQEBVSsdKTvAIA5qWcoXSKRGmOGeDglmgrRcrFQY+1NEkVmxVnXwRpDIcaZQ7N3YUi5Lq9giRzaAm9xLKU/DhI066ZgpZopT8r0EJ22Vx9/HhpyA00KUXQKSGVafRm5lOqVogszApVgoMOqc+IKHmO5poymlvFYWfKqLt+YsqRnAOliLnrzEv5AUHp+plhmClFSLEQQ7HvVD2NWt7hN1n2bUWXPnRdJgQ1keJYyDkspVWs8vBQKNm1D05eVnTsKCqEUEipUEo2J5/zSkjSr2XIIBM80s1Jj+8hm4MBgpKrhs5FZlIIYzTh2CNkDeQLvxdLaMypxgqJi7ZJY3Stn0V4O6hzOxog42LBwsJwajoqwVlgk+0TGoDh4rBSk2IM1DhGZh8HHUrTDtEhE3cGPk1HF+0cwyK+21vpVne/Svjr86yJti7Px6U/3t4OyqDGYrpfbVcYnyrzQYmj0L2ODK/sO/tDoLvPpIfS9r34PoguAreS7XrtXhYuv2fitmFWzk9iO29/E7grO+brbOVCAeLRxql/LU2sNExK96Ckky7PtWD97O7tWfF63PPwyxP5LQOS8pUwuoOYKJxmiOfHIMh0pczvjDAF0qvEfJ0ZXtkY73+gpAdhJsC7Z17/aiRMke6hMN8tQsFhNlHZqkUzXtPKkrp7NVDE210FbHNv26eDcPOHoPxltthiiy222OJzFz/MaeanccyNKfLZih/GBPmTPH5FVeAG+COfsO92ZbfYYosttthiiy222GKLLbbYYovPbPyuIIiqfvVTasfPLlYsCz4BwIvHhVlQWu08zT5WQ12VlUbN1yo8uTpsE1t1S06SGrXcy11yoLlsAM2qsrFIXDe0UdqrQ4mv3Ddr3KZRIO08BG2OLboux1F5JLpony1fL+6AI6KUvgq7rgQ7z8G0QEZ3TqgU/FUfatvtHzyi88uqNIawMENaHXq28hYNQhmMDZJ7Fu2VtDrQTimzucRUJoPoig3iAqi1RCdfVAcbRfr8+NorjQlS7WoV0A5nfSw6J8BiPexsFo2xIYJtVd6vXVFzVslNq8JKPfTkVqhi49hWhWsfipB3phdS+rKwP5KzWGqJirL81OnqLJcgSoiFkgMhFvLJaoJ0MjHNTxTfheW4lVHkfW0aG2pzKU/B3I9m4Vykla5c7s70yQWCVZhzMNZDvXyr5ma1bbkYayRXNyW1kpXZ3Ss0m9NS1b7IgtlVJ2OlECDWi+59Ee9LCIqIsUbq/VBuO3LuKMPvIILw5r3tGjprAdxHdrRTFRsWSudz6SLDs8w4D8Tq5rQqVyk7EzjWUwDVRYPmkaOHXxafE1VQtjVTXJh0/VlYXSipLDQFMSeZqucj2RgoGuxZZWLKAtNy3845tClSCuQshF0mHOyGKMnq0yQVSm8lSWUXFrtmt/KuAq2l0/VUbe2ozLvsDjGsrHPnA0zXBZ5OzArTk0QZvHSnF4ZXQn9f2nhIVg4/sN/HC2HeS3vuh7EQx4LMfi5gJtDfKrsPhfEcKUOkJKV/bdvjEYbXuox3FKbDY0aLFCWOViJz/Q2YrhPTu0aR665HUmeuLeri2HmtJZONQfTWszuCwEfyhKt37rh/eAIYE6W7lcamufyjH/Nxes7V15eyPWN6mFi0ue0o57cKp3dse3qw656Oy3lFaW4/80HZ/xOvTLB5iy222GKLLbbY4nMWP7ImiIj8SeBvqurdJ2y7AP6Eqv6fP8nGfWrhb/WPqgD8syakWcSUBbGXZwmWGFeHDV2VmhhAoi2xB0yHwC0pgcVxQxRJoGRI8jjhqValHlqW8z06htuuysrGt2pFqFpiq+qJYwUx/KW7DCt9inWJRlUAFQNrgmQXXtWWj+iQyWOgOBhiFrY0TY9aBlKSJV+ShSCPc2yt1HoHk2QFADTApObBggnB1nauQaJqzfpmRqU0550KImnSxRHDwQNdaWuY+KUdJJyt/KFUYdqAbVu1KVWXkxniLI+S45KWztbSDR3jAgyJQrTzVFvXqgcDXpJQQR0HDsqwmhO9oIeZ2C9aGVr7IMscC26HmVLmXMRKV1wLgjkgo5iKZS21CLoSFWXRcq3gSzBXDGCx3qyuLCowBappSClCSpkuZmJQUiz0KZNd/2TOgdPYGcgx1bovkKiUVYkQ1elkBdQ0JxaFHAygCEGbVXAd8zrcRYUyJddayQYQgYEp2ebBlIVQ50d1XnEh0bIvVhYk3qbmROQnkWUex/vgop7ezylyeHLk9A5M951Z2bo+Cvjc7JQi5ZFdanMkCrQSOnNl8hKMWtbU6eLGUq+ZsAgN10h2bZViKqYNQHVL4no/Ju/LChejTu9JCHNEY6CcI1ptsf1+CknRkNEckMNsmjCYmGq8Dw1ELp1Cvzznqq4PagK+JSh5X5iuZTl/tHEShdgVytXE2edyicJ8EM6naLa2BdLJylZgJdIcq/hoR/BxnPb1OtgYd7cGRJXe7uPoJT1lsHKR+nw30WUelUOFUehcf2N4Vbj4duAeAx2n9638a384oyp0MTOX0MrDzufEdN/z6tUF19dH4sXE0/0J/SXbfn/zlOFjK2U83XX80V/+Ht/8Eye+fvEFLr9u49y/Vro7dcFnA8Kmtwr9O3ZXVn2d88nuu2rH3u0dqEmFq935k9YFtthiiy222GKL30P8Xkpwfr8lNN/9k7KV4fwO8eMIo/5l4J8D/sYnbPvDvj1+wrbPdqxXY1eMhbp6qskSyzDpotsh2hYnZZZF9LNG1ZCoSWTSZlfZdB9YFg7bCvKbx1mDEu6Osk5GCGo6EZ0noizHpoBMVpOPKEWSAQFNFNHbuOq7rhNfgClAVLKDLC0b8hX+rp/RIVAOQh4j+RzMOaM2sWhLoE27hMeWtFVfxBPsvFfXGPHtKwZHSzDXY7RepKzgh6zGrVrxCp7getJaE1jvo1agxN1TZM3I8VV3rW46otBBrrl6XFbdw9lWYqXQGAZh9hXvHmeKCHJeNTus9EPE+1SZHH6d2vYCoQJyFevK5vQgouYs4x2ep9hAChNALCY0moX5nNApIKdqgSMrtyJsPqXVOCa1m6A6+IABgs7SaOM31wvt7b6xpO94jkhvzjExFkQsAatJ3zQm8hjt3vDxt+S9PLonyWLWtc6CKr3rU9SYg2uCOrC1OkfbTXHHDMhp0QwxUMGAHM1CUUFiIQ4Lg6pEc/Epozm2aA7LxT/HNrekz4SkzLEjPIQmoivHyHSIXF8fuU+F+Zwo54Cs2FWIJfiahFke2/SqgxP1s6aD6fStMugi6uuAYQMB14Ogq38Li95Q0mZ73Z5JPtaNaebuRnIXmjtWPAl5Z9vzviAqVM1VHFAKPo4lKFkdKHNbbU2LkLBWcVkfMw2gl/OibwPWx2OCu455yIRdRi8N3ZhSZLq2+zAerW3pYWm/FNMFKT524xMBWTGvfHzi2YCE/q6gQZh3vi9wfqqMT2w/092o7mJ2HSQbIDw9eP9CYHhVmrbQw3ng9LSjXM32DBfQ84KYx7vA7iYQJrh5b6Bcznw8HPjy09cA/INf2tG/2pEeoP9+4jdfvss/8+43SL9W+AfyJQCu/35k93FxpxoDlMbbQHnL5trb17e8f2FKq7fjjptx4DwtrwPHc893v/l8eYZsscUWW2yxxRZbfI7ixwFBfjf4auBxSvoHJkQx21VgjSFUwc2agBaxldf19xR7qa7fe/NFGuzluvQg4qv752XVdW25qGkFhICVtcyhJcRhNOr62pa09H6caCKYNdmv25WyACHH2BLd2iEDJnw1P9bky1/kPRElm9BnW+l2lxowEKjfuXXmzlbxc20zWALqArIlBxiDTZIKMFRQxsgqZo+qNMFOmYVwXra3cpq8JDQVQNEKFoiaT28dw9rm9XXxZBoMpFi3gbIAF3YS2qp1c3GJ2uA+jcocxb6X5BGLo50vrKx2R1k5y1iyqcESM4IlsWtWUelXDjgr8KhGULdSTYHYYaVLKugYCHeptUGDJfE5KDIF4rwC/7zvTUA2CaVTiie2us82v4Isc3J9HcfFYaiViUVtZRacI6WL5CGa4KtiDIt6nGnFICm2il4SiIalfGzdznkBs7RbXVtYbGQVVBeWQ024AWf8WHKqK6ASZ1WELEaUGoDOXVGCkIaMBHPCMZZJadcpz6HNM4nKbj9yEiWHDh5ssoSTMN0MyOFMP8xWQhQUPa3mysrSt4g2xpYdoPZx1eE1KNhVJVT7LPTO3qpOQrOsWFGhHTNX95peXQRUF0HZCoY0xFagiInGYtcinoVUhUuLoYJlTtaXLGRlYbD5tdeizTKWwOI81AAbKw9TcbC59l2tDeFk9rzl7ICVi/RyoRDUMLpzRE6BdB8a0FPn+PKctXtMZmngaxgNnBxe4aVvVioz730Yr4sxX+ZAOIZmU04tLcxmo5wbYysQz3D40Lanh8B0IYxPe5vnBdKRdj+lowmtdveF44vAw/s9p5vER709a58/v+P22Y79h+bA89HfeZe//ccyf+j6I25+2Wx2Xrx6l+5WSCclzEo8wv5D4Ris3uV7OXCeExf9yGlO3B4Hzqee/GDPjHgTufpu4MM6N7fYYosttthii089fhICrutjbKyQJX6YO8xXgV9effRPicjlG7vtgT8HfPMn2rJPK4qt4FcHEqC9JKtIo6CXpK3UpLqDtMS8/n9F6bbjGPsjzC03WJKJNQgimAtEXNogIo/IF2E0QKVZdgqgRqHWVBba/traVQwIYVqcB5rlrJnoPqKhk5dEUbI8XknOnnAWaW2Yp+hlBZitaTeT80LrLkVsldNBkNyHRWsEW5FXByBQaUlE01aZbPBlEsR1CeJaa6WWCqzGFHlc1lSdEVqyK1YL33QQ5sqs0IWxkhdL2zYfapIOrSymbe4LFCEn1yVZJ661L3EBzIpbp9Y2V7cPjRBPRl+vIEtO2lxwzLnDrsW6ZCg+BEqxVXFcWyY8RPpX7hQxsZRmrcbrUdmRLlPHKP5CdrvMSRTdK7Ev1nbFWBDtQmi7T7Q6zMgyVw34UUrwjL0ITKsxKN5/UUhGMtHej1MBrOKr6t1yfIIu896P0/RcKoDgzKQK6rXV+iIw0wC9WiYlMw0kU4ES6g2pxE7Nwlgt8db1/dzaIJQxMnWRGAtlt7jwxNtIuIvc3O3N0rc5Na0AgqrtIooMbuXbgM36MGDZfw2CiIEW9bgNEFpycWOkjc4wCVD2GdyOVpIuDxyxvqjSbI/rOVWBrpClMstCc4QyO2whZtDk99IUm/20AYgLyGfAEzy6aev9UyBkIT9Est8Q1b43nsydJY7CJJFy4d/tTPtGImiYKSkyJV3GWGiMuMo2Kbti17taNosxOfJA03OZDtJcdoztU0znqV0DXeyYO+tnFmPGjU8Kx1MguQvP7lVm9xLmF6HpGsXzArirl6XFUdl/VAhjYLoKvOyeAbD/4h3nd2cOH0TiCZ78FnxbvsDLX93zS8/MtO2DLz7l9Gqgf43ZBBcYXirJa9TOr/bcXOx4sddW4hdP0Hsb4+h2utu70hZbbLHFFlts8TmMH8YE+TPAfwKNoP9fsbxSw/IaPgP//k+jgT/1qGBEEz1YwAww8EOj06frS3DVrVhRyu3//qLv6oVSqqaIWN078Chh91ENvgjdBCjBVmN9/6rBUValF+BAzEkoKaK5PE4uqiVuciBkDZ7giUyo/2AF/PjvkUZ9b9/zl/3axnKKjLOVk8S+MOzGR0OraivGdVXZBCkVjZ6YRlupllWndJUMKSwgySwGWKxfyp1pUoGLtcBfjZJc6NEp4Yg6jd/3rQmX2LHWpTe23S+Hi5aaHskigitguitV26RX+319nVpSJ3YNa7lB24HWNtQYOs0mtFe7FqKomv2oRFo5U5hqQiiUXtza1OxVa8ITVuU39Vxr0d2q5fJohTxCqdapRGYVSpxMQ0PU8QYvh+l0mRu++k/Spfqis/5XzZYmfLnWdml2vqC9sxjywq5QT7Y1lTZ/iEroau0UlPMCWNT7q7Jdmu5G1eqJXt5W7wsHthY7WoFJUJZjzvW5sBagbeO6uo/GwFh6S4qFxu7QTgknIX88cL6eDKSYwyPtFXAgo4jXPq1AjHqeCtbWOV8HOrs9rYOKTfPl0fPJx6Pe2t1qDHGdl7Xlsu+3fiYKoF2xMr+oVsHVxFXF27CUCT5iPsUVm63O9TnYPeNtlFoWlQ1ADq+jgVOs2uH3ipW9QEWPdQhWRrXWQVmBjtIZo8meK2EFJGnTYSkoeQfnp9JKkBpADoSHQA6JViakfk95WZOksgB0avbdp7eV4NsPH0D3UOjus7XFjz31tn28FnIvxGuzUe6OJrCq/8h2vNVLuJopXUKKMrxW3vq7wu39M/7hbzjQcjlyfK8j94Huzqxw4xl2L21cdi8UKc5w2Vk5k8zWbzAA6OELXsa3xRZbbLHFFlt8LuJ3Ypb8PDJEfhgI8t8AfwV7Tfw/MKDj772xzxn4LVV98ZNu3KcRtuqtjwQIH5dfCKgiVScBmr7HulYf1rmKeu2+tmM9YpnUnWsiVZbz1YRG38CaSvIk7VHjLbkLIzAFc4VYATOlL0tyJ7qISPp2rZoLdfV81b7GdpBKsV9tL0uSpJONUT4rxykgcVXfPwdjczQRWJaVZizZk1hceFWXUpqasERFh+xJb2gipcsY2vi2PMbZOY+cVWY8udEKcYHSVmCl6JIQ+s+aBWJJl7YSGXE3n5rI2Wq7CcNqZGEq1DZUwcRaatJpY1Osr8WbJQ+PmSK6gAbibV2tWqejsZniqSaiqyTXj7EekzbPWZ9j6X89RvRzmCtGYKYj77Ilkjn8dhCwgkfRxHRJ87K9OBhWpOk+tHMlKzFpIFaA1Fm5SM2NS45oKl4WkU0Adg6tTCIEA0dqucRvY+LIGwBa9PnwBghYgSxG+6yyBRBPnOv9oj4XakLfl3YMyQJjRFOwzys42ik6Cek+MMWlXGQNfMLqmLVdbZz9mdMoPLWv9X70D103proN1ZKh9gxKSkG8pGjlZOWCtuLlbzJ76dbqnqokkfnKnx8YcJQreFQdjtSfO6pWAljniBozqAKYrSMNh1mVi4mCCPG4gJattGuwe1AdaE5HBzAmeQR4vnk/aS2hE2t3A1ViHWx7zs+yPCPq34d6f6UjhDma21cF9kSaU5H2Pk+iOksM5ifKg6NXJQb6W6G7q9fFyr+a5sgze96HGbobob81jZLhhe8fIne/YOU5Kvb8370uhH8gvBjMQWb+woheZ84ByiDoKztHnWu7V5nuLqNRmC4ip2cmVnu8tu3TlZKfT+jw8/dStMUWW2yxxRY/b/HDym5+kiDJ76nE53/5iZ2+xQ+zyP0G8A0AEfnTmDvM7U++GVtsscUWW2yxxRZbbLHFFltsscUWP934kYVRVfWvAojIrwD/NPAl4DvA31DVr/10mvcphItSrsUsJao7kxhSFbKgMyZUiC++rtgfddWxYWS+ivhIA6SuplYWRP18tZreShJ8/7Zi6au2j0ooAua8sGKsyLwiEGQrD8g7bdRwmYQ3dUjW5S1rir+oWI18XSGVumq9rFxLXlad8fKBaodb+xackt/avloB1qRoL8b4SMYuWFsES1CkN2p7CYr2gTI+Rg+rk4tGJQT790KdX8ZwvRJttpHa9mnCqN5mqWOB63eEOsbL2De3jABMzsaRRUOgMXq8nGopM9FmoWoNqGPlFr5RH1s1K00wtM2HsNgUl6Amgjr9/+y9XaxtWXYe9I0x19r7nFtV3VW2E0coWHZkFCs85IEEiHiJQHL4kSIQIUAcJIIgiWUJkUiRCYGA8/tCgEQogkgYWQjJJkgk4QFiOZY6RoADREbEgkjQthRQSLfddnfVvefsvdYcg4fxM8fc51Z1XVxVXXW9hlR1ztl7/cw511zrrvHNb3yfCTrGPrIA/Q3rRD8P95ek7peSn2SqYJyDNqD56nu7Auu7BBLG/oygJ82xymCMEitnADQfY27DEkjLzyBhNHcbEhfKZBa38Bx6GqHh2pp9BwCPD4Orr0pWhiAEqKZNsKbehjOQghnwMiekKKNZBco8sTRI3bWmMEGynAOAnpBzW5uCdhcydkYAYMwJOQP0gsFXhtw5E6swq/KAwfKoEd93StZKtjl3wjzfgXF/R8nWIgg75HBHsu28j1Hup0+PERaz2AkEHq5XYRVeBW+9fX0ZrlRQgC4uHuwlMwQd2kQTk83vHRksFtrtXN0vfdpv1+dsD3Him/EEoK2lCDQ42ECY2TdsOjxyb2yaYIxEH8JxRlZFP1F5XjgbZSVr+zNnTYk93Pe37RwPS8P184T1veJ01IDt8/b9/paziroxWvo9YX3PLHsBY4T0k4m9yskYJKSM03uCz33R2vCunHD95g55JrguBOoN/W48n0gaZLH5u98R+slcpq7v+L147+yseo8fccQRRxxxxBFHvCbxoUEQIroD8GcB/AuYrXA7Ef0QgO9T1ctLd/40h79s15d+/Op5EAAAIABJREFUEqM3EzsY4i4lkQSHW0e8/FIvgAIiwVSjUQeFPrK4zK7jXCXp32nk6ux08nQmgTk6xDkWeNtsexMkHN1qV7I8pwO6GMWeN7oBXayEZqoTqCDNTlb77xk37TQSthIpoHr7PdlLNHWysoXYJsQo2YREtamVDtyUMuii5vrACrrrCZpkeyUSNktWZLfxroKf5AlU2oCSJS9B81bXdgmxW95oUObhmhzed9POIJAMEcMAI0K4ttL2AQMjZC0lCT4G6cxSE9ZalhNDGOKUpQSHIgmGfSYrTE1UKMVX+zPF9tbIYOg2saUxJnIqZVH+HW3mwAEAp1+0ZGl57tojoQGS1ynGWK1E4GS1ZWkRDRPQzXsmhsjnipAl4qEPI2D0rgl22Djb38vilrXhyuIgGxGstMoBIumYgA71Mib7w8aPmpVjWRtGf9pJDLDYQ00WVgqzkQFSCVCWcgu16xf6JurX0xoXBwb0pOiqQ/flJCneihAy3alopGjeL6YwTKN0ZPWyo+gjU4qKKsHmeL2u0c6G4WRz5TxeghmkdqxFsT+zdiQA8F6ppRKAmIa4KZDCpVGCBxBwkska3Keo73Az5/3+BAF6FuvymfI5yRcaQsdKQ++j9hHIkp4ERcozQYkgLKllQ91LkCpgEiKnfcmyp/5sqEuTXyvu43keFrh8tWfR3gBFt/IwVsCdjnpT9GeM/U0vPXIgK1x6Yk4QTNNI3lL7udjx735ecfoqcH0b6Sz18CsI+x1jdR2g+//XtKL2b9qh94rr2+biE7onyoTrpYFk6CXtd2Mc23uM9uVmouFHHHHEEUccccQv6/igEpYPUyrzUbjcfNTxKha5/y6A74EJpf4wgL8D4FsB/PMA/jCAFwD+1Y+6gR97eDJShe9yxXUPkUpPbp2FoMtgEkABFhMDVH9JVQG0++L9bi+YvCNX4ce5aWhYxDkyEY2XfQdE9niR98TRmSZZP68jIY+whNxAGFk9wS+MDNptm3AjCFtgwBMiArQP+9NkK8R53N5V6ipqMEOAoZFRE8XoLyxpyPOsgwGRYNOqww1nGSKTIbSqYn+rYohllvPT7roIXECaYCqUxBSrjqRVGRVN6m/aqixfOPVBpog+Rb/28jnMzYbEMAplmyuTVbGPe+Bb9RrkdYovfC5O+jU+F5VGP0HA/oZC33J7V4IxKqomjCIZQsvdjmUNcMG27ztjv9pkvawn8CNhfU5oV1gyNfA410awn7yYuKYumiyOTmrnqn3rZKKg8X0wJtjsU1Wa2SqX5hKsH9IZIoz+0ECPLTeQk4DOAloF7POihS6JEvrOBmyEPouO3JvXnm4qzDJ0TTxkZ9McufKAgG/Bw5geTU3UVMvf40jD/npR8CpjsT3aJgEmKniRvGdTSDVZRJIgTlzo4doTCX25YePY5R7CtYAaiuJ8ZABNu+8Tk2fjc25LGuBBAXMddICDx+F8VG2q9awQKvo/inxGqCBtkcndoniVZKv0x+ZIpo95gEUxJPE82DgBIdqKzkgAhTuZlbYza6iTsXkAA3GIxnPL/x1Qb09nAMwJQBvbByAdICVfAX4g07I5GdMt5gGfO3QVs6Cu92TV6qFybAD9PIBX3q3/1Rmrn43Job/g22yK5QVh/yYATSFv7dClAc62ksVEZQO0Nucp4PRV+355AE6/qGiPX//F5ogjjjjiiCOOOOKzFq8CgvxzAH5AVf9E+eyLAP44Wfb9+/BZBEEAZIlGvnhaUskwWr00RdvHaiI6wMXCli8AiGxFHk7jZmcoyPyuP5bPaZTLeAJdS3IA+1swVjLr9+wlDZP1LY18R1cFq++3IV92Q9wVqlbmo7Z6j8jhCpJCce5o9i2IFwlV2I8uOtnLppDnMpJzEgI5mBTMFFZP8Lz9HCBIiCwKoAsb48AdKXIooyQgrl/DsLpsaivVMQ6CTChjZTsTNBcrFXcJSpDkbAmgEKzEwdk6BcOw67QAVNgmtdyJ3YY3GEITk4MwWD/xcb0GUkAq36/mtdrhtHgD8uI7Ock4jLvygClBDlVkIkykaE3c7ljRO6OdJYGRS1PsLxZoW3D6KqWbTC6ch2WvEHB1jIAIe6wic7RvAGJ0LWwY+Pf3PeefCoZjUondAUG+MpYqBqvAfsfob0mKtwYrBAC4CXrnATB0AnYDNwCA73YfD4UIW0kO6ejkYtcpLVF1JNzjQvrECIvbEO4swsCaP11UdOdsY7iq2ICYgCw1gKPURK0kTjefLDclRqBxTVGubY1w3GG/h/pJRiLurj0JKKyCtnScz9sgKwnbtXlcoKKDhbLParu02fXlDvTGxiiL784D2PGLOgNKq0zXnbkAPWfrg42BDpZdsKSaDL1YBz11IUg4Um1wRg+s3MRBWr7QILEpzAp5tfFmJdCF0P35rueOnRT8yKBO7uCkUJ/vtNs9wc740gsgJ05wR8nayT4m8TwJwFfd5UcXQM6C5oyscGq5fg4uDqtmHS4GusgJuJguKlrcewqAFO1e0AF0P4fsxgzMf1fIJmc4Si0PcbFuH/pHHHHEEUccccQRIz6NLI8PE68CgpwB/LX3+e4nAXw2zfS0rKzXa8hGQQ47zw7N1URddIAO6m4HKPsHsBGlKp6oEuHpajxmVkB+xTff65wn0O4r8LGCGoyRyKEWBwE6BqXZGSFxTKl6IBif1/Mnw0ExaVvk9mJ2rEqwGaDzd9kmgiUKi45El9lWIIvVJznoEcNBSTM3MEoXKkwUpIUxXGvDrud8PSiv8VhNTQ0CBhRj3PTcMaEcgWg0hdyprXA73d+OY2AOhdsIYWJJ0O4WtluMa/TdvyfYirpgJK63Y+hASc2RM3EW026Rs42Dhs1uJ+hDCCnM1yy75r9sjw3buqZbj+xsjj2eWLalg94AugL7tqA9Em5yay97srnOVx+yxwCaTJdECiuIr5hAkH4H7MQ59nylXPGOPsQ85N3mHO9z+VG7I2y76ZbIvUAXxVZsS+XSrMwgtGw2yvYIa7rU9IvrJZCm+0yAR7GNKoGbQlLrwsZdL2xJOamXltQR91yTkMm7XnjWKZFgR6lp7KCZ1gkc0CCAFrX5og7i3Mz3Mp0hOkAYYp0YGYABHVkisVGyJ6JNfW/YWBOIWNYdIsY4CftqWhTq3xOrWSI/NtALcpCY0JcyCVlBJ7fLDregdGnRfEYGq2WnNvUt3YXimbGxufkAkJ2mexNQ62N2u4A1zgbRkwCXNuajP0PEnbG0KfjKyTrSN3ZgVQjE7rNVgVXQF59rncCPlNokNlcJPVgYMGCGTs5Y8nEIQA5xTReBLkBXL2Xx5u33iv6GlfMEmHj3ZUa7jOf7vngpXxlbPvcB9GwMubBrKBHopK73Mp5P/ZvJSmSOOOKII4444ogjXrN4FRDkxwB8t/+8je+GWeh+NuNlLAfAgJDVvz+hCGNq5sZVJDV1JyLB9e8ZMA2PsnpeTxmAQ7zA2rE8qeJh46ix4g6k1kVoVmRiFfuvir7YC7l6mYQ2DAFDIU/AZxyE6i+ZLY/Pqr1qrFzT5uyI7slf2YYi6Q+QoLAB9CxW595DeDCOO1b3Ve08AUZl0uvjnWNDlGOWYxxWtP5Zag1Uar4nnth8EJ3yntnCIyXbRBfNJK0ySlSdZe4J06TpcR3HCcq5LHMbptIYF4KcgCAX6Y0xAAqYJQMZ0RYMJBhrJcahgEb1mta5FNdWm1dyLJrn6M96Xjc5aR4nARm1lWfyki9jviCFWrUB7RGQE6X+A5W5HONIMkCQ9khol3HPaZuPbfuXmevgCHWzVO33hH4u893FLaMEIPVbvA/b0qCra2JsluBr0fygKkxMMDFPlPEkuL5MWNOSMSVYoX4QajIJ/6Kb7sqYr2NekDLQFdrVRFrhAFfVwBAyPZFbELWUS6XeRezv23S+nRze9isjRJi1M/ruwEJgFEWrhADoY4OSJLjCpw6CW8wGw6COU7FCViVjdPQhOqpifR9le4Duy+jb4iyR+tDqVq4GAHh0xlWVLnmjZ/u0GZDJ/sySRsBbO+TCs/6F651YKYuNR3Mb3n3lYT3uNsPZt7g0/u8F7cYapD7YGbQ36MqQs4zr0ClFpAM8728A9MYOxY6OJUVppQF6EtB9B+4IshPkaycsz0vzfUmiPWfIHRnJZxW0Oy97O3fI2YWoO+W83d/wNnYrBdS7cpMeccQRRxxxxBFHvCbxKiDIvwfgPyOiNwD8eQxNkN8O4B8H8DuJ6NfExqr6xY+yoR9nRLL9JBjQU3l5vg1HMvRMU4IxGBz2uWz20mziqvOLdn1hlyk5R75cKxTSRvIHIBPiCTgph9YpKS8rp5kwhfPCTUJxkyRPCTpuAQTbMEpmsiyojX1SHFZtxVFPpdSkBbBgZTkksfIfSUVpUwgzXmaHG+/KAArKO3uscEdbEgCprB+CJTy7U9vjY2f98CM7GKZWapNJaqARNIAwKiBLJm2AnH0sdjh7pZwfGIKlBHQX6R3AlyVfwbLI8YjckiyxJx0aMsmkCPZJ1aGJ/lYQJARkC5AmjXK//Zm7XfjTImRTAmBIjRdPeHc/ZnOZZN7UtGeu3kaewTSQbdu2sdrdrjaf2mbnkGWMlY03mQtHfK6WPLaL7dsvBDkh3TAMIBrjxy50m5exMeRMo6QrktwUHa0AhI6yImcP0KJAlEtNwrWjm6qFtQHkPMzkO/oVjjYylwxN8xd+HYuDTW7mzxW7L2gCb7It/PSaxz0qZwcqNkKyW4KkcLrRIQEMiPFxElYspw4+dcg9mXvKNsYDBCudCRDHS0aSvcUw0NLvNVKydiTAEB3DuKcXBXyuBdAQc9TGgvM5nmMUzyRvip4FeCz/HJIx/4QGILj4/cSPdry4z2ljB1HLCdTnXDOWFl/GtWQvyeHHwXDhHdP3MW/6iUEnge5i8yeik5VSLQK+E1zfFmOrxD29ASwmZNwvin4lyJ1A7u0hwqtgvTNUxZyaCLJxdkF2Mm2dG+bQEUccccQRRxxxxOsQrwKCfMF/fi+A31s+p5vvIxo+A5GlJr2UWRSuf4Ifa82uMZIZVujJV4pjN0+CXF7C3TIIQmw16cBIYFjHoWrCUxJdWYabCcqLcggPvrRfpaw+E/e9gDB1v5eAILcaJC8rhRkns92idD9zNNfwCHHZABZQGTOh5xHMgjprXK+jaivsj4W27gk9xOjiT1bEy8p49i0uSm1+J/CFwR3GRtCSkOwAyGv7vRyn2kZSd9FEtTIfY/8MACH0QpTUCsowwAP7YxYo1FUhbbB/TFsBYLilrE7Tc5TdbEB6C5EnVcGiUE96HTgze9JyDNdLIW+vLmM1GrDEsp8CVPAElAdwGI47IF+BdoAlVs6XBxNUDfDFxDIHsAHYfOYLBvMitE5S88buqRBhNUtPmAW0X9vlOWF5tGM1L5cJkNDABcxJZxkHbQS9mmCkgYcORgSQFoCd6wUhCA1x3VRHGUcwjoJJkiUz5QZaxEoqSEH7km0ajAn/WYFIIbAUgMWv5fvdlrl/ussUwA0FUCr3ozYMkDCcl65jPqvA7oHQ0rG6OmOQwEChrbNpc5yscfrY5lvO2StRlpRARoYmo0qhE7CZNWdNxhitanbDsPs3t/c5zxdGys/GfRmlVRthf1js+R6i0J2MUeUEEgGsHM7nEu+uH8IEkKI98s3zdgAgugpwFvSV8/nXHhh8NQevei0iYl62B4KsC/TN3UpjfC7G8wphs/xsh35+w/V6wvLct3FAkMRYVf2RICdCv7dB688MEOGTaR6pwsCt6ENv0IdllOgcccQRRxxxxBFHvEbxKiDI7/rYWnHEEUccccQRRxxxxBFHHHHEEUcc8THHhwZBVPWHPs6GfCODhIB9OEloC4bFWGXGTmOpr6w02k9ngdSVXsLk1KC7OSTU1TztOkoKlNBbXfUdv+uqtqLICrjAoDQGPxYmApCMCwDQTsEsT1YKAeYAUdquuFmFrOUwcX4uXdOh3UF+nKppUFd8lYPZMPqU4w0Ae7BBRrsnsRRneaRtLKmtiHIZZz9fX9jLWkpJTKyyRslCsApAY9VXh4uFrfw6Tb469ahT1F2EtB5r8McJ7IyK1PaIr9z+N1e7qzYHuY5HUgtgrIOqaXB2xtAJqQ0SFs2kABbKcpCw3412xHWQtcgklOt6G7zZtsEOAZBaBFYeRUNgNrVIbOVbVp3ET/dn9n1/bi4uLcQrV2NwJLuAgOUFTWyjKDHiYFfEPG5WMiQnW80O21IbpwaJc4WeSAx5sTPNsSrXeXk0Bk9oo8hqrJaYu7rY3xrlZ7dVAjEWqw7b1mDzuMCnqk770Sqgk6LHvo9sh1mK/ohilF51F2cuJTXTPef3TtUrwjLmUsyd2u+pRGZRL3uamWO8W7ma9YGN9STBBMFgZAHgq+uprAS66+BFTAtmGw+cLOFRWIlZK2M2PUP9nOCZzRXbhSYMa2pXdHb2VI/7jv16xzwaFyDYG+05o7+pORbmHIMcA3amkzjriB8Lo4acqRXPMjj7KMfFn1/nboKx3sZ4CNhxjHWUc6uw8JYHwk7NynXK0PBGprPz0LAJwJ/bsL+zQ5tN2PZAJkwrXlq2A/qCoO95G+4Y+7Nm91B1yIqxuTQTd72Wkx5xxBFHHHHEEUe8JvEqTBAAABExgF8H4JsB/M+q+vzr7PLpjkiUq1YH1F7O4+U5LGpLMq0hEtkUuo+EOo8QbiVNh01lUyCEGney0Q8BVFGEhWkcSyMvKCUI0WBVK82wdgEmGjqS+6z5d4o7eRVI1dMgAdAwCW5meMKZmgGRSIWgJBzMiPORJYpTeU4VIyS1gziwAfh4Usk8so3lIC4Aa8BADA7mn0295Mio+ll249kgx+8YyUtcq6qNoFGiX/CuFKHUm75h/p4QJRaWSIuMtjJcCyOOcwMWZUlWWF2GS0oMwUmL3odrVkSi2v0AajT99mjHqMCVrg4cLJgEaIf+xwBU+h25q9DYbjjbeHtdR4ZzLgLdy1v0NNAzdXHG7cTYr5T0f21AvytJnQK68GiXDgBtSnodfNHV761VQA6CECk6A7Iy+MJYHtyd5qbcIHNgvx75+Y4sBwIDsgGyjTIQWYEuhF5roSYv5AGAkFu86m7bvAzQ0E4GVJ4F7S0bYLnjcf/n/BvJue6Mvr0EjI1Nw40pQLYADyO57bZv2NfmXMzr5duSmr6JjO3y+ahq5S8uRGwWs0MvBmGBLQboPJHVLBpEGs/DRdMBBwpzhZEyvtUWye8T3csgtdFHvetWrtPJSlgarD3Tc8lKDNmPyRtBrvwEf0ntGglQxdooK80gY/NHmvcrS8bIx/zKBvJEG09iFrsXLwsSK7UJC13A2swXuyfaA1tF1Q3wRjthuQKghu0k4LsdPdpwYshqjjHtwfRv2hUgt75tD1Y+tj9rkJOinwA5j5qeEDqmJxfwiCOOOOKII4444rMfrwSCENH3Afi3AXwL7JXsNwL460T0FwD8uKr+mY++iR9zlCQ334HdGYJ2E5YjgQnNbeMtWRezxzSnj1mQMzQFdFXoIlBxQcRbkbkbccjKiNAmWS+fUQCEbIN/FkSJQtnwxEUntkWu/t8kBYhdy+9V7DOSDwNBYoXbDhQgS28OHtVk4srOYnGnG5S3eWdg2PnGTqGNUh1EpBFC6DX7EO4MbThgYBoD+EpzfIEniUSAGxoMBsW8XfRdZw2LiT2TuQNZkqiYr61vkyvgXJoR18VX09mdMjLnc20BLLD6/0ziS2Lr10l2Rj8Rlhc09VNXJGgXyf7NEGQbJUCFwgSRjon9NLFk/AAk3i+xTpEMoEZXga6A3JGBew5gpFaGEPa1v5whUACECfzqBOwMLc44WAR6D/RVoQsbc8YBnHalwQwhOCBUgJ0qOKyzpgrgAqsKR83KwIXdMwFCYhbOkcTvzk6aFIt9vJwxIaxYzkZTSftZcXhJCcySjDJdCXpHCaqKcAKlgIEHBKTGDrFZ/FIFagDIxhBvIxWtoxDBlJ3NmYaNHTczryjHRsmecf1+6HPoSYCrASh4dPeRpKWUeyCuI2BWwZWWtI1xmwRpI7qPKWEIUhdWHoJRxApFTMxxTm0KuRtgnnq7EvBqJkKsi+Y8AQo42wKUtGeGLi7uGs+yszqIY8elR7Z5kUwwNR2TpY+53HRcC/L522xseMMkcKvuWtbvXG/ngbA/X0xL6Nxzm/0k6BcG3zOWFwR5RLKxQvOpPRqYQjvQ+3AqSk2flwG/RxxxxBFHHHHEEZ/x+NAgCBH9KwD+NIAfBPCjAP6L8vVPAPinAXz2QBDUlXj7O16KabPVZOq+ilZYFBJOJKcCLMTxAGgPtjZDY3mOSwIQSZ2+z4smKbAiV2Ute6kUBXtJB1nSx0FbLskvw8X5nMmhrLPbRHXq0NjOX7TDCcW/o07ANi18z+Uy/LIvbPx055E03JQMxXjWldX8OhIJAajBKOpKZZV8gCe6aPY9V61ZIdG/SJZkHqd0con23a6uu8MPgCkZm7CUFDV9+mUyLySSGcwJfTA2SJHU+uJaoitlOc20ul9rWwgu3GmU+23hqSwoy05WHeVQGCUiyczRUQqT9sIY98dIZJ9O2UhuW2cEKyVKdmTVBJk0FFh1nNcOgOEGgjKfKrgFb0NYyxbmU66kh0PPSbEzwOFos8AATU9u01nI28BXuxZyslNwt8+yFCjLSMr9WicBAbsweulTMIOGw8zYj13UuFNqHWM57XlY7eQAD4OcHkIsWBZBawIihSphL8KVfW+2j4MORApiGWSrGCuv91AltKXn970TRBgkXr51MtHmCfQS2PgHmNYJypzCpCYULZa8X8kYb4WVlHMp5sDmoF+Cs3PJjy4Kua+IY9lXYf+CKZklEWAASDxT4xi3zl4uMtrV5kRYh8d2wgTdDLRQv/cqUzBtsBdnIok5q6A8d4w9QlnaQp0g8f3i5Y2LDttkpXQaChBHFwEWoK9kDjS1xG4Rc71hY220C2F/aMPSlgE6dWgzkEru2OZ/ANhS5rSD6FyYH1aiNp4dRxxxxBFHHHHEEa9TvAoT5PcD+FOq+v1EdPtq9H8A+AMfXbM+2eDLTG9OC8zdVsoiIYoI7QIB0HzFW2Z3RVvVz5IWSlDh1vKysgaqtaoyDRp4UMOBWY8kSkSaojeGLGMlD4q0JAXZyqFphnhC1d3+0ZkGpOXl3tumQaX31XtWX70O22Dvcz9hZlHkQDmL5hrJiDMcYsU53DoqCKMoThGjz3IuLhElIY4XeXjCUQGpSF5mW1xPkmOca4LUCjoRuXe1Pq6rzTlOfo3U+mc1JSVhymQoQA6aAASNa+DghjrbZuimGHuGGNCwGq0r82U+DDaEQMN5I9ot3r9l7Cs8QI1oq62e09THYR+tAxDTgXVVrQn4ijhvlNavphViIEw6FUlp321iC1i5BWM4rgS7Qa3cwsp+KEETWRR9Y9djGCvuko48SIecTOwIULc04lNxQGmWiC/vDR0T3uwZsNyAZBk+15aH2t4AXeK4ZfM9AJwG8Wu9nVom0HHP9wp4LYK+GjOkNbMvVR3MECK1Y6WDDAHCYxo4cKL+ewAkY3/K3bAY+mjMljrfws7W3JRShyfcYU5W9hEaSdzj3ij3mZJfIn/uyrhk0mg8wwiQeNAWkERbASVoHBOAgXwxnyY74jEn0Bx8ACDMo1wvwKoV0DUoQmqgxQlm1VvGhxbB6iyevrZRvigO/IKTURjaTfY9nMGlAFNaLdPFx7A+hxcHDUXnMWSFtm5aSGz78YUhdZxc44bWDl3EtGdyoH2e7O6M9UizphRhYkodccQRRxxxxBFHvE7xKiDIdwD4y+/z3XMAb//Sm/MNCLEVL5KRpNDiJR67M0B6WTWHV6mQv5fvnpzejfdL9cRCO/LFWVY7fpR65Mo+kC/t1cLTeBziidRIrHKlPLNQAi2W9CrLYC2oiROGFW+szgZzYrBEdFoZfxKhQ8KAqpHLxbUeol1yUsABHmMgPAV6wnKVgKTXE6vby3obyf9OHQZN9oieZZQWTOKk5PT4gUfUWv2JdSHj76EvoiMBz4SMJkAnxl0dnJiSlGBM1GSsJhsOcqABfQmtjacAgHqbbHfK1f/KHOKNsn+pT6Nl9ZZd0+Pe9QcyeVajGzQdK+aEFGLMshWyhJCS6l8SphwM+Go7BoUhSq8kVph9jnkpCu9hP2vXfhLmhJemxLXDfB/pUu4Xv1bBcqnHaQHMuFaEMnIlH7AyH2006frIgtTokZNv6zoVfSf0UzPrZZhwa3vEAHteErVEShZKEVYtZXS1GkyZsEDRXTgVzDOjKgYk2VnN+gSbSzgJaBnlMkRwrRGzqLXnEKw8B84AUdMdkqbgZgKnWS4TYAgbmEasoLNCu4FwwADE+n1DvxgbZ9KZ2SjLRGQFeHfgt/Ql2FBx/bQAqPZ8QN43o5QvQAz7XYHJYnca3+wLzRPL9yfyPjpTwlAhHePIxtTQndPunE8dygUYIYCbgh2QqiVH0hm9aeqE6ELO9iht9HIpFZ9ze9EyKowSrQrWNeI5dhJ0xWB4BCOw+Tn9tDZPBM1RvOhr74z+sECX5tcyOkHY3sLTEs4jjjjiiCOOOOKI1yBeBQT5OQDf/j7f/VoA/88vuTXfgCAF1vfgq5X2mYaOhBaKcElw4rMOT/RuAAQTCHXHkXgHV1/1CxCklG/EahtjaEGQAOLWFMlSKe4Z1ClfkHXzcpm6akzuohIJiC3eZjIhoFHeUWj6owTC2ymaLAp1Gvnk8iLWrmSxKCZHgUzayznSdWTzxBnGtska/XjvX1wPg8tYkYLaAIXieoXLghY3DER5CwPoM4iQ7QMyKSG18aIyDlDYCrkAEDawrJwjV2cjWVNYErPodJIQydVFBzskIkCMKG9qmBLqCWQBEgCIfakbrd+AEsYuZNoeMQ5RghMiv7XEJ9rOmsKMCjXNjgSPyrbdkmgqcxVlimXy2gCOOovQH+jl75K80jqOk33sSLFa+8BDTes2AAAgAElEQVRcdHRxvGqx84TmB/tKexzXwDUynRoAWBxkctHX0GHQoHYsAFYFr93m6hnoq0Au1oB+ZgdCSplRwbqmZ0UAcjB9iejXEBQu9/x1JMehsRPlIylsXHR8lK0URRZz+JB7sdIKIEViobAEGDCtjZxLBJR7rC9eKuP3ETuDQ11AVQFjnJw7lkieScHe6cfLiv3aIA9Lshji/Gg2n2ShAbz5daS8PgDWUYIFeH9j7gVAsRTQkTT7gHof0bgm1kEabCa92R82PtQUxN00UMbOACl4UdN46W0uc4GDJF7esm3Nx7kwcljRkl5l30EKo0ZhuindmYKKqWxq0k+Rm+dR9nX8O5JAjg4AUtWYiHISYDPBXS2AGDUxsFMMINEzGeBYnjsPBwhyxBFHHHHEEUe8pvEqZNf/GsAfJqJfUz5TIvoWAL8PwF/4SFt2xBFHHHHEEUccccQRRxxxxBFHHPERxqswQf5NAP8wgL8B4Cdha09/BsB3AfgSgD/ykbfuEwgSo2zbCr5/qLECW1YZq+aFr2Szr9aSmNRCrNh2GOWaN6RzCEWdvh9DVhpyEkHJ9/3snJTWtxEpchptcqFQxCpsaF8AxZbWN7+toXc9EQrNEcLs3JD0dJ60SqpuhkKTDZJuDW1sGvoLsaKfe8ax91g9p1z5D2tKwMpuaHcrzn20f1iODr0PjeOyTuyM6DeJW4M6EyQdbN21JvVFnFEylTz4UAaNP3QV4jql8GpuP6yOky2gsPITr9uZ9CGKWKv1R8fxQ88DGGU6e1k1Fl+5FzWBRLeF7fc8mE3FYjTKuqobj56KxWlcpKmkyfePsYnSk6pHstgKfZaAlHmdbJWN8n65jdgvtocaOyjdT4jcitTaKmdfdQ+9jR1JAKj6IqnBE21kWAlbdC9+CT0IsdV5XhTt3CGpX8O43rEJsvr9koK+fg3TacN1QJRHOwFz2zCRTIwyj5hX8OfFTR+MlTbmBylSOLddCVsnyB2NrkTf3Zlm6IPEuSjZUboHM8ra2EO4uDwHtjvC6dmWLIe1dZyWjoUF53XHZVvwvJ3RF78QG41SrObz+Kb8K/RC4HOdVkmXHwBgZ50wm8YJEWa9DXW2DCt44+kyks+lYKOMEpTyzHAWCZozTnxuJ3tGyJkSXibYyRhB+QwUaDCEqEFOzq5IpogzZrL9fs/ng42yrI9AyUbTU7RR8z6nEOBWgPaiGeJdEmc0pQ5KlApGv2Msgi2YpZUhkAtj2rDaz3hengX3bz1mn4444ogjjjjiiCNep/jQIIiq/jwR/QYA/xqA3wLg//L9/0MA/76qfu3jaeLHHArsd4VKD4A3BXdAoNDmNrhV+HSP8g1Pdm6SOvYkJxIeIgxbySz18Bd3LvoBBfSIxDpABAqa+kT7jgS//B3v2YtmEkoKd2koIEl5uY0ylEm0NWIHKBxYmgsERuLXvbynJGy32g60w8st4mTzONJu9H7oU+Y17QRe7LghPGsONiPps3KZAR48qZ0HUuvDdvJEsmizpHtGJH91GAL/CUo63Zwi5828b8yJzDGDwt/mUhTSQXlPa9x6nVyPxRL8RKaKpol/tzB08TEVGOATZQhKmetFGysYJRifZz95JK66hq6DHyvAwjIXdQXIkaU6zwEUoVc1oUxgAvRCY0V9LIf96bgnlTC0SAiZvIcjj7JOJRUJZkVS92jAo2lSBIh3I1a5E9TrtvoioEUTzCJ385CTjIQzB8+ub7/wAEGihOw05qSsrplTwJ52nQGCOodu5Sy80fk84AuwgtA9Od6FLJF1XRbqNg9CF8XmKr2vjkYCdqG3IlYGdO2Efh9lQRu23nC37iBSnNcd+5mxeaf2ZaCd5Dobw+kGDgj4M9Htf8lLZACfA9F/F3oVIQhafp+DQzQBfNZAv+akqXkzgXpRMiiwUpeY7zJKFbHB9D/uSz3jlfM6KrOV/MV5HxkgJGAGBrCaXktsowFMeRvh5X82b90+91yFcsjAr429pNGut3VhlKLx5mK3AQYv41qDAZzEgCkvHeLHG/In69DDqc+lxYR3X/IvwhFHHHHEEUccccRnPl6FCQJVfRfAH/X/XotQBq5vIy0DAUuQ+QpPLs39JEEL2EosO8DRzwOEqGSAqOePVbx2VVvBjdV5gjvA+PHDFnECQShFSJUALslLiGiCrL3SdBIkZYbZqzYAZC+zuhA07TbjQBhJeAFzIgHNVX8ASgSRYi8azAnyBF48mc3kFikqK+tIbsYgjf5kMl9BEgHoChfH1DEu9fr5KrkslCiKVBSmMjyquGPBENIiNlb4b9sY7VriutXv/Rw3iWwm334u8pVdZdO2uF2dj+QzdB8ymbllaDBmxxrXGpGm0JMJVfLVk6vShgDs6vWJ+T45V/D8OQDI7ralZUy0KSTYLFyue8zrmsGzgznuPoM230+Z9Mv4GcdKIIeR8z30NzTVicf9kyCJC/o216cht3XV4qxj9+wAURJ4UWN+hJMIYMklncwS1RJ3P0aMW3c9GkGKaWbffJsemguEZCaBMVx0HAjVBQWMugFC2L6njuFe5X1sF4K41hCHeOwOyGnc66kv4WN7y3iq18KYXITLtmJ/wy72w/0CNMWLU8eydpxOu9nqhq7IIikCSmz6IcYC8+9Jwc1sfm38yIVhNf+WBCxd0FWHvawGwwUwxtTJGx73s+u98Eb2HKjCx9aAocfUMYAPpSF+rXbhetU52ov+yJP7ezCgAL83TgSNOcSY2DVjjurQU1oE1OYx1IWmfycorF903B/TM1MHCBICzibyan1VFJAk2lH1oKTcT0J4/rW7iaFzxBFHHHHEEUcc8brEB4IgRPTjr3AsVdV/5JfYnk88tAHXd2SINcJeIPnqyQUHq2K8wwbDgTeCrJr5XqxyB4CCpSQ2kRjGMQQgt+JtF5rLXFCcYvyFlzw5zQSWgjXgQAMoyzWsYwTdADrpAAoUw+WhJvKRHBUQhchAlVwZDp1MHVaiVrpS+rSP8QJM4JOvnlwEwFMSLUR5g48N1eTR25WJYBt/xzgDAK7WNlndtYZGEp+IhmJYFEfJhtycA5jsTKd2yABwsvRlyg1GpwL0yoRKCh5QEpo6l+r5AxjSBCDKidgFWUspTa52s0LvugFfd5Zsk5cK8A5bWV5Gog8qoFkFagrQkIYc1wLyeFIuCxIgqAm/NgcIFFlzlMDeAkiwiooj0cyUMQZWCt36KWTVSSQ05lpuwKXNFYzsY3tjWhV2VZkrCUxFmzZYqUKWZrn97CI5ByqoZ04cltROiWW9fu4SFGUi2kqHSnulVF5U+9gUS13s8/ZIaBekfXd7tH7kc8zvXb1iDr/O7M5XJOM6KtMQGhb7jnbC/twm3f4mQxogpwXXs2C73+2eK4KjybwgsedfND4bZWDHvjOkN/TrDbJZngPUTEg4xzGAtLhWp3qDwRk9DPGLECy3APECJKpiyfWZafs4EPbYoCfxY9AMDhYAIURwa9kaK6AbeamZDoFm4EnJHgDQOhxmyLchArrYjnTu6P7PdThpTcwhHceOSwzSIca62ENMJqtiuHg0zUwZALox6LHN9shHHHHEEUccccQRr0l8PSZIVcIAzAXmVwH4WQB/B8C3Avh2AH8bwN/86Jv38YcugDzrTzUQOqE98ChRIJ3yFToBso8XYCUYEwL2B8dqXfPqDfKylwAQis1r6JJUnQYuYIGs9nI/rdr61ze5xUiAOtw6lkbSJzrrnCAXPUciHuNC/qJfzkudzFo3M3jksdEUHFT7OMZZIQzTa6CRxI2s2MGN0BCQW3BhTgCjf7X8iPsoNSJ1RkxZGY12CgBEgsg3Y1nKe5JWXhKcBCfgNwTRPBdijCSSSpqcQAC/hhydHsebSktqUhOlWdEVgpdvhA6LJ327J/VerpG1/TL60E+YzjmufQyutVnhoJb6WPjTgQJESTDDzvGkrCcOF3bJ0cbA/wize08cTzHpbOQCvii6t2GwTmJA5p85ju+3ch2gTyBxPt4TGBYAVOTZFXQUAnW2kpoGKN3YSkdSfKMNU0vM0lqZXAejEeQsab0ac00WJBuGvWQsxksFo7yFfK7F0GyuK9JtvmUfi9ZRdVw1qYrRXg3dldWHSAwkWF4MJlC7EvrJAMd+MptcLDon167FEaUt1GTobTSGeHnWfmnAxgbW1ZLCeBa3UtJXWBi6YOiJLJIaIrn7xlBuNmfFx684Vhl7QkHKT+YmYO3mzV144mFB5dnpYJuUckOJkrEyBXgnSLCjSM1xCQbsTLV/3t9wqdGXPQdZgWeGrGq4G9XnXGj21Htcvd8BFjUFR8lN3AZsQKte23wNQkPkYIIcccQRRxxxxBGvYXwgCKKqvzl+J6J/EsCfBvAPqupfK5//AwB+xL/77EVm+GM1EQSv/zcLTurzSmCsYutJp1XyTBLbYF+ov1sGsyNBEF9p5R1ol/KS7xH0aupwob2Rv2XTxUCAWJ2vlOxI3BkGQijDyzFu+49JaDT2l7rKr6UtQtBcPVfTC6nJn4y+CAmwYrAV1MGcYEl0Srp4FdS8TdYrU8Go9iP50LCZjHMTzYmtjw2Tmr6J2LVIpga8TzTG+vbcoTFgg6wziOLJYuzPGw3g5GacqyDqVAoSSVhAjgV6vNXNiM9TEmQn0zDY2bRrPNmJJHEKKf0IbYS4LgGgFGAiEldslAl3tos0S0nqNUqR3LJtLbUyHRedjlXndOqDxOfRnlJaFgfSkniPcqTSP2Dch5U1gnH9ntwPMQTR9jjuBrBaCYsBOTSAK2Ba6a/DwkWElcj20ZVG/xjFmtiuZ+hcKCuk0cQ4qba5UcITTJCwzw1mVGyXgKuW+URAv5sbG9bgsmqCJ3whnN4l8OYD8kAI0Cz0Z7SN8i1ZFanZozC9ilOZN2IgpSqAS/PyFczlVjmkLopKOgQ/nWmiGGPPNK4rN4GQojuDRLv9Nz1eya+VWznrDQgCAHoi0NXFq2HzKb/39gcYpbFv3NsKK8mBDj2isPqG/YySKhVAhF3I1sct5lKwPYQgWwN5CREvkkyXbID4uJdSIezFkh1emhNWx6HVIuU+93MB9szTZ326N4844ogjjjjiiCNel3gVTZA/CuDfqgAIAKjqTxLRvwPgjwH4ix9h2z6xIBe9m1ZtYxX2xtkAwBCM5JHQcVk1Vtc8oA0j0WkGLKQrCHkCcb0pz4ikjxzgCG2R26QMsVKLTJqzhKZu56ux2bbCcIjzhNAel3IabTQDKwFC1LyXjZmQDhaxehwOJBe2hHEdb9LTeJbzJYMFt+eIhMxLI1izTt0WKtX0RoSejNHE1vD6/wHmjPGJFdEoaZLC+plKfoKu7mBOHtevcTIfGAP48U71s/UjVo+n6xBj7loQ2spA16Q+QBCldHYIYIo6gcXLTWQGCOrYm04LPQHUUsgRmM8HQJtpPujuSFyIT9aV4+wPDfZQHTa5OebAKXIjZQfpuI75GEPtQ0TXSgrKMaZV8MHSkDL36pBG+3OqvYRpVe9LUhiziiwpvtWGmRxvbsHKPKD/2AjSkYySIRqsBaTQkZhXkKj+fgL2pqCznaHfIRlI9ryxucSpezEao6xDlyU6HvoU/hntpjGjjbE+j+QYec/wDtDDjXB0AAqEHCCdUMMy92qJVS3xigGrIJ7W85OBq6QGEEg5vs+zZe05XiqM7iwM3TnB19SUCVeXMv9VTCR3EotOFk0AbDFWNl6T45P6MzGYKApoONlsPs5RinJl0JWnchttg+FFSsBDc20kAGcZgxTMuvgZQ5Z6JsFoAUAECTulRQx4iXsT49xx3PV+e3pTHHHEEUccccQRR7wGcbsO+kHx9wD48vt89yUA3/lLb84RRxxxxBFHHHHEEUccccQRRxxxxMcTr8IE+RkAvwfAf/OS734PTCfk6wYR/WoA3w/gNwD49QDuAXyHqv5s2ebb/Xwvi3dU9RfLtncwlsrvBPA2gJ8C8P2q+lc/THsgZO4MzuYAjMGR1Q/LYHxUpkesSMvJNuRtZiDIomh9Ls0wtoLmMcz6dQiDpuJ/9O2GvRBMi7FB2T6+q0wQQq6QZylB2T6OES4RVEpEokQldDyivKaKU+oCSCfvr41FLVloDwRZCaLi5RaY2QMY7ImpNKKsqGLxsafBEgmWg7J9b6wD0y0gMRq9jXFhoFRtk2JFXEtRtJQAVfa8rt68YDtUAdq6nQuXTs4n8Gu9qJcO+HdVwNDnRmoSsM4LsKpj3Jr9HQwHcheQWVMByZ6J/mRZjzOeqPRQWaHgwTIIccul0iBu+kr65BgxluSlHPG9lRuUPmmsxNe/fcPC/Jjm90tiZpK8jF0yszVS7yXYQB1ZCsBS9o//bmhJpqmDIZRb2R43rJgsnSt9iHtDBeDOydRIhkGwPoRcPNXH44YdEja3pjOBZFrJ/bjmGtowrOmsMgbC23jqbmPr7SuipQDQrwwVwhVLshD4cvNM24HWMdxbQgMDSLaU7jSXmjSdNGSy7zmYSL0Zcl2VYJfRZnpL0k2vQxnoiiwZEmZwUzALuAmYAVp2nM52vOtlNUthBbAQqImxwMoxVBTUrMJEmV/Oerqdn1VTKhg/fhuFbbI4VSQZdWFnvNFUDkRq10BVIV5aRBsBe9gE0yh3y1LNuY3hBBXiUvGckGDnLDRb9AZzJMr4GO6ug9c6PnXvI0ccccQRRxxxxCcSrwKC/ACA/5yI/gaA/xJDGPW3AfguAN/zIY/znQB+O4D/BcBPAPjuD9j2TwL4SzefvXvz938C4J8A8AcAfBHA9wH4y0T0m1T1p75uazzhIZmZxOH80hdA7sWEJ4uif3900dSzoK8KfeR8kVVWULMsia/DfraKgmpzAIWMsq/LqMXPdsGSDHY7TOpDcNPOA9A6ErepBCGAD4zkLsQ7AaSOBcjseyMpzIgXfG837wZ0RJIPmNAr7TYe0oCuXtoTSd8V6F1BwpCTlcU8SSii7f5rutB4iCMSIZgZ/bYx1CnR1cXH6WZWRymDmCLisGD178QT0XSgqfkZ+XWK0qdOaI8VqRrjqDwAD1nG8XW15DadFkpZSNri0jh3tTpOgdSNvMzmJjFeFapDn2Oyz42oFrzdtW1K+RBAkO46NgU80YVznO3aFHCpAlo1IayCoVWQU1G0B3SUEvgGqpbwopT3BGhRQ+u5MOYE/NppDG/M/VqiIJTJtUZyON0wSPvkACwmPQ0ioCHdVHK+YPysLkax3+21JoE9F5pC13GxpLmtqes0EM3ggbnVaCauqY8Rz6VVRlkKAPISEHhpFrGmfS2UwIuaCxTPyKQK58BSU8gbHVeygWyPwPKC8nnB9Rp7K1MfpFhPyzL6WbU1AL+3K/BJahberi+UDlNAAiLaCHtXdLXfh9iuoi8KaQxqYoBIE5xOdoBl7Q7Q2MUm194gDCci7aafQatYOY2XmE33Vb3Pojzs5tltujtjTnNx6SEyTY4szXoyHkVAuIKvALCRldsFSOf39hNdqUWHPpXMbdBentXxXwWlmqK/eJXXg89sfLreR4444ogjjjjiiE8kPvRbjqr+MBH9HAwM+YOwV+sNwP8E4Leo6l/5kIf6q6r6rQBARP8yPvil44uq+j++35dE9OsB/A4A/5Kq/qf+2RcA/DSAPwLgt37d1jAgd4rqTCJne1mUk0Le7GjPdqynHVyShcf1BLk28MneTGVtqe5vL6MKWQktEvNghdSkSUcb1J0QJi0Htpd+6SPpqElZ6h24aCsXgUHaZ/HPWP3mCnR4O5QIHKvwMwIxHSPOm4n5rplssieRUrQwjKEwHE3E/56OBYzkM2vu5/OpwFZrOZZQSxQGhzZ3aAjh1D4SKXLWCFQngUgDQTD0ByI/S30PTdHDYQf8/iCInDx5CVZHRyatprPgG5fERpuPadj3lmtErl9AG9kx6YZlUZP6aFaKgpYxBE32yHUuktoKP5gGG0KQaFbVpAGA/a0+J9/1YjESgahuGt7YMe+LFoMt7ZMxH5hMwHa/uc6AsSNiFf7mUgjHtbdrzukGUpblgQQiguWEMia32jyp9wK/h31qKblg5g1oGAyfycq3sHRy3vn4spABc9601mHuMwXEkQKihH5HCM8m2ytYJyF+Cdh4xnjHZ0txdtoYfeM5mY/xjPs0GAKsKd7amUDC4CuhFV2ceDSSOPihSOHU6q4EeB9vLq8UkdG8P+KZdS32tDK0i2gLxx4UFpX3kxVKDbIYeN27XRAmF0z1NqoCEBu3BCm7gV28iB2ravTEMHEZS2nl2WBgVcxRdcbTpP1TQcSX6Pfcgn/adP6XOo5VwRPG7CAm5M+WABzJGC59AFP9HGOu457KZwhAj21+GL+e8el6HzniiCOOOOKIIz6ReKWlHlX9MQA/RkQM4FsA/JxqNSz9UMd4pe2/TvxWGBDzI+X4OxH9MIB/nYjOqnr5wCM0AX3+aiuvUaKghH1n8Knj2f0V53XH0kaz987Qe8JWX17vOlQ8+dvYkqIzoKdmL+vb/CJM7uoSq9eIlcmSfNuGKC4GdakO0JOiqpXShdG8tyROXY9VQk9KqkVvrJibnSaNRA5jG2M1OMDhQpQjIdHCcLEVx1tXFFqAsKatDIzomwJJL7eyl8gYop/OCqif1XwkxjTGqaq3MixBdPZ7lPZYEuxASaWCYxwrSoMiYbHjjuuV1HnPE4IBEqyPbKrQEwHYumI7lcgAtjJfv1dkecLorwEFKO2Y2BHBFglwKI4TfW/+M5PYUuLgfSaMz6a5S0jhxicr3+Gek3a2ZSDEN3Rh1WwzMDthTKDSnAw+iSlpCwaK2vE3T3IT7PJx9babcGURUD2VMfF5YiUE1gcO0KEwlQjlfilAhQIOmjmLK0AKKUDLS0rDgnWlGCAVlfvBQJgAWpECt1mWtDkDiXQ4Hk0lEpor/7TRBM5E28NNaQgCk82neDauQ+SXHLQLRxpggB4x3wyYRZkjyFK7CsJJcdMKsCgZbLUPvh/v8BJEgpzdBhpxbEomhTYDVPrFJmU/ydwWwOZLFaAtrA/ykpon4c8ZDdCnUwK8Wh8AgDF0KnB3EgNRNjah1mhLfM/j78HKKiDLZD89gAxq456uoS2caMrzOZ7bG/mzfwZ6aAfoOvr0usan7n3kiCOOOOKII474ROL/F9/VXxy+9BG35WXxJ4noPwLwHMAXAPwhVf3fyvd/L4CfUdUXN/v9NIATjOr60x90gtYE929cp5pwZoEqgUnBZJTxbW/YxV5Yt62BCGbH2Bn7xf4OC0N1+8NYRdSdgZ0skQjAYidfyUS+0KYDCjDcKHpJ0oMN8aQTY6VXXpIwJFtjGy/V4ZASFp7iCVoFKczxhtBP4yWchNAe/fur/c7OrmhXHe4dsAQkj+v6JjK5ppizS1pCArNWBHsC7slnMAtux2ACjiIRhidUmFdaVTGBHgEshY5FlnS0AqTEirsn3bqUNMdBhVxxLQmMfU35eyT5FUiqDhm5R5yvRGjIZHJWgQdPaMhXlYXYQQrNY6e+BPkYL6Ndplkwxsn0S2x+AhjzNEAHAahzsl1ixZvUi0uknGtcjjGGzvxIMKSAWOZq4eMfOiMRkZyq/z5NA8pjKCv62dkawRAAvCzB5zg/ZV3l9XDQDKtCPQNuzTRnZprSmIvJUOEyHxZjFwQrhfoAGsQ1YoKVktdUMErZ1O/BYOQ0dXaFtSNZKtGe3cBWcLm2kez6lhVcuC2hs3OMXDh0JUTJygGjz6tWXM8ACWfu8BWpm5L3QZnvpEgALxsvQEutpLiXR1vC7hcYTLN2BZYX9l+P+VD70Kx8L+x6I/odGUAV1/4WfAAm8M7sye3fAJGnaAgxrKyJ4LQdjAEMDZe4F/3j5bSDWbEvC/rV/03YCuCgSPDUmGD+cbEyhxjzicjddnzeE839DZDX3GgwQPDddEhSH4kowdwci5u5ccTH/z5yxBFHHHHEEUd8MvFpLfq9APiPAfwozJHmuwD8GwD+eyL6+1X1f/ftvgnAL7xk/6+U758EEf1uAL8bANZf8bkn34swmAVdGM8fTib6JzTRpencsZw6VAn0YoGSQnkUtttq8Ejq0AA6DYRChaBX0xWhslKaq7L+Imw0fErhTsqla4yXbX+hT5FMGL2811pycg2P7tv651YGYeef6tABr+X3sqCTs1REsXh9f3u0drWrt1NmbREVf8EnT0h0tAUIUCNYLtYqJQxGjo+HAUJkwo03QM24aHCgaf54YnnEdjdslAFEqI1lWFdGpMWoMVKkglEOmmR5RxwvgBnViW4uJ0tsbhkzo8GY2pv6AA4qRElEFRUlKsdUAl1hgrIBOMUqbzAuFh/UmGvs5QGe6JKXTeQq9U4JlETwPoaIhFKTAwSz3ozzRidifGOnAtZRd9AmABIAWMRWrytQUkuprpRggPUBmQiCAT1LWirbDvASEYy/67AHAyfnpgILIA5syomMZSUDh7E+xQEwQJEAxlwMOJrNV09YA/yYAArMoGX87OMcYW9sY44BNCUgZmUpqqiL+qV8jXIOJctGSxuUhvgrwcQ5yUDOYKBEn3Qx7Q0w0DuBd2sEO8OEvDQvAImc7w7sxH0ozc6fekrFQjw0iPr9KN2Q1drHV4I0wvo8zuW7afSPQGdN0CS/7wxZFXKHtDcH4KVNMfe8qeLPJx9o2dq41sEkI98u9D+AAYbQ2G5S7dUyD+P6SLm/ooTPAXKJY1WdoQCXJB459hzQiowpxr8XbOBVPupoACEZNLSMTKwbR1h8bO8j9V2kvfPOR9zsI4444ogjjjji/eJTCYKo6t8G8HvLRz9BRP8tbBXlD8GU14GbfKTEB76+qeqfA/DnAOD8Hb9an3/1znaqezlduX1lRYuSiuJKIkLokbgpwFeeWmIJgL+ER2JWXoqpKXQRE/VrsVJckzpFvP2mHkhNAncq79QlUYzEtDhP2Mq3JzDlGLxjrGAjkocyiJFQLEg2Aelgcyjbir2sYzWYN4C3spIqamU5vgovuxpF8GoAACAASURBVJce+PmgNBx4FvWkvyQDJSGMMdV6ocqKLm1sf0d/4veajCd4UPopZQAoWDzO6uk8wANHZZSRrB87QBl7ha24Jl3d9y3uGT3AIWAwE2gcS0v7UruCCoDEld2itorr8g7KSD2N3MZFJu0chDkDw0imnTYPVu+KgyIroO4UAhgAAR3nqfN2ApWSUuB/x3WNz+LzKDXhwsYRB3pqYhjnE2NX3LriyOrMo3Aiin4BA6CKc7+M5u/AYrQFVaOHFd0FJnOeRClVjEHpfwB39bkhJwwQLsCMeilyPMZHFAwywMSWfZ6RuJZNGXo0DLaLgw+K+lzTHL8si6qgm+gQ6iQDNJUtwZ7YIgFSxjivgu7XWnYCuomXigMPCXr4+HC4pbgYdAjF5vj5faReBrjfKzT0K1ynQzpBzg2k/LQcxwEmgIajjo932+FlfmVIFODOgzWkas9/aZNjUtWpCdBOE1jEHAn20ShR8Y+2jQcguEV5E43ypAKA2N8x7mX8mwEe9VlRz5GlZaH15OBuuPz0E/l9zEO3BfNxSPHB5Wi/TOLjfB+Z3kW+7e9+2b5HHHHEEUccccTHEJ+ZVxxV/VsA/jsAv7F8/BW8nO3xTvn+iCOOOOKII4444iOJ433kiCOOOOKIIz7b8alkgnxA3K60/DSAf4qInt3U4f46AFcA/+fXPeJOaD8fHpL2I+r2eSOcf8FWQeUE7Pf+/dlXAZ0+r/cdoJYaCnxx6jycDs5kq547DbHKsNxdBFhgQnmdR+lF85IEIdCFTZehlMBM7h4vY3LUUeKxephfiIkRJgX/JWtQ6iv0dCHoRunKkN8z0O9stVaidKc41PDmooldgZ3AXd0NYbSLO1ynw8ZTG6yM5vZ6qPfTtVoixK8VgCwHGav3foyJio6pjCJZAV4OoWDoroNhkiv8lKur1AQcq8YCKxuobJI6/lkK421o5rTRKcbI21JLRm6uX2hF8EbJCpm1VWw/tdoFmyvF+iTFOMnnQW9W/lFKq6gTCkFl0IDgc7W2p5NdhygxEL8X2MeDxuc5JqRDkPXG5SdZEx3WJhoMmsH+GGOU4pt1tVxtnnFnyKJpPz2sV2mULIWmxi0bxFkg5Po12pH2snEPUS6RYxLArA46eW/GXC8OPmHKq2QMCKGbcQCsBCbmfOhClA3CTYiyjGx0MXVFXJdDmybzKh4UvOsQcK33j5+bYj5K7DJ6qkrJXBL15xnNejJoir4CtFKWy+Tt56Vm5Iyd2I+2UkJFzoAhZ4OsmqVaYDXXFgJkFVyvJ6zvzuVlIabaNqQwamqSRInJyXSQgvnCG1JzqJ91aCD5Mz5spaMNsiI1eEAErd7eMVdjPtzaggdLw12jtJRxATCWRzxr/T6srAxj4Ogot1SyfzfqdJ4EohW0CNo6GiFC0J2xn5qxCq8E3gezSZuzc96Xw3AEPo73kSOOOOKII4444hOJzwwIQkTfBuAfAvBflY//Esyy958B8EO+3QLgnwXwox9GiZ0EaBea8+Qh7ZEJRD8r+hteInHfQcug15/evGI/NcglasYXkLojzO5aFv6yncJ/O0HOVsBPp25J9bpDgh4d9H0FhBtADdrNbhYA5AbsiHIS3IIgU5JUM9kCFoSVrNT9DPRIe1+FaZeUBFwXKw2x2n2j0vMGyNmTrSvQLja+IZRI1fMTAVwgEz5Vp9/7tUmx0XjdJCTYFMccjjcE3By/WqDGNbilnlvnCLoDHIl/zIFKeffSBt3J9BBi7BSg6vJQX43JE96dgEZG5yeF+rwShgNcOlm21usXjjK8DSwlhCgD+IpSIoiDVZ7I5xh4wjfV+tekXQB9aMUZZAyhnHnsT6YBY332/T1pDiHQWyDOhqPo1Sxzjjb0C0rC7xk6lXGY2k2wMqoQtg0dhYvPWdeHqQKY2mCOSnWe081P/z2S5QAZTHzyRrQ2yl4w42wJrhBAqkXQ17eNcga/tnna3Q6UQ++lLTHOocETG0hq0ZRxEZsb5kSjAFPqaSS4FOOlc7snMC7GJK5JjDPbF8p2rytsPsnJk2d3JhrW0gosNB5Dixg45MAdPLnXopcEtudj7hQinQCwM0TNyYsXwf75DurNnjEYczlceXgb/ch+C7A8p9S/4I3QrqPr7UzDAWcf122U2REodE4CtLu5rxJoldmhK0IJBrC5lk2KDANIh6uY8wEyxvEXBTUDNdrSQQTsW7PLV8RVp34z0JYOdpBDhMB3iuu6QjaGXNjcaqroNNWBP6LGx/U+csQRRxxxxBFHfDLxDQFBiOi3+a9/n//8x4joywC+rKpfIKI/BXs9/B9gQmS/FsAfhKUvfyKOo6o/RUQ/AuA/IKIVwM8A+F4A3wHgez5MW7QB2+cEaacKGGNiJ+hJsH2z5ur8+syWvk/nHcyC63XBvi1Ylo5l6dhPljlfAcgzAj808BVFj6GsSAoBwtCTms3iXQcV9w8QUp+Bz91sGKuwY+qR+MtyCHrG/u42kCvlsW+yJihXJNORRUfSme4RsTLuyUNd8Y7xStFOZwLI6i/aC1K/g6++6n2b+MkQRwRssTWTQAFAvmrsK563i50UyW0kbKChS9hHAkNxbgcHUkOgY3JK4c3GQGpSwtaOwVAoyXWwZW4TnJpk+9+6KOTk2gORq6w2qENcMQ5SOuosoBCYpJ2Kha8nnHnAIk55k6CHO8mU9JbrUIGPSSvm+UjANFbVGVMbSFycEYCcY26O47EitRECaEoG0614bcy9m2OENAWRXZ9bEV/ekFohvFkbpTBFbvGxqtFT7ztZAIbaZIz9ARP+reyR2ra49xQvd3BCGfcAZzQSXv/eNUc0AEN37GFP8HlHuqvogsHuqXoYPmbsgMokOurgBDvAMAFRZQyq9lEk9JM+hd48B9S0fgBnl4VddFyjW5pZHXeh6floFyoGHAYy7gAF6itj/HQl4CTY3+ABhPrzSncCX8fztgIZ1IHlOUyDhnzebGMbcfA65zoAXcczQQnDftyPnSBo9C/uZ6WXDDRA5Ey/psAq4FUmdzJidfFTgipBemGbRT9cv6g1SUczvTmP+HNfhdH3BmEpbShj3tQAxQIaSuvzffmaxqfpfeSII4444ogjjvhk4hvFBPnzN3//Wf/5BQC/GUYr/V4A/yKAtwD8HIAfB/ADqvo3b/b9XQD+OIA/BuBtAP8rgH9UVf/6h2lIO+/4/Ld9FQCw9UEBuV4W3N9f8Svfeg8MxXvbCadmb8TntmNhwdcud/jK82dQjRdRe4Fsb9uL5uVhxf64WBKrNCwPYckab75iuzdbnGYMQc6dnQ3iydXiWRxHu60tsltmTou9OMc7a28Mvbbh0BIruvEOHAyIBhdt9e3iXb5rJv8UgAvNyfGU6Naf/r0sAO7sg8YjQYlIcKUCP3WVHjCgRQaIcUtAZg06OTLBoUx4aLjSAMl2qe4s1C3Byb+D3bDNCUf0bSqvAHKVN8cGmJR2qpuLOmtEFF6aBAPBVsXwRr4ZT1aAGH1VyGpzhjedxU8DAIqx0zG2eUjfhnvZLoA5L2FJG1wUYAkuoOsimukeUZ8chCz7IF/lp9qHGHuMHDeTXD9AOuAUcOnJ6rnvJ+xASzmGlrkRQMAE5MRYahmzGD8/LnUYmLSoiY5W0KOAgrXf2bYQIvaSpOzDS4CtYcU8H0ehIHaGB2yeilCWXrFbFfNOE9A4gRGLJcfcDSiLkrRspyf+Uxfi/FSuu4NBBAB9zDcUoCVzfMEAalzIVxcT3xTRmeXQtZ7ZwWCMMYr7m5AMKpKnbkraHOw8C/SuY1/9eO5kxDvAK9Be0ATwqZ9qeT7OZ8+EYosd51jGfdQFAxDrPqY+FnYNxr5ZEResv3bz3CRAiZz5pi6eSuNCMrAs5jy272yYURuPbnHAZdeGfWvgxUAQDmt22L9HcTBVYNvJWIblxt0Uk/OVzdVxrfj0ywMEwafofeSII4444ogjjvhk4hsCgujtctXT738QwA9+yGM9APj9/t8rx5vrFb/p7/pZPPQVmziTQxp+4fIM75xf4Dvf+DI6GF+6vJX7LCS4b1c83J/wt9a38aX33oQqJZHkc88e0VhwvVvw4rLielltRa5TvsDqYwNdSmLhZTDpJKDD2QBNDYhgpCsJN6NBg4xFYC/Ami/SbR1yFlADVfTCT/UFIkkjhZIMAkO3+vAAWQAYZZ0wSkky4RiJovL4Hv7O3e/0yaorEKvVccKSiGN8JsCkRVJLlZQAxIq3J198xZxcnkdCmUCHFjBGAbq4NknRR5m0FkqCXVfdcxh1JI/qjKJcmSb/H2uCYLSX4y8orJ7S7ohm1yU0P6STlc8US9Fg6STIRQriodkBlNwnmD03/UvGTCR2ZZxzNdyTOW3O9uH5cxs7ZwAIjXyq9qskzxU4y8Q7S5t87hdL3dglS6QUaQlKcGAgwD54m0LjIIAaHnoP01iTzVGEpTHDSlZy4lnC3uu+Zf9sU+2geB+qTeptlPZG4p/3IwNgQffysu7PCd4GC0ZXzbmQpV1wvYsOgIwRAYx5knoyDgAEsEUVjChDo2TAin0YcyzmggI37BjeYaV73V1cWknwA8zM/nq7K3vG5yMRzZowwGCWKECblxcGCw4GPOhioEL30izaa2cA3QjtEQmOaAOw0nwPCIyBov68KgAYKdAeYjsDJyfr75wTcDDJwNhq/6sN6B0gYchG0JVzP1klGR19GxckmSAbT6zATkBfBLQo2HU/ZOlWWigGomgn2684LJHrEenJ74nTYKMAwHreMT8oXs/4NL2PHHHEEUccccQRn0x8ZjRBPq5YqONbT1/DRcZQvNfPeGO54r5t+PzygE0b9pVx8exh4Y53lhfA+hyfWx7A9KvwtcsdXlzt+zdOV6zccXf/HM/vT3jvesLeG7pQsk0ulwX74wq98pTojTdhIMU4FU63HyvY0ptrjdiLrIgBAbEiSaRoi4B8hbDvDTstdr6IOG7akmKAI81WxDXaRFaWQwxIJJmb15FH6Y3pwyYoksnuCnR2AdUNWb8Pp6vLMpLw0CEB7DMGkoI/sQhQ8Bh2nKKbGGJ2j5B6DCGk+USvwpOcaEck19PKs46EJvaZxrDES5jvo9QIxYI0dtzYSmLmjecDLCZsCNh11c55DcCmwwEhoNuqeRcDqGJ1nmCgxSiLGYls9iHLpDCYIQk8kSdzCl38eq06lX3oosl2sjIMHZa6WcKAZGwoxvlzlbyM28sqKIIxQApoAEqetWXi6fo0CdDEMX2Oa8z3+LIm5C5USUoGftyUcuTcCdCq9CE/j3Ht5ctkCPl13mLCWjKKCkzGY4ji74qO2bj3fjNQ7ABqAEexrwL9TGiPtn0wQvppPFZMoNQPtWPSjwkNoBqzyG4AKKN9dj/Z/IrSJANwZ6AktE0ktDVuRGqTcRPjmeNdzrUTcOGhPRLBPgcWxc6UYG0cVxfFJgO4iPs+NUEuDuT4A0a89KgFmLQDy6Oa9ozYfcEvebbYxg54nIYgdAAvfCLIo4F3uo79ZWHsYfcd9tMVJPL7vLLVdDE2VV/sIH0tN0BSdsa/D8GKi9IluutY7jaIjH8flmWwG4844ogjjjjiiCNep/hlD4JcZMX//fgOmARnzxIuspgmAIAvXd8yEERbMkU2ZTTc4cw7fuXpXVzeXPGV0zN8+fFNAMBb6yPeXC94e33AQz/h3f0MUYIoYfdjPPYF713PePfxjOu2oO+M3tkSWuD/Y+/teiRJsiuxc6+Ze3hEZGZlVVdVf8z0zA45nBGHwqwIQYD2RXoQBEE/QW/6F/pbehH0KIALrAjxgQR3R1wuudOcHnZPd1V1VeVXRLi7mV093GsfHtXUUBC4u9PlFyhkZoSHmbm5eZTfY+eeo04xDQU7R04sU2C0GTex7mCTPbSy04SZWeB9gjdAJHi95ESWpFL9XWIDkLC1xZpwtmUHZVfYCQRJdT9EkzkBgJM1MdUEtTxscxVkTR2VZKMkCDPgGkygsARaJ4xWU4SgQA1XoKMAHKiJBdlObnuKAErJQNlpLkwE1Easy6zDsBDHtGMygyUnd217ZUxk2VyHstMrAiThs/aa5CaJ1euzXhMTRST/zqHFqQZQoKoK9aLulguq1kWLNOR2oiWMqeqmcGfXqNPSHcmuRvkziWwt6ETR0UHQCO+CKmtgoSCaB17ni6y9UsLQDrFoYCjLhWZaAiksmrDmvtB83hJaIlrmqLGu0QKEiCaZAllOcAYXsqBnG0LL+9USVXXjqYBVy37R0iFZlIOVyNenbbMAlsa+IO2nsGEcFXAlgz2yFaR9HRMAoJN6D9m1A6BsNCttyUK8i/If6Pwk31xDPgO07BiKBoIEgFtHqDLvdpx99J1qMPkHwMYGzKAEYCIkhyLMWsCu/N1l65WpMu0oAdOjeo/rMc3UH0nH3jDHeGrKigzUoAZ4WYCkdr9nBpuWn9VjUweIMbWSVwAj9vW7SjyQJofUibGbqALVpRGUa71gE6X8/wdV8CiDgk4gm4ruxkiAF3Af4fuIvg+YZ50IIh3ybyFJrLHGGmusscYaa/xOBv/2Q9ZYY4011lhjjTXWWGONNdZYY401fvfjvWeCnILH//3mQ+y6GZedUhiYBJ4TxuTxKu0Lg4NtWzSIg6eInZ/x8XCDK3/E1mn5TI7L7oQLN+LCjbjuDmASOCTEBnc6xg6vxgu8HncYo0dMjPtRvVOn4DFNDmHydccvUdUMCYTik+vMSSBv3wFISCqmFxmJlRGy2QQ4271kTkiJrW6ckBLr5mJDlcisFJguSGp23vUA+2k75Lk8IlOqC1277H7rzni2hxWv2gWtMKIKcDZlFNZ+akQKy46sNDvCpnMhDQsj6xOg2VUmv/y7CIU27ZXP1aGAk22+57Ka853tVNuJve0Co1Lp8zVMMyH1KMKWwrrr3DrstNR3Iar0flbqfauBoGwdATv9BwjYJb2utuub2UPMxtYQW0f53ExvRF0kCBIYmKsbhXRUS0y6ZOUNzTa9oDI8slUoVaHJsitPMMvYpctKsbTN1RzN3LVdoGXpRCpzrH2gMG2kpf3nyJbBZJa1eZm3ZReteGU6ayPZbjyTsnNsUO/0Y/O6sCcu58C13cyu6pSJUCfjbE4TLdpaOOlwI1haPmPNCClzaRNBmzpeAFU4GJnZZUMPBJkZ0al2kIyq95NdhfL4BLK4V1qZj1zaJKaPUqxr8xAyW8IYSRzsfBo9jtzmwtmlYYpwaFgOAjhCsTJPzqyaHb1TslXO2dV7gxKZW1PL6uBFCQ1IwBPVc8nXLisAN/d+PrfiUARb3wLVuYGySLIeC89Qpl1Cw4Yj8AiknhB2Uu6l6nBT56xosuTXimYRldImySVLDuBBz9s5rXlq/stACK64KQHAPPX1/4A11lhjjTXWWGON71C89yBITIyXby/Q9xGHnWp6XHQTtl6fYN+OW4TEOEwdeh/LZ5IAl5sJr8cdPt7eYu/HAqIcY4ckhNugJTM7N6GjiF1WKATAlJCE8bg74Jv+QktsSPB62gEAbqcBN+OAm8MWIZi9YaMfIF6AXLOfH4AbYVRJVoIAYJo8nEvFyhcAPCckUSFUffgVmOGBfj5UEb1cIkGJ300q8oN4Ts4EC0CjBQco1KQeQCmDacsBpANyxkOxsau1pIhnKmMsLhWNmGfcYEljb0tdqDRdfgobPZ0yAIOiiZHfz9PKoYIgaBNwS2iocb5o3WfIgB4SgVAGQajMURZUzSVDqWvAppxs5pISNn2PDMI4MS0AAbwCFGRlUZw1KBiQxPq6nTo1II7zasEsiW0sEYkYyCK+rZtJAQmakgghS5YqkFETYRujAR+5dEJIKoUfTfKbE/k8/23JgVCdzxYsQE00S5nIGf5GgGpcgErpgrTHtWKf3koQWmyiKUvhdNZ47mBxvLnktIBDYwdbBEmlATVaQZkMsGTwprxNxX4WqO2V8Zgtbi6XEHZFc6ZNeCmXuzWvFTCiE0ifEDtGClVAsx2igpFUgIhyJTPgB02+pQEsyzRlu2sDhIqbEJbXnwzALHbA1k7R62inP89Rr8BqtutW8ECW1yqvL2eaR2fvyyZBBiy0WlJgxAbQSd5+YRQArw7G5sS+A3gytxoDhbOjD4+AP9k5zagAt61NtUQmpG6pmyTm6FX6auZ+4WbUlAaqkGoDCjkVQY2RkWZW8LsB0QAAgZcuWWusscYaa6yxxhrfkXjvQRAACJNHauxxsVOHGBHCzXHAHB2OD315OwUGOcFxN0GE8OZii6t+xKPNUdtLjCAOY/C47E94Ptyj54Cn3X1pY8MzkjA2FHDdHQAAA8+49vr7S3+Jwc1wJDgFj9PsFzt1IoQYdXu97OZStcgVIbgM2gSneiOiYncASkJM9juzskPgtYXYamKcJ1ltwkDVFvVcJ0E6UYeFRCUpoDbx5CZRbhKxvKuLJEVnIydBCaLaB/nwuYIg0gFxUxMaikD3QCXBliZxyCwDTWZQtDyqjW49j+wmUwRDefk+YImJbYlnDZP8ujhtl2cCRwEfARmt/46qZgrr7i8gi3lp3XfK2AtIQPY5gXiGdIJ5Q+BGs4PY9F+azJG4Jl1ErKwg0xQhVjciaYUa8mejsUgiVTFK04NBm8Q1dp3S2cRloIEE1GZvza+Sd7gNXMuinTmRzWKb+lpdF9kdx4Wz9hoQonxOjC1CaERBqUyxZD2S5ZQ1LKGa9FbhVVsfZb5srRv7CYC6+rSsIxIFGhcsKKAIplpSW1hMBgqKGAmsARybM0aeeIoEOjgI14PI7l9xpjexvJntXETFeLtQmUEGCJCxMPIccbBxlvZRRTeNubKwuG3XElA+217HDE6Ve+4MwChCu99ybSkqGUkcVJcm4h0bYYiCuuU7K9ECkJM+gbqkTCa7HuIjxDfjzoytLEzcBPV5MMqwihMvAQXR68onXYQcAHfCAlgtWMUEhEHnPLv6pK5+X0gByc7AigaYkrmySOSov8yD0zmeVLuGzWWrtXdeuGitscYaa6yxxhprfIfivQdBRAAJjBiAQ9gAAObZoesimBNOxx4xMNJ9B7YHyG4kxI3geKmZ7tfHDt90EdutMj0EusM2jR02w4xXFxcY/Iwnm0t44/Bf+AnH2GHrZniKYBKkJjlgStj5GU+39zjFDoe5x5y4HNNxwpwYMTHm4MrrbE/7UQgbA0Hujxu1i0yMZFT+GBnMAqZcCmGfN/ZA8qT0fRLAEYTrMSUnEdSH5pyUpZr0yUZApG+UZK6ljjeJJKEmO2X3n1H6zGCIMIoQJEhFO8kSgbgRpY9bZMZASR4tsRIvBQQpFpnNw3/5bP5pu8/cJjERi+MKiySTCNr2LUF2oybxbkJhjVAA4GrSqG3RohSk9GljcdkxwuYodcqqEKdOE0mA5Hh5bYAFSAGWwvRIvgEpbP5bPVA4S9YTSoZKgdSNBKiMBbMk1XId1F1nL0BmE5Ad34AwWubT9GdOQkL2WWQQRJStYmyPzEgAFNRhUQHLdi7RXIdSgtSADeWWy1MVNXlOXsymtbkEeZ1Sk2AW21NZvFGsXKkeQ0FMKJes7CejVHmem9+lgodtKUphu0RUwcs2qN5HZMBbWdP5PAUA13M8D+lNKLm3konOxI8B/U6AXhfxBJlpaT+d73Op90thT+UhNgm9TjoWYsd57IsxNWBTdKguQAbqZEep+oHaJp+XDEHvw5RLnEgBqgqSUjnfwgg6xxnydVgMOn9cS9OIk37nskBigmTHlqDfpcKEGQz/QIs5qpbXAjplJokU9ljqCHGjzDfxtLQAbtdzM24x4DbPRXJcgCbxFXjK9yHn75gzYHuNNdZYY4011ljjuxDvPQiCRKCjs4dazVamg8fkBLSJmnAFrRHPNeH9jT7AhiNhvhDITJjZY77vm3YBmhgPrsdhv4HfBHw5XKFz+qS77QKOs8e+n7HrJnhOGNyM3kCSQ+jBlOA54YJHDG5GEFdcawY/K+MkOTyE/h0QBAC2fgZD8MrvcZg6jLMvlodMAsdJd/4NCIGxAADA+YSUd7DbxIpq8ppZA5KoWLdKmxyw0q9jRyVJpzPGAKBJ98KFoM0trGyi7LBnXQzo75I1Pgzc0MTN2gUwM4NSZYfwTEuGgRPTqzCLVytPKI4bMyv9PSx1Qs5BkOSX+UfKOgxku+GRlAkyAf5AxTmHraSJTNOEAyBzbb+YqeTmBIsEUfUSNGFJEDAIYNbzKgm5/d4mNIRG80ATolLK4LAoQ0GXqlWnJecUqLI5JCe6OreJbKDlOi3novSfz0FgJShokrZ6XGlDCNLLwuGnLPdWCyFrtlBdK+KB2EvR1aDzBNbmVbLVcxa6yOVlxhRKBL1HMojS6DQsds0J5RoUJohT8IfMGrokzQUUpHfPv5mDck2tLEhZYM2W/9l9I0zmwEJ1jvI6EANhZPERHcIIZQ8Eu1fatZAP9AZKOdWOKWtJBDQTmAhuzOdRAYZSQWUgUSmJM6CoABblO6bOPQAku3dr6ZpeC87slwwatfdNU6JTYlbWUAZjOFTwFrOWAWU216LkKM9zbrPRkcnfGSkoGEmZJZL7LqVOBih5QdgprUW1gfJY7J99r7pJ4I/1SyA5QtgqAJL1UnLZ3rcx68o6C7B26nvCwLxX/Rj978/mMRk75VtAsjXWWGONNdZYY43f9VhBkERwD2f10PZwmLYMupw1EdjGIpHhjg7DNwqGHD4B5ivbmZ3qwy2ZjgafGHJizIPH3G3UWhSwB2TG2y7CdRFdF9H7CO9quUrnInbdjL2f0LugJRX2oO0pAazaHjmYUn2fE/ZuApOAKeGh22CM9XJnm9tT8AUEOaErrzuXEIwp4LLORE4Ii71r/qm/SzIWScPeSF2CRLXfjTsYo6BNnslYJGK2olg8tJfd+nbHvd2k9VDmQSNWuSiH2KjuSWGdZL2NFogRaKJoiaXOj73d0PhLwuFMXBM10RInRdsDBKRNWrQPB8ROd3OTB/zJQJBRKe/lukRLaNp5KwAAIABJREFUPlD7RWoSnHMQxAHsgGjzlgC4k+5kL3b5WSpzIp9zrNchd0WhJp2lAmZPZee71WFpk7tcukBi2iVdRV3IQCXxqNoxJHW3vSTmZ8BEM7ZMYxAv+q1VysKa95kL66aUs1gCLw5Iu1j6L2Bdq6FgQGgWy9V2KrSVvCyAtxaooKbspZZQCLJuBGB/s0AcV/2GXFpUGkItCTIrXGrOUaCJv4KJorUf54mq6KQLG3sge0634Ef+u7nOed6y9o7kEqEua5BksMomkAHySVkj+VratZee9DyBsvZyHxAYk6NJ2g2MkChomVbl/TyHVDVxKEHtfIHl3KMeS+cASF7rBSipoAuVMiIFRVJmiJ0BKKUMK3//OSl6LeU6dXYeBhoKS/0OiyhgnPSCgITYE9yp9u8mguv0WvGk43ZWQudmAYiQMijWfDflaOVl8t/a7nKNxr4pVXK1DSHAH7/9llxjjTXWWGONNdb4XY8VBCFNbuj8Qdl2ZYf9hE2nFIBgrif3/gLDa4/hm4SwY6SOEXep7pgOCUIC7hPiwYPGSoNG1h4J9vBODjML5j5hHGIBGogTnBPcdQH7zYRdN5+Vy2j5TBZw9Zz+wU273kUM7qF8LkcSwu08YOx1GdxPPWKqrYTICInhOcFx+zn7aeU5Yr8XMKQACY3riBCcS0iJEIPOgQipKF9OBifbas+JSrS6fbe8PkUEkaxswkMf4CNAM9ft5iYJBizhsYQ7MzE06auU+upIs9w9z4mPahZQsztv7c1U6vQVuKjzmDUkMttBxU/tTROIZKCACyrmauug0SgppRcNYJcZDzxr9sy+GXMjc1NYASZoWhgmgDE7at8lEbdznCdWnZaMP2Wgqb0mWYQx2a48cYMvJNMPqUmbXZBynfRAMW2RJThTfs+f9UlLedr3CUgcS8mB0FlfAKi5v7Q707sAtF8SpEjARJpcLxg3ti4zUyiXUZwBGIuf+ffcp4OCn5zUtSOS9bv8XHHisfUgOUkVUi0X2PrIjJC0nMeidWG6FVkYNWu4iM0xWd+lvCw7pcymM2SgpCQqYyhYo7CJ8mIZrJm/OFKx3gRIxwVEWZSR5HuzncPQ3PcGeJzPZ557AZVSGmlZG23kOSwgDC0AAopSyo7azyAA5KvDUbknmjbL71KPAQyAyOBL1mAxQdvSRSNmLYOybdLA9nlCPBkokhQQmR4RugebtmPtB1BmCEn9vtA5rQSsPF4FUu27MwmSp6IvlMWfCyDCgBvP5n6NNdZYY4011ljjOxIr2XWNNdZYY4011lhjjTXWWGONNdZ4L+K9Z4KQF+DZqIyG0GBCgYAuYT9MeLa/x5wcjrNu35+ueozXHrsXwP43CRwYh48Y8cJKWfqIzTDjB0/e4G7a4OawRUqEELi40MSDL/1QJMjMSm8u23eMAGB86HHsN+g3szInGncYZsHFdkTvAzpOcE1pTGaJZMbIdX/E1s3Ye+VUj8kjJFfe37iAcfCYTBclJIcgjDF6MATJtgRD4sL0yEKtIoQ5MkJ05W8Ai/IZIkHvAxwLgok6jLNH7Bhh9khR5z+7zQCN4GcWTswb6nO5eiqUaCyBSn2v25eqQ2IvZYFIY33kYNt9dmPtY7GrjrrT/K2uMM3rVaugYe3Ye7FTtkjqGk0KY5lkB4us1VjKcrKTCVAYIIsd2whwEPijsjhSR2XnO4+tiIxyZr9kqn8dO8/GQGm1TnKFw0wIu28/7zxH+XghZRvp+ZnwqkA1JPLajg0NpUxSpnq050mLkpNFh2yT1bJJHCAumk6DirsW5w6qzKTCtiIppSKS2Eo5BCmRMmIai1G1hLWddAJAVei39H+2ZijanJc1JMoAyKULZwyCal/alDlENJoeosNhGJOjYVcAdT4KY8HWQp6DRqy1HJOa9gu7SJB6WxeTWdga+4pzP2zuMpn5lD+bKsMBvWrJJE6qK1OuXx5r7p8XbJC2/E28LMo0zn/qnDZeQ4Xx0SyxlvaUD8t/GtvlHZ2dBDCMAZMZZ275WXFSdFUWpWct26XtM1cl5TnLh3iB20T9/rcOondIHYMSIUZBGgSjaVJlTSGe9bsluaqlknWL9H62dR7xTokbJULc2PdRdkhKgGu/x9ZYY4011lhjjTW+o/HegyDMCdfXD5ijwzTpdIhoyUYcHV69uMLbYYdw09dEYCKEnSBsCFefn7D/DePmvsfxqT45jocBx0cer7cjLjcjHj95jYtuREiMyZ443562eJg6PBw3mE5dfYjPD9jREoNACCeH0FuW3NL3u4QYCd4neHOCyQBECA7OErzeB8x7h62fMdvT+iH0OMUOg5uLfsiT7gHREtdgYEgCIQphSh5RCIdGhHVKHkyCKTrMyRUr39mAHtc41bQ6IjkJ9S6VMUZyCJY0SVujPqM6iTT0chuclnEYUHJev04AJJjjR86Dcq7dCGlmZwZKWJS/lEbQfO4scmLDljCTuZNkkcOSqBLgmZA6IA5UwRUTeSzCkW6ZUOVzUwvLmty1CQ7PgD8mUKQKpjTtZPcHMf2I5AHqmuSnmZcK4tQxuFMGUVDWZ/LNmNt54QpeZI0ESow0SBVoTfpaSfrb5DxRBYjOyl3Ka2bTugC8jM9PHqCsgUG5cRtz0EUipbSm6UAIaIAxAUCN6GXR0kh1vhZ6E3y2RDLI0ZRRaOmSLOa5fZ/y36U2Sl8s77e6G87ca3IpGaAlMjm5lrw2CDTbvbxJdTqSAhuttgUAJDGwyjQrSqXNWL/78rzTTHUszRjJylOSpDopoY5dF7CBWGJz2paXpLq2cm1PEZcteib0zpzqCVAtzSqApqjbCRYftfE216C9Tvm8o63nhFI2VK5ztv5uurLuiu5H6Y9k0SaoWRusYqq1AWipUa821ZQIuJiBK21gmh1wYvCJFw487U+eCc7spPN3W2rdh2ydtVotHFDBYQLmHS1PbI011lhjjTXWWOM7Eu89CBJnh9u7HeLkihAgggoXbl45bL8i+GOH61+eSrIxftDhm595pE7g3xxBn/8Gz3/1BOH5FQDg+NGA4xOPu0+e4eUnAU++9xY/up7xuD/iujtoI5fA1+MlbqctXp92uB973D8MiAZylB1VmB2pPVFT454gYIRTh8BSXFqKYOSUM1TgtAsYZw/nEvabvZ7D7DHOHrvNjI0PCHvG4/6Iral0MiVsOGBnfx+iOt98jUsky1AGCdi4gDk5TMnhFDvcuQ1OoYJJRILT1GGKjHHukBItxFcdCfo+2DkkxOAWu/eSdUIAfSCPhNhrUscz1Ili0mOz+0eOshHcJiMCPSbvNCdNnJGAuLUki2QJgpSd3/ragjAieedVtTW4YYvwSMvEM2bGRu5fx5M6IDvhpA7FzrUdQwFspK4Dnuv5qaBnXTfZxadooTrVAci7xWVnmhQoCTYmEiA17iU8mlZIBkFsPAub0zxUS2ClI3AGgKKxnVjKjjTNdXe+gEB5J7xBEzIgsmCENCyahWpoPs6YRuRSFU5NtNQbydG2G/RaUb4mixPTcWV3npJcxpyM1yS9AGpnIEdmkOh8S9mFPxf/TJE0CTYNCWqzdhubsICYIJCCQlXmA4qVLs/1oxz5ncR3wWZhgE7qfJUBE3GiH8hfJyMW6xmAMjEySDGTMWYAClwBn4Yxo4KxKOKw77CrMpMnKeNB8j2rnS3ckgpjphFOJtPg4ViZRDm5l3zvZ3CJDBRsrL2X17qCNM7WWMpCqA1YsjDzCVjgb3o+9RpSBGQG0kzKqAnqMLYoTrWbNvcvgeG2ehKumxC8RxoYKX/PZyCsIG56LSiQCb1mwMgO96LfV7ZW3YmMdWTtJGB8gjXWWGONNdZYY43vZLz3IIg7EHZ/ttNkIT/QzvqQvnuZcPG3N6AvXyK++qZ85uLD5xivfh8AMD3dw//lDfD2BvS39v5uh6tnHyB8/Bi3P9rh5vef4s8/eIJ0GfD4+R0A4JOrW/QccNUfcdGNeN3v8DUJjqOCDSkRUmSE2UFOru4Ko3noJbNgDMa0aOwYKRh44gWRHA7TACLgwQ8AoI4to8NxE0FOcHva4GoYcbVRa5KeI3oX8Lg/wJEUNsiL4+XCgWZqSmp2fsLOT5iMCXIIPaJo+cwUHKKdT47NMIM4oXMRBKDzEXNIi5If6YEwVQcbmblYpMqRLaG2vxnLMo1sf2uCoLlsIydZ5yG9VMTAot1pFyeaqLeft51sRAKPXHbXM8DgD5X1oYKny3EWgCbbqdqudtw0AzNQhkSqW0iqbaaOELaE7kEKKLJIeByqu0wW3Ix1jMJA9M1njD7fgkfuhEVSn3eOyzlQbS9ly9NSWqOlGyrOqoBL2Q0HIPFszFTnoyR3E5frSYmMDSK1xCdfF1Q7ZXEN6GFAzIIhgGa9cJMIl/4rUJMnoohqWslEqcaxtnPCSU3S30ZJvBMZQ0cWYrwCLUlIjcDtQoA2UHFGKUmzZeDJN6VfDXBX2k/NcBpcsbSTk/9oIFuZBymsIZ5NTNOACZ2Ld+8lSoAbqZRlFTCGVMiznRrJYAvO701N1EUap6NM6Voo+2LJ7mjAIAgV1syyUwUg8vikqP428x2hYrZ2vxWWRS6ry4wTwoJFlEGTts9sYQsoSyyv9+QB9nWe9ABljaRez5MSgIMrXXAf9Tr5VJdGHkdhVwlEgDQbiGwAXxlEJ0gHB5rsvrAGMrCpIEiDiqyxxhprrLHGGmt8h+K9B0G6txO+/79+AaRUd+tiApgghxPS2xukeVp8hrYDtt8EnK4dXv18g4/f/BHkz39R3k+HA9KvDqAvfoPHnz3B4794BOk95idbvPmJbq/9zadPMD0L2D094NnlA0Ji9D4C0L4ca9IfIuOw6ZXlQaLaBQDizDXZEbKdxEq5lo1mKdJb5pM0wZLsTpNfO3ogEu5ODoftgBfuQvt3AuaE3WaGM72Rw9jjOHZge9BmloXWx+PhiE/2N3jUqX3B3TzgPmy0DCg4zNEhNrR0MvBEhFRHgtRGtS2dYU44Qc8xie2Q95qNJO8gvT7II+/MNtR4tVNV4CIzLiSXUrRaEgQQC5zpFkiTKEs09xroMdwlMLeCAzb/Qohz1UvJbJZ54uI+wSOZi8tyDfJE9Vrm8bS749R8hiqbAtBypflKEHZaZrOw121ZMTlnlArGFMAs95myvgAWlps1AUVxk+CpjjGX22SWRE4CU9fsSiMn1UCKUhJDwJYtQ+fZkmtlxtQxuBMh9dWNhGbSMoXchZNiS1oG0+6sZ/tloAI57SXMJRcsSEOyXfQ2kVWwojBLBCBPDQMEVcumfuQd69J8fZUBcrYQMqkjqcaDtIl0/nhZetnZoznPxooWpN8HsWvWUsOeWJSONWBXC44VR6ImQY8bdTYisfvL2BzlpL0gMi01OdrIFTnN2sqsGP27ASOIAFtn1LDD8gFSqEotoCIKwhGQDNhInTIfAF3bxa2pLXdqwabFuhdErtc2z0uubinaOa2mSOO4UvRt0IAgweY/6L2fOgNiip1zPdcMOFLgalncuwrINte0BaMorwvSkiTxXMvEctsbAoghSRDAen81l0ouVhDkP7X4b/7FL377QU38yf/5R/9EI1ljjTXWWGON3+1470EQmWeEz371D75P3sN//3uYP30K6TVbePmjAcJAGAi3fzhjfPwIj/+z/xqP/tZsaG8OoLsHpJtbxK9fAF+/AKAs7I/+6kMAwPTjj3H/gwGHZ4/wxSdXiLsEXAbd5QPQ9wFDP2PoZ3Q+QoTgXSwsjMPYq7BoZDiXEAJDEtdSE58QA4OdlZgAC3tc51VPJIxemSaBEQ+EKLokZksmD61Y63yWkQH68N/pMTcXWwDAB8ODvaXABZNg18+IEtTK1wRcHQuOU4cp+JJctha7ABBMxyFb8BILXC4TcILUJ93tLLv8TXbrBdSlStXPu9BtBmhADuVEULAoK9IPCrKWhGoDtEKMWorkOIGt/IINwAKAFFmZLaKlV3Hiykiw+XPEmiSF+lrWuM2Wtjo4ejcpFiD1QNoAMwlSXynuLXghrOUyPGnCRQHFVhOiyRgFgB0Km6BlqxQGRM7PGraLNO9xsF3/pOPKny+slE4ZAsnX9ltWgepJaElDe67uSHBHQuqkJOatuC2YtLygJLO0ZBVkFse5rWsGgiIV9gltA8QzpF3vhKLPUMALsdIVAHxis621txyWjAQs5/DbmAutDkspFWnmvAw5XwORRTvCsizzMhbEMrN9t71FxFpGhUilvTzIlDVAAEvCDagoZUtYWMHmc67XmioAR/n4CgDoOmh+97Rg3LRJP9nf0s4bG4MigweREIdUziONBH9owAugCAmfl+Vk0dcCNuWXZwOpCEXkuC07ihtZ6MYU8dS2bTm77g24kxEWatgbFFF0TWSu7JbCmAIWjBeBq9/b+X3hOojcFgnggbRLiCyLz3Af39FZWmONNdZYY4011vguxHsPgpD34E5LRNKkT7K8HSDjCH78GOEPPsGLn+/w5o8D/KW+L3JCetvD3TOuPrzHH/zsJX75xx/gr798BADYfnGJ3VeCiy8D9n/1EuGXf1f6C199DQDwb2/w+N/u8WQYcPrJRzg+73B4vsF4rcfNO8HxKoJ2Ed12RtdFXAwJQ6dj6DjhljaYo0PvA4JXZxZv2XPnIxwnTMHh/qDn5xoh0othxOADjnOHw9TheNio5MZo2cLMQARotpITYyukIdVdWas3x6gP8vPJ4TN5gld7tRK53ExIxuDoXERn484lN2P0iIlwHPsCdpyDIHFisE/KyBDo7zbEfD6BnL4fySjszcO/ABJVsyCZEKM0bBQk0pIEgia9bdkELKEjKaBIIofkm7oCUmCGXFJQJrD+ndkyJOCsp+ACkuelC1Eys5SZwLQUMgQsUYr1dWBZbiMMpAmIg/YXNwIOpCVCbTcegLm/JFJAqHWoqe3XXe6c2JXL0YAqmdEANEmc7YhTEvgTIKc6xuxQkzpdM6lvQJY20RfdCZfMBslskRnwR6heRQ+EnRSGTf4chZoIVhZDizYogAWWigPYyeXEXBhgLxBnbh3tWoGCdCWzz24vAKJzRY9BWuBwwSCgUu6U/y5MCijbpZT/5M9kxkIefz4l0uSf0OheJFLmFwB4K5Vrh98K0RaWQu0zl45kAdbk0/Kao/mdgCWS0/xkKUyFPF9xQ+WzRUelFTtdgEJZWFcQh1QFXLHs8x1tEBig0kll6Mw2J5ll4QlRXJ375rTqPNR7oJTLNMCOXgApa5SjLBrKIIz+QeCwgD+QjC2VHV20RKgeY3q6RWA1M0sKmNasmeSoMAClcSLKwE4uByzXvCnlycw58QLpEqhPIJ/KKaaZl9d9jX/SuNwf/z8zPX5b/EPtrQyRNdZYY4013vf4lq39NdZYY4011lhjjTXWWGONNdZYY43vXrz3TJD5yQav/8c/BkUprh5hQ3Cz4PgB4/YnER/++AX+lx/+Ka6dOrv8ye1P8dc3z/Grrz+AdxH/7ZO/wf/04f+FL3//MQDgL+4+xb/+5mN8/s0l+r/7GB/86w+xfTmje3sCv3yrfXzxJXDSrXL/91/g8fc+wdVHjzE+UxbFvGfMO4c4OIyPN5guBS+uE/BIt+d3FyNiZMTAOIQNJBEkEdjXreTNMOP00AN3HZCA2QFi70/XHk8f3eOj/R34IuHNfoc5OhxnK4eJDuPYqZCpAGly4E3ExX7EbOKmITik2TQzRqd93Gzw+qB1EA8XI7xX4dNNp6UwnYu47lUz5BA6pK3uTI7cIQQrHTHdEwFAeQe32cUsm/9JWSMpWDlMPK8zgGqg5NKQvIOd0NYUqP6L1d3Tt2h2KLOgeT029RTWZmLd+UYgIJH+DSBmTZbWwUdQdlxhDAmZlcmStSjybjklKTobbqqlLoV+L4CbTLCyM/ZEs6Od5wEkEFeFD9GjrRQxrYSmdIFqO9lZopS10FK4tS0jIJurNAvMWEiZIFlUMqp4a5qwKLdpyzSS1x3tOKhGSQ436o53ClRshgsBIUGZS0JIXsxBB8vyIasukKwlwtKUWWjpSRJ1H+I+wvWplLuUtZdrf5LSBcr67KtIJflvZ4LIzFW3x9gOaoONcmxh42QBXFTGRKsFAzYtmWZulXFjTBVn5xSoKS9R9kqh7hBQa5nQsEOsz8bFpRUUXWiAoK7Vto3M5gDyXMjikHw9AABHt2D05PIonVdlMRQtlkLRQmUCUZ0jZG0Nu78kZe0k68wL4t7YbGHpNEPZMtocflrh1yI4C9UXyULL6jKj7KRzzRXxxtBoxWntfYpUNWya4bVnSVKZYECzlvN47VKKCZC0gsmUqJ62iexSxDtMEPE2Z9Zo1j+SRJCj+3ZdlzXWWGONNdZYY43f8XjvQRC5jHj13580kbZwRin//rM3+J+/9xf4tHuND9w9/v30HADw94drPEw9UiS8eXOBf/nmx/gfPvgFfjb8PQDgv9r+Em+f7nCbBvzin38f//t/+TN8+ffX2P76GlefqY3u419cg37594i3twAUFKEXrzD0mqVuO68ZJRPw9AmmT67w8FGPw3MtbTl+uEHaCGgmDDdViDAnjSRAHLa4uAO8uYYAgHg9z+PTDl892+H+n93go8s77LsJ3Av6vWZkgws4RY9D6DEnh5AYHwwP+GDzgMmsIo6xwxQdpuRxP23wzcMOD7eDaowAGB96zF3C5COOYwfvE3abCfe9ZtBBWPslwRhmTNEhpnodQmR4lzAGBWRSZPguloQoJUKcnZbu5MTsnNuUYNao9nfjnlCCBKAqcrkQrLTSBmlLL4CStVCwZApUEm/O2hQAYs8oIqC+Jt3S1aSMnECQIJ293iSZbclHnNQy1I0EHm14VorkpiYpKuUW1hcDWU8kbKUAGQu8KOq4SbSkRrgmbu5E1d0igz5d1aPg+axEIQFxo241db7q51O2Fs2XItR2hQCGJpY8oXxDqXCkusxkUAi+0cQQm6dU54DbMgrU/qojT31NywY0+Y3wSHuAh1jKmlSUmJp+9HcpSFHTTb5m5+qrucYihznMlLUk+RpSTVrN8abMYaq6JPlnFtLU82Etb/Bqt8wTaQmQNSOdlcm4diBNFGyQSmK8EN20kqw87rImWvmUZLa2WeCzvS9Jln3n+WxBENQ5KKUtZyBBvtYAiqDtoj3TcyEDJVvdDPikpSPOnIQyAJf7DqTW1qhrxU21jeSlAHe5pGUBYiSCC1r2sgDhcrmUAaZl/RVV4WUbKghrf59XpuT7t1nTC6HfBjjLpTxCtTRHmjWFBHUYS65+7wRCd1jLYb6r8f9WdrOWyqyxxhprrPE+xHsPgvQu4tMP3xQBTwDoXMRld8KP9t/g2h3wL+9+gl+8/Rifff0BAMD9cgueCNugWgt/dv/7+OwHH+CnT1QA9Z9f/RqfdG/xzN3iv7v8Bf7wx1/iLz/5FH/ywx/ji99Td5jb37vG/otHePTZiP7re+DlG8RXryBnTjQAgG9eo/vVgCcff4irZwqijM8GzDsGR0H/NoCj7XxyZRAkz/APM/gUQFNQ1xtvLI7rLcYnPV7954/xtx8+Aj0bsd2NeHqhoqbf29/g+XAPhiCBwBB8tLnBjpfj6+wpfRaHXx6f4d+8+Rhf31zqGA8dJBLmuQOEMLmEeXaIWQOBBft+wtbP2GxC0TTZee1jig47P+MUPe7nDU5BAaLRmBghOtwdNioIevK6++0E3OmYiJU1IJMhQ6yAA7m0EEIl1t1w5qTrgFORkiCS4oKTQ4QQzac3zA7JHHYwcqm9bwGEok3CTTLq9ffkAdlkHQfTLjDXDQBV24AFcVJ3nTAT+KT9+4OCBRybDf28o23AVwZHUi8mSCrL5MyOiZZ4p83y/dSjilOKJlZpU+ejCJSy1J3mBISLuuusQqz1WJ6bcZql7DvCoc08goCwrayDoo/yTvKsgryEBlxpgiOaxJEWgIh41WagSAjwSoDoKrNKEhlzo52cLFrSjONcVLf83rxX3DvqMakjkAMoSGVXcL2u6lCTk2P9KdQm+MZ+8VTB0DOXFjHb7DS0ujYNtcA0RkhoOdZWuNS+EZAMGLL7rpx/BnKmchNVZgfL0k4WUM2blhFBUvWGBMaWqv23oEnRWMnnklDswalJ6FsQJG1oCazkPpv7J6WztdX02dptZ+0d8SjnSEnve55RBIBJGlDSVVHVrO5a1r6Nx0xddOqym0wr6IsKfJDYmm/nhaH3Y54bA7IKs0qk2nlPBCRejJEC0D3QYu2sscYaa6yxxhprfFfivQdBTmOHz3/zBJwFMKHPixf7E/729VP8b9MfIfx6j92XjGe/0afqR39zD4ggbjucnvV4eOlw9/Ip/tUHWg7zp0/+GR5dHnA1jPj04g1+tPsGz/tb/Ivnn+HrR68AAL/+9DG++OYR3n6+x/7XA64+v8bu8+fgz5RNEt/eLMaZTiekv/sc9Gt9it16j23XASlBQlB6PqDMEQBIos/MkpCSYGmhALBz2DqHH/zqRzh+/xI3P9pifLzFrwxk+fyDJ3hy/YDr7RFbP6PngLfzFhsO8FY3tHUznnb32PGER+6AP9x/iSt/xC/3TwEAf/3qOcbZ43i3ASaGgDEFxpvR2xgEt13A9cURl5sRGxcwuBlPei07Ygg2PINJrXSPqcftPCBY5hoS482ww8PU4+ZhW86t7zTLcpwwzl7Ld0TFUZkFXRfQOT0HEVKgg4B9r0KujutcZWBs4wJ6jmBKYBKcogIyx9Dh1f0e0+wxHjtIIswnp4kXAD5pRk+xbjZr8pSZJECKrLvC2aGktbrs1d1GSzUSaEiQRIhbnYM0OPBk1rsZOPHakTO2SBaeLABIZle0O88CSJ+qK0uTJEYWs6TV41JnxzaOIHqyoqiSuVbEHZX+aW7EXRnF6QUwxkbE0u0FWDhuAEA0txlKCmbQhAXTozBTWped1h6WrM2WeWBjz6VEsSOI2fMGeMhgDeQk30qmJJeK5E6bflo7V93kz0z3uICWAAAgAElEQVSNCm7pfWqMADuv0EUVyG3KnhZB0LI9Y2IQ3j3GJUJKJvTr6vmVsQXYOmuAify/QM6omSDmdEPB5iMf4pO+7wzwaoGS5kJIEtDMFdzIgITN2SK5bsttcnMF6LGyEWr6acVRjU2T1yAF/Uy5H7gBEaCsJZ4JsbFbzue8nGupbAkWBajyuTipYF0jLpxtePP9xrMBVRnIzPdLyEwNWrzXtp/BOmE9Jw5YrrGEhctRti1vgZQsMCzNsdJJfd8EhLOQMucyIOuju8c7YOka3/34beKsK1NkjTXWWGON70K89yCIeyBc/vmAONQHPmHgsN3CPxD23wj2X0dsvzrAv9DSlfTVCyAldPsduo+egadH8AeH8a0+ac6/2eF+u8VtD/zd1Yf400cjnj2+w6PNCRedZqafXrzBs+09/u7yCb75+AL3PxgwfPMIw0sFIYa3Cf6Q4E8R/naEe3WL+OXXhSkiIRRNkX9MkPegvodES/6nCQgB8pf/FsNfb7D74fcRnl7g+JGW2xyeDnh4vsWb64R4kYDObEx8ZVq4LmK/nbDfTHi+u8Oz4R4/HF7jDy++AqAgycvTBT6jJzjebyCjlq7IpBOtMh4er2aPh12PoQvY+ID7WctlBjdj52fs/Yitm7HlCSN7XDnNcqMQHvdHHGOH19sdDnOPkLg45ADA2AWEqOyTDHZsfMC+03nMLJeND7jwIxIIPQeks6f/vR9x4UbVNaGIZNnGIfX4anuFKTocQo8ojPupx8Oome397dZca0gp+mQ705nRkHdfE4CZVFPkzPYSZkmqCZfqSlCfmT9AHAgxJ4yUmSSEZGwRTXCwdLhINa88LzXQHfj6ogIzMDeSfEwFXRRYqNvxWnJQ2yslDVITREFNzOJGyu55lqsQFgjTEsTILAAAPAHuZM4c+RA2ZgTXBK/qJ1h7Qd5x4CmfdcB0SXYJdCzBts7zOdBMZyyNmlSel3S0jjdlbjMgkjtvSjnICaQnJOcAY8602hu5D3YG7jAtgCMx7QcIgROQzMFkUfZk5+6OXJkIJfnGglWRQQgto7H16hOoE8CAkdRq5eRxGEtKMvNloSlSx5DdS3A2xgVDxQDEukbPWCO5/KUw4PK5GHDCCqCWMVrfPFPRUilWwqjtZvaGgoaCxKmZJ7MfhrnDzHq89AZERMFMaulMVjalLj914FpCJsoIozov+nl7n6Sx520ACiv5ooAGWT27iWFrmnVukn2nxCGfQ+6TChBSyvbsOuV7Yo011lhjjTXWWOO7Fu89COKPCR/8mxFh7xA3trvuVZyxe4jYfTXCPUyghxOQLXSfPwWYgRgh04z+7QTxPVKvT4w8EeRWxfLiG484eHy92+HL6wC/N2HT3YjHuyMeDSfsP5pw/ECtau9GzS5enzxw38HfeQwvB+y+usTVZ0/Q/VIBhmy1m4OHAXAO2Y+VvIdME/jxNUAEudojbTtkQQ0eI/j+gPD5F5BxRPx3/x78qw0urxSEuby+RHh2idPzDY6PHcLWw58EYTARQGh5wnEA7jeCLy+fgR+P+Pn3v8CP9t8AAP5g9wIfb27Qc8Bvdle4PQwYTz3i0Z6sTdA03nW4Pzk8dMp6+Jp1DL6LGPoZmy5g30/YdxPG4HHZV/DnslOA5MPtHd7wDlNyOFrZjAhh4yI2roIWSQjbbsbGL2kHF36E5whHgq2bEcvxrJ9xMzwnDDyjowi2jGjDAZtdKK8DwCH2eDlpSdDnl48xRo8khLvTBkkIITJC0DmIkREfOuQyipa6D0BBoygo2gGZLZKTNidasmGXlr1a9UKoiM6mySFNVt9vCXzW5yxRkikUpkQVPF0mmDwDaMAAni2JyskuZYtTPT7upMpj5HKZtjygSbwBA0CcldzkkopmTDmJ5iBLUVbSA8Xp/UupnmHWxaiMgdxO3olHY5OrArKtGG7qSMdj5SU0GWMmsyS+JYnPpRo1+W2OybohCU2piYnnBlK2hRhokj9uVqfSE1JQXZi25Ec8ihjsEjiwaM7dnZRdUMRU83VgWSTCes1JRV1tuLxRrRTOWjrprCNKIFK1lFbQOLeZBYAXpRplXmDlJWqvm4WBW+DjnMEEnBFRnCh7JYsHdfXNInra6LCclzCRXYtyP7Do3OdD2Kx/rfwrTVzBRwDogLgXpN4pG8RYUGXtQcAjgSNBRIp9dI4sgCyswG3WEypjZKgI8Lfo3eTg9t4C4JDvVftO8FIZPmJ/N08DFIHpipbaSWusgd/OFPn/EyvLZI011lhjjf9Q8d6DIJQE/Tcn+GOH+UKnI3VanuAfItzDBIggXWyRnmlie/xwQNgS3CgYXs/w9zPcSZDlMpKrO3ccADoA/kiYTx5xq8nv3bDB3dUWu4sRF8OIy82I6+GIwVgOTAl384C7cYPXN3vc3fZ487MdLn71ewCA/Ysfwh0TKAri4DDv+Z1du+4hIQxKSQ8b0nKCLGEwA/50jYvvPYG7OYHf3EIeHiAHLUXBwwP8169w+eQau8eXiFc9KAjC3hewaLpghIEgHgiDw3S1w19MP8BvPlQQ4+cffIlPhzf42dVX+P7uLV6NF3g97vDi/kK7OG6KpgY9OICd5uk2/uAEJzco+6SP6PqAlAidMVGYBfvNhG03Y+tnzMlp2cysT/dJCL2L2PhQyuljYoTEGMNy6SchJCEMbsbofCn5cSQYox57By2P6TgWkARQIOTSnfDYP8AhAR542t0DAJ71dzikXkGQMCAKISSHU25zHvDNgzrzhOAwjV4TxZzwhCz6qiADHzVrbOnt0iVN9JpMkDnBDbYz3UfEwEiTK4KRic6ySaftSCP4mvuglJ0sNCHPAqqZhZB3pTOIQAbY5OHMWcgzv27H1cHqPcMzKohCUt0rgAqCZO0QB6Se3nHVoMUOdsNmyfT/vPPtADdLKVtys4CDoHvQ46YrcwzKzhwOCGKirmyf49pFyySoifmSYVDYM0LgDIIwSomCCKkuTAMOyELpVoBOk1WJBGGGNG5GwgA5m/esadmwZySTDsiEdE2vJrWsHUcVtDFGhESUDF5mBWBAWiZV2j9jb2RAh5ytqaZsRgJDiijP4jI1OinWVqQKhAG1DClR7QtLEEQ1d86SdztAgAI2UrRrnJtpxW1zuY8dnwWS6wH20+4bvZYZzALcEJB8QpwZNDpleDVjccylNEscFi5IEAERVb0Pl9fz8pz0nlveTxkYSbR8jaKBd0frgglhWwFHvdea74+ZcHqWzrtcY41/0lgFW9dYY4011vgPFWvF7xprrLHGGmusscYaa6yxxhprrPFexHvPBBEmyMad1c3b9iQT4q6DMCHuPI5Pdbpuf8iYHwnckbH/gvHkrxLcGNHfZ74zIwwCJrKabgCiavuFjnwSxFOPh4PHYRjwdjfBuYSNiXpebCbsugnP9/e4Ho7wHyeknxA++4k61Lx+O4AOXt0wtgm0nVRTIO9eCoCbDiBzmhBZ7Lq6E8EdGcenO/Q3W2xfX6K7meEerOTn7gC6PwAhwt08gGKE9B7OUxFh7RzAkSEE9HeC4Q3AYYMXb58BAP6Pj/b44bM3+Pn1F/iov8Xz/g7zhcNXF8oU+fp4hbfjFl+9vcQ07nR3PdPTAWTbWd2pdBhdB0pUNlXFCe6tHMQPM7xP8D5imvQ6EQHbzYQohM6cXwTAHB1usm6BCaNmBsTQzxi8iqACqh+ShHAfNpiiwxg9RAizuYI4Euy6CW+3Wzzr78Ek2LCWxwDAhRuxMaXOK3+Co4RoJTYA8Dbs8GzYYkoOp9jhftpgTozZHHDujxtMk0eaHGTmokGQLTwxE1JHkF4p+4mszKZLcEMRPYDzukVs1Q1aHpIXvdnN8jaU+UhztcuUwGor6hgUSateIhWHHUDp9GTMCQ6oOifIQqW0cJg5Dya99lnUlSMhkRirqkYWe0QHhB0WFP5SwpM/0rIgGjtcilouEOelg4g/CPxJ0B0EcaMlMQtdDyGECymvL0pO2i+QhpTTMiuy3TEF1TpRJxdAjOkQA0H6s3KoZke/vJ51YsRcYJqSILBVgTT3erEwzlbPHpDZhHOlEZK1zwhTYc5klkCx2GbSkrtcJuSt3cxMyuwQAqSvdSy5MoWsvAReNYYo8LIUqnV7sVIioobtYkyQcp3P2y+sqaaddu5YzFrZnJxaskMmp9gYyKxjCxknH5j1d9xZH808J8dgLxCOeht0VErRlEWTIKO2k3pZsDDEq9ZLXiNZYLcVchUGqMNC2+YdhpVUpkhmauUymWy/S4kQvSD1mWGTryMgF2FZyrTGGmusscYaa6zxHYkVBPGE6XoDClUpkkyDITnCdN0j9YR5yzh8qE/Jx+9H4HJGuOtA0WH7eoPhmwnDa80U/MlhunAQJ5j3hDBoYkMBYBOmixt1gXAjIQ6M6daDEuFo1Pg3Q0S3m+G7iE0X8GR/wE8fvcCn+zcAgPCpw2g89g0H7P24EPPc8IzfnB5hSg4hOTyEHm+PW5xm/czD3YBw0yH1jPmCMF118EcPfxzsHC6weTOrdkhISB0jbj3iwMXqNTkCRSkuFZu3Cf4oGF7rOE5fXeCzpzu8+PEFfvr0BR73B3yyucEPh9cAgI82t/jydA3HCZ+dOqSjM7tMe/COBlIkAElr6CmgJJziazlDGDzmToBNrMmGE8TAOPqEvlMwQ4QQAmM6WUF8Thbt59HAlN40Q7b9DALgOeEwd7g7DJhnV8Am5yO8T3i52eN6ewKTgCF4tFHe+dONWg57iugoIoLhkIp4a88BTzf3xQHnIWwwJl9AkpfDBW5OAw5jjxgZ884jHh3oYPozI4Nn0pyPqn2qOEbauDJPsUvLMoLGDQlR6f7EWoYlAJhiTTzNoUaCJtGapHEt9cj1BMmSLtMHcVO+n1ABENQSk4VgaIRaoWZR0xbIQH0dIkgbwyQ6WZRyZCFRDrVEYGE7msEByUDIslzGjYTujtDfaWmMapvYxxPQPQAgwnxha0+orL+ib8Io4q6UoE4grumEpIBEGZwsYFIiSOCaFJd1WctTEJsyHCeWsOeSECoCtAWMAIrmBwgL4ErH3yTR+Zo0S4MSgBlohTdzaQWSgjjJU3UcYkHq9VTjFioK3AjQSAamcknKRAvDF2Jz3pE6x4v1UAZhc9Fa50LBEE3uWfsqVsRUxgezPRYSUOMKI+eAG4mVLdmc2yB5JLC5KEkuvbHv9zy2NDLicGbd04I9XtR1KZ3Z9cLAo0RII6vzk4Ft1bLX/sh9m3ZNO09V10UWAruLe7IATSjfsQudkZkXmjtrrPEfM/4xWiRrycwaa6yxxhr/2HjvQZDkCafHDn5k8KxPkDzrw2QcCNMFY94R5gvC8bm+v/3oHt4l3EbCfMU4PGP4g0d3o6Igm2OAO3VIHaN7IIQtWx2+IJhY5bwnhC0hDgqGADA7RR1X3DKS95gIOG0ENxd7HOYOP7xSEOSj4Rbf90d0FDGLQxTGMXbF0vWxP+CjzQ3m5NFxwJg6fDle48VJ9Th+s73CC3+FmTeaMDtC2Nex8AyMV4zukOBP2mbszxIS0XPSHUeBv5/R3QiGb/Qkppce46XD7f1j/NknFxgen/DDD17jhxcKgjzr77F1Ez7e3eLhqdrcZk0MAAo0CCHN6oiSXVaK1WSXQBNrImUChNKIWQoL5pkxO8HUK4Oj6G2MNtEmxKgXQDABmFkwd+b0Ex16H9C5hNPsMY0e6VjHmDpGYME0dng4bsAmmjj0Os8vdxfoOaJzEZ4SgjCYBINTwGxwMzpK2LgAhmDvR2wkwFv2sXEBj/pNccwRIbw5bXFzr5bA00MPjGw74HXnl4MCbDoPgDhX3ECERZ0sfJPxzIQYvCaIgatYJwDuo86Z6TyAdR3wkEVB6noo142A2eaYRi5ggZieRiuSCpKy7uxP3dlvdr/F+sk71kLQHf1GpyEn0mLJNc9LTQ40LCkx8c1yHXtB2ANhS8WVphWzFDIdnYMCp2GLhbhrC4KIgRQUDMdoQAghqq4epAlqto9VrICKPkUR42w3401TpGhmuHri0qJKWVsEKKAlyEAJEohnxA3BTVRYRVkctjVGyoyEtmkuorKAzKqvkkGQ5BW50TlgpCjVlhXLc2nXQXlZpAGeKpgpTRMKbgmoLATULyXT8ihCt1k/p1j0GpMpa7K0kY9pQNgMDmCuYFGxpDZGhph+TgZB8nuxZ9OukaUSsSjoKPav9J0nITN9SJA8AycdawHczrRqkJppbb6b1RlGqpgs21iQwUo7lFHsfFvmVPfavQs+rbHGGmusscYaa3wHYgVBOuD4jOGMBg+oaCAHwXjFOD3Rnd/xw4j+mYqG/vTZCzzMG4TIOEyM8brD6c6BTGyzu53gThHiCN19gj/q07M7hLI7P184nK4dpkekFouWOFWBPEJndP3kFRT5+uEpXj5WcdbdbsR+M8GRujTMiTEFl50isfEBH+1vwSTY+wmPuiM8RTzulaEwuIDLfsTn/WMc+y1S7+AONXF2IwAiJM+IGymU+dZRgyeBO0UTqxRQErjjDHc32jnswJPAjw6Hlx0On3j89UcbfPmBlsP84PotLroRnhI+2t9h30+4GzeL6zMHh3H2cC4hRkZKVEpXdsOkIqenDvPJV2CjXFyAZoYkUTHHnNy5BgDIu6qtKGKqjhcxMhIz4JS94VyCbKImkwDICdiStOnUwXkVgkz2+QyiUKHt6+u916x5283oWcVbGYKLbgRTwoVXQK3ngMd9wqPupDbBbsJtGPDm0Q4A8M1pj7fHAUdjioTJYT540MxwD3pSPOd/efeakJIUlx9AEywFDVwt0zCKfpqpJG5F/FFQrFALOEK64c5O3UHE5jhuGGlmda0hQYrqCtTav6aZSzlUSY4zS6e5njnvJdHkkwvyAS0JyrvqxjgopSBNW7m8gVJ9X7xa9QoLpkDwB4CnhsVh3bsRkAMAmNhrdsiZUAkxfbO7LgDachMbVwET82vlQogyewiAMSrq9j6WPzM9wJ29b2+VuTPmAxGKi5B4TY7TyOBTXidWBuNyoqxgGo8NQ4CAuEF112mFRaHfVeLq+SvjoBGwbYAdsTKMc0CilIawNXwOIOTwUss3Cghif5sLTGZBtOPnEy8AsMy4IVvPZdxowJKFsKnOUcs2EgMHARSWD09m/xypAmFAFfbtUEpyKFbWkeRz9wLhhEgEmrk4UinAp2NLmTFUJi+POwNy2U0KBrrUQWTXGiQda7ZDBgBJgu7+XeHhNdb4Tzn+IbbIyhBZY4011ljjPFYQpBfcfyrwR6AzTQ8eATcKpkeE6ZEgXCZ88vsv8dPrFwCAvR/xYrxEuia88hE3p2t7gO5Ku5SAsFV2iVp5CkAEnqwM4kaQOkLYKadavAEg9pDqj7rrjKQPzPFE6G4d4tea/I7dFiPnXX57CG8cOZCALy+fFfp3t5+x3U64HBSgeLI94Pn2Dhcfjvjq4gqvbvcYbzegB10S/oEQe4LvCemo49GksSYPbrE9C0jHSIMHnzTrC3uHsGVs3gZQdBDWJ//7o4Ig/+7Y49HlEVfDCftuwqP+iEf9sZT1MCUcQo+bcSgMF9dsTT4ZDhj8jCk6vD7tcXMcMM4e0ZKZMDuk0ZVkis1VZjD9FaCCErkcJSVCjAzvc+Io8C5h25lWiI+YgiuaHSIExwkhMubJw/mElKiAHiEwRDxS4vK6COFg7z90PTyrFozjhGPo0LmIQ1CEwnNCz6EAII/8EY/8sZQUjZce92GDl9MFDqHH/bzB/bTBw9jjrrBFPPjgFpa04hvgJydOVm6kFqtSbGV5rmU1YNuFF1IrV0Bfa21ejYXANofOJyQWnXMSpMQKNIWa/Usn6nQCgLKd71kGxrOuazbwgJr1Xg7t6vmAUBybivWwMTTEabLq7H1hYwk5QdzoenELcAKAB9ytwB80wVbWQx1bnt8o7wy9jh8GMFEDIFg/yQEJBBad3ygAHKn7SB5Du+WfSL8vzi14sy5GDp8BOAVCiKDlTz4tdFdkZGVtNO3FRMq2ymUReQ4t0c96E4VB0OXkO2sRaclc1duwtWIsBeqTaZh8y4TZxW0qcQr4CHMcatk95RhvwNBMVjLTjM/sg6Vh0JR+7BxzOVUudyk2uZmll3VrbP3HjfVn34ckOl88W5nQGVlFS8MILuuKmGNRAShm1v8PtlHnqVMwJDWMIhCBgiyBwgyqwa5NZqpEgkRY6ZId66SUJrlQ74/MrMrskZYVtMYaa6yxxhprrPFdifceBNkMM/Z/8Bb39wPGWwUx/L3TXbxtQtwnuMsZP//gS3zY3wIAfnH3MW7GLbZ+xn/x4Rf4m82EL7qnqNPZwR8N5BgAjlIeqjlUFkXZxDbRRmWg2Ljeqkij/D/svWnMbFt63/Vbw56q6q13OtM9547dt92O26SdpDFORAaikISY2E5EJIghikEKQggSooyfCASICBFKkJAFX6IgAgkQICZIWArxlMROx076tt193YP7zvfc9wzvVOMe1lp8eNbae9fpY9MGu/v2vfVIR3VO1a49rl1nP//1HzS4TEwad/TZ4wd7I7PtqbEDMFuhvAMEbeiqnLaa8t6RfOmdk5bjmws+eetdbp2+y/W85NF22sfXrrc5m6sSvTLklxq7HGaGE406W2lMOzRMtg6Y2mOi38bmxOKKODueKewmkF+naXrouorHjeG6LJlPt8zLLberBT4+1Vvl0SpwVZc0nSW3HVoFjB4oBJVpuVNe8/z0gqu24qopWbTia7JsctZ1Lr1BUL3Px735NblOUcQCfqTXRVNSO0sXjU87ryltxzRryHXHxLY03vQgRedF3lI7y7rNCEFJDK8bvp+AluDjTHsEROQcGKyVyF2jgjBHjAMEwLDaU9qW3DiubMWlnXCUrfsI3htmyUvFQ1ZVwUU3ZeFKOq/Z+JzreB7ONgc8WMzY1hneabGTsA4dG56uNcIi2ViRG5Uif1GpIVqNfiZiA0dQ9CIGHXpJB0oRWqE5uCzOrBsBRbwWv5fghM0zBlFQAfKhAdsxN43lnOrZLWk2friHYkMckAbZBFw5yAZMMpnUco+FFBna3y9xZj3imCK7Ubv3XJBxrJtA1kYvjJFniPKpA1U9Y2Vcso1ASOBJZFCl6iYK59PsPag2Nq7xPAYTwaIYwd273KZKjInAjgknfkAgwpjNo1Rk5/h0eAKORGaTSuBIObCvaFUPqjCWp/WyoBGrI616ZKLrI/NliOEVTxSV7umAMIySOakKvbxKdlkQpv6sJbaIG87p4JkScQk1IAWyXTUwPNJ12gGY4m+WjcBHZIr1DJW4bz4da5SvpO+3RtgtKsmx0niO59DUaZzSS28UDOfQK/GS0XoAK9UgK3JW4YsoAezUE+BK/Hsn+5BieNECgOjeu0X1Mpp0X7h8zBSBJgs9KL+vfX0z15MMkT0zZF/72te+9vWhB0GOszW/+4VXeWN9wluLIwAenM9pNhZlPUXVcjpfsXEZ//DRRwD4hfdu4jtFNas5vbfi99z9HD+SfQtf1s8A4DNL+VjYJShw8UHXZ8PMWnL1D8lfoQO7CRSX0gwUFx3BKHwWZwVHdGoQf44xEKJbechNtG/TBEwTRtp4qXYq66kPc1Z3Tvipb8154fScZybXvHzwiHuTKwCsdpxt5jzaTHnvfM52kUWtveqp4SamG4A8SOfXmmypyRfxYT2PcqNTjXaQX8fGxg3fb1xOW2Y83lquyxJ3rJlkMj2fgImmM2ybjE2TobXve5VLU3KeT7g7u+JWseAkW5HrjtIIwDG1OU1paKPGKNHJj/I1RYxJqMywrUJ3XLYVjbecbUR2VDu5RXxQWO25XQgQ1sZuoYuvtbNsXMa6y+mCZtUKSLLtLJ0zOK9ojI1ME3DRoDY4hUN6S288Lig2TUbXRT8NJQwKpQJWe6zxPYsH4Hax4PninEx1bGMHX+iOud3yciXMpfUs5735IZdtxTYeTzpHAJdNJQk4TYELisNi2x83wLsXh3LMrcHVRjxDkjdFKhN23lOt7mUgwURQxMQkkC6CKEmqkpgpyYA18/0Mv7IDC0LpgO/ygbKf+V4KonMnrJ9OZuGD9ajC0+YxLafWmE0cd1vVm7W6KAlSIbI5OkXIwGcBV4aRLELGrSulSdXbgPGDf0WSGaFHYKUe1i3riPdjZFP19+aYzRLi74Qf5Es+j78fRcD7+LkdZCQ9jpKa/6AGMELHk4ccbwgMkhE0KvMCUgEkeVQ07NSJ3WN9f10dBqUDyngB9Vy8qxII0Wi5rl4OXAWFaodhopzCO9WbiYZWDHZ7tgtAq3vpVVBx3SPvF5GniE9QL9Eas2ASuGFCPB3D50FLY5/G3iBlGVbfk0tsGECjyFyBYUwGrwaj2tH7SWrnk6lqBOaSdMd7M3iNjEyF+4q3ia41wYW4Hwyyp9wL46aQ32LVyP01BqC0ViJR7MTDRDmEeZKuRRpr8Tp0kyBGrmlsqABZGKEq+9rXvva1r33ta18fnNqTXfe1r33ta1/72te+9rWvfe1rX/va14eiPvRMEKs83zn9Ch8tH/Dm9BSAn86f5+FqilZwOllxUqz5mfeeZfNFYYoUlzLj1s0K/kH7Eh/9tod8952f5cezLQCfnd+jebOieqjIlgHd0KdNpJlnXynsOmDq0BvxZavA7A0xXw1W0x7IzL7ZBmzn8LnGlYJb+UwNs9jj2eR4Rdtc4bcqUv4DpoX82pFfy5Tj9D7M7luuLg748t0pX7675cbRkjtTYRi8OHvMtx++C4fw+vyUy6Zi02U4r/v4Vh9p6U1n2TQZy8sKc2EpH8g+Ts4Ceg3tgULVgdlbG5rDXHxQgPpQYxpNcxBwm5w2z3hza5kciG9JYj9sm0wkG63eYb8QFJfWc7kpeW8y56Rc4YOmi9PzfsRU0IRe5vKVqxtUmUyJHhdrtApY5Znaul/eRQlB6wyrRmQu1kzxQTGxDfO4bK47DJ4Du8UHxcbleBSP6ikAy3u08yUAACAASURBVLbAed37fayanLq1Yg6KzBYrLb4IIYD3mu02E8YFwn4gqB3fhKtpxcOJrP/+5JD71SFTW3N/c0gXr89RseFeeQnAzNac5ktO8yUuaIzyTJJZBrBwJUZ5tj4jU45Stz2rBODL1U18UFw3JZfbinWTiXwmslW80xjr8F7hvY5sETvQ+zvdSwoSE0R5hkSYILPuvlJipmpDPx2ffEOUFZaIn7h+dtrkHm1kI9Z6uszRrnMxxcw81cEWNxnOc7vOoVV0WyNsED/4dCSD1D4ZOMa/JnmAeF8EOgZz2bFURtgZ9JK0nZSVOMvvipF0oWeCjNkmoY9e7U1TGZhcvYRjOD0ih0hSj+i1ERIzwisZVCPCjoqSLEbsiF7VlPle1hRQBC8yMhV9JdKxhKAIzgwMjHEcr5UDTzHJ6eOUbEIHWg/bJEgksM+j5EeJ5M5nYde8NJ3LGOecrk+KCh6nGY3NZvvI4J6OE+U1iO9MzwIZJbUEq3bPTfx8/LvT34+KngEV4kZ15nrmUojyL2VCL/8KWYBOSRRuHAdhzARRI/ZePHcqsWeAEIKY3dqo/fExKnnkzROyuG9OiQzL0fuUyDiIJBUjDCN34HYkPQQwpdtPk3wd68V8yV99/if4gTd/81M//6vP/8TXtJ5f7Pv72te+9rWvfe1rqA89CHLZVXxh+wy3syu+vXobgPbY8E4pgMfU1pxt5my+eMTpZ+MDZjQkdTls1jP+VvUd/Bsf/TT/6p1PA/Cxg5f4Bycf4Z37x+Tv5hTnivw6YLZgBSch1JJGY7eeYKQxsVtPiNGs25s59dzgMsjWgeJaGq+UHuMyaapQYlZqannI7eX/GtqZfO6tQndivJgv5Sk3W3ZUZzXFhaZ+LWN5d8LlrQlnt04A+PnTWzx3esnz0wsx5axaVl3OusuxUUpymG0pTMeyy2m85eF8xsOTKctCjE/zK015KTGg+cJjHy2xD4dmpbs1Z3NZsLxj6GYC6vgLw3Yq6E4wDIkfhYNG7zR0OEXAsFhbVlXJw8lU0kmiv4BRAaUCLvp2JDCl29qeup4VHVoPkpMy6+icZtsICNA2Fned9eaQDw+O0GXHfC4pO0eTDcfFmuN8w0G2ZW43uFHncGBrPIqpadi4jEVXcFVXXBcCQjQRSNDR/6BzmpqsN4MMTvcGj4nu3nQFF7XcuqtNwVk+o7CO5abAdZIQYzLPq5PbANyYrTjIaub5hkI7Mu2odNObzfqgKHRHqVsmusEFTanbXj70wuQcg6euLJeTCVdtybrL2XRyjrad7dfVdJbVNmfjlFwvRiBdH0G6ew8qjySQOIXPFL7yvWQqVbAa13qUFYq+MiH6RUSwqtGSSjPyJwlBURWCcljj6apGTG0bS7vOoNGY9WCGnCQvYZw60htFqhh3K9IEb+SeSsfiDfhCltdJDgIkE9H09yElBVKkcQJikkmrCgzxrAzfV7HxTu8HS0xpSYMl3u95QkRlo4Nprbyn3JD6EZL0BFDZIPMQqYuRe0wHkZ/0F2M4Htnx0QU1QZr0tD8EPINRsHZy3lKMrXbgGiVNO+BNQDuFtwL0JqwmAQCqG8xGvaKPgd2RqkQA4qv2N/3T6d20Gj2Mp/74G7PrM8JwTfsY7hFgMJa0+KQpUaGX54wNRhO4E7L4Go1we6Cml0zG7cRz1sfidgrvGQxvIxiVTLBlG6qXMXVFBKUiIJJ2LSjxvQqFx0w7kSn1953CNfqrT96+9rWvfe1rX/va1wegVAgf7oec4vnnwkf+yr/Fx28+4JOH7wCS0LD1GT4ozuoDfuqNF6l+asb8DelG6kMtMZGRwbG6p5h910P+6Mt/D4Ajs+JxN+Nzm2f5+2cf4d13TsjOMspHivxKznd56ckXjvx8CyHgC0vINIvnJCJ2c1PTTiRu02yhuAjky2E20mfSKAQtsZ12K2CDHxnZtVMxWGwO5DVbDsasdg3FtaM626I3Ld1BQTvP2B5Hj4sjzeYW1Lc7zLwhyxzOKVxn+mSVqmqYFA3bJmNaNBS2wyrPO1fRQ+LVOdO3oXocyFaO4nGNfeMB7uEjAPThHDU/YPOxmzRzQz3XmGZIJXE5uFLRTaA+kThNX4SdhohANCCEUMQuJDVAO42QQtUavVXY9dCN9gaNGsxaiQ9ESp5A/COKCwGisnXA5dBOFJtb8v36xOMPOyZHG05na+7OrqhM2xuvVqbF4OVVeVzQXLYT2pCMUw0rl2OVpwuai+2Eq7pkE0GYzTajqy2hNsKgCJEtMWpck4lpaAy0CnttZII6GSkeOFTpKCcN1jqJVdYek45RezLtmWQNpenogibXHTaCSQe2pjIthW7FBNZbzptpz7hJgEimHc5rlm3OxXLCdh3BrJgSwpPNY5wZ11sdZ7vlMxdBkARKwOBd4Mt47BEM6ZtzpwQoc7E7jgwBW8p1yIuWMm97sGld58K4Wcl51kszxOXqMKTJjBv+xL5I4MTI6wI1eP7o6DfSf5TW4QdmSYgJIyoIACRjTe7lBBrpjp19EGBAfhO6KuDTb1D0XtGJfWEl/rgHOlKzbCJO4ogpP4jvRhwnRGZCOofKiefETgZu8n6J5yM84ZdBMqhNhqjxmicmiG6koU/7upOKAj1QE8aJQ2r4XVOjz3we2Q+ZF/8KQBmPyfwQ0xwioJPud9iJwNYj8CO9Oqfx25H3TbqOPf0mntdxtPb4v1E97PeOQW3yDEm+Snq0vNr9vjJh8BxpY0RuGm/R4DZY+dMDKIoeBEGDKYQ15TstbBQFoR7F9Kh4H8WI69DqHrgUNori/l/8y9Svvb2LSO7rV6U+9ckyfPqHn/sVX++eGfKL194gdV/72te+vjnq9T/2J34mhPCpX8l1fuiZIHYN+icPeeXuAT93V4xN751ecVys2bqM1x+fwOtTdAvLuxEgOJEmRHeQXykm7wYW//gmf4XfDsC/cPdL/MbZl/nuw8/w6yZv8Nnbz/GPz1/gjQcnbF+XiNvwujxs5ueglzV609KeTNgey/vNPDY6eaA5CbSHmuJck11HEGMrRqqJNq+dRPGmvsHUgWytCEahWwFtXKmEHQI0h1AfWXxWUT6ymG1H+WBDKV6a+MzQTS3bGxmb0wmugDzR/OOo6copl5k0bg8OA+2JY3prxSzG8F68vOJyNsF/WWMaxeKe5bi8S2GjKejVNe6td6g2W8r5jO7GDHNdo1rp6kKZ4auMbpKxPbUs7xmaObjJLnBntjF1xaidZkSFgf6vO9lPXct50zHVxrQhpt7A5P4WVxnwAz1fuUB+1aCXDfrRBRhDmJS0dwTo2d7M2R5lbG7l3D+Z89aNUyaHG46mwhQ5rdaUpsUqzzPlFTNbc5StKWNHkym3I0c5yde03rBx0pyfbQ5Y1BJ52zQW1xmRKUQ5TfBKIjVBms7YLNm1kgQIJF7ZW8u2ynbSHsK4WTMBlbuewq+tJ8/lOswnW0rbMctrjvINmXasu0EuAyIb6rymsi0HeU0291xlkk7jgjShKRFHKfpYYRC2Tdcawtb0zWHwQY6lZ0NEkCRIjGvwIyCsP57BUBOnUGuL28gBr7OMetaS5x3TUsA7oz3bCOg1oSA0sek1iGyjG+QDJG/QEemCbLf3HUfdjqNre/ygGTXi4yjStILYeJqtACm6E+PVQQ4TmSMRROja+J0eBInmrl7RVQKWCBMs7p+JgEMEMYRlMcgskgFrAp56RotSPaAWUAMmspN8MjoRJsjxm9AzC0Jcd8gUoVF4lxgMSbKXTuJoPWr0qoaPEztEQBo58Wk8BKXpvEMZvZMs07M8grBSeqaICrLJ0fgMne4BkJ6RlNge8TvCNlEERvdQqjB8R8WI3p7lkc4PDPG9agAz+0rARMw0Dhp8BIdVZIWIjEZ+31SKSW/jILQBH4EPpYIwqACqYT9CTPehNtAoTDsyKwZ8GXYNW/e1r33ta1/72te+PiD1oQdBdBs4fbVlcmapX5d42DdfnPD6YQe1Jrs0FBeK5hA2z8Qn9cMWm3c4r1msLbMv5EzfCqzqmwD8Tx895NWP3eE7j1/nU5PX+P7jf8TvPvwsX7p7h7999zsA+Nyzz7B4u2R7dMDsfkW26mgOsr45zxYK3Yg8oDn2dFPRfyeWBCqQeYXqAu1M0XqFaSUVRsqTLUSbrhuDzxXLu6aPAHUZdBW4yrC+oSmuc/JrT76Q5tysWor3aopHmulhgbca5YJId0yaMZeH82zR0RxlrG4ZFh+Z8/AF4Vy/eOcx28Ml71an0tzawPqZkoMXZbbr+PNLzGvv0Z09gLMH2IdH+MUCnxpcY9BGkxtDURZMfs3z1Ddy6oOY+FFIU6e6MERAduzILpQPPUhktwJ4mNoPIMi6Q3cevW4IX3gNGwEaMS4APZ0Ivb+u6S6v+nFj35IG/6CqmE8q3DMnbG9XLO9kbG7nnB2LJOjdww5Vyrg5Pl4yL2sO8y3HhXi/VKbFKscmGrxMbc1JtiKLXeGNYsnGZay6gnWXcdVUXG7KXbmO1dIMdZqgA50zvXQiDgVJh9hoer+BsWcFqUGW76kgQNemkn3YrnK0DeRFy6yqyY3rI4RBmqy6tTSd5WS6ZpI1zPKaLPp1+KB6fxarPZpAYbv+vRQjfN0UrOuctjMiW2oMbiPXo2clJO8GIzsaCrmOOvNoLQwXgsJ1mtBpVGRZqMbgW82msLQTSzWpKazr45a903S1kSYyptO4VsMqyWUUOp4z5WLvOmJ2yIBM7IrIVmD4d3+uoqVJanh9NnyeWB7KAy2kzrqXbsRX3QyAhzeReUJqjgUAJUjcLgp0/M0IBlwpDXyK4FUjtkJiv/Tym8jMEYlNfG80bkJkETBu3tOuRoZISPsdEkih8Cr6WBQDM0InhsITkqkEPA0giYr+FhLVrYDQqh0QIhg9pHDF/fR9vC0Dk2p0f+wktHjVs8wEQR0Ak36lTwIbY7bMqMKYjZbOUxy/KgIWAoqpUSwNIvNKq9NIIkwENWk1NPQ+H8LISUBhvNZeZGE7sp+gdqVLjUZvtYztNrJz0iF62B74p17bfX1z1dhLZM8K2a0no3Nhzw7Z1772ta8PS33oQRAxGNUUC9f7ZShnaGc5ZpsaB9jc9kzuLQHxgUjSjy5ovqJv4b+UMzmLLI1NzucvX+TnbtzjJ557me955hU+UbzDd1Wv8a3PvQvA63du8pPf+jI/+vGXuXj9gOpBhtmIrAWgejjMsm6vNM1hfJiPIEYzT8aoAtCk5jaZTWYrTfVQD7GcXgAfu47HmEN7IDKT5hC2a0221mQL6eryRUF+7TC1j7p7iev1dngoVpF9EowiW3TMa0+2sVytKwDezo54/sYFL7z4EIB5seXBszPOXoxymdkBh6cl0y9MCY/OCXWNKgqUk44nhICva2mgVivsT6/JqoqDuYBVocgJZUYwhpBpfGVRzWjqUque9S376wXM8UMjoDcdyjnUuiY4J9sblcoz1GwKkwpbyXGFzQZ3JVG5bLdwcQHvvMtkPmdy65T2ziHNsXSNm1NLV1mUh83NksU08Pqhg9gAqsxTThrqbYbWgRvHC47LDc9Usv6DbMvNfMlz5QWawON2ytlkznktjKLruqRxhrYztM7gnKY9MNTLrJebqFbv9GjJuHOncUzNeqt6OYbKYpO2kRjfTW36/YRhdl3YI9A18nPiJopJJvIfAI3CK4VRHqv9jtQGoDQtM9vQesOqyzmvJ6zbnFWTsanlPHadJniN0h6lIM87MuOYR9aR0Z7Wmd6bpPOa82rCdinysrA1qFahNoau06ycoqtaMitjbTKpaXM5viT3ck7jTPyJvB7MVIkMI90O5zAk+4TYkAYdCFbtgiBJFqLC4Ouh2ZHIdFO5x3t5ECOQI247+f/oWrxkhosYX0bSkSS5kZMCIJKveGGG/U6H4iN4GPfzKX390xv+nv0goEgIoMZMkVRGWDxKqd7TghBIt2SSdig/YlAoJAYW+c1JHhlpX8c+KSkiOB13MODy0LPXfBaEjWLiNpIPTAIH0+Gka5YkL08wVEKSsKRz8bQTpYJcoB6XVv33gej7osRA2Jmv3kbatvXxNbFxoiFqq3ZAtV1GjYIUsx1GYMxoLOpG9X/SWBmOG0Lpds7Jvva1r33ta1/72tcHpfaeIM8/F176w3+cbCnpLAAuU6ggqQ2bW4r6hiccNxwdrwA4rMQQNDVytbO88eAE86okdhQX9A+6m5uB9vmaF+4+5pMn7/B9R/8EgF9XrMgw/Fyr+JHlt/Fjjz7Gz791B/uuNG3TNxXZKpBthBLfTkTK0qfL5MJ8cGXAVbKMzC5GKnutyK9V3zT1posuNR7QTRT1aRC6fKCXhaTvi9cIPX2+qxi8BIjUeyf7YlfR5+Ta01WywMW3GDYvNdy5d8Eka7ldLahMy+OYnPLKqy9Qvms5eDMwe7clu6zpZjm6iU/yRqFrh960AlK89xC/WOxcP2UtKs/ldVKJyWBiy0wqQpkLnVwpQmFwVUbINC7KXXQXolwmoBuHWdbCeLHRs2Ne0s4srtJ4A8EoiktH+Z6MBfX2Ge7R4919ynJUKddR3zgh5BmEQHc6ozvI2NywuCgP8BaaQ0kK8laxvQHd1ONvygU7Plny3PyKZyeX3CmuKHRL7TOuOgFkrruSjcvYuozGWbbOEoLiqi5ZRQBhsy7w3dCoPXnLK8XgK1LroVmzowUDw2yyGr0iYwIVm9PCkRUdRd5RZDKYtAr4oHBekRkBQrQKPWAxyRpOijWVadHKs4jGq1tnqV2UTgVFZlwPpMwy8Sk5yVbxPFRsnPj4aBWY2pqrtuLxVsbau5dzNquCsDGoyJgh95KAAUymWxTQdgYfPSSUCn2KT7vIMVe2vz/kvlK7TWLsTXswJDEGRswJ1amhgUaAj/5zI/eiCvQNvnh4xHs63m9mG1lf2yeYGYpe8pL8glQYmnvVyfZcLokgySPkSaNTFYQlEkYQeTJbDXp0bDaIf4ofma+msaLYZR2kipIN5eJ3EsiRWBHJ5DSNr9SUp9+1JjIXIuslASBjUCl5Nck5jcBHBIa8TcwQegBBdXFf+h+2MByvCTvXtK8xOJI8UPpjHC0zksbsfB9QjerlXGNQMphh+WCiZ0uSeaV1dXIO0+99fzvugEEMspmxxwrx/dHY8FG+tWPce1pz/0/9INt33trTQb4O9avlCfL/Vnt2yP/3+v/DGnkaC+VXqvZsln3ta18ftPrV8ATZB+Dta1/72te+9rWvfe1rX/va1772ta8PRX1d5TBKqX8F+NeATwG3gDeB/xX4z0IIi9Fyx8B/AXwfUAE/CfwHIYSffWJ9JfDngX8dOAI+A/zpEMKPf637lFct+tdfsVgWhJWcDrPS2JXMFjc3OsxcUjESNb/pLFXeYrRn3WScTte8fOchX47rvH5QUTzWlI9g9ha4ByVnR/f4oRvP8NMffx6A7332FX7/wSt8ZzHjO4sv8f2H/5T/5da38w8/8lEAfvb+XS4fV2QX4kmSLcTMM80c+lbMTYORGWnxeBi08cEG6tM48+cV2kF2pWRZhBmSrQKoGE2byWxv0v67PNDOZfaZEGeQk5/AWOeugYOW0Gns44zZG5b5mzJdfvLzjtVFzoPFTfzU8dbBMUfzNcelmIZWN9ZsJzn1rYzFw5xskdPO6I/BVaGnbZstHH7lFgdvrDGLKFnxHjqZ8lVtJ5SGPCPY6BlyUOAqi8/Ez6Q9MHSFxhWKrkyDSCRREikMpp7KzOlI5hAs0Vg24LOAXWXkVxKhPDmbM33vRfKHK/TVinC9xF1eEhbC5BgzV0xZYmdTypMjMHHm2xjcQYHedqCRlJ6ZZXVHmCSreyWv3D7iCzdvcff4ipfnD7mZL5kZOQfHdk0dLC5oam9pg8Hgue4qLhphi7y3mrNpLZ0z+MhwMCM/A6VC78vRtgbv5O/9EiMGiU/RoZ6BgqBkHAH4jaVuDG1u2aQ0jDgj7lKSiwKth1SOLO94VE6ZZC2TrOmZI1oFppmcR60C82xLph257qhMK+ySiOOuupwu6P67PmgOsw0HVs5Tph3n1YTFpmC7zgkrich1kYGwBkyUxnStEXNMHTDxGOyspfOKsB68GlwZeuNUFcSrY8xMSEyDVC65kY7YIfiBIBCUAu3xKfknjNgSgG8lRcfn4BowmRqYKQwMEJ9HdkXyzkiyGuQ9uwbnlLBB9Ghf4kLC8hikMrobZHY9e8QGaNTAuhqzSbponmqDJLc8ySOIMhthxfDVULwerbBnG8Xt6xBTeJI0KZpDjwkX6d+BHblPv/lOiVRHy5gOkSmiet8ShsSc8W/d+DgSm8czpBHtyFlGchl4qrdGyNI6kpfHE9uKEpU+jWdM1dGBoIYkLRWUREOPPGqUU+K9YqD3gUlkEhV9UjSEwqFsQBmPj/eoil4v4QM8TfJ+fB7Z1772ta997WtfX5/6usphlFI/hTxo/G3gbeDXAX8O+HngN4UQvJKn6h8HXgL+JHAB/FngE8B3hBDeHq3vrwPfHZf7CvDvAv8S8BtDCJ/5WvbpzidOwu//734P113BZSM+Cw9WMy6XFUqBtY4QFNtthk+U7cagyw5tAt0qQ2We0xsLCisdSeMM55czwllB+UhTPg7oRh5C17floXXzXMfLH7vPH7j7M/ze6Rd5xs5og6MOYky68B2vNKf8+OLj/P0HH+Wt129gLyz5lXy/uAy0U4WP6SxmKw/xPpPPuwq6yUCxD4ZeHgMCbuTXsl9okQC5cjBA9AZCJr4DEOU3edhJyPBZkASBWUc5q/Fe0TyYcPh5eRKfv9WhXGB9y9KV4CrF9iTQ3JSuTM9asrzDWk/TGLrGYvOOqpRzcDzZ9MaVi7rg0cM59n6OTd4tfpDr2E3A1LKf6cHdFUrMU7UcbzeRv/siDFHCOh5XFrAnWzHrDAqfDAbr2PTrgMo9JneSIpGaieuc/LGmOFdUDz3VY0dxtsZcin9MWK6hbSDL5dVaVFEIgDOuEAhtC20HeQYnArJsnz9ifTtjdU+zueXh7pY7J9fcmYpnyK1ySa47MuXQKmDwTExDGwzLToCUs3rOsi3YOsu2yzDaY5XvQQitAq031J3FRRNT53Wf3tJ5jVEBl+4DryV2s3tKh9QpafjsqPlNDV2tB+NGO0hCsB6VSbRpFtNbjPbkZjAutdr3ZrLJi+e6KVk2cozrJhPJjJXzoFRgljccZNv+GDtvYgxxxYPzOa7VkkgD0vlaj7KB0GjxoPAQStl+fliLR8hySMVRpZNEGYBOTCZV8mnwkq7SyzZIEbdPyCvG3ixKpCrSlAeRjOhAyEe/0V6aeImaTSahw/pFXiOgkyTFRMPL+F27BbuMoGcWQY2RD0Tax9Q4K08f4QvD/vsxfK4YjEdJsgwBcPpI415mMQIFUgzxk/jACPj5KoAhxUTrQPJmSfsglzEuODJCVdtB4qVjsoqb+l0JjwlDnG3az96zhB1AQz2xvyFJzcZJMkm+oujTinakMemceETa0g6/KT0o5NRgBDyO0U3nI5nSKnb3PS2WzISTl8jOOgJkAZ1JdLbWIk/b1sP4bq4L3vvz/xX16x/MiNz32/PIN0oO82Tt5TEfrtpLZ/a1r319M9QHISL394YQHo7+/WNKqXPgrwG/Dfh7wPcA/zzw20MIPwKglPpJ4DXgTwH/fnzvk8AfBP7NEMJfje/9GPA54D+O6/ma6ihbc6e44qqQmfNnqiuuDiu2XcZlXXEejT5DLQ2TWRjCytCVHjyY85xH6yPskcxanx4tuX16xXqWc32ron6voHisyVbR8BQorixvnT3HX3j+Fv/7i9/B9935DP/i5Iu8lInp50zDM7bmd08+y2ePP83feeaT/NziLp89uwvA6tU52XWM+HViqCpJAbJ+uwYfw0x8pvrUicSA8BbaqaJogwAIW/DbJ05MAFvLeoMBb3afhV2u8LnCFQWbOzntSYeatyw+GlkBmeXgbcfkrEO7IEk2c8PyjpzH1XOG+tjhjhomkxo7lR0oo5eEUqE3uzwoag6efcj1jZImGv51TtMBRdZRd5Z6K41wSASE5JUQGQw6Sw6T7DbwOqCtpyjb3ueldbKNJrO0WysxtK2mc6o3MwUIZUd7bKg3luWVJb/KKC4Oya8lHaa8cOhGzGPt2olRbefRXTRGdQG9joYtTQuPLgjrDWEhIEp5taB8fcrBvSPWt3KWdyc8vFXxzukNALKjLdOqocxbZnnDYb7hON8wt5vR+N5QmZaNy1i2BTr6athkXKoCXdDxs4BVji4YmngOamex2tN5zbYQz5G6tf05CgE570HRtQbXGJmpbkfn2Evz2jeuY0ZRqwlOSRpMI54ced5RKytpLwgRoO4sWgXWbcamydhscly8J/to0MzLLL9XPMwdVRXvyemaebHlKN8wz7ZMspZNm3Gxknt7fVGBUxIbGoe5rnXvRdFkGbOjDbUKuAgOVZOmj1XtWkNXWnytUZ0GxwBUtHHYRUZV8t/pK/XrXsDM0Em8sTBJRj5AyUw3ztB7G8CoYXY/eYpEhkRwaSZ/aPadU1glCTKmFkZFn3AT+3QfwUPZKLtATi7LqwS2ROaISfHHLvpLqMhO6JI57C57TFm/w1zrX6Mha1ruSb8Sci9vRQBjfL3Gy5myG1YZsh4E8UZJik/mJQUofW08Xp80OVUMkbo8cSyAygR0Cj36OgJmYtQtCkL6+YmghjZiKOxbMdF9EshQSvfGsLsRvKO/jw1avRr5hkSgSI/eU/SAiNLC/NAm9KwsPzru4JUAgU8c6wes3pfPI/va1772ta997etXv76uIMgTDxyp/nF8vRdfvwd4Nz1wxO9dKaX+D+B7iQ8dcbkW+Juj5Tql1N8A/oxSqggh7EZ9PKVqZ3lzc8xJvmbVFf37U9P0D4UhKLT2JON95WSWNWhFmDrCWpNfGPy1NFRni4z8qGY+3XLv5iWLg4LrOxXrRUZ5Jqc8v4LqARSXBV956wX+5qTE3QAAIABJREFUwnPP8EMvfZLfcfNVAH7/wc/xvBVA5NfmJZ84fRV/+nl+5hnZh//w8Hv54pefITu3+FwMUZUHE2nrqoP8WpqdbCnpCN7QP+C7Qv60M2mUdEcPeoBE7dqNx2w8pnbS6SqhV6cGftwr1DdKFs9aFi9a2iM5U8uXwJWGyXua4tpTnjeUDx12KV2X3eTUJ5bNLcP1aYaeCPiRGDe0GrUx6A5c5dHzgTkCkt5xNNvw8tEjbhRLfFA8qmc9A6Jxhm2XUTtD5wae+KbJ2G5lxtN3MkvsasNyOd1pImQBUFuD3ag+ajZksJnHW6f0mMKRzxp81VGfarYbi17LMWQLMdMMCuzWShRlQx/Rqxzkq5TgEygfHWGvasy5MD3CegsPHpOvNmTvTpi9MaW+UbK+Kduvj2dsD2FVBc4OPBy0lJOGGwcrjqLsaGIbdOycbIwaSaakIBG2ue4oje7TW3zQdMkc1o9lJiJTaUZUgM7r/l7ZdhnrNqPtDOutdO9KBVxnCBNh2KR4WDVmBaggMb8BXGfodMCpQOL3KwVNZwjAalniV1ZSbxLgFdcZlJFGz4E3luUqRgm3hvU0w08VR/mGj80fogmcHwj761V7m02d0Wwy8qqVfbjKMTEiVy0s3cxQlC11Gjte9UkyOkpn2lyieEMnsivnB/PKcQSt6lQPIPSXIgEOUdmVAJTExnKt3gEs0jAdx7/Km/RyFBT4cQddBRqvyFYK3Qa0Cz1oCAKoulYJQ2z0tT42N0pMtIZglDBCNL0hp+6ibCyXnwzdKFFO7TBH4tjPd0EIGQp6iOzVQUZtkoWMvpOWD0riofvPx/dvOrEjJkpQAaywI/p1eEVwI8fndD3csI4wOhlqdAypwmgfe7PUyAD5KhghMpVQAYUaQJLR9UMh0dwhnXR2kl36Y0wslVT9MmH4zD/xvXTM3uBbWCd2ltM7pslJAvVBrffj88j7ocaRuk/WniXywatfjkHrnjWyr33t64NU74eI3N8aX1+Nr58Afu4py30O+ENKqVkIYRmXey2EsH7Kcjnwcvz7L1m1M/zC1Q3OsoYmaiSs9mgCtbOsW2EXWOvpCukE3ESjGgUGskmLKxzteU5+Eb//bkZ3aXl0VFCebJlVNSdHK7oDzfWBACX144LiQpNfweQ+lI8KfuGNl/j8TfEM+cEbv4WXbj7mnzl6l9988AV+a/WYQ13xXZHJ8V9+9H/mfzz9Tj5/fYc7lciXXVAsWlngui358oMb1IsC+ygTgGSjRD4CfTJAV0FXCaijO/DxbJpG4Y3CWkU3MejWixS99RAf5XTrUXWLqjsml2vK+yWThzMuvkWG1fqeY33PUR9rspWhOK8ozz3lRUzkeNBRXinKx4bNjYxumuGKwWdBN2A3InXxxtJNbZwBl+3bDs6PZvzDZw45Ollyc7pi1eZs2ggQtBldp/FOYzOHiw/53TYTCjsIFb1T6FZhV6LN1x1DbGlsWvtY0vhZdh3jYAvoJoG29ITcQxZQpcPHsVLPdGziJAlC/tDLQmT2X/dNZrYyZIuC6lwAsOqsxj5awnoLlwvM+SXTd0omM2ne/UFJc1TQzi3bI019VNJNS945nvHWsVAQqoOaMm8pso4sMit8ULsxtbYlBNXLT2AASmZZ3YMcpekkGUltqEyzs1yhO2pv2biMs82cRVv0n7fOUJiOLmic17R+YIk0ncV5RdNZmlriUZzTKBV66UEIct6d0/haEl6A3rshGD/yVojVKVTsCutFwXlnqFvLclLw0vwxt4oFp3mMvc43PNjOeHtxxJ3pgi5o3pwcsXgk6TLm0rJdFBSzGu/jOGoNOgJyxnisdVgLXgV8Jv4KSoce9PFODTIQJyAIJgzNqVfoje7jcYMJGD8wPcxWic/GKFb3Sa8I1XtUhCHJJcppVACXJWmbwmx35ToECC1oF9CRSORytQNgJHAkJdHoKKdJYJR4CElz7rOYVhUUIaZSBRsIIci/bQQCRmwDZYTBIsyKUUeeDjHJYHSIKpDIgEggWFzcOTOkzPgxIEJ8X4AAnIJO89VIRdruVwMBQYXd5Ju4fTWSHQmIEf1jurjNBLq0Oib3yJgNacyOx26SwIxlNOneTBKgBLyM/UZS1vAIoVadjuk3SJuePveRrbOWeF7lRqwRHUGQp3iZfMDrG/o88n6vpwEke2Dkw1NPA0z2wMi+9rWvb9b6hoIgSql7CFX074YQfjq+fQK8/pTFz+PrMbCMy138EsudfC374Jzh7HzOIzN4JCgls9daC11ZGjIwkT7tj5peTpHlHXdmax5XE9a5NEzle5b8UpEtMppry6PDEj3pyPKOyVQQhK0ObKeWZm4pH2mKi8D0LZi8K+v12Yy3Dmd88c6z/PDz38pvuPM2//LJK/zm6j4An8in/Ce3fparG5/mUFdPPbbXXlrySnOHv3v5Cd7bHvClxze5fijNtbkyZEtpvl0ZehPFbjI8YGun5AHaDvGcAgjIedKtMEd0GyjPW7LHaw5+/pxsIVKQx03B6p6nu9XiM8emNpgry/RtmUmfvOcprhyzdxrKc4PPFc1U72j1gxZWivIBLqC4dn3DohuPLzT1Ycb25JQ3T0/FsyA1TDGOVGmoJ5Ft0SmybuRpoKSBUy3YtcQC65ad7aeZ7j5SshPZQjpPPlMEa3CFoZtIZHEfsZklxod4JAQT5JF4NOvbzVUvA6g7hWkU64V0ueXtivK8pHrUkj9ao88XhKaBBxLLq888ZVFQzmdMZxXdYUFXGeojw/pWZMQcF1xPA34SgZoYZzv2aVCZR2deGE+tQZkhwvb0aInzmsw4cuOYZg2zrEbHzvco25Apx6Fd44PGBfEQaSOoqJXHqMBJturNTGtve2Bl1RXU3nLZVDxYzdg0GU0TTYoj0yIE+XsIClN1hELGQWIFFWWLc5qmscLEaLU0t6lajfMZi1az2eTUznA1LXl+Kj8h3zJ5j2eLgtNixc18SRsM82zLlzORHT2qT1ArQ6Pzfn+oTWSrgMs9rtD9OVNIo57lXd9HqwjuuM4IsUoHtPY7QE9XZvhGo+pknDs06HYzGGiGgdg0gBhKDSwQHYEQIiMEotFlIExkX81GQJDe3NULC8Vuo5+IC71vSDLcNE1cDqShVuyYZyofsQ0ProzMly7KPYjskS4yH6zcD0GH4Z43YbhuCUSA/hz4xkQph4sskdioj9kb6TWxIPSw//1Otjoy21T0cVGDPEvFPyMmSGLxpN3Chl6qIqCV6hkXvTwpGb+qAEYR4lhNoERwajAZVmM204j5oRgYHaP7VY5XDSBLWuZJnx4VwI3Wn0ANF/8dgRTdxf2IBrTehsgO/PCAIO+H55F97Wtf+9rXvvb19alvGAiilJohhmQd8APjj3j6vNyTT2Nf63JP2/YfAf4IgDk+wl8UQtkeP4ibgLICjIQglOXUkE2mDSEovFf97PmzR1dcT6Qzfi87JnuYYZeK4kLhVxmutLR5oD4YeOXKBsJpw7qytHMT01vkY7sJFOeQLQ3N2TF//+CIH73xce49K89Uf+iFn+IH5m/9ogAIwEvZjJeyJb+z+gnuu4bP3L7Lq1vxFPmJhy/zxTfukN/PxEdAA1nARaaJJE3ERr4IfQqCCqOmyyfmhCK/Kpnez5l/ZUP55iUAN/wR2TLn6uWM7lZgdromv91xeUvAos3bJdO3Mg7e6civO8yDljI3+Dymu5Sa+sjQFdHTxIFpdA/CBKPIzxsmX1yANXRHE9rDnPowdTyhN0dtZ1HW0MkI6YRIIcfrR+z22GwkuQpEkCM2e6knMM0ABJkmMkS0GFt2U937OLhqaKJ8LrKkoIbGNP1deUDJTH9ngzSQQHug2NzUrG8VlOcZ5fmM/KrBXspUvVpvCZstarnGLNeY84zCByZlzsHNAwCa45zmwNBMNa4yeDPIn+QgZL9dIde92A6mmQBnt2VQhNyjCjGHLauGaSFMkFvTJaVpOcy2FDGuZOMypj1TxDMzNSd29dRx2gZDGwxXXcUsO+ZsfcDVpqR1hszE9B8VMDoQrOPWfNn7xaRDOMhrSXDqMq63BZs6p95kfdpFmu0PG0u7sZzVlqtZxflcxmJ2wzExDTfzJbW31N5yo1hiT2T7n16V1I8raDQqF+AipJl6EECk0ziTPCAiONIZdPzdsNYLsKoHSYeOQAikxBxwmcEpYcR0I8lIsAazjo1yYnnosMN+Sck0Pfto6M1ll4Iw2ELhcVbhOoXuRkyMTuG2EQzsoh/JE3KLlBilGwFKlJd7UVYgkhq1lu15A1qJgXPabxWb8oD4i4SxXCUPqCal4qgoGRrtX6sJ1uOTTCUwMD5Ixz38UV7J93tJiJL1RiaOcgIEjA2f+0Nt04mWY0p7EQwiIUp+G4Hd3xAf/VwgmtzKReiTXCKLkPh+bxacCB9ByfcyP5ifjsaUHAMj6R67pqzjCmoXTHlSPmZCZOrI/wE7mMcvxo75ANY36nlk/Czy/L33AzH3l1+/lHzmydqzRj549UvJafYskX3ta1/v5/qGBODFKLkfAj4C/K6xwzoyc/K0WZPj+HrxNS53/pTPAAgh/LchhE+FED5lZrNf1r7va1/72te+9rWvD0Z9I59Hxs8iN0/N0xbZ1772ta997Wtfvwr1dZ96UEplwN8CvhP4HSGEn31ikc8Bv/MpX/024M2ov03L/T6l1OQJHe63AQ3w5a9pfzzYKzFYTNp3lwdCEQgm2kkqiYYN0eehKFu09mgN203O2dUBt+ZL7s0kjqV8vuOdyRGbq4LswmBq0d+bjcJvhlk/Vwb8zKEqRzfpaG9piJGbqtbYjcIuIFtBcalQb2RcfOUOAP/5r/ld/KOPfYlfP3+Dd+pj7teHLNqCFybyrPWx6ozvnX2BUmkmOuNFO+Gj2TXMxHDzc/N/yn9z+Fv4P/Nvxy8z9FriS5O2XyjqDBGNNs4YqmHWtp+JdIrmhmJ701AfTrn5GVmgeG/ByaYiX1Qs7xUsX7SEe0ueuy3PjdeHBRcnc5qjjMl9w/TMkF+12KUwCMxGAznh2NDMNa6C+kiMUtO1K88tR51HXyzJ3n6MvZ4CIscJBlyj8RswtZbYXy0sB6NHE3RhoPS7PH4er5MKUTozpqXDYJqrQIUQTWkDphFmSJr5TiwS3Ul8cZpZT1HGQdMnhqR/+yz033eVxBB3E0V9olkvNdnSkkcT3nzpya9bzKqVlJmmRS3XsFxhL8UrJitypnlGqAp8lcXkEY/qfRo0PjP4wuCtwm4crjT4TE7K8hlLMNBVhm4i7JntpGJdyPfP5keYwpHlHVXRYHTAGsdxNGa12jOxDWdmjtWOTPn4Gr1hTEOmHDeypchqsg1n+ZzLuuolaqly7fjo/BFWOTYu5+FWQMyJlThcHzSLScFlXfF4PWUVzVnbxoqx6MaKtOHastka3lzJ553XHJUbbpcL3l4d0QXNcbHmMEbsfuTmY77CKW1jyYsOrT1ta3CtNC6+MSKxSEaY8T7pGtP7R7RmNxY5eWGkFCM98mMhSSdy3y/nTIBgxashskCCHTHYQkxpiWwt5RC2g99lCYRaCUMpmZGm/wV0gCyO00r1rKmdcZ8JW0R3oK1I5vDDeEcL2yrFduvo4dP/rkRZRpKJCANjkJr4Rn6DfC+fCSg9yF2UA59rQkjsiLjf4xgZP6yvl7iMz0Gne4aIGKXK5dphnATVs0QUCGNjbEzq429GELYHmiGVqpPvp2WCDqixX02I7A837PbIA1jOO0oYJJGVuJPS0l8TYQ6FLFJuRtd4WK8amDZPcBWG94MockaEmnStnzSA/aDV++15ZF/72te+9rWvfX19Su2Yz/1qb0wpDfwNxEn9u0MI//dTlvk+4H8DflsI4cfie3Mkku5/CCH8e/G97wD+KfCHQwh/Lb5ngZ8FvhxC+L1fyz6V954LL/zbf1wermMz4IuAy+ODrQ5CHx+Z4elp1/uGdFsLTmGnLbdOBGC4Wa3YdBnLNufhxQHtModOoWuNruUp09RAULhCPCTCQUdWtdgY45pZh/OazSonnBfkV4riseqlIO1M0RwGulkgu9aYrawzyVm6aSB8ZM3N4wW/4cZbfGr2Gr998jrP2oH58sCt+MHzf5Yff/gybz8+olnk6KWcBNUqdHxID0NP0cflwuiBOYCbeELh0SvD4Zfkg+MvNOSP16jO4w4Kls9PuPiYZvtxaSyfjWDIxbpieTajOLNM3w1Uj6LPw2WHcoGuMqxvWzY3FN1skOn46FUyOVOUjz3VY1l+czPrP0/ghLcKlyuaA5HWjH0MUpPnLX3aRe8LkhrA8EQ/MJbPIAkedhv67/XrTyoBJ+dLGotBPhCeaE5UkGVSlHF7EH0ZrNDudTRWNaNxZNeQXweydSBbOAFFrrboK3k+D4sFoWkJzqGUAq3FVySVMfJ+lqGMJjQtqihQEwFa/OmcYAy+srTznG6iaSeKrhgkO10FvoB2GvCFF1lDTPvRRtIwlPZkmcMYT2YcPpqjHk02HBdrXpo+pjItE91w0U14Z3PUmxUv24IQFJVteXH2mEw5fFC8tZGJVh8Up8WKyrQYPLXPeNRMudiK7ul8M2HTZKyXBWFrUNtdElw46MjKjqP5msvrCSEoJpOa44kAOR+dP+Jse8DjzYTSijls7SzbTu6XxaZkvSgIjRF/lzhGUiPdjwU9Ai1S15kGlmIw3ExvWZHlQVRxLDLMMjbxOo6NJJkJYNa6HyMqeVrEG1V39LINVwjYNh6r4tGxO2aTBG7cHCdJnOqSb9DoNyHeO3Yd/UOc3HsJYE4gpLdffY/ASJ6RDGDjn/7eC7JMkpURfUV2pCJjGUzaRgIokldI4BcFB+ReV5IM5IePwyiFJ5jh3yFGF/epNk6JKW+gT+jBj7YRBOgZvFyGcwMCMgUTgfgkiRkbscIO0ANIlLNiAHuSz4hT/W/H2LdkZ9ylv48lQfH79//SX6Z+7e1fUtLxzVrvt+eRT32yDJ/+4ed+RY7tm632MpkPX+2lMvva175+OfX6H/sTPxNC+NSv5Dq/3kyQ/xr4A8B/CqyUUt81+uztSEP9IeAngf9eKfUnEbrpn0Uez/5iWjiE8Bml1N8E/nKczXkN+HeAl4Dv/1p3KGhwRdgBQfqH3dgAPzmb6Jdx2jMmE6hG47aad2tpyJqblmcOrrk9uea0WnO5rWicYdNkrNZiVlkvMszSoGtFtlC41tLONF0VG8eDmknRMCtrFpOG7WlOc1hQPpQn5eIC5m8GlAvYdUsy+OwzDpVi8/mK9fGU/+vubf7O3V/LX3/ujN904ysA/L7Df8LHs5w/c+MV/rnpL/CTN1/mlctn+cqFMHqXlxPCRYbZKEwr5onKyexu37PFZAndBbqJYXtD080Cixfi6coKZu9YDn5hgb1/wdH9C6ZvnXD+QBrTt3/tLQ6fv+LX3DzD33jA449OeevhMZfvCQIwuV8wOQsUV4584VFe06xV75fRVdAdBBYvBFZ3FcVlTn4Z+llklAAguotMDCMzzAlgSJcQGDw/4sx673GQxsFoPKR1p++hAqqU2XOJvx2tP1nAJN+PuJ7EwlCM/DlC9B5QYGJas27F06Sbyr55GyCHbhYbYy+NjdkqTK2xG41dZWSriuqRMGKq99bo6w16sSI0DaFpZVo7Tl2HtpNDrGsZP9rAZoPuIuXmeoFWkrOSzQ8IZU4oC0IRvVsmOd3E4CpNfWBoDgw+h3YmN9S4QW7zQKNhbYKYMQJX00PeOOh499Yht6ZLPjZ7gENjtWPdCVPjclvRdIbcOq6aEq0ClW25ruVitl7zYH3AvNhyUqw4yjbcKa85zATEyIxj2RRcaM8mz2htLv4eKVJ6YWlrw6PaErayw9dby2ol679RLbldLphlNTZeyC5ougjkLPKa9/QBm21GV1sxTu7UTmqIMBzifdrH4446z5Qqkmb+Y6pKSjLSpUNPW1zIMOsRiNM35xJjHNoEYoYYuxvHWmR6qU78PHqz0rgqb6NvjRlADxXHaz/eo1eQMkAGLo7ZcYUYxa0c2Fb8dVJCTkqSUT6SF0brlb/EcxVUz0Ybe/H0RqRdOldyHMPnA2KjUBFEGLaDVzv3YQK6VYqbZfis9yUJ9IBTXxE06E1MRyk/ApyPonE9YBgxcoQBo7oRuhNGoAhxjLRKDG5H4Akw+JCktKHxTvOUf44BkP78yveTWbSPDLUeiPHgp+6rVvkBq/fd88i+9rWvfe1rX/v6+tTXmwnyOvDCL/LxfxRC+HNxuRPgLwHfB5TIQ8gfDyG88sT6KuQB5g8CR8ArwJ8OIfzo17pPxQvPhjt/9o/KP7LRueiUzCZaPzQZ6YGzHajSabZNtaqnLLujjpNb1zw3vyI3HVb5PjVi66QxvGoq3rs+YHkxQV9ZTJ1o9LIJV3lC6dFV1yd1OKfx1/IkXjy0zF8LFNeeoKH+f9h7s5hNsvTO63e2iHi3b8utKrO6uqp62u2t23ab8QbDaC5YJcRIIKS542YkJJBAAmmuEULM3VggkOCCuQNuEFhCIA3ryDNm7PEie2y32+6uPaty/bZ3jeWcw8VzTkS82e3BnnFb3VXxSKn8Mt94Yz0RXzz/819WWmQc2Vj1ECluPboN+JnhcGbY31fs78l+zH/4mr/69u/wN+78FnNd8Mxveb8r+P36EQC/tn6H33z+Bi+ul/iNQ+1F1mM3I/PWnSTFFOuI7iLNUnO4qzikbUQTcTeak/ejJMC895J4c4s6EcPO2598jec/abBfveFrDz7hy8tntMHwaX0KwB9c3efJJ+fM3iuYPY+U1xG3Dag0ZruZZn+h2d9XtKuIilC+ULhNYux4ASSKTcDuA8Eq9heGdiXAAgi44JNhqVxjvnNmmFdmyUef9SCHH5I2xDBS/l93o4lWz3EDlq63bmNvZqnbtEwaB10lcabdTBpoX0kzFpIUJbo4gHZp/1Wr0S24W+loqktwm0ixlvNg9x7dBHSTTEcbATtU66FpoSzkbyudqbq6JXYdYb1BmczZH5ov5SyUJcpa4smC7nxOKA2HCxmrvlAp2lgJAyCxAPI4Cg6aE8X+fsTfaTm7s2FeNsKEamQdty8XqINEeeZmLlb+6N5UtSZWnvKk5vXzWx7M1705601b0QRL3Vk2bcHtvuKwL4TJBZCZIXn2fBQVCnD+8IY3Tm84cQdCVOy6Aq0CIRtnqsBNM2PXOg6No27tIME5jKhTeb0jM9B+dj43tQlYYyTJAGEcmEVLDIqwcQKuBIizMKwzAq3um94c/wxJntKmOOxEBDpieSh6Q9/cEB8xphgYEGNwIpuAygIMKUwbsFuJ3O0/1hC1GlgevVHwcIrGEhwBXQZgOpv6juU1jPbzyOBUCWCT5SqQwKF6AGT6iGX1ncyMfCw9M+XV3wMjpsnRPTimjAXVs3Z6dk+SLOkmXdvEDHmVXRY1kjKVZT/9/8f+HKkEtPWyxR4Py+uW8yO/z8YXMl0nn+KYM7NmhAr7k44n/8l/Qf3hR59VJsj7fB+9j3yemSDjmlghU/1pamKUTDXV56O+F0yQP1cQ5PuxyncexUf/6b+LUrGXosQIbW0JSe/fpz1kH4jU1IwbFNUMzUZwkXjRsjjds6xqTssDS1dTmY4HpUhmrA5cNgs+2p7x8fUZ26sZamuw29RUtUBU4g9Rii9EHCVF0CncjcFuFd0s0p0GaQrb9P2dZv6pZvYsUq4Dpk6z17Pk8/DQcPulwL/w87/Nv3Hx63zZXXFhDHMlXdDH3Z7fbl7j9/eP+PbuHk8PK27riut9xS7Njvu1Q9Wa6oWmeh4pNpGoFJs35DzsH3qijdgbQ3GrOHkvcPaNW9RHT+SczWbsf/g1XvxEyfrLHY/eesHX737El2fPAHCq4736Hn//6Ts8/viC+bcLVh8Gqku5TqYJBKc5nBsOF5p2BW5NL6fRXeybIwGExO+gWxialBazfZhkRSdB/Ae6dE3HM6wqEosIyZ8hhhGtvNU9G0P5QarSx44mlkeO1xUJAUcNTz/rHuS696AIqVEzSLSoEclJsPQgTigjvhQJCk5o88qk/fQpZrWWyFVz0MJUaVSfagMJjAhg2ohuRFJlDgODYPGkRdced7lDHRrY7Ym7fS+pCYecFyylqwo1m8E9YRXFeUkoDH7u5DicJhpQaf09k+jMsHugqC8i3SIKwJFYENVzjVtLalIGiro5tIvhOpXXkgZUn8HhdY+9c+DsROT5le0ok4wlp8is65JdAlm2m4rYaYktJV3/UeoKRWB+uufOcodWkXVdYPQQI1zZDmc8yUWol8rUrfwBJGVKRUJQfbTv2POkbQ1tigaOnRZS1yguF4CTjmLe0HUGf7DiIZTXYVJTPgJo8YMXhWpVYoEoidvt0lcHIkny7GDEkhjGb67srdP7kYy0Mv1jUkXMXmH3GdhLYINWqBB7wKEf2yO5TN/ghwSCuJHHjh3JevrYmxEI8kozHxWEYgANlR+BDwzrjeY4ZecoeSWBIGPQqr8/zbCfPXCW5ZMJUMv72FfG12rVP2/UKF1mLBMStkwcgBRG4E26vrpNxz0GYkbXFeifUa+Wys+6LIcJA1PEl5Enf/MXOTz+bIIg3281gSBSEwgy1Z+mJhBkqqk+HzWBIN+Dqr70KL7xn/07aB1xPQii8F7LTG6efR7PiHZKGrRAH70pM3tDDGu0EKqAmnWU85bCdcyKljdPxAfjfrmh1C0ezU0745PtKc82S26T4WW8KTAbLS/8eVZWDS/zYdXhlk2/36ezAwvX0KU36LqzvLhdUF9VuEtLeamESZFYEqaJdJXi6kcU8S9s+fGHn/Kz5+/zV5a/D8AXTE2lNFopnvrAdSjYhZLbUPGkOwPgWXvCITh+9+Yhv//Ja/DenMXjoaHZP4h0S/GHwAb0reXkXc3FN4QCUH5wieo8/u4J2zcX3Lxt2LzjufuWmLv+M/c/4u3ZcwA+2N/lV59+kRcfn1E9kQ0UVwKwrbsqAAAgAElEQVR4uH3s2TDRQnWVGkwPvhQfkOI2Uqw9xVWD7gK+knVs3ijZ39Xs70fakwQ0jZqXftbVBnQyxo1B9QADr+jsVaeOALFsUJm9E1Rip4wlBF01NFO6JTFKRuMtDk1ksKoHRkAaPF9KwxKK5BFRBnABU8lGrEtMD0WKdYYYNCHLuxojzXSURjnHlJrEjpg9FXZLsY4Um4Bbe9y6xWzkOupnV/ira2JdMy7lBFDTywWqKomrBRhNdIZYDEo8dejAKEJp2b8+o1lp6lMtFP30eKouI7MXHcVVI6auIeJnjm6RPGxCpHi5J5SW5qxg87pl/0BxuCsXJ5y3lIuGxazmLN8rQVMnZtbtoaLuDCEI+NA2Fu8HUCRuLbiIWYgpsm8NSg8ghrGeedVwUtXMXYNO0pQ2mN5EN1eICq0EQNHEPgZ43zm2TUHTGQ6NIwRF29hefme2Gj8PuLOasmqpa0u7d1D/MakSJg7MJhiiZDuFPmgZo6PGWCV2gooJvGNo9gfG0/D/mdFzdHj5WWUT4JfG/Bho0Y0wyLIMLBQQxvIzjch4fEwgxggkMQygTMamx/eDGfYj72dw0M0GACEDksTBoyccydWEsTMAQuoIBeoNpNV4ewNI0gMyOh/wcPzA4O9Ta/H2iRyxxMYMnLE8ZXx+gpX7HSXXrP/OP87INKpj+V+WCkE/DvLvMAB9UDz+z/8Wh48nEOTPoyYQ5B9fEzgy1T9pTUDJVFP94NdnwRPk+65iVHQHByrS1qmhMnFIasjUckYzfRFwAVUkpkEypQvpO6pRmL1GHwxhp6k3joMN3BaB252wKE4Xe+7MdpwVe+6WG+6eb9ielLy4swDgk80pl5s5h11B3Jt+Jj/7C9BqisKzmg2z8LdNiUlvuVYH3rn3En9Hc3WYcXWzYHNTULyUt/b5p5rZy8Dd34nsHy/4xoMv81v33+Z/ePTTAHz9wcf87Om7/NzsXS505MI2GFqc2pHT/jyRUz3j6s6v8g8fnfLfvvmX+LU/epviA6EpmINCd5puEfEnEe7V3JQF7VI+Pzu/z+pba8zzG06eXrP65oLDoxXXX7oLwN955y72ixu+9vATfmz1Kf/WW7/Bi0crvr2Rzz+4ueDpJ6eUnzrKS0BBcwb7u4mpk9IZ6ouI22rsRnPygaZ62eFupWE/ec8ze1mwu7RsH2oOdxR+HvqkhgxwqdoQepBLDRO7eQY4/ZHGMBJdBkFU8mWQplAFYYrEkfFqyCaVAYxRhJS8Id/P4IjMoOtO2DaqSU1dZnUcJE1CjCc1oYj4maykXliUC9iyw9iAc514cI7SSPZ1gdaBEDTWerzXdIkJtT4pRe5zUJidxu4N5lDgNuLtMn9+SnHT4dYN+naP2h2Ih1o8RgC0InYeta/BCLMhzkqiygyFDuqA2R6Yh0ixKnA7N3i7IEye8sUe8+KW7HujC0e2aVCHhtg0GOewzwrKF0vqJxXb+3IOdq+VNGcFl6cz1mcHLk52LIqm9/e4M9/SBoNL6NSmKdm3lkNiiuw6SW7yt4WktMBR49tFS9daWm9oZ5q5a3FamCGlFUQhb8tqTxMsVnm0iqycnKe9dyxcIabKrsAHTV1Y9ilVpmsqdK1pd46yanHOE4Omy0yP2kjiSC8nkedSZgioIqCdJ3hFqBSh08mvYpB5qDzG2yTZyH8nUEQn5oKp6b09joxNc8Of2BH+FVCCBIJEI8CaaeMxAJIXT4lLKsp5HhsVRzve4MhnhRHgksAYYVYNoGZwAhjGEW6UQZxeEhcHBlk0iREVRwsybCPt7BGja8yciY4jmQoALgi7JEZiBqJexbHGwMlYIgSJiSL/EW0k5GXjK98ZfTemBJ04/rwIKJdOXFAisxqB/boxg4nvVFNNNdVUU0011Weo9P//IlNNNdVUU0011VRTTTXVVFNNNdVUP/j1uZfDlG9+Ib7+N/79QRdNmjlN/hvqYHoX/XGFuUeVMlUZsySm/1BhbizmAH2CgSElL6RtzDx23jGb1zxYbXhzccXdckOVpjzbYNj4kstmwdP9ipfbOZttRbtLc9+NRi06tJVZxZANGG3mNcPibM/FYofRgRgVbdBcroVpcng5o3xmWL0PbpulFlCfCS62ey3SPGz5whsv+fGLT3mjvGJlDsx1zT0rviZ3zIavuD13jazz427D/7L5Cv/lN/8yANsPTygvtchSqkh7GohFQO9k2rN8oVl9JKap5adreHkN+wPqQuQ27aMLbr404/qHQP3Qhr/81rf42vJj7hiJfvVoPm4u+I2bN/nG8wfs9wV3zza9RKHpDHXruFjs2LeOfePYf7CieqZZPpZlVh/WmF1LtJrto4rNI0NzBu0qJaekWVZTK8xhSNfIs8a9UakTA0ZhhXBMS8+JDJ3MzKtW1iUnfaTxj0MKT5bT6CbJY7o8o51m9UfGkCEZj/ZjMEunksmlL2TsiXQmilzGxH4WWLtAaDW6SEapgDahP48hqDRzrQhe/iao3hvHXhtMg3hApLjeYhOoXqako8ajG58kDGmd7njqWzcdet+CUoR5QXNepmSMxGzaecwuMU06D0oRrRF2CRCbBlWVoDWEdO3KgnAqbJXdoxn7C0N9rjjcibQXHndaU83E1+TBaoMmcl7tesbGrnM83SUT333F9raSY1Z8Z2Rpq0FHVOWp5g2l63BWmB5Fkrs44/tUm33neinMMjFBGm8IUXPwtpfR+KBp0/T8k0/PUXtDVBFz1lAUHTEqmmTuGrbuaIz1vhs5/WXu0fNOIot1IHhDDGDSMyNGhe+0+Ml0yU+mTc+2zDZJEiqzTf4yI08L+QI9CyEbmr5qsCopU2qQfalBZkOSt+heSiNJTXnc61aMdnv4PsthsmwkDvsQVTImHstlHDQnSTZmkpzGxGTOmhk+HMXnqmZI+BmOIUlmRp63Y6lJ3pk4NjbNEeuFl7jjRsPeiAQlDPf84KuS1peYJGrkUxSNyGGiE6khQe7Jsa9J9iXpt61HY1aBMgGdWCkAoRvJ/AAazZO/+YvU7z6e5DB/DjXJYf5sapLNTPVnVX+cjOaf//nf+6dex1RTTfWnq0kO872oCPqgU8JBehk0EGs9JAfkF8Xxi3BQxE6hLPJy6YcXc+U8Ya5RUQ8mg0EaE7tN29hpwtqyrgq264qXp3Mentzy+kwAhnvFmteKW96qXnI5X3B7WvGiXvLxRgCCj5+eEw+G0Dp5QW4UplFIdqU0Jfsby0cnM8ysoyxbFlXD6UIiQ5ezms2dksvlgtkzTfUiUl0HqvcFhFl+qqm/7bi6/zp/5/4D/FJ8JtARN5NlFrOGR6c3fO30Mf/iye/yU6Xhr59+BF/5uwD8d/Of4aP371J96nAbhakN3Vz3+v72JHL7tmJ/r2T2qGD5eEX17gvidTKPvbzm7ifnrD66w9VHS/73H/0JfuXNt/mRe08B+NrJY3589jE/MfuQJ3dP+aQ558JuuGfXAKxDxQf1XQIKpzxOeb716D4fbc/49pN7ANz+wZzlRyWzS48KUNzElA6SrpOVpsRuJO1C/D3EYBWgq1QCQdQouSUQs05Dx74BiWVEmZg8J1L3lIwrc3pESP/uTUMbhamT4WqSvYzjNFU8bmxkp0GHwfjUKiSRI8WJBqN7UA4ELFGK3mMgxpSYk0ASVXmUimgTcWVHWXQU1tOlhqm9n6JyveYQFLfbArU1lC+TLGoPdi8+EFkSpF45BruPlNcddt2g2oCp04cZ9InglwWhcqjWE40WP4QUkYtSNGclKoLZddjLLWp3wGzTeL+dMzuf05yV7O8a9ncsh7uG7Uo8eD583VEUHU0wFNpzp9pyXuzpUpdb2Y5LHdhsK0JQg0HucMqhU8S94RBKauvQOqJN6L2GnJHzWLquN0strOfWJKPhqHo5m1YRowPaeJaFgCS7i4LNpiJuHKExBOtl3ekUNCBmzkkyohrdm/WCALrBRNTMoxUoFYkkUAt5TGkbZBgVHqUDsUhgSAby0v75pabbWXl2BnpDTe1J6UijZ+W4oc+xt2n4Z5PTHgAIAwiSU2ZkuSxJFCAje3uQx1LGLzoZT1GpFLIjiIJuY7+e4BQdUXDvgCyb4s4hgQ6aQeIW9ZHkBw1Hkwej6FmgT/7po46N3Pcmg4w6YEzEWEVrIiEB7Vnmozp1ZFKbn5fje30MkqgiiHdNVCKv6RdATkw6HmXC8B0l4zckM2Cl6KOY8zGak4appppqqqmmmmqqz2JNTJA3vxAf/of/wdELd9ZfhzISioCae5nVa0c5jD5N/dk4gCB5pi/NrFObIW0EICjsLr+kpsYuzdLHmcetGpYL8fg4nR24U215Y36NU565afBRc93KzPb/++SLXN8sCNsUh5C0+bn5MDudtPfCUghV7CM2AebzmkXZ0HSG2/Uc/6Kkem6YPZWdLW8i9iAvzc1CE5ykk6gwSiYpFO0CmtOIeWfDv/YXfpe/fufvYdIB/4PDF/ml5z/Jb7z/Jjwrceu0TznpwaVkE0cf6br8KLJ6LPtYPtmg9imB5GzB4X7F+pFlf1+Ocf+w47W3X/IL99/jndlznPJcdQveKMSzpI2Gj5sLvr27x+vlDa8XN3yl/IQTfeBlEPbK//zyp/nlD96h+3AhCSRbMIdIfZZAkELOoTlII5+vXV9qaEh8Bd6Bn8e+cQkuNX0uQCkafG1D30BHr2RcjaNRwzBmVKdlVr9VmAbsNjFFcmPbIUaJhiEZA7lOeZkhDvOYoNInbpjhOPvPbPIpAfwszeanY7CzDld0fUNcFS0zJ+krp8Weg3fU3vJ8I+e4rh1tbYm16aOnx+axOh1XdRlZPPXMPtmLj8gohtfPLM2ZFRbAq74HQLNQtEthF7hNZP6so3q+x7wUQCyuN2AMalYRThe0FzP29woO6Tpv34BuHvFnHShYXux4eHLbH2MbDD5odq3E34ao6DozfN5YujobzKphOj/5cIAwLmJUaCPXP0SFGTFuAKwNGB0orKewHU4HKiv3g1aRq8OMF1crYlRUs6YHVgCazhKCSua3iq4xhNagEvNKNYpYRli2aBuF1TMGK2BIPlYR7VK092j/MqCjFBz2hRhHw5Ck0ynsrRHWUhzG1DjxpGemuNgDKNnzQ3l6Y1QVVG+eeuT5kdcR6WOwe2PPjt5EeGCEKEz20DECXPqZPHtVBvzsmC2S0pY0wrJITD81BklyRQG381iGBCbm+yUzY1zAloMbsjah995p9o7YmIFt4wfAXZJnRowOGMxulYBKau4HQK5PNxqWGSJ2oySd5V1v9VEs8xGLxESK8wMf/kf/NfUHH09MkD+Hmpggf/Y1sUKm+n6riRky1VT/ZDUxQb4XpSNh7snJHiBgiG4GQETZgDFhMEQEQmroJCkGeVHOb+IWTOmhCMSQZlEjxE7TmdSQpBfnmFIJ1MbQdiVXa9EwXNkVHxYXfHt1l1VVc3e2YelqllZAgbvzHUZH6uVwCQvrqTtZ/25X0lwXmK1BBYXZKthqwjaZXc4c+1XL/YtbLh68ZHNecPNwxuULAVmK54bqhaW8jtg6YutwTHtPFbUkluwer/gfn/8Mf/Tj9/m3H/59AL5aPubea7e8vfgxfu+N1/nDT+/TPp1h9vk8p3VUnnjR0b4OLx46bp9Ldz5/ekH1MjB73uJuG+bv31J9YgnJ8LM5K7h56wG/9PZ9eLRnPq/ZXM97pooxga41tFcl9qxhNq/5V774DX5++S2+XkpM71sP/g9+7uRt/u4Xfohf++iLHD5csPxAU6yzbEPhUyxtfSqAQJbHAH3UrG5SrGwNplWjRk8YGMEqfGnEsLQc52FmZoc0IrEI/cwxyL8jgBepQjfTfcQtIFGniagUbZo1VkLv19nQN/XkMkM/gCEDbR/Zh5RElG6L/h4wjTBVojKE0hAKyyFLf4Bd5TGlpyxbDgvL3LXMbMvDE2H0+KglKcUbfJTmfxwde9gXNBtHc2rwpcXuSuy2IziNL+VENqeW/R1Fu1JDIgf0sqTmRABL3QjQWJ87ZncM8yeC2M3eVcSrG8Jmi7q8pnw6o/hkxfJUmCDzF3OahaY+l/vvcKfgD19b4E6FhWFtYDmrmbsWowOdN3QjY1lrPQft6JQj7gwqavAQoybk6OxCLpTSyaDSq6MUXmUirQ1oHenKls5rnPV9usxri1sK7TEqUncWazydH4AYm6g/ndcYHekqTRc0+1LOgb9xErXaaXw2EH0lOrV/wmnwQRFSA903z+n6FUWHsZ5YctRYh07jOyWGvd1wn4/lLiqtXwZmGocjeUvefkQA0qMI3jR+o5Vlo0mr8cP3cnT50S02kq3YQ0R3ClMk1kmr8JUkrkACO5QWKU9mgcTRvkF/A6mQ/oxAEJXlOUb1YE70qjewzayMWB6DTqTtR7Sco3h06MeV9kcFRaxfAd5evZhjcGO8jgCq1YO57fixZBTNuvgOGdBUU0011VRTTTXVZ6EmJsjbb8TX/+N/D2BgerQSIQnILHuVpCDjSjNoKgMZSYsPaQawCJgiJBlBIEYIQUsiAwKI5FQZSR85zpoUmrgizAKxDNhFSzVrOJ8LHcHoQGk6lq7G6kChO2mOUuTnpit5sllxebPAHyzUGr0fkm6UT3G7d2vOzraczQ447dm10gRebudsr2eYl47iRmH3wyxsP+vaSnKC20WUh/pUsX4LHvykyFX+zS/8Jn9p/oc4FWij5ld2X+Z/e/ZjfOuppLt0z2boWsv5PWmpFg2zsqFuUyLHbYW6clTPNPOnkcWTjuKmxdwIW0ZvdsR5xeHhit0DR7tQzF8EfJKq+FKaoWIb2F9ofKW4+Yqnen3Lz77xAQBfX33Iqdly4xf85vpNfv3TL1B/85TVu8N18AXU54rmLNLdb+Q61XIdzU5jdgqXGRrN8Wx0cEN8Zabyh5JR0pAafGJ0Yl24FNULEnWbxlFEmszY6AS6ZW8G1adiREPfWObRlGNMc1xpP6ueZ4/zv+NAvc/XV1ZAPzvfN5NmaJiCE+ZLKCJhJrHQtuwoy+QJoiKF9RgdsDr08bFNig7ZN4764Ogag3lScvZNqK4DXaVoF7KPzYmiOY20K9lGv9/5Nk0Mpxz3qw8at1aUL+X7Z+92zD7eYp5fE25uxTdEa1Qp410t5sR5RXc+J2olMbuPDLsH8v32JODvtKzOd9gEfhgdyaSDwnrq1rI7FNSbUsDRzOjJFyKnhIxn9MPoQuWUIQVm1mFMoCg6ipQuc3e+Y2bbPl2mC5rn22W/D0ZL7G7rNS4xTLSKHBLYdH2zIOxy1mze5nD9JVJ3tD+j9JsjOUinUKVHO2G0qLTdXO26EJlXAgbMXg9jrht8RPI4ypGxwBA/myrLXo4YTm36nobsJ3IEsmSAIIN9esSKCkmWlb14jMTvdvOBGRVN8s7JLC51zKCKJibwWo6xP6a8CSPnLhRZipO+Z4+/n9NZYmYP5WdCUH30thqx/F4dNz1gkaOPx9crwjgS91UQp5fbZIBEjQCgtE7VKR7/4t/i8MkUkfvnURMT5HtfEzNkqu+3mpghU031J6uJCfK9qJjADz16EY+qjyXVLahr22vZQV6Sg0GMEFuZ6R9LDaLRBBsJZSTaAC72emydTAh10UFU+OSrELSV5iK/TKeXZ91pYmJwbIqC7VIMABarA3cWO06KA1YFnAoEFKWRt/2FbbhTbvmkOmXTFtzsZkJf38tB6LXF1Ar/vOTyYNmcVJwsDiwKmU2+t9pwOt+zvijZbir2Oysv53bUoXiF6jRmrZk9VxTXkdX78Cw+AOC/2fxzvP/2Xf7V09/m6+U1Xzn9Jn9x9i7/05nE8P6v5Y+y/XiFbhRh7ThEhbOeu8utnJPFjvV5ye3dGYf7JdtHjvLKUb2Uc7D8ZIa92lM+31G81KJjf7mGLnVEhYMQiM6yXFREZ1h9PGN/seJXH34VgP/n4Y8wf7Dl3mpLZVteO1nz3psFh9vEiLkWD5A8W+oqiZmNS9lEu7Q0e0u3M5iDwq7FO8TkdNgm9kBXNmkUYGQ0O2ulEYsafK0ETHAyLvw84L1Hlx5tIuWsJVbDJWgXlrYeGv/eGDIqiUsFaaZGzaywReh9R5QW9lNUI3Am7Vf+PrnHSrKaOJLkmENmvigZ+87gi5JdlcGd1PRpel8ZNWJVZUmGdoHu1HP7tuFwa/AVtMsEJhWBmGRjuvDCpvBDw66UGDtm6UGoPPVC0ySj33ZpmT84YfFkwezTM/TNDrXZEWsZ7/7xp2AMdlahqopiVlE9O2H/UJgim9cMu23JujaoeYctOmZV2/eVhfXMihZrAjsbaFtD8BqfPBeOKnf0OV47VzK3BPDKEpJsprNyzz7xhnnZcFoe0Cqy7xyH1vZ+HTnyuPOaRkesDlgTcClidzZv2EPvQZG9InI8q3Y+yXWEqdL7TOTxhOyv6rSAutlLRA2Gz0rRe1TEUsZiZyM6gYa6lWZftwrVJkBuLBmEwUeJEdDRn7vR5wmswLzSvGcAYASC5PXpVsa1qekBqRz1m8dzDABK/HXHnte9GlLuURUzy0UdMSy0l2V0LTdN9vggr1/L92NiiPQg46tgWKuJmTozHkJKznMM+TiH3xlj0EN5NXoevALkZHaMGi7tEKUr5dYDo22qqaaaaqqppprqs1QTCOIVemuOZyM7oXLntIvMgujNMp14KPhiMLDLL9OQXypVkidoMfJLs+W+ks40zrs+lQFAFZ6I6V9IvZVGWDeyDV0rdGPw6YV302qaxnJ7EKp76SQpIs9SL4uas3LPebXjtNyzdA37hWOfWBbX8zntbYFZG8yNod3PeFGVXM7kTb2ctVRFy7xoKc88h4UMFWc81uSZ8IAPmkNrWZ+tmH9oKa8ECAHYb074pcuf4h+985B/+bXf419f/Q5fKzTz838g+9DO+D93P0x4UWK2mlg7bv2iN9xczWrmZYM9D2yqjvqeZb+16ATk3LycMXtW4TaR8ibgNh2hcpj1YTivdUd3sUD5gIow/2DL/CM4/4NBUrN97YTnD07Zv+5ZvnnLvfM1T96S89S+sBS3AhDYneJwU9LNO8okuSmqjlB4urmhrUXOEQqNSwa45sAwroI0TrodZmyjpm+E8qx3aIbmQ9eaUGr8zNCVgbhQGOspCrlORdHh55q2NXQHR2iSf0GjpQmDvuHsZ+AzcJBnx/NsehjNDI+an9yIRkO/n+i+Xx/W0WVZkMJqjoCefl0ZSLRDwxUVRAfdSqRAzYOO5o4SP43koxC9FuPGmJr4zGA4as7FDDPaCFUQQ9c0nvcLw+G+ZfvIMnt2wuzFkuplh8usog+fEQ8Hwv6AVhrqBrPZsbw5BcCtT7GHgt3eUp8Z2hNLt7TozOKJimVVU9mORdHgg0iA9q2l6WSsdZ0R8MBrYhQzzq41fRILAQEnkudQDJoOeqC0qS37g2NdCArWtoauMT2gpFWUBJ8gRpdKxaOx4oyHecOegtjpHpjNchZrJbVEZzClEyAnHx9A0FqG8xjE6jRxzMRIhqC29MJeclpAXiAWitAqdA3a0SchvSqzy4DbdyMqZmPdhFWk8Rj78ZWybgYAYESy0w34SkCQPOZ7f53RMRif7oXRd3svkChSswjH28g7GBKw6CFyDEQwWoYuoRnZVypX/jmbmmawLy/SAyDpHGWD4aB6oKX3UwkDQHSkljHJ5yf7jXAMFqko9+R3pCBNNdUPcP3tN3+5/3lihUz1/VD/uKSZiSUy1VTf2/rcgyAqSrRnNOpYfz16GVQ+yktzL3GAUIO1eSZUmrreq0ANL9R9KkeiXYdSvtPNDW0V+uWFis5RxG0sAr5TiVque4kMgNoZmraiuUnU+zyrm75vS89queekqnt2yNw1nFUipzmpDryYL7jVS9TeoGuNOmjirRzEoSrYVx5deVzREbxEZ7bKoNNLeunEHNMZjzs7sO9mRDOYqy4fR8obx+Mnj/iv3rjPr375bf7ag1/jRwvx4/iZ1Xu8/9od/qh9QGwKiaB97tilWeP9vMKWYsLpXEfhOuISSisnt37LcHU9J+4M7trgNhZfglsLi0O38qdbgt0J82H+PFCsPcWVUDXm798w+8TS3Jlx+2bBVXtKfOeW89fEz2JzUrK9qigujZh3PrF0c8PhLDFq5h1aS7OpdMQDdanxpXzutkM6hy8VOstW8qxyAhZCMTDaM2tErrOCHYStwc803d7gKwFdQEAY51JaS9n1jXZbW/w+NZ4bg4mKGEaNX8tRUyapN+JvElNjOWY+5XGOFiZJNBHc8ecZaMkJMNoP687RwjnRQ5rXjMbIPdXUhm4R8Ocdet5Jc56b71ahaoM+JBPKDOb0wGVqTHXEl+BbTSxDLysysw49b2kuNIcHjs21obwqKa9EDnNyXuE2LeblBpRCbffEwwFSUlEZI8Qz7MGxu6dpNpZ2acQjBtjMLM2pZTGrOZ0dWBY1WkUWztCli71rHT7oHuQzOtJ0hibJVbrWiFlplqTkaOKR1KLrNO2mgE4Le0fH3kvC6zggUymq2RstEcdAUXiWVY3Wgba1KBV7wATEQ0drSYfRKqJ1xHt9ZNwaCk1bmF7+EoLGBzMkZwVFJKBSDC9BY4uONvvTBEV0ilAoYdy1Cn0YkoxUUH0qikRCc8SCyAwGPZJ19WMTegCiJ6+kP73kzInPSSjoZSzZ8Lhnguhh/MbMnIKeTqW74Zmf74mo4wBcdoqQE1nyvT4eq4zAk6CE7ZF+lguRFsj7/yobIydOZbaOyec2opQsrLyMBaVBtSOwMT937HBuxudvzCSpz8PRuZ9qqqmmmmqqqab6rNREdp1qqqmmmmqqqaaaaqqppppqqqk+F/W5Z4L0BnrheHY+WIjzSDQKXwh9up8pNCSasRiDKiAgs26Q4kr1MKuW6du6g5gTN3aKUOhkkjdEqYbMDrEBZWXGL3oxX/WjGWHVavRedOe6RWYjzZBw4AvL1dZxO++wrsM5T+U6zmYy7Xla7Fmd19XJFxwAACAASURBVLwLbDcVfuPQez0kguwUca+J1lJXKVVCRdGZp1ndg4vEcUxw5akvFLqRE1G9iFSXgeolNI8dv/P0y/zBVx7wV9/5HQB+ZPYJP3f3PbZtwWN/AdcWXSvctVAL4trgC0c7C6gkH3JFx6oSFseD5Zp2taaLEl3adIYHyw0vdhLN2naGpjNYE1jvSnxtWD8rcLeG2QuhMSyezihuOlSIzF94uvcsN8WK1ReEAXDvbMNu1nBtVkRlKa+VMFaSAW23N3RFGM5BBGaeXuFQ6J65oBP1XwxMB1YRajBTVF6upzlkKcs4gUZhNwpfKfwsMXbmjkPlKRcNznlmZYPREV811HPZx31R0e6NSGQ6kXvZ3choITFEaIaZ9zxmQej2eUa7Z66gxFiXUaqGUhJ7mu6pPJbyjH02Zs2xp4zuD1ODW4sHTqMsfpFkPdlcs1GYWmG3SsxnPUd+BZnKH40wUUwNwRlCkbxVFpq4bHFlh6o6ujPD5oFlt04JNXdL3KZk/nSG2wbKyxpzuUUlzxC2e8pPNWY/x21L6lNNN1M0K1l/N9PUdw1Xq4L9acFyVlMmQ9MsUdMqwujnynYsy9hL1JrO0Poh+qbrDG1rBk+RxCaIjUXVOhmEKmLydkHTe33EJEmJaEJaZ9dFCuMpZp6Ds/094kcUOJNMa3NpHXuDVQBtPcYEnPF0QQvzKEDINLhkBquTkXTwCo0ePGlMShWyCmwgdJpQacJ+xGDI225VnyQ1/Gc6B9kMNT+/81jyavQcT2wJgxiMQj/mgk3EDh2JtToyV+3NgxtZb7sYMZgAe5D99IUSqaOFqIbtqgAaRShke/nZ3t9PkeTBI34h0ShhbaTne8zPkt64+LvQMTIbRKX9SsyomMZXDIDW/T1BOFbUZGNXgkiRVFJh9ZIggHv1d9/2VFN9BmosjYFJHjPV9199N6nMJJGZaqo/u5pAkFT5hRrkpTm6iF8GOuTFWo0iH/PLou7EJyJrr/t0AS3JKz0NOumyxy/aukvNYHp5Dk4RbMS3CQBwOqUT5AYngov9izgxQiPyCu0VMcoLv4658VSEThEOmto4ahfZlp7tXOj/F0vH3dmW11drbsuG9bxivy3oDmlItAJmKI8YbgYgaGnG6+9sTLoq0p14oo0054l+rRXhGqqrLENRbC5P+e+3Yu7717766/z47GN4HX7FvcOHL85prirMJpkopqY/Hgxhq/FlpHOSJAKwXpasqpqla3gw33Cn3PLG7Io2SVXaaNj7gq0veFkvWDcl1xcz9rXjci1eKjdXlvKywq0lOtNtIsv3DWtOANjf23O6PDC72LPXFco7irU04yBRucGoPq0iFJEwl3QggGADodWoVhH8IBUJLtPzJYVCLpo0PrrWvcTE7FUvDTB1km/VirhJMhEn0pvm1FLPPXYm8qGqaJmX4luidaSdG3xnCEEROk23G5ptlQx5zUGN/BEGGY9p6P0HiGCyOWofoDT4I2S5QrCpmUu3VXRxSP4Ix02l7lSfqmN3oLwm3Oq+wZXxnI+dIXJ1NAxDMdwWKKDOqTzpPG0V7U5Tzx16IWBIddHgT5NU5aIg7g271yzlpWH+zDB7UVJcimeIudmjbre49Q57OWO2qgiFoTmVDbcrw+5GInabU8uL5QxKkUhlQCB7digdMCbSlA2l6/pjKKz47RgdcDpw6CRGOMtlQlAiRas66tLha5PMiY8bfHKTbeWExBym4zU+Kgrje98SrSI+JKAoN7xR4aMafECCImYUQ4feM0SpiNZBjIKzSWqKf3VFJ98LCh9U7+2hdEyASBQ/FeMJJhBsGkzZEwXwQca3erURzyBbO3hd5HPQj98AIctE7OjcaAgJeO7P2QgI79cfxbtH7tUBkAQwh5jASYnw9dVw7HnbBAhdBsMVvoj9eNXt8P8RGe/jpJY+PUaBIBscIxgmJUqpMICJYxlU+lqo5PNo1dH9nD9HPIqHe2kETKIg6Hj8namm+gzX337zlycgZKrv+xoDIxMgMtVU/3Q1gSCkhs8zStAQE1NcQFe+d+If8IdkathpmoMZYkpHL9GhlBny/DKrUuRhfnG3e9U3c3kmXDdDnCJaQJEMpPgyEkcv0jKbmsCT7AcyigyNWl7gVYr6jSYSD5r9ISVNNJb6xHJnvuXObMdJUbOZF/1M9K4uOBxSo9XqYaZSDd4pEgWrekBIN/Iy3y2TseJM0Z4o2pWmvIoUt5HVR4GoJXHj7z34Em+8cclPzT/g7msbvnHyOt+4fsCTawEgDusStTUSQ3tQKYVEw6UM211Rsl5IJKurOs5XOz4szzkthO1idaALmpeHBY037BuXwIEGc7oBoH2oWe8rbm8r1KVj8bGmuIms3k3xresFLx4UVCc18/M9O69QwfXXye4gNyrRSEPUtYruTmp8KwGGojLg8+y06n0coolQJvOBhCIEHeU4gWg0KIU50Ptq6A7UIY9dGSvtxtDNDN3ccagCh1WHS+at1nqc85RFJ31OVDQz25te+laim8M8RSgngCIzOdxGy78TgJOvt0mfx7z7IY0zk3xzehPUKA1ZnrXODI6eCaLQOzGTtVsxk3VrOa2DHwN9xDB5Vj03h9CDRrI++v3NY9XUwioIpaabGepTT3fSMJsJ0+P+3VtCVFyfzlhfVTRnhsO5o7qUFVdXFfP3FDy/hM0G/UyjjcYtxH8mrBaUlwt2DxzNiaKbW7q5lfjgDNDkPt9FOhs5lBWq8r1BsnXCsjA6MC8btIr93wB1a6mKlvP5nsPKsqsLtvuCdp/Q16BS0kq6V7PnRJ9ABbf7isKKh0x+juVUmeHCiOFx9gIJYfAVyWBGNwJJrPO9uWqwGm0Czok/TasYjF/zRc8EhzbR5RSoHEHuSKatidGW75fRWMkMoZzKFRl6/7HvjQnpqyMwLeqBpRH1MYuk98VIP+dbMt/rus0LDuOsB7bj8PtDQMtIbOT53JEA4cyYSr9veoZVM2xL9kMRrRrGdDxmPaEFfIn5eo3MT6M9Xk6OWZJoNLr/Jaa8AOiqU8fsj/5+ioS1+w7D2qmmmmqqqaaaaqrPQk0gSJTEgDH9X6ekicZYggbtPMrEfgaUqOTFX0XCUtMsLe1eKOogTAkBB9KLrIkEE4YZQYSirLxK4EGK4g1DtCqAUQKsRAWhkO/G1O/4IiYJTAQzvND3s+Nx9H/QxyXGBHL4TvGylUbnYrZjZlsWrqZKJqoHb9m2JZu2YFcLONK2wibo8oxlBkcaLaBMbnLLNFPsAt0ZtOeW/dpQvRjAEIAPPrzL/zX/Yf6lO7/Hj1Yf8075lB9b3OPx3XMAPtxf8Hh7ysfPz2kvS+xawBC7TdfJK2FCFDKML+cLXlroVumgS7moaiufq04Rlh269CxXgiJcLHacnl1zWDqerxZszIrVexq3TvT/RlHvC/YPNeWdPXrR0S0NNrFVxikTMUjjrw1kPYxSHkyQpgad4kjjkLqgYjIAlZ9jACzE1H14kxooJ4agPQDRj9WIOcgYLhII4wtDt9C0yySzmAcB9Ao5L0rLtkLax+h130nGvC9a0o/kC+k+abPU5LhxyuNOd4ipZyvH2DOjojRp/RgZU/ll1dLsak1Uugd8ogZfpmVSnGdw0hSPo1BhYKn0jWkCBfvGNpm+mkbYW93W0m41m/OUenRXIm6z/Ol2OaM5L9it5RwVN47TxQUnf+TQz6+J2y1hvUNtZDCqm1tm2zPc5pTmrKSba+qVppuPzqOcdnwhzwVfGkJpJeYVqIsEdLrAtqqOkl3kPCZQSnsWs0YSnyrHeiYnqekMXWfEYDXH4MZ0TRBpym5bslcl1omhr7X+yPtSZ3aHDuioCFERkoEqCCAC4FN6jdKBsvRYO+6k02XWwvYIIQ4pL4oBXOl0pkT0+ItKYyQGJT+7hHZlgCA/e6wABTolKYX0XFWdyKaC54jh0BMk2uE6BNPjjn1MMAhYlI17YwPKx4EJA306WNSqB+rGYEoej/JMj70ptlbDgM/pN/KzOnbnCgJmRBMHbNQMQEhUStQyUfXpMeNtAwODsL8YAshnxkxmeKhXWSY9yA7FpZkicqf6XNWUHjPVD1L9cckyE0Nkqqn+ZPW5B0FUpE/s6L0LsvTAa7ra4WeG4KI0aqlaGzBFkEal6vDO09VyOv3eYm+MsDvalDyTmr48sy2z5ZHYpcbZyrJ51nE8405IzZtWfWPpK/FfiCnCUalXXsK70Sx6Si8Yywv0QRO947qYE6NiUTQsi5rTQsCBhW24U+6og+HgHQfvaLzpZ48B2mBSDKij9ZpD46hr1zdVxnqsDbCq6S4024uS+oWjuE7N3DPHb1Rv0gXNX7nzh/xo9TFfrT7ip6v3AbhdVVyHOb968SV+5dnbPPn0nPjC9QwFewCzjxQR3DY1KgqaVfLrmIMvBhmJ6sBXBcHB9lRiRm/PlhQnNVXZohSYewf2+xnxeZK77KG8gqgtB1uhSk+sIqFOjZxLXjCJqeCrRLVPY8k3kqqDBpykZvTAB6Nmr+/yUmOT0yxsxFtFqERSoxJDw6TG2haKuAHTxASoRawBt1YUiabvyxTTnHxHRKoiM+Uwws3U0DyFEesoFHEA1dJ6jpq+1KkFPUi+dKv6CFydGAoZBIkj8E7uhbTNKtKqgC/F9yTqEZDikn+ClXPVJ9L4ofnN1zjLbpQfGCLjdBrdQrEG02jaJK26Citu5x2rxaGPZd7NG7ouMaMOlua8pD49ZfnpguKywb7coHZyv8T9nni7xoaAWc8J8wJ3WlBfONpZuud8ZksIm8qXMn58SowKLjfYkZC8cJoqoFJMsAAKik+DFjaTisxci1uktKTO0nhD3VrqwtLWtgdAcoVakly8tmAD2obeywhA69AzUiQ9RuJ8MwADMcXoRqIKfTrUODa7bi2tl5QiY70AIDnVKo2J/p7oAD8YKAmwII19JIEDr3b4CvEWQQCLaEcpJkHha53GYY4XB5IsKkdWK5/sRfJ41P0i+DzGCzAu+fMoCFlaZemZJPm+EFbF8U6aOvYgu7EQ8i2eo2gVA+gwukw6Aj4Dh/1p79lj2ftFRwapz/FlHr6Tt2WiSCbz9rz4+vSgpxotj/xOKm7VMbNkqqmmmmqqqaaa6jNS0ysO2ZRyBBAkI8riRij/vjQiTckNVcyNHLSzQFx2mNJj00y715G4lchZk2QL0ahhJpv0/VHEYijSzF/SsedmMhp6HwnlwYzM+3Ksb+89MipzoG98QxEJVvUNqCwQUY2i3RRcR8WudOwqR5dmepeuZuVqSu2ZmZYuGGzKPHUJLQppo20wbLuCy3rOuilZH2RmuusMSomx4mreMitbtvOS/VwAiPK5IT6u+K3DF3nxhSV/8e4dvjJ/wlvuOQAn+sCX3RU/5J7xzuw5//fyK/yjxUP2C+kqu2uN3ShsYkK4fTyib+sO2oUct24FIChv5FxmhkE7d7Qnjt0q4qsAJx3xzIt8BXAbhdnL+dRrQwgIHT3N3rdpO7mRCmVqfBIrKMYEgCWjW+0GTwVIM+vJeFLpiPeJ6ZBnrgFVCpsidlpYE42mS+azba1wMy37WZMavIhp4sAqSg2OytG02bOj93lgFPOsEguEvgHq5nFo0lTsx/4AgmT2SJSGMct20vqjjim+NuElmbqfwQ8rYzSmP76I+AVHy6BBNaP9TU10P7Odmj1lFCrGo/tHVpR2P2RpTGLPXOWZ8QJfOa7PHbtTAcXmZYudJ0+Q08j6tOTl2YrbS0t56Zg9nzN7KfdE+bLGPr+FQ4O63WL2NWUzQ4U55iT5/GTzTCXnKziFd68Ao1ZYX8GCrwy+0n0csp8HvLE0seK2EGbPfFGzTEbBMapeSjMrWnaFo23tIGUJii4ibLBOgTcCivTXUfCINrGGVDJl7scQA4jhnMekh9E4QjdLdwRAMdgEgsQw3A/9cqUXP51OjyJ2EXAsS3oUYr48fr4pBtDERChCL59RKhLn4nujtOy/2pneeDVq1UvK8rryM9hnAKCM/TUJxcjwNB1msPSGxj12GUCN2CS+BFOoZGgcsXtFSNsMLgOO9NHYPUtKTg69P8d3AzfSvqgEZJDieWOWX4IgKZlx5ROYVIT+evq0DTFHTed8xCSMJgoAqON32fpUU33261XjVJjYIVP9YNTkGzLVVH+y+tyDIFHJ7D0j6UgGIuwB2GcjO9W/8I5foMWUUtMtA36emgIXiGXEd+LAn+U2MHiC5JfgmMCVaBAwJIEkKlGso1cy+67zDHvadJ71TvtxBHAAdi/7LpKCkXFnzyRJjexe0wWHbwxN7djVYmAwK1qWZU1pOpauRqvIwop/Qsdx06NVYGZaLsodVofeaHHtJT3Ce0VnpSEqqxZ/njwKbivsXmEeF3x8uM/T6xW/efEFHi1uAHhQ3vL15Qd8tXzMPzv7Fo9eu+KX5z/ENx88AODxzSk313NYO+xaYzf6CASJGtoTASzMTjxFZs8ibgtFkrtUV4HwRIl/ydywf03TXAT8IgE9hXzP7BP1vtREO/q8UsLQyGMnNfniFQJxr9LsvibOPD4oAaRyBYV2AWUCNiVveK/7GfwYFcZ6mXmPCq3l816e4A2HhaO7NthDYmJ0AgyZQ2JitGBaYYvkmeu+GUfG9nhGHC2NXpZxqFYN9PyUNHEEgiDbDIUYKeqRmWlev8hbpKHMKTG54wpWPHS6efb8GJnFjnuwmNfznY3ZUSKIjumavNJB6pikZcLGMHW6T4DyEqJVtDtHszHcnnSYeUdVyQ23rGrurzasqprt6wXrzYztdUHxMnmGPF+w+rhi9qzGrA+oQ4vaNxQvwdSCuPnSjEw2Y5Jx6P55E+zIJNnQp490Mzm4bqHxlRjM+jLiK8tmYdktZP1aR1zR4axIe5wRqUvnBwBC6Yi3sTdVPTJWDYmF0RpCo+nlbXAs1wDiosU4GZdNZ3sgpPMaHzRGRToV0TpiTESlpJyuk7SaflwbCNaIPwgQu8SEIhDRic0xPHujjmmfRhKSoIZmXYsHTrAhATCKrjB0VTKXLQz6oPp7JQPIwojI4NwAcovsSvWeN3mMyfN42G7UQzpYNMkwtVG4LZQ1mDqiU9BQcHKd/WwAqaN9ZQxn1l5mi8TR830g5aAzo8OFHmzN50HpiDKhNwcWWUw6xpSOEyMyBlwQeePoOrd/DAAz1VRTTTXVVFNN9YNek+J3qqmmmmqqqaaaaqqppppqqqmm+lzU554JghpkKeOZvDx71seC1kM6QJ/gEsFpMVpsF4Z2lWdsI7EIdCeeUOhRzOmQIKO80PFjSjmQmcBhhrun8SemSrfIMgvV74Nu1RA1mo6jp/2nuFHdxpSSMfgOgMxSdjMgaoKX+Nau1n1E7s6W3LhZH7daWE9pfC+XyWV1oDSdxGWqSBtMnzZhTBCjxsbSHWSWWaVZznye7BbMTlE9NXSbBR88n/F+cV+OofL88p0v8QuvvccvrP6IHy6e8tbFSx6fngLw+MEFv797yLubu1zu56wPJSEqmkaOIUY4WRxYlQ0vNgsO+4JuWeHWCreW81FeRcrbQLHxBKewe8O2NbSLJNUoI34mMhvtITTCitALGQQxKEJt0Dsj3i0pZtYcMq88yUyswtdq8N3IH2vwlSdUiliCteKzkOUnwWtiZ3qPhhgVRdHhkhQhRsXWljS2wO+NzJy3iq6ReF1IRqrJnyaPFTFkHLEtRpKZzFbI+2ga8TOIQUEzMKbGZRqZDTeHkWwgjeHgEHZJnmHP8oIsP/NArTBaxm10iREQh9l3kQfI/ROgn9HujX9jmrnPLBIN0cchXjXfWkmSFnUUqUO6L4o16CSrsntNs3P4mWU7E2bUtpxxe3rgdLHnYrFjVdU054ab+yLNunlYsn/gmD+ZM3tRUV51lC8P6F2DvRJakCks0RmIEb1OOjlr5P+A6ExiqhgwimB1kikl6dNCU59qgou0S2Ev+b3Cb5JcxkW6omBfenalx1ifYmqTSa8OWBsoio5QqcTS0oQmbT+xAuh0YiLIWOojjVPpVtE1ijaZuDZOvEVA7nmtI0UyXlVKzFeLxARpjKWuhcEky0eM6fApDanDokwAB0RPaJNcJ1/HFCPN2Ouk1cJiIF//IMcSFdqITDE6GShdaekOhrA1mHp0Pyh6047+XoeenRc1RyyznqmRGIKZ/SH7kCWIiRW2VSJPS0wQ3cU+IldYVQqfk8Bg2FZm+KXDHhsJ90cfhNGhQo5JzwsNejqV0mHGrKrsBROjbN/YIGqY7EXkFbEarW+qqaaajFOn+oGr72aeOklkpppKagJBohhNjt/yhZ4f8VH1fhx6pCPPNOWopGlza0nocBt5YWyXisM9CAtPOOkIMw1BzEh1op7rJqVs5PVmoKLXZEPWhccSiRjVkTDLHgapQVFDIxjscBjdQuFuFSYlz+hWvCJs6r10ikbUjcK34Dsj0pyMcehIayytKdgVg4FifCUyUWmk0bD/H3tv9mtLlt95fdYQ0x7OfMe8NytrcrmqXGW3bcnubkxLqNUPYHhA6id45YEnEEL8BYhHwysCBDwYCfGERAt1I4SQbRncXaZs1+CqzKqc807nnmHPEbEGHn5rReyTbXc3UttUZe6fdPPePHuIiBUr4sTvu75DoLAeo8Nokpjo8NpEfK+hN3fUDaqIAsQgQEixUNhV1gZBNAU3H1f8zy+O+bM3H/P3H3+H3579iC8W1zLO1Wt+s/kpb8/vc+MnbEKFR7FJhh99NMzNjkr3fNCec9VN+bPTRyw2NcuVvGf9uqB5ZqiuNcUmUq4j7rUkTAC4iaTymF2aC07hAng7NhlkaZJX2I0a0mLyXIkp9cfsMgg2+gxEK1HCvjb0E4tr3OC7AEiDGhQ+x1joiK48zUQ6qsJIgkicKnoTh8Se4BRuui+7yT4Co99AnivZMFQ52T/fJBNSk0E/lVJZxCMnpxgNIIaLQ6KLyTIPM0Z8ir9Ikng08v+hjINcJScgKa/QMULaD+KYeiFgQGoenZL+MAGUYyVz3BhHj4X9FBs3Hnu04t0zvBbEv8HsxAy3jIqwlvQhkEa4XVien1TUs46m6phVHY9PRbq1mRXcHE+4uV+zurbUr0smzwsmL3qqK/HsUJ1Dtb2Yud4soG2JPqBMAimsBaWxZQFagxXABJ/jYy3uYk5/UrE9t7QnYiDrkgFuqCQ1KhQGXwdcGe42sSbiG0fddEyqnlD2aAU+AQpGR5zXbNsC50SiEnoNTkx5gQEUMRsNuyTnsRFf5TQjkWD4UlPVvXiY6kBTuDRf5X3bjREAEZHx5JjgEGSiFoUfpF8xqiHOOZd3WvwtnIZejWBXr6DTA1jmi4iaOGxK2SnrHm8DrgiErRHvnqyk6UfQUDmFsskb5NPqq3QtZS8geX28XkDmsgBtci8OVmG3CWRJRtxmF0VqaRIYl+VppF8D6R/qzg8Z37MHXCoHOmrCcONRxBiIMXujaAG4ht9zmlHrxCCvG74/iFSKT93vD3WoQx3qUIc61KE+C/W5B0GUh3KRAIj0nC2pDZFQRJRWyRNkT5OdmCPRMEQhajcyRUwH0WhaIBw5SOkOQY8JAarThFKSC+w2ReX6vZ4lrwImkEMlBsKQGlIHYpX086lhRMfR49BE3JER4GUnTb1p1WDUmhs+3SXz193oEzLsQDb7zKawew1prqgiwUBvIrvaY+ueJvkoGC26fK0DLSVeIQ1KFptHiGXEKdmOSeMwrO73klBQ3pS8f/WY/2pbw5fhtybvADDXkcfGU5TP2ezFGOxDLbto0Srw1fI5u1jw9ekzNqHkNqEvH25P+d6LR7y+nFC8tjQvpOvIbB+7AqsUdgvKiSeGWwpTAEaDRNOSYpEZ4l3lPDD4YAgbI68Ej/4DdqPwpRITzKlOHgGpYYoK1Y1eHwoIhWU9le2ricPYgDYBXQSJzQzy/bHK81Ve00Zej0GhdRz6m9BrMadMAImd9/jOjLGmWyPNWK8IG40pEmiwl6YkTWEcGkMVJPkISMBf8ozYQTdX9HNFsHtNY9x7bxy/c1jYtuDt3nvD6D2S51LQCuMTe8Zw19RRjSvsAgQJqJM9ePo5hDZFmTr5PtOOwGfsFHarcOuSblKwm9Ys5z0nR8LysMZz72jFbrJjd9+yXtZsHlc0zwUMAahvPHblMX2gCAG12sJySexlI3G7QylFNOki1Bq8J3YCeEXn0C+mNKcnlPdO2D2e0B4Z+skIvoYqma1WGl9lM9A0BjoSOs3WCbBQWM+sbikTq2hayHZab2mdpQ+atk/MsJ0wYrpdQb+xMhe75JXh1OBrQorO7XcGpSNV1QuDKU9FHZhWkrojLJURCAEBVEF8PbSKlCl61+0Zq5rkO+ScxvUWv7bE4Z4xevSYXfLe2Gn6ZC5rpk4ScCqPQ+7JAmqo0f6kV+hAMuLdm0P7czT9/wDEfSoyegQ3xOtGleIvBAzeIMVGQBCzy3Nzj4GVgJZgGS6EuJ8Ok+/B2RDYqZROMzJi8IY4xF6nyPYMtITRhBsd5fqP8nOZTBqVrvtDHepQ/3T9Rcapn64DW+RQP4v1l0Xr/vPqwCA51GetDiBIgPJWVo1zIoDyuSll+LmvuWMwNyS3hPRQq8T8DmRVrroBFTStL/BTPzysDkyPMuCNrBCqIA/LJuwxCGKSExh56C6vJaY3p5KEOo4rj5kuHhhAEl16dN0TvMH3GtdqVKcxa3lILtYCiNiNGGZarXATNcbqwmgOmCN+IwMwAgmgyeaWBoJXeBNwiXquVcRqYZFoHfGVpt0VhLVsRLVaHtDT+KIguHEMooJyKSajsw80q/ac/zL8Fj944zEAT+srvlV/xES3hKjRKrAJFRMtK+8hahahplSeWvWcmzX3zZJ57kIATeRPLt7ge9snfO/2Md/76DF+WWAXqWnaqkFWZHZg+kC8hSrF/Ppa4as0TmoEy0ICIIIR00MVRpAns0zkDYl90ELYgtlJnK2v03kucuMuYJXsqVkO1QAAIABJREFUuiIsk0RiJqa85qgbjRDVXjOFUN/LylEWbpAqNWWPSQ2Rj4rOWUKUlfqzZsPVdsKmk+a9nRSSLNIbXGNwTuGmepBmFcsEHLkU8Zzmsd2N14PpIsVWTGlNYkTlaNg8dgOraX/Beo8ZNQB0Ia9+j+MoYEWSLWl1d96yD+4l1ktitAySnTIxWOzIkroj+0n7VqzEKNftFH5juEzyMVM7ppOW0nrOJlvOJls2ZwU3DyZsnkoaUnlpKZYW00KxqilXgfrKYbYCgphFi/Jelvh7h3IeegdOXg+LJWGzIWx36NsFk+UF1dmM/kQm2+7U0k+FGeImCt2MDBoZAwEIQlew7TVt7eh6S5lYGrvKUhlPYTyN7ZnpgG4iE9ux6OQYNn3Jzlm2XcFuWxKSHGwop1BB7jV9NiMtPJ1L46QDTdlTlz0uaPreoPYuB5vuFQAhKrQSRl5OlcnAXFX0VAXsTGAXkSYeiFYPkbzKG5E3LRUhSeR8p/FlgCLTjyIYAXPzdeODTkDcOD9hvC/JDxNwsX9PV+P7dJdNbiXtiEpYUCDfKyC6GuK7dX/3+zMIGAphVcmxjfsTTJR7cU42SwynHB8etUogYE4kIkln0us+Aa2KEW3cPxif2HCfjic+1KEOdahDHepQh/oM1AEEicKKIILKmu60ABhKhsa/n0ZIsZ3o8QFZOTCpQRuTH0B3kXKh0E7RbyzRCmMiP1DnB9BowFepUY5qXHkO8gAck09CsRW2Sm4cfT0mCgy+IIm1AeB7TZz1KBPRpYciEAO4WdLebw12aaiuxB/DdHGI3YWx6cz7HI3aA0X2GssxjIDgFPsrhz4KRGKVNN0AhfVsjawq94uUD+lHxgnF3up/EQmVxOBW1zD5RLHWp/wv198G4Ph0zdcuvsBXpq84TnEsL7sj+jTIy74moJiajqf1FV8oL3lavKZWnrR4ziM74yvFJX938hFXp/C/3ftFfrh+zJ9eCdByuZyyvpzQvzJUNwq7ltSg3OBn5oprFJh0zuzIMJDYUzlXvo5Dc6XSec7sHJEr7bFF0vxyE6HUZ28C1StJeknxt8ppeq9wpcXUDl0kWUG315gidPcQFVZFmsJxVO2YpLQfrSIuaGrjqIyjD4bWS6MLMKk7XNCUlYMZQ8RvSKvzu22JNp4YtMgUWgO9GpJT7FZ8EeJ1pFwFqkVI0dMJQCuyX4hK18PIsBl9EsbYW9J1kscQ5BoeQBE1fv7T8bO5qRMZmho8EHxiPoVS5p1KbJB9Oc7g6ZP+rR34XsbI14ZFU6BKz2rSM2taZlXL6f0t3YWMw+XjKdtNlZg3CrU12FWJ3QiIUSymmDYOciPbRkkVSQyz8ranuFyhFmvCYgkvX2NXG8z1NH1+Rn9U0s0N7bGmm8sx7Y+zNhA78J0llIZdVbBLY7Cw4tljSj94AU3KHmoGtkhtNmgV0Cqy6is2fclyVw2Mka4t8J0mbg3sDH0U8GwASnSkn7dUhRt8QvL8BGGEFMbTezOkIPWdFYZTql4HqkpRWkdpPa7wxExLqkCbQFF4NtMKtywoFmZM1VpoohVflWjleow2go2Qrlk/C0SjMSldK5SjLAyE8URA4phzgkv6PUH6ZwbNtFYEHYUFmIBNX8cRBNmN0daD3DIIQ1D7KLIyO96Lh/ltc+rUyNJTe4wUkZipAdfI+zT48QTE90PHwdck++ykC+JubO+hDnWo/8/1L8IW2a8Dc+RQP8v1z2KQHFgih/p5rM89CBINuHpP3016+PRjoxpMNjvdWxXzo6dG1PKwOTxgllBEhXKR8jbRso0amj0QanSwkZBYBEGnh+MsFQmIQWSWWjhZ2MsSALtTd1bGs5QmAzHhVtMdGUIVxCjSRlTpMUmaE4uAqwPRWkKp0wp3lAd8BISJTqEHSUxq8AN7DYmsIIYiSqNfBsy+n0XQaB3onKFMfiFN2WNTQ7WIKskuDCpFPeaVWYBoZR/9VBMKTX2paF4o3EKaxuV5wR9dT/nJxQUPZktK7bjaTXl2fQRAvy1ARXQReHC24On8hm/PP+ZJ+ZppYoP8YvmCByZwYaZcGPjy8fu8nP2QPzk9B+Dt9iHfXT7l7dt7PHt9jFuUmJWmvE0+Do6BuaH2gas9Rk/218jsHR/Hht230gjp5N2S2UX71PtQ5fFNTJOdGqJdswmr22lioTCFpyg8zoTBR0GpOJiqagWVdZTaYxOYMTUdWkWO7BatIj9aPmDZVnSpsS0SU6Aqek6aHafVhof1kip1litfcZTMZvpoWPY1t33Nn7+SKOPtpiSsLdtrS/3KUL+O2F2kWIdhjHK5SolcZibze5BGBTWwrrLkaH+cTB/xxQiiZGaJ8XllHFQRiVZW4TOjKUsQdD+CilGR2AHcqUGOkM9Nr4YYYuW1mOZqQ7so2DUVt7OOk/mWi8kagKcnN4Rj+dLSeLauoHWWdQKblusatyvE66LV6FZjdnq4D9mNpb6qqa/PmbxoKS43sN6idjKX7YtbzKKinFYU5zXdXBOsGKgCuDrdgwqFSX+HYgQuM6AaDXRFZNsErqvA88ZRJk+Nad1hjee42lEaT4w9uom0pVy0G+sx88B6W9EtS2IyLVUbk84jbDtNN++TVEZANZ3vvSpiTcAFjVKREDQum6OmCshJipVKn42D9EmpSFN1XEw2dDPD1XzCopqi1onZtZHxtOm+PQDRTSBW2XslEHQEJUa1ceJBxSHGV/VyvQ5+TollMTBBMmCZzX2dQodIGMyphcXhpgJwxnUCQvZjto38v+kjMYHEOn1ehUgohIEWeiUgqUpAIXvfsT9/Mxskjm/IbMbMUNz3lCKxUA7GqIc61KEOdahDHeqzWJ97ECRY2J2ru0aPHmF7JGBBBWmAYpkekoMSZkVUxFKB0tiNGpt3A65Jq/xdHGQyIpdID8KlrAT2U/CTRMUvR5bFfmIHiDRH+fEhVbnkWxEznXrchrxBqPu+MtKkVxE3MSLNAVQVMLUjXAS2E0t/aygXamQY+LHZ1H1K2VCJjp72ISBNpySoBKjGBBO4a7bnfKRzhqpwgznibLqjqwxbVYlxn1PcSX2ICqyH0tNZQ7SW+pWiXKR9DBq3KXm9OOXqbEpV9zin4T1ZGa+2eYUTnp1XPD855v2LU473WBCn5ZYnzTXfbj7kG+VzvlJUPLIzHlkZiH+l/hE38+9z+9Dw/e4hP9494iebe/zwShr81a4CrylNoGstMYiZZExNn10YTAfaK7wngV5xWDUW0CSiajEK9bX4amRae56TUcscdEPyhLxudgKe2JXBGWErlaWnqvqBqaF1SABIHOQwW1fQZUSuFFkQCMD14c0Jm12JTyBKCCp5NIhvxP16xdP6ilMrzb0hYFSgVj1aBQyRdSj54vQpAFfdlLUveb2b8uHrE9afTKleaaob2XyxjhSbiGkDRiuKDYOXylBK2FXajVKB/fmuosjRhPmRm/27AIt2ihCFbRSSv04GFZVT4ONw3SnP4P0jG2KUluxtdGCj5GYXUFtFXGniVcGrec3VsczHyaRlUvZMSwGdGttzWm0GhtnuqGDrCnpvaL2h7S29Nzgn56nrDZuNRa0t1euGyYuayctAfZlAkMUOtevRtxvqzlG9toRCE6qUHlNqfKPpJ5p+IkwcAWPTIZh0X/EiwQilTuNY0CV5124SiCbyIvn/KAVF4akKAcRK6zlrNpxOtnwYTnGtFbPedF9TvcIsLN4rNrVFl5JikyUwxgS0MuL9kZJmemMHQ+ZszhyCpmstRelG7xrAe01vDS5qzus1p9WGT8qe6+VExnBRoXYiCxQJSgYWNC79O9YebCSk+72uxXfHZ8mN1/idETlfvk73fIAygKBUHME7p4a55hObyFfiO+VrmbPDXEpMppBZYl5YUDrusZj6ZKpq5fwJSD0aLiecKF1HcVS85F3M48kemyXcva5iFf7CJKhDHepQfzX1z2KOHFgih/pZrr+MJXJgiBzqZ7n0P/8thzrUoQ51qEMd6lCHOtShDnWoQx3qUD//9blngsQisn0YMNsxFlWSPuT1TLvXnRIaNrJ6mFfzo474SUB3Gp1WiLOhHUn3ndMmtIuoJDeRlcO0LqeTpCQZoeYakkYUhEa+Y4wl3ZMDRNAmrTSm/TZ9xNyKXtwn2rtvFH2KTfVNxM81euIwxx2uNviZxazkGO1OiS59j5UA8v1qT1ceypEFoguf6Ot7x5Bicn1UeK/RajxAayRON6Wa0ncW7zQhrbiS4jmjiaja05+D7i3VdTpPXZIkbA39pmY3KYllZHI7rs6KVwTo1uBua17cljyvPSp5Z0QvGoDZyS/z9OSGv3PxNr/UfMg3iksAHtuKJ7bmCfDN8hY/vebjkw3fPb8PwHN3wsrXeBS3bsKL9ogP1yd8eH0CwEZPUVdmkLyE7Si7kB1Iq7Vp5TnOHUGB3yX5QKsHxo+wWuJguirzIDNBFESL7zUbr7CVw2STXC0MnRAVm65g2xW4FD0K4tMCDKkbi8upGEtm5lOhUEqYPMuu4tIIdeHKyt9zs2PpawyBSjsuiiW16nmrljF8s7qiUI5a97x6MOdPnjzlh6/vc301k2N8XUpE8UKjPRIhm5RR+ya8oRTqv+7j4J+QySxRC+tKJxPWuFH0M8ihQb6JQzSp+P3I9Rvy9bJL0qU4KgZUYoSNk5mBqZRNcIfrNclvcpqTaeX6KW4t/jJJRSY1y8yYKsQwuKr6IQGlSIkouarC0ZT9sHpvtchEXNCsnlZcXTcsXhU0r8Rxs76saV57ytses+3Rm07YAyliV7UdsbCEWYM7qeinFjfRdPmeUOc5tTfeBtAKX6bxLky6r1lCURINbJrI+ih5/tQOazyzsuN4vmVlK7rWQjIF9RuLXhvMNjGmrKEvgnhyIIbOvQkoHbHWY0zA2JGOUBSeEJQwvtL8NSYMc9n1lu1G8yJo2onlollzMVkPnibLumO9qegnBWptxKQ33SPsNn2H0sQYB5ZIcFq8lTJLz3i8ikQrCTzKKzGEzSyKZECqFeAiKiqRLA4R63uSx8Q28lUcPamSRMXXisKKobL8LkhjlOSPKgBd9jUBn64PSNeFBl8ktkixN7HZm9dx/F2n94xZlUr/OUTkHupQhzrUoQ51qM9gfe5BEHREnbW4zuDaJB/oxIxTp6jFqER2YHb71GcxrAtWvB7k4TObZYo0xhvw5agbz74P8ClZS5vo+HW8Y1yXjRhVBJ+d/NPn/NCpyc/yvuYHWtMq7Ebo/trJ36aTB2qQBsetNN2JwZ914hVSBlyTqPNbg1lrkRls1R0TysFDwCbzWBMhp0QEhc/UdyONd1XJQWeNeu9lGyHFXVZFj9GR1gSc1/QpycEZQ9xYVCd+F6oMdCeBbIaSU1WKtRy/rQz9URy8UkIljarpEhgSFGZn8I3Q/EHiae0Ggql45+iYP3/8kIuLJb949gKAX5l/xN+d/YBvl9IhGqV50854ZJZyLPGGQMATCTHyiVd8Z/eU/2v+FQD+cfUml8UR4dpi1xrTKtR2NOzMHgyhkCbGNxpVO9Q0mY+WhrAx4kGTG3AzzpOYUofsVuad3xncVuMmBX0CMbraSeMaFLETYElv9eBL0iWufE6YKHvZH3fkh7kWg8U7Q9tZXi5maB1HT5GqY92W+KiwOnBUtxxVOx7UMkZT23Jst3yxesXXqmd87d4znp+e8KwToOjt9X3eubng6nZKvyoxC4PZyPnKppSDgXARRHbSKfFaycknOmLXiupGJF12G0Ep3CTNubnIYIYGVCdZUpoHnnTdDjmpjKBJrjhetxn02H8tm2x6JYaTqlcpCSafNyWmnHYESfsy0qZjDFUU/x4Th1hjW/jBj6Mpe5HRFD1nzYbtfMXtvZrbhRzk6qakurSU15bmsqK5dJg2YG9F2mUWK7hdoi+vqeqKctoQ6wp3LghFNy/wtRIzTgOukus7GDWMw+jRksZSifFqv5Du280Lnm8LqlnLtGklkcgEZrXsw3ZWsF7X+GUxxNnizDCwoRYjUUzElwab/GhsBoqMR9lIq6yAqnvzEMD1Bu80m1VFuyvojg3zqqVKJqzF1DOpOvojw2Jd021K3NZgNno0u/YK0v1CeQjBCiiZridVBLmfV14kkkGJ98keuKG8IvYKgyKoiI4KlUKpBo+XrRpMsbPpNCTppSYlgWnxA+nHuRsKkmGunA/tIvTyHp9+v8j3KXSRwK0Usbt/34gmjtfDnt/UOKcPfiCHOtTPSv1FUpmDROZQP+t1kMkc6me5DiBIlIQI2zhCjuwMiq606FYejIeHzT3T0pwSEQpFHyVpIOx7BezFfvrkM6D7HHGaHjhjYlo4wMm+5BW6T0d06j55cuT4yGSaGgsBIGIR9xN4cU6aE92PCSami9IcAmyhWIJda3ZthTv2qInD1LLRYAOulFVft9PSrJCZMWmctABAeJWa6jSkaVaFMuB7TQuUpcNajzVhGCcfNL0zaB0H3wqlJN0BwKpIv5OkEbVLrhVVoD/KzYAYztptZoUAKNwsj1FENdLI2O3eauxWQcgeBdKUVMtIdQP965rFUcUfzMQY9feOfoHfffDrfP38Jb9x/C5frZ7zBXvNUysnaqbrO9Pp1MAT+yFvlcKCeFJd80+O3uTd63NuXswxSyNA1J7xqW5BtwpjZCXbTwwhe7cUQTwKvCJ6NSag5FVnGyXRx42moaZVhKUhVLKPPnlCaKcEENpJys24Ms1wbuX9Ah7o5EXRz/Rg6Bq8wvVq8BgAuK2CNIzJXPimiMQq8KNjaXzLUpJA7k3XfGF2xVcmL7lnl/zG9CcA/Pr0p3x8esZlP+f93RnvLc95tpzjvWaeALRZ1XJabTgtxRH2tq9ZdjUqNc9aRV6tZ1y+PKJ4UTB5JiBg3kk3UWJEWQUBOjJzIxv5lgJaZnaVirKafwcEgQEoUslrZgBF0j0hqGSCW6RI1D13StOB6kYWiYyhGs2Mi5wuFAegpK8DXSMnaFN5lIaidMyalqboOZ1sOZ3ImKzPSm4fNCzXJevLgup1SbGG+koAisl5TfVqg75dE29uiYsl0TmKjwVEKY9mxGmDO24IlcHXhn6m6Rs9gEluou70xsqTIqTlh3arcKuCfmq5Pi2xlaOsHE3yDJlXLWG25kUzZ7ct8VsDTqMy2yykMXeSNOStsDBiuud4rzEm4JwhBoXXkdK6IdikbjratkigtuFm2dD2liqBKaXxGBU5nqyZVy3rWclqV7FZVvhNRiblfOJB9VoioAGfzF9DKfM7VmI0rYBYBEI24U3XarQalB6MkTPzb/B2igks0VG8Tvb8Z7JhayiFRRfs3d8H0aohQSYmAJyYks5Ic9FGYpdSkJqUKJOYInl/9o1Qx5jcBACF0bvoUIc61KEOdahDHeqzVAcQxCv8ooQyoKx0gcYGwrwnTjQuJRvoTg3yApUkCLoH6dcT9T4zEPYYG/IwKytw2oFOI65dYlcMTI7MMMmvy88H2UTufdX4IB2KSNBqfDjei8iNpaziKpeo+b0Y7WUQxrRxiHq1W0W7sHSnGj/LUZMeVQZM6fGlJniN0lGwgyxX8fLQbJaGYqVRfdqHHDVZGTH+84pdoylqh1FxoP9vO2F9BK+IQaFyQ5oX63UUA1qv0K2GVqfY3CRlySvrJdi1HKPpocsr6xaog4A1lU4r86lRSefBzSP9kXzebqBYxYFZIlXg6wv++PSCP3jjF5g/XPKNey/4G0cfAvD1+mPesDdMlOOxVRzrhmPd8LfTPr5lv8NvTN/h7YuH/KPTb/D+7Sm3i6k0f4BqDcWNTnIsYQ0Eq+hOZPv9PEIjRo0DeyGoQZbhMwNJjWwdFYQZkk0b7UqkUsplMA/KVRTZCeMKcAZB+skoNYKUbpRlHgPYNFawRubaXkJGtAZ3JYO8NbBVkevylB9NH/H7J1/i8cmCb548A+DL9SuOzZpfnrzPLzUfcnM04YU7pg0Fx0Ya/DO7Yq63nJgNJ3rHLhpu/IQ+6VEMkefumO8+epM/ePYlXk/OmL+nh/SW8iaN50lKedqTF0HabxuJURrAmGVfeyCIRJCmmNz8J+69FsR8NobRrDLOooAhaRxVlrGpzKYYgZThHtArTJqjvtX4NrO3DBHY2YJdWaFrTzPpmDeSzFNbR3OygBO4PatZrBo264LVQsaouK1oXpXU18fM3zvBXC7h2Uv89bXswPU1aIM9O0HVNVhDOJ7i5hXdiXTPu1MxWnaNGuKgCeOcyAw2s1P0bUl/bPAzzY2RQZhUHY+mC5qznlVXse5Kdr1lsxKNl9YQIxLxHIFeE31kIKK1exEmSWIHI1PE6gBVjzGBri0I3tB2kT4BelrHxDwLlFpSbmrruNKBXSnHmCV5MSiCAt3rJGfLE0GJ9KTS+LlG1R5tAzqnBxmZP9FGggLVK0JQw+vaJdZeAkfk5O6xA72S+2xmx9iIUmqQIaKhN2AKmXMqgbvKiQwSGGN3eyDE9PtC4TJpRimUH5kgGdTcN9b+p8yJD3WoQ/1M1b9IBO+BLXKon8X6ixgiB3bIof6663MPgigH1QubvC3kAdLVAdU4lBVgJKiIrzQhsyFSRKJp88MsdyJ2yX4BWsAP8Q6IiAokd/gRTXrYzYyQjjtymPwgKskTDNIYgOBkZS+mVBtJD2HQ7kebwIhEtVZVYoYMIEhq+teR6kY8UYq1pjuRHeiONaEJeCUyHF14TCHaoJySEHqD2hqRlKwZIhdHWnlqBFtDWGu6icXNDJOZMARilOSRsE2SFyCnuQC4Iog0wIhfhup12kZqXKuAKyK+kSQLs0n+K0PiQ6J/24ifiowilHLu8jZ8E4hTj7sAWk31ymK3UF3JNqpFpHzmiO8pup9otuenfPfilD+6+CoA5b0Np/MNk6LnrdkVv3r0Pr/Z/IRvp1XjN+2MN63nV8u3+VL5krcvHvKnqyc82x4DcLWd8OzVMf1NSXmtaV4qykUc/An6lQAibhKkeTdCQ8hJRbK6K54NumcE2/alV4G0aixeAaGAfjoyeoZElDTHYlp1Lpe5ocrnMw4rzTFHbiKNk5ukpKC8Yu0YWSvss4cMvin46XzOT07FV2V2suHRfMlXj17xheaSx8UNX6ueDR48AJogoEe0lNYz0T0P7XJgFRUq8AV7zbeqj/iV6Qf8o9Nv8ofNV2g+EL1LsYLqWhGsxp34YV5l4DN6PUpagkL1WqQ2e6yHmK5D3Sv58Z5fgly38Q6gCQzxySByHqK6G5/sx3Ea2AEJHMmyCj3IKDLTTMYxWsu6LthMBECwlaOpe2Z1y3GzY1Z1rOcF7bmMgfea60WFWhuWT2c0r6bMPjqjfraSr71ZEq5viG0HXU9oW9TrksJayllKuDmbE5qC9rxid2LkvO81ylHJf6JJ7Ctt8F5x08v1sKwdPmhO6i0n1Zb7E5FMXR8J1WTTF/ig2XTFkAATohpOg3d7shOniZ1m22t0Oo/axCFtpijdcK/KYIlLgETnLFaHgSGiAFskFlzQ6CJQlA7XGPqiIKwMOl0HdqMod4qoFG6r8I3BT0ZfE6x4migTiIUagGufmIahS5HYexHn+2CYoBF7vL59CRYQTLqvZ5lSYjWpFJct+5h+5uPANlQhDvsCoPei3aMS0G/4/aFHWeGhDnWoQx3qUIc61GetVNzPF/wcVvPgafzKv/MfETX4ZDgZSuiOA2ESoAiyWhz3/AI6LXThXh5C7VYe+u+soiWpSjRp1TmBGaQHeJNYGXlV0LTSqO5XzN4Bca9B2tdtp+3sM0iy14XQ8dODbKb/s7famBrl8laaXd3Le7qZvLE7FhaCryOxkqZbtPBRIi8Beo1dGIrlGK27X6Fg2L6K0iC7acSdS3duGi9xlxuL2pm0mjk+pMfktxJzw9ppMRPMq6c2DtGb9Cn2slWjtj4xcEIznkPlFXqnhhVQXwfizFNOO4rCs92UhLXF3go+WCwUkxcCFBWrgPYST+saGYPthaafCQjRnUT8w5ZfeusT/u0HfwzAb0/f5SIZiQJc+w0/6Gve6+8B8KI/5p/cfoF3F2e8eHVM8UHF5LlCdyMrxjUSpeymUeKUi0gc6APppIbkkVGk1V0PZiP7mCUwKsgcD7m536Pfi++AzEkVpImyu8x8GueOdgnY+tR895WMQZ5/+zWAd7npT3Mh+6KECvp5wJ86js7WvHF8y9PpNYUKLJxclKu+4pPVMVYHLpo19+oVJ8WGKml6zuyaJ+Vrvly84kx33IaC/+7qb/O/vvMNOcR3pzTPReazuxcITcDMe4rkt9H3ckAhSRnYJSaC2bs/pmtetwLGZUmbzNVxPLJ/gxgkR9xkPJf53GWzzOznM4xVYproNn3e7AExeaX+U7KJcRyDxJo2nqrpqcueENXAvJpVLb03+Kh4fTulX1QUV5bqUr6wuYxMnzvsqsese8ztGjZb/PUNsU9ggVYoa9GnJ4T7p3TnE3wtBqEgbINuquiOhB3nG/FL8tV4zcaJo551zJqWB7Ml9+oVZTqP190EFzRdOigXNK23dMlHqPeGbVcQgqLdlfhW5HL75wgTBxYbcMdYNQSF1hHXW4JT6CIIeyeqIdI7tLKt+fmaynpChNWmptvI5DZXBeW1plil7ywkEj2kY/RlOhfNHkqm9+ZRL/If3cl9KEsfdZbTqD2gV2cAY/z9IxK4vbmXwcfM/CCBIUmiZXeZEcIA/gYzsj9CyQDW54WAmPysPv4v/nPaDz/cG+BD/VXVr/9yHf/oHz79/3s3DvUZrwMz5FA/z3Vgi3x+673/8D/+Tozx1/9lfufnngmSG4p9mr+KYFqNrzWukfSX/VVhFUTHHesINtLV0oAOK+v5YTU1MLpLjA89ruB7LbKH7NkRlcLocQWZMHoE5AYy7r0++DhEBmq+7kdpQ2ylucpARNTiUzCAJY0wX3ytcBNFsRIzSdMm+cBCYTqhffsafKOHB/Nh5Tp7bSDmezGZSeZ9y3Rq7YXVRSmmAAAgAElEQVTlYtci9+i3iVp/30EtbI9YBQIaHfco2k6hd+khXUXRzscRwBjaChuh8nikCbuzsp7Pl0rjD0Q9siTsRhNaTddp1NmWs5M14Rh4LK/33vD6aop9VdC8MDSvIvW1p76SLyiXmmBVAis0u9OKH334Rf7TLz4C4I+/9j3+teMf8rXiJV8vJ5yaCd9gw0PzPgA3ZcmD4pb35xf84PgRfzx5wu3RlOq1NGLljZwX3SkxUqwVroljCkQlLBk0MrdsJJagTMCVcnn7LsmJSH4GZRhMHkHGRelIdCI50q0YA2dDz2Ilq8g5ySZY7qwofxpgG5r1VJk1kudvnh+ZVVKsoLzR+Jclu1nBj5tjfjh9AjYBX4DeKoqFJhp4UQmzJzYek/wyqrrj/tGKb51+wt85+hG/Un3Cv3f+e0NCzf949Gu8MveprhR2oyQBZApFYgLoZK7Zd5Zg9MAguAOCdOovBSQiI+gWjcx306kkDUnvUcmKJt91w54nA6mRVXEYo7iXUDXUHvikBmZK+nyviVtNWBvasmBXBbABWyWD25miKhyTomd27xp9L7L9QsHNVnxtrjYVl69qypuCYgnV7RHVTWD6SYu9WstGXr6G3uFevEIvltQvZmAtcSLfEZsSd1Szu1eyemTovaKfjqyG4EF1Be3KsitrrucTPpkd8eT4Vs4DkS4YSu2pbc/9aomLhj7FAGkVWLuKLhhe76ZcbxqWqwafgNm4tqidhp0WT6MyEAqFSUwRpaOwz5wi7gy+M3iT5v8A7ipUVOy2JZPjNU9nt7gjw7qXi+7Z/IjtdEJ4aShvkyfRjsHzQ0Af8VMJZSTUQUDwIV0mglZ3gAw0g6H0HfaRSfNgHwsLSkDQfR8RNfrIQGIfJWNXN0nG3nupZ0O6WJJ8ZeZh/sKo4phAc6hDHeozU3+ZhOYAjhzq56EORquH+pdZB8XvoQ51qEMd6lCHOtShDnWoQx3qUIf6XNTnngkSDXRHDEanIPThYgVmK/rvvtWJjZA+pJLzvk1SjcZLGkD2ouj1SD32iCGnS54dmeZciKwhBHm/KcD7Pf+F9P6Y2AvAnX1QYW+VLpI8Su4mftgdxC6t3hs1JE+AyBdCFXHzgG8UbqbEHFR8KMVktM0r2uB347L3EKWYGCpuIoyEaP4CJkg22FNC8ze7vZVra+hPlaSfVJ5oA96N0a0qRZZKMo2WlU8dZXwR+nj0kehjMg5N0p0iM1VUYtDclXxFE4mMsqSihbC0dLsptw8Vs0nL46MFAI+aBeGh4uPNMR9cnfLyckL1wtK8lEunug2JRRSx68B86WkuDdt3ZWX8H7z8Nf6Pt77Kbz5+n3/z7P/hTXtNpcAkJsWJ7vjF8hlPi9d8pX7Om80V37v3mLdfilxm+XxC/dJImks2423VoN0PpbBDfJWOPcg4YBjYHtEofBVG74AiYOvR3VTriEmJPH1vcJ2layx+ktNlUkpSNlfNspa9VWLlZbPKpfOd/sBItc/GvXlehGzcupF5US4i5W2i/itDMCmhApnXdhfEA8XKSruvDS6F87i65oPpnPfPz/nuwyf8zfvv8m8cf5d/fSarBmdfXPFf69/iox/fxy7FiLZfFeySD0RRpOQiG8BBbJxcz3tTJ2olK/g2JrbXHjMkkg0xRMbGuN86HacKEHqG+Tl8dyYA7Kd/qKSgiHfZIvnN2cRyX6eQWWGmVbBTRC1MBFfLibhuDabyFIVnPtnxeLbgwWRBcypzodKOl1+Y88nqmJttze2mwi1KylcN9ZV4dkyen1NsAtN3F+jllrjaEG9eE32mGCistRydnWK/+YjtuWF3PqbL+EqlOSDXdLyx3NQ1iwt5Q910eK+Z1C1NYuk0pmeaaEMnxYaH1QJNZDWpuJlP+Hh+zG0rE+H1Ykq3KYkbg2rFZyg4RSiS6XW1d6/OzLbk4ZEHOmKIPfTbgkVRc9ZsOCm3Q+Tzeb3m5dGcj49OWL+sKW80ds3A+MnR3aZVEh88k3ttNnQmM6PseC4/XTntRZgdYmI6zCMPOqqBaajSPBl8oBDGF0nqGrykwoTdyIDLxqmwd53uk7sCo4nvoQ51qEMd6lCHOtRnrD73niDVm0/jo//kP0A5kX8AmI2AARlQ8JVoqIf0lyIOUo9QRokzVdzxpxAfDIXqFGYrhp3RjNr40ERiNv4EcNLV6G3ycWhT2sc+XbocH5yzT0hM+IBKUb7ZGC+DGPtASbTjMfhS9N9umkxHk9+B2ebGdEzAyZ4jOT5U7zVrvhz18HeAIrIMIu+vyALsRgxZQcCT7iQO8bxKJ8lFMjGMTg3xmRIliXSGCWwaTFB1lHNiExiS98HvP9Xv+WB4hUryELtRFEvxwggFdKcRd+SZ3BP6/xfOrnlrdsXUtLTB8rKd89HyhJfXczmGl42MVaeorqF5FWhee+xKuo3d/YrVI8Piy4Hzr73mm+fP+erkJY9LSeR4WrxmrndMlaOPmkWseO6O+f72CQDfuX6Td15e0L2cUNxo7EbdkZTk8yq+BGluFRAmgWj3my6hz2dwTpfjSdRazCSLwqOSP0LbWnwyswzLAr1LCRl9AtvS/IIMfKWmPwNgajSCHEARlYDAKl0HOQmplblRrEX6YzdxuPYyvT8qNXqRZFBFMcTLRp18cCrxsnH3O771pY/5dx/9IQBvFZf8oH2D3/34N3jnpw+xKbnGHckO61lP1fR4r/FeS2Rz0PhOxiBG0nWtBs+VQf5GahidGHhmoEcaSbUn/1GDifFfVPtympxAoz4lmYk6fqpZVUPjmz8L43UbbGqIkXtHlvWpxjM/2XDc7LhoxNziyeSGSjvaYHHpi266hmebI26TZObmZkrcWCbvW5rLyOSlZ/LxBnMpoGG8ucXfiLTFPnxAPD2iP5/iZnIi2xPD5p6mPduTzwUG3xSX0qlikskVk56ychynGODzZsO9SvxgGtNjVGAXCrZevv+qm3K5m/JiOefm9YycMDMMYhHlXguoMqBMoKgc1gaaUq7Zzhm2uwK3K0BHJrOWk8mWh1M5xnnR0piey3bKh8sT8Ve5riluU1LRTuaz2eZ7PklSOAIU4hmVrs98T8/nNYByGpwiFiHNITXc24ffDXsSs6jS76XBEyoO8yyPs96bi7pTI5Aex7kyApcRu1G899/8DtvnB0+Qv446eIIc6mepDvKYQ31W6iCV+WzUwRPkr6JMhOOeGBV9ejjue41ZmcFQMnt87Dcj+QFSO0VwVhqfbP6350kxmB5GNZrTIc2LrxEjwyqgUlMabGq6CoPNsbOywIz2DGyTO94EOgoYs2dMKUanaohOHFJscjrMVtgduhdPEF9JdKjLTJFaQCGzU0OySMzeJGYcj2hG4z7lRYt+p8lT6aG/CISJws0VxSI9zG/l+6PV+EKjKi/a/ZzYUSrxkl0VsNOJ2aEGZkdUCp0SNJSXZjDGOHqXeLXHAklLpjagCob39IUiGjOwYOqXinBj6W6PAPjh6YQPLk55dLzg0eSWh/WCN+obtudCxfjJ/QsWXYUPmuvbKauXNdMPCk5+mswmn+9onsP0Rc31q3v8n0/O+L8ffIE3TqVR/PbJx/zy9APeKK6Z6x0PzZqHZs2b9kpebz7kT0+f8p37b/Le1RmbVUXcWGyKPbUrhd1BeSsxvyoIY8g1ZjBvFR+Q0VMlqkjYjScp2IizFl876qbD6oBtOnwtx9CWHtcZ+nWRgDItHi+b3JTlcwAgCTI6MLA4hvma5o9vFK6W+T/M4WTEqBL7InsW9MlTNhRiDhvN+F37zAmdGEjFEspbhb+s+MHVW/zO12cA/FtP/oxfn/yUv//4O/xP/BrvmAfYl8XQuDqv2PZGvCGCIiaGSNykKGOXaBd7sbl3GEY2pb6kZtNr8WhQJjIaWiZwwqkx/SmbLsuEHlbvo0nbCpF9nDrmfj6ZzUY9gklDhLJgNSNwmF/ulZiIBkXcaha7OYtqwrNKkorenZ5zMtnS2J5Z0XJabrhfL7lfLynSYK8fVaxcyU/euuDV9ZyrVzXN8yOaVwIKTp89ZPLeAt7/GPf8BTx/gQaqSgxuJ48fUn7jPgtn6WdqaNAHALoz6X6WgFBdsDOR9UQmwrPGU007jqfbIWFmblumVhC5N5sr3pq85mY+4bvVGyx2FZtNJZG7pPtnmmMxgtYMsblvzJMviQqs+4qPbo7ZrivWNw27bcliJ8dw0ux4OF1wWm44Pd+wPSl4du+YF0uZa9tNxW5RUr2wA/NNElrkGLOBsDcCclBkJko6x04TCaioB8ZILCIqyJioACaz5NI8UFGunZh9rYrxPp29Q6IZE1+ykWpONlM+MUtiZi2NrJFDHepQhzrUoQ51qM9afe6ZIPWX34hP/rN//+4PVcS1ltgZ1FZLnKFTIzM+yzLSg6Jy6k6k4JDWsh8dSQZN8g+kaQ8F+KmHxqOLMDRYoTOYGysU6L2GT++zG1JTGcr0PU0YVqeVT4kgLu2jHw1KgcEAdTQ+BVfHIeEgWmFl5GPP8ZCouCfJGZNtclPn6j2adzLR9JNALJJMQ0dUSl+oXhkxgC0j/XEg1gHduEGaoU3A2sBuW+LXVtgbezR2GRdhYYhJpUgRBnlBPg8KkR/pCGUYYlGHYew1qjXYhaa6ElbI0CyU0M8i/WlAn7acHa+5P11xr5bVcx8VRkVK7QhR82o348ev7tG/I03h2fdh/mGL2Tnas4r1A8v2gWJ3Lxk1PtrxN978kG8ffcyb5SW/WD2jVh6zp8NYhpL3+gve7y647Ge8aOe8fSNymVdXR/BJzfQjRbGJw777SlJlIK1CVyPTgNR4Dia5Vs5ZaALmqMOYQFWNHZDVgc5ZnNO43uI7LQydBKTYtU7GvAK62U1q/FLCTWauaA+my7HRCtek7Rci1coSgDEqlFFGUUo6TjTj/FL9yJJQSUpm2rz9iC8Vm8eJofLlFX/vS3/O35q/w0/b+/zvL7/GT9+7T3FZDNsSIDGyHztqF+kYt5mJEocV+AHYIQFN+frv1SiV03tgSWZsdcm8N+brbLxmB7NLlcduT5ITx9cHOdKn42nVKIkYDC9TDd+1zyJTIyAYTUpSqjy28jSTlnndMi9bZoWADMfFDq0iWgW6YHndTnm+mnOzkBPlXtfUzwxH7wfOf/8T3Psfwqd+x9inT/D3j9k8mbI9M2l+prla7V2zZmQxxATXRx1xDfi5R9UeWzmqumdayY3trNnwxuSWe+WSF+0R113D1W7KuhPQcr0raXcFYWeEKVZEdOmpm46npzcATGxHaTyfrI55vZqwuW3uJKvo0lNWjvP5motmzePJLZpImxJt2mB4sTniJ8/vES4r6kuN3XCHwRNK6I6S6XYVhKEyvIhcX/0oeUGRZIGgd3pg6u3f6+5UlkvFcRx9E++whiDf39MYf4o4V94o3vkffofdJwcmyF9HHZggh/p5qwNb5FA/73Vgifz81IEJ8ldZKqJTs2KtNN++1viJwXdaGr/8lLj3QKw6hV3pu9r/1GCGMoqMJkW9EhhSOswuJqaFrH66TkuKQKZI6xQPm1Z0NSObI28jr37HncIXoIIeKdAmiuSgioM3hu7HFdeolTBd2igSh7Sy7ybyuksPzNGIj0nMq4l2j2kRpKFDp7jfJJGI/SiDUGUkak108rlYjp/3ZcRmNojR+F4l/XrS79dukGowBa/skBYix5DiYlM6C06l1c3ULGRpBhA9RKNkEThkwwVQRZCo3trTFxaUpViMiR52LeNSLgz95YTX85pXR8cUM2m6ZpOWSdlzVO140Cz5xtEz3pq95vunkg7zztEjtvdqjt/1VNc9R+8HJq8MuzPpmDfPJvzj5Zf4yeNzvnb2is1pxZldcW4EZDnRW+a645erj/nV+iMAdtHw5+cPAPjTR2/yD4+/zk04Z/qxomojZhfF1yYFerjUZMbsC/OpxlIShBS+Vbi+wttIP7Wo1Ig3044QFGUpsgEaUCrik2yp3RXCnkjXiGo1eqspb/c8DJI8o1jneRgHJon46+RzmueZnNMBNARQapDT5GjoYS5Y8FG8JoIdI6gnz2Qbu27OP2h/iZuvNHxr/jG/evYhrbN87C4AKK6MyNa6fK1IB5n30a6zdEAJ06IYm0yAfqIkIvnE3ZVhwQhoVIFYIUkmTqF6PaRF5espx2gPaR1unwYiG8wskbgvC0ubEWmSGoCELKuR7+dOc6wHSU/6ng6C1URjCWVkU9Ss6sgn5XhfspXHWM/pfMN5s+Go2PGt8xWcyzau32i4/OqMTy5PaI+fcPzuA6Z//AFxJxdU2O5wH34EH37E/JOH1F98QHte0R6NEbtDXGulcFNkTLo8BNL8d0oTdxpnLb2uWKXbwovihHemF9w7WXHebJjYjsfTW0iMopuuYdVVAm5cN+AUobdsneY9fyZzofAcNTtCVEyqnn4iPjkx3XvCqmC3KPj4tuL55IhPjo+4mKy5SMDoWbnhXrliXu549+iMq/qY6oXFrhPbZZfOWVT41uBrLZ5KGSzLTKc8HzIDKd/bbZQ4d8UAeks01t41ERMby8u9T4CUcV4Gg9wDYwJEEjg2bCOm+8PBOv1QhzrUoQ51qEN9ButzD4LECDGqAQAB8F6jEiiiS4exilBpQmpugkvNng1QK3pr0J1GZ4+EISI3ex9EMf9U0vgDxEJjNskjYyVMC18rfJ0etKdeQIMmCtPEqESnls+bThp6paWptEmSkuUwEtOYAJjEwghO4fe8FsxWYlfzSqDdM4fVrSJUSSaTYzlVAjIGQz8xIQ2lQu8UxXoEQwDiIFUR0AUVEziUxsmPK9p2pTCtwnWKkMbA94ptadA2oE1ElWJqOEToRkU04n2hrERcRq9QKapS7bSwb5KMKGogSMxqZpNErwiVR5mIqjz9OfjaYJep+d2maEkH5QKKtcZfl4RCBnrRTLmpAx9XgXfmHY/Pb3lzfsXXj58DUH3D8ePT+7SnE07ermguHcVK/gA0rwzl0rJ+cc4fPjli8aWat6ZXfGXyAoB7dsk9s+C+WfFA98y1pVElXysk+vVv1R/zpLzivze/ycvyHv4TTbkAu2WQo5hOoZ2MvS/z2MXBTyPHKJudwmwNUYObjYDaujPCiqgdSgeqylFaR924NFcVPmh6Z9A6YHRkuanZvhaqh3JqWMUWvwQx4c2eIvmaUT6ObCkX75j/iudBuqZKNfhm5H3MTWEoxV/CTQTIKKQvpboG9XbD7/W/wOUXZ3z96DlfP33BMkkclu4IsxaZjwoCgqm4x8JQya9km+ZwkQEIeUNZKLRXrOcKVSdWF2LKmT1ulAnS3+oEDCqSh02+nhBwJOtZYv7hOEaCYjACK3vAaI7jVqmxzdIZtQckZSbQAODkP4z3APl+BUoJiKvN+PkE6r6aTnk+9xRHLWdHGy4mgrgdFTu+fvqcLx9f8v2Th7z//JjZN79M80o2cvR+i/397xH7DvfsOXa7pTg+YjpNtCCliIUhlAY3L1m9UdJPEiiGAHpyzOleGBU67N0TnAFV8Lye8Oys4/hkw/3ZinvJ9+SNyS2hURxVc97ljN22JOwMsdW0G9mHLihW9QRVBqbzHVXyDHFFkk61lrgx6IWFW8vlZcWr2THNkQA9F/M1D6cLHtZLzh+s+XHd8n5zjnshc628HRlLpoWwUcKGSr8bQjGeZonSjQMAnSuUEbSCEPeiktVdNmJSSg3MPcfA6FMmmdMmkCXPlX35lGvuskMOdahDHWq//ts3f+/ABjnUz3X9q3/z+wc2yOe4PvcgyP5TXkz/9k4PBpFKh5GNQDZJjMReoVREFZE4DwSnCX1eRtv73izdyJtJK79BR6LRoHVagRbAIDM1OoWwJmyQvwslSTJVAkn2ABezSWaVe4kdkrARCUl2k5uimB60+ySZCGXadiTtg3zebiF2GQyR1XoQICZk87yccNN4YqFBaex61JJrN/6R45fXcrJJ1ImtEaT5in3aXlp99xPZVqgCrkkmsjqOnitOCbhSB2k6i0AMEHKzYiO6E0NP1WfAJRusJgCgV8ReSyqIAlUGwizSJ28W3yRj2340irWbkfUjZpSGaAy+KvjopOGDe2c8uC/+Al88uuKX3viEt+t7XFXHTD4pmLw0VNcplWTZc/ojz+RVwepFyQ82b/KTBxe8d09Wpd+cXvOkuuZBccsbxTWFcjw0Kx6nefXANPz27EdMvtjyu+Vv8uOTB9hXJeWNGpKGBolJXlVO83MwQdyTHdhN+nfQA2PHOTGI9LUh2oBrLG3h6VLCTFP2+CDXQ2k9582G0niutUxG7zXBa5SOuN7Q9xq1tuhk9KiCGD2aZJBqtyKlyak7eZ7HQaEz/iw38L5UIgGaKvqp+n/be/dgW66zsPP3re7ez/O6b0lXunrYRrbAz7HBj2DLsQdPhWCSKZMMAabGLpJAZpgMjyljGGYIOPbUZBwSSGowlYRkKk4wA2aiVAJ4HCLbgJ3YCIMty7Le1pWu7uu897O71zd/rNW99zn33KsrId179j7fr2rXOad77e719eru861vfQ/yBaXoeHw0+mRbQmML5NEGD4xvZuO2Fi9ZvsCNsQrQ8FjGuNkk2UhiBRwBH3LcAOSL4RxpX4IBKQ2GpcpTpLnlScaCSsrgpMDhEa1WjvdCPg6vWV9K/Y6RNFY1qhLWQrRYSP2uqI1Au8IU3IjQLoYg1bmHk4nRo/pKVbEpfHliOBKZmvDKpC1CnSeCqbzN9XulVNRVk3ZH0cm4sNjh7NIhANJuztJinxML23zHya+xfrzDV265kadXQ46d1Se73HDktSx//jR+YzMkUY2JVHcgQnNlhbR3C6PDTYpYqSjvOHwK2XbMZdQCn2ht/EWqZ9Tht5psbGRsrHQ4vxJcQY51exxp9TjW2sYfFtaGbTb7LYb9Bn47HMSNBAbhnt8uHFlnTJp6skawJqVZybiRUm42SHqOxrpDNx3jtTDOT3Y7PLO8xB0nLnDH4kVeffgplhtDvt4JIWyD9RbJRkrzggtVZGI1meperRMATyUT1oSQKJpwz/uG1km2pQTnptySYhsAH/8P1N5x1TM0lhiqNUnk65OpQ0i493dWJjIMwzAMw5gPzNnVMAzDMAzDMAzDMIwDwTX1BBGR9wDfC7weOA58A/gE8CFV3YptbgMeu8whDqnq+tTxWsDPA98PrABfAt6vqp95Lv1SFdQLPq7SailxqRTwCd4pTpQdi2K5C6tsqSdpeJJmCc1YbjOugBfjkFdBvaCDtE6gGhqBNj15qpTNKtHdVDnNYfD88C0XS+kGT4Uqh0DRkrpSRdFOSPtS50GoCMcLOUG0WmGOi4k+Vcq2R52rK3FIMcmFUVU0SIfgS/DpTtdpiAktmyH0gCSsThYKSdWHQahcsqOKx5g6bMg3mOQuiKExDiCW0PXDkPDUpwlF11F2FN+cyqFQQtJzlBLzpTZCGIKrco6kLlaZSEliEleXy46KGlIIOpba3b5sxxXS6Hrum5Ws0asllx0eM24MbqD1NfRnhfGZFheOhS+u3t7lpsMb3LC0xZmXCpvLHQbHU9rnw6pz55mEhSeHdL/Ro3U+I+236N+4yP03B9f8x04c5uaVDZabA5azISOfcKK5xUta5wB4WfMZbksH3N15nKVTQ3578ZV8ZfVGzq0uUQzS+l4l5kqpKpNUK8MQ7iFNiJ4XElaGp8utluGZcD6EZenIkacpeRKTTbZK1AsiIWnkcJxReDfJsZMUKJCI4puCE2XQzSiLSZiFVyEfOxgkJH1H2nOkA2pvFpeHleqQd0ZJ8hCKUnmKZANPmQllP3prlML40KT0qiaQbYTyzN3HU86MjrNxS5ujCyGM4+jyNlvNnO2sg45iDiBAF8Lqv8s840Ea8mv48CzJyJHFsKnO0wlLTxY0vurZ6Kf0TrUY3gitVk4jJpkdDRshPwOEULo6q+vUODiNpY0dWoK4yfNWJZhNYnJVl8exqZ5HH/M4pNRJZn0GVF4S8ZmZTuha5QiBGKbmwTkghsgR29d9kOBJ5WKlqbQHjXVXh/GVjYytdpvV5RXyOxNu7GzwhqNP4I+Ek5w9tcR9t9/Mxm23cvjBgu4D5/FPPIXmUy8uAFXKtTWSr3m63S40w72mVfWsTpPR8Q6DIynDQ45BSJFD0fX4RvCuSLeFVp5QbjnW18L31xYXaS2OuGFlk0PNPjd2cxYaYzZaLXqd0GbQCxWYXN8h2wl53qRo+jrEqdka0+mMyLOSUScjd41YRau6VxN8r83X8xNsHGvxqiNP87LFcyw1QpnfC0cWeGZrkbWFJdKNlCyGx9S5W6oEwTGMSdNJGeSwIb6rWjHUMr7/6v9Z01RhUzG8rHqeqvetSjiej6GK015DmnDp8eaI/aqPGMYs8aunPnvJNguRMWaJt77p/svus1CZ+eZah8P8BEHR+CngNPBa4GeBt4vIm1V1Osf9h4F7dn1/a9ff/xT4TuB/Bh4F/nvgd0XkTar6pavtlC8ETzIpP6uCJJOuhHwSu1yNPUjh0FxCKEBV5hCQBJxTylhuE5W6tGjlVu6zUBmAzOMzz7jlcMMQulGdoyqv69WFSXmitX/69CRKUyVvOsqp71dx86ESxVS8d5VHrwFlJ4S0+CoxXkvrKg1VaEKVD8PFcIodHtdj0L6Q9BPKttYu3OVUzg8/ncxVw0SrVrSrPu2a5FVVRVwhiFdUQr6RfEEYr1BXsKn6iSb4WOrXNUJ+D4Ak82jmKUN9SNxISMZan7s+pw+TfFGQPAlhPlVsfZU8MuYy8aniUkI8PdWkXMh6isshGylZT2nEMsDb4y6Pn2ywcLjPcmdA+2TO1qEmmyeCkWRwLMOnbRZPj8hWBxx+oKR7tknvmfBobt+yzNdv7OK6OUnqKUtHqz1mqXMHALcsrvPmQ4/w7Z2vc1fjLCtH+ty/cJIHj97A6jiEAGzmoRbtuEwYFhlehUGeUcRcFeMiQQTyPGE0SkkbBajUVa7nvTgAACAASURBVHpGW81wvYopY0rukCqnRy+p8syiLmO93QhVgFphoJPU4xJPkoTQMpeUIbwglqF1ojSzAidK6R2jIqHfazHqx4pAEHKKxJwXSczTkoyEtB9zcmxpNNppSPY7drhCGIWoInym5EvAltDYgMWBYzBY4smT4WY9fnSTpdaI9HCohDMeJzinLHWDVbDbGNNOc25sbzIoM1JXsjluszoMVVGefOoI0ODYl3qkvZLGVoPNYZveiZTWcrhQznmQYATKGiHpr/eTEJmyDKWiIb5zSgmG1snjAoVD04SkH0O7dCrng4+hIenkeaurrcQNUiUPngolQqaOr4Q8FG7q2ZgynkIsrxqrQiWjaAiMNoxGrARTNlMeHZ7kkcPHOXXTRU4thpLPp9qrvOmuR/jEymt59GUnWLjzRhZPn2DlT8N+GY5DMovhCB2OKNfW9g6XAVoryzRvvpHhzYskoyD0eNlRdKnD/yCEtzSi0U83M8os4/EjLS4c7bHcHtJMCw63+xxuB+urrgib4yarm13Gm82QjLlwlPEmH+SOtFnQbo9pNXJ6WRnyhAwn5ZTdSJCLDZ4ZH2JcJLziyDlONEPo1c2tdfpLDb62cIIzm0tsnlsg2UjrHDlSvYtiGGE9vnXekzho0VDh0xjWkumOHDp1rg8fjFdhzOO7USf/FySWdUZ1UtK9MnzNsRGEfaqPGIZhGIbx4nOtjSDfparnp/7+tIisAv8CuBv4val9j6rq5y93IBF5NfDXgPep6q/GbZ8G7gd+Dnj31XZK4+SOHROOkClOEg2T9XISq19n64/JSBWHFlIrq+PM4VKPHyd1/oX6qzEmOynCQcoOIU9Iq8Rnvl7pdEMXcnwUEpL4p7IzKWlVOhGCYaTh8Ymi6WTS6EbUuQ2qygF1bH8R5JmU0wyrjZUBQbOQODMkZJ26VjplxPCQxgR/viehCklbpzxFlEInOUKkWpmeqlZR5SFAw+qyFASvFSb9dUW18i+oc+TL4QCVkp71BB04yoGjbKYTb5GGx2Ul0ijxXULekr7b4ZlSlRGtrkuVRHV6dV7iCnvlHeHj9an2u3FIDlsZjaoPQOcMSNGg10sYn0g5vNTjyFIPloIHwupSl4uNLuPlFkuPp7TP9Gg/3aN5IdwHrbUO2+dTxsspZSsM9bjV4mw7lOB9ZvEQT920zLkTS7xx4WFOpWu8of0oL2mcoxfdWNbLDq06GQAkeHq+yVCDi8B22SLXhFwTNoo2NzY2KNWRxYH/8tZJxj5lc9xie9xkY9BiMGhQ5HFVfhASp1JGT5NxqOKig5ibpSG4LEzYs6zEq9SVZ+IlxImy3BxypNWjneSsjdtsjNusxWSV/WED7wVfJozGDnKHjEMVGgjeCNl2MIY0tpXWhicdCS56dIwPCUVLKbqhIlHropKMoD8Kxz8n0F0YstQasdAcU3bCM7vUDEaQhis51V3ldQtPhGsjBX4qmvCxG47xsdYbyPodlh8bsvzoiHTQYHO7Qf9UaOcWciQJBpBua0wrDYaf3FfGqBQf812WPiSbVair8EiYpzLqZuT9lKKfkAxdbWR0xcS7apLIeJcRJA3vhsoIMmXbmOSRQCEJ7zZX7DQohETL4fcyGqIqrzGInlK5kvaVxcccxbkmT67dwBPLoQpPZ3nAX7zjfr7npj/i9NHD/OnLT/LY6mEuvCaUl8liRaHmutJaU1buO4+ePoPv99lNub6BG+d0Li7RfiTWUs5SxscX6J9o0LvRkXdBG0w8IryQ9ABN6Y0X6S22aLZzOq0R7SwIcdPCBqcW1thaavLI2lHWNrqU/anKVNspeT+hyBPa3RHt9hjXGTHKY+4XL4yHGWxmuO2U1WKF+0YN7jh6EYATrS1ONDd5/eFvsLnU4sHFE5xeX2awHZ5XHYfyvTKOBvQyvHcm5c3jdR4BCJJBXba98vBKqHP6IGHctPLuoPL8CoaQamyrv6vv1DmE5pd9qY8Yxqyzl3cImIeIMXtcyUsEzFNk1rmmRpBdCkfFF+LPk8/xcO8GcuDjU8cvROTXgJ8Ukaaqji777QrR4FWhgmo1C5BLFEAtXa1Iu8yjzRLVJKzaRW+PetWsSrRZWUWcok1PkSluGCdE4+DDLONQCUIzH9pViVOFkHy18kTIo4FgqvxsWMYLCm5VZUKjy7YmwWCgSUyYWk1Spsr4Vl4jdTfHQrWkX5XHrVzf6/KgMrVaGCuvBLd4nVR3qUqvxuo0ZRKTN/qdRhLRnZdZCupqMhBDcspJQlIplWxb6socZVvxjdC/ZBAqgfhUKGPy2LKllJ0EbXmk4dFUQ7LUvptEIpTBSFSVNa5XXXXnz6nbJXiFuMoLJ1QA8s0YJlMI6SAYhiCUgm2uBxf5YdnhXJ7Qao9Z6QbX+BPLW6y9tGB9aYHRSsbKwiLt8znZavRAeLJHY6PJeCUlbwtlQ0IFlHZMFLnQ4Ez/GP9u2OTsDUu8eflhbskusuL6rLjQiZWkR0tyupJzOMnpiJAgZDKZxOdx0TNHOeLaJOIo47anlv6EUmFLU86XXZ7Mj/DE6CibRfAwOTNcpp3kbOQtLg67bA6bqArDcZgtiyitRs5ic8xSc0gqJcMyY1SGGyUvE0oVsqTkls4aR7MtNooOm0WLByXEOZyrj1cizfCcaAf8YhiwQTel2EzIF4RiQ2iue7K+0jkXS7sOhfGykHdD1YuyCVkf2qEID2WzxdahFD0udJtjGklJlpR1iNywTFkdd3lidBQf79jlZMBN2RoA37H4ZU6+do1fyN7B4AuLrDxc0r5YkOQJEh+IwQ2OcrEgd75OvNtKi/r+T5Nw46sKiYPSK+WUp4iIkjpPp5lTdB3jImE8ysinwp6kkNrIGMLMJs+TxLiJHWWHufSeB+pJde0cFyfPPp0k0oRgOK2MtTDxKnBjyHpKtim43OGfacTr3OA3R6/h7S95iP9i8XG+5YbTDE9k3HfLrQA83jtCwxU801tidbvDxW8+TufMCZafCEa85sURbnuM29jGr2/gt7YuMZAkX4VDJ2+i9YobGR7JGB4ShsfiOB4N3mHBkK1QCqNeg+FGsy5tfG55gVuPrXGivcXLDp1ntdPjYr/LVj8YKUbbTaSfIKsN+r0UaZekrbwO/2o2CtrNnK20Rbmd4foJw7NdHhiE5+GxzmGOLPR52fJ5TjQ3uXP5LMvNARsrwSA3KlNGZcK4SFAVxkUIqxxV3i69Bm4zDSGQw0mVJXWTsfVZHKsshClWFbGmy2KXLa0T60oJSSmTUtW7xn0e2Zf6iGEYhmEY14T9UB3mbfHnA7u2f1hEfhnoAZ8GflpVvzy1/5uBx1R19xLh/UADeGn8/YqIQNosdpTILYokeHFM42PjSNLwlEQPkTIaQeqY7vh3dcw0GFrSRoEv4+r4MIGxC20LQaqQl7QyYpRoIxxb8rgiOOXCLiXBbV2icSS6sddEw0vZhNJPXKnruPXxVA6SNK7G5xMZq1XDsuPxxc5zu0a9ZBw8RfJYSnaotSECoPDUpR+1ilmvjDaAr+bg0bAQrt9kHKr+VZVZkmH4mfUmLttlWyla0BiHfA91iA1QtoSinVAsOIrlArLgMaOFUDk6i4vhHfF4lYdNFRpVyV0ZncRPPH6q60QCRVtxGaCKb0hdiraxFYxjaR9a5xzjcYtBu8FgKRgQOktDVjoDGjeVrLa7XOi0aJ9rsvBUeDTbF3KSQUFrXJJ1Usqmw6cScsIAeU+QMmGrXOazw4yzNyxy5+JZ7mifpxXdjvq+SdPldN2Ik+kai27IihtzNIb0LEiTBReMHsmUYaTiVLow9VdJ3jrDhc6jVL4lPe9YcZ5173i6XOTJ/AirxQKPD8Pq/sinNF3B8cYWx7NNvApDzVgrQrjOdtHkqeEKDVdwItvkRLZBx41puS5PN5YBWEvb0RvExRLVIIlOStEuFOQuVodZdORdR+uiksVwmea6kgzBHRKKDgwPC41NxZVhf+tCqOKyrQsMFnOarTHNrCCpjIIKq4MOj28epvCO0jvaWc5NCyFU49sPPcR/2f0aN7xmnX946J08cfwmVr6W0b7o6ZytHlrHaJRRjBLWmw0kDf2vvGIk5hLS6AFShcRUJXaR0L7RLGg1cjrNnGZWMGyEyXVZOopxgh8lIQ8MgJfodcYk704MLUKDQaMe8qoSTbRMTpdLrT0IXPUu0dA+VvOty8gQtkkRvNeqELc02PzItoCvdfhU7y4evu0or1g5y62ti3zb4qMAfEv3KZbcABdds/7oJbfzyPZRHjgXjGH9c11azyyw8OQhFp66gc6D5yge/8Yl92zx1NM01zdotZr4Uzdy8bWhOs14BeTYmDSbuIONBhnJekZjLVbeamQ8vN7izPElXnrkAic7Gxxt9dheDEaQp7tLrG508ReapFsJ2nMUzbT2Dsu7OZ2FEYvdIcOsZOiaSC+FizEEbr3Bk1mXC0e73HJonUOtPql4TnRCuEwmHjdlkSriQAzKMM7nBwucWV9ieLYLa7EiVzEZ32oMfBIMpuOliSFEp/4nKSGPjPjwP6YuhV7dDrG0+gHjuuojhjHPXM5D5IXGPE6Ma4V5isw219UIIiInCa6in1LVL8bNI+CjwCeB88DLCTG7fygi36qqlXJyGFjb47CrU/uviiRRGjEnAUCRlYwkw3sXYu6nwyMghEAkHmnGmP7ChTbR0KCOEOIiIWC/zhUiIakeEMpL5gll7tDc1SuTSfQEkZiIVavjlzIxuMQ+1IlO42Rkx1JuEpJ4SBU+E3dVXhI+d7hBKHmrMMnLUSVmHYcEmT6T4EqtIT8H7DReVJOjsiVk2yEkpOpGMgyGlbKUOvdHVQ4VwmpkbXSojjUV5lOmIamrb1Q/ieVqw/5sW2IsfEiA6cZCMp54vYRSq8FlX4oU3wzJYOuV8mqs4jkrr54qlCDsD94v6urhDNfJV8akGOaTam2/0XSSt6QoJ4lU00FYJfdpgl8NF26wnDE42qDVGdNs54xugc2VjNHhsL99tkX7oicdxMl+Fowqrk6gC801Rbxj3Oty/0aTRw4f5djSNp0s3GtlnEQl4umkY1pJwUpjwGIavE0Opz0yKck1oePGrBZdRj6l78Pq/Ss7p2m5nJWkx6Ib0gDGLNRGliMx023XeW5iiyOuT6+RcWfraYAQeuMzGlKSSUkiHoePSWtD/55oHqXvG5zINuJ5Biy6AW45XMfjrW0e7xxmY9Rio99mvMtI6RqeUhSfJxTtkrKdki8IzfUqXCYYRJprYexGh2GYSp3E1xWQ9gS9kFD0Hf1ORj/zwUsMJmFiZXgeiaVHn+4EI80Tm4fYvrnFt3Ue4X23/AG/3Xklf3jsDloPtWjElBZuDI0NQXsJPk1iEtTJPTVdolTi/FMqL6zqfk0gb8Co7dGmR7JJbqAk8yRZibj4migFbXh8fGBdfLDUa7zXg6dV5QG3Y/U/ekXVYTBTBpEw2Y6hMlU4W9U/wjOtSZh8QwyzqMIAR5D1wD2a8Y2tG3l85RitxRG3x1CRTjomdZ4bWpucbK7xivbTvKL9NG8+FAxxT922woNbJ3js4mEuXOzQOHuS7tMnaa0FGdKBp3V+RPaNCxSnn4JeD9nuccS9NJ5/kc3bOwyPl+hCSdoqQuhiQ4nRYeGZOpsyGC7y1XHCkeUeC40x3TTcLMe727TSgvPZAsPNJoyCMbvKX+N9xnae0FwYkaae5tKIkQMdVqGKjmRbGI67PNQPz367OWapFZ6jdpqzkI1oJTmZeNpJTirl5Hlt9DnS6vFVdwO9RodyIyHbdiE3y1R58hA2E95hZTu8h32jCneM9Y9TUNEQbhnf9UEIkO4ul6E5Z7/oI4ZhGIZhvPhcNyOIiCwA/wYogPdW21X1DPBDU00/KyK/Q1hF+WlC5nWYrFlecuirOPffAP4GQHp0OUzAoV6RbcXY8DxPKKoKFonWhoTKPT3NyqBYCvhSak8MTYOnSG1A8YKOHTlpfY4sK2lkBaUPVWZEgqHB1QYTrZMiqgpF4cIqeJz8qcYcAVXozu4rUidp9Uwv7vtkogSXSYL2Xcy1ERIqJlW4TAEulA4ICnJ1VacMAZXnhGZK2QoVVtLtycQyGYKMNBw/JkwVBR/PEVy1Y6JFF93sK8MO1N4h1QpmFZ5TJR1N+6AijJeVsgn5IvjhpDqNFCEcJRkFuYqWkC9KML5MV1uIKWHCKnk1+Zt4m6gTRCezU/ETr5+kJCbJnbSvQ5iIE9sqUaWn9lapSLcdo2GL/qEM183JmgXaKBl3q0SPDUbnHM0NRzLQepW9MoKohAo+4pVkKKT9jPFWwpOLLWSqko4qE28lCVVcqvs4ST3iPOodSVoy2m5CLkgRbpx/f+Qusqyk08w51t2uJ2gLcVJ4ezt4lXfcmJbkHEm3w08X8p4suiHniyXWyw4bZQcnnmPpFitJ2H8s6bPoBqz7DktuSMeN6MqYI8k2r2o+BUBvIeOhxRt4eHiCr22d4Fx/kUGeMZzKw6DeoWUIDVNXMG4LRTd6XrUcnWegsa2IwuiQUCwoZbyO1Wp62gvJhMuhoGlC2azCw3zwJCplkqAyF3zMe/LMRoOPjV7PkycP886V+/n+45/jts5F7ln+FnoPrgDQWI8JXbeq0KudxlVNwj1S5bqpvChq26ZSGz99I6FsJiG0pxUNbkslyfKYJC3RBvg8CYmXq9CtGC7j05AguAprq+5VzabyRlTP9vTzyFTYS+VdprpzvxDChRwxj84khwiE5yvbFtIedM44/PkGvtHgwZXgFeSbHlFB2yWtxREvOXaBU901TjSCl8Rdnaf5tsVH4CYY+ozVcoEz4xUe2joGwP1nbkQe7HLiP59koSgonjmLjkbInz4EwKGvNzn0kltYfdUyW7ckDG9KcCtjWMkZRgNBupGQDIVsw5H7Ds+st3DdnIXFYIQ4vrjN0fY2Ny+sc2ZliYu9Dr3tFn47hn+NHDJyjAqhWMjpdEakK7423OWDDN9LSXoOXzQYtFIGzRYb7fB/J8vK6OkzJnOehUZ43jrxeVvJBtzaWWV4JON0WrDR7jBab4TEz4Pq3RiuscvDO0d8qBRWVu+l6HmoqUIViplRG/20cLS6473/w84h10sfmdZFTp3cD465hjH7PFePE/McMV4spj1FzCtk/3Gp7/s1IJaSuwe4A3iXqp6+UntVfRL4feANU5tX2Xt15dDU/ssd71dU9fWq+vpkqfuc+m4YhmEYxnxwPfWRaV3k2JE5TsBiGIZhGPuMa770ICIZ8JvAtwLv3BVXe8WvsnOl5X7gL4tIZ1cc7l3AGHj4ag6qOvHsmCQgDDHZzunkpKLIHos6EkMnRKZCJaTyKpFJh2M4i68qPTQKEhfO452QOE/pXR2S4zWsplYeIqFvvi7NKlMdUC91upI6l8Vl1p9kKsZbndZu71XFm2mnEonx4qrhOlxunUtdkN+nUpfJJbYXPwlPqau9VB41VflGBa1+6mUW1KJr/nS1C4mJWYXJirMrBK0SQVYJVgvQMSHZZB6r7EwLOk3MmbijbOiV0LiSXscxMFmxr/otU+Oh4Eqt+6ZJWL0t2w7fSKBZkKYlGtIHkHcSyk5IOFtlqZzOF1Bfh+j14schWaKmSZ3Uc0c/i9BXP3b1qn6RxtgLFQqnuO1kRwjD2LXIM8+w1aAoHZ1mqGzSjeE2DVfgVVhIRizELI1H0u361El0mxlqxkbZxqEsuiGlCwOZScmSG5KT4PA0KOnEm+ZoEjpxmJxcL9DzTc42lugXDUqVusxvrskkt4YoJB6c4GPuFN8MSR9FweVBVhUmiSBj+ejqWmoueBRXp9aQSbWNCk/9TpBc2N5sc/rQCsOljFsaq7y0dZYTi7fxSDvEhehWEkufVu+LXTfflOdTSFapO/MAaTyngs/ZUc4UoByHsDyXhTwf4jSEclXeX5XLmw/eTTp128IuDy835QUy3c34XtuROHjaO0yDV5dOhetV75nQh0ni5Crhsc9B44X2WTh2MRKGhXCmtUgqnnZdgxeOJ1scS3q0pKRE2PIZX27fDEChCV+5cCvjJQedNrgEfInm4fuaj0lOn6V9ssvwcMZoFKoWpVlJ0Y7DOqxCSyR4xonDS8qoOUnkmzY9hxt9xj4hLxPG45RRTHbLyNVlpH30psqSMmSGAMoiqcPnXCEwdoRoynD8ulQ7kCaexHkK70hjzhivjkxKOumYTiNn0CwYNlI8k2e68uYR1To5bgiBmoS7TIcF1p6DldeQELwSddc9OmfsN33EMAzDMIxrwzU1goiIAz4GvAP4ziuVnNv1vVPAW4Dfmtp8D/B3gO8hlLRDRFLgrwKffC6Z2GXXpFI1GC90xyRS6omz7GjLpYqiyqWTZzcxmEyfp1QJuUemzju9H6Krv1Z92DU7V4nn2y3DpX3d+b3Jp6rSsrvTWhl2nkUPrsrwSjR6TFenqI0rcZI5nWeA6ePvdY4dF3oy4apzb1SGkWp/dW4/+Q5MGU+qxIBXkkcu8/NKyOSYtcHjCt+rkqlC7FN0S68nICp1Al0KqRPDujJMTCvD0Y5jujCx9UkMQ8h0kqxSNIRkOZ0kz80mBjXiuVVBEsUXAs4h1f5Wics8zVZw0V/IxrTTiXt+ZfjouDGdZETL5WRS1Mkty2gVy6SkKQWJ+PoDYVI3JiHXlCxObHMcJUI/3u+5OtZ9h+2yxaDMyH0SQslqA+alF1ljyV7YOQGsJ4iw08AAE2NbZSCs7tV4/+54RqYMn6RK2ihYyEZkUtLTButlh+1xYxIqVd+PsuOerx/p+I7QJMjjqAwV1X0R9xOMBWWjqtYS+5BoXUa3+sh0J6uyuFPP/WXREAJ2iU2yut4yucV3vP72uPdFgamy2lWOk6r0rs8mOXR8Fl5GvuWRVslic8xCNqI5VdKmpw2cDwfp+ybnikUeGNwEwPl+F9d3JGMPeQF+uh527M/SInnHhXLCSXgkvHch1wvRsBqNOOris5QpSRLOmTiPV2FQZvSLBqMiDcbtKeMFLhifqvDGUqUudaxT4XQa2+G0DkVJEk+almRpSeI8mStpJCVpvHBOPCWOwodPGXPUSJXAGi69T6fewXuO066FgPD7pWM5T+xXfcQwjGvL7vAZC48xXgze+qb7LSRmn3GtPUH+MUFJ+LtAT0TeOLXvtKqeFpGPEKYcnyMkIrsT+ABBbfxQ1VhVvyQiHwf+QVzNeQz4YeB24PuuhTCGYRiGYcwkpo8YhmEYxgFF9JIl1BfxZCKPA7deZvffUdWfFZH3EZSHlwKLwAXg9+L+B3cdr01QYP4asAL8CfB+Vb33OfRpC3jwWRvONkcJ13GeMRnnA5NxPjAZ54M7VXXxenfixWC/6SMicp5Qgnfe76mD8NyYjPOByTgfzLuM8y4fBBm7qnrshTzoNTWC7EdE5Iuq+vrr3Y8XE5NxPjAZ5wOTcT4wGY0XmoNwvU3G+cBknA9Mxtln3uWDF0/G61IdxjAMwzAMwzAMwzAM41pjRhDDMAzDMAzDMAzDMA4EZgSBX7neHbgGmIzzgck4H5iM84HJaLzQHITrbTLOBybjfGAyzj7zLh+8SDIe+JwghmEYhmEYhmEYhmEcDMwTxDAMwzAMwzAMwzCMA4EZQQzDMAzDMAzDMAzDOBAcSCOIiNwiIr8hIhsisikinxCRU9e7X88HEblbRHSPz/qudodE5J+IyAUR6YnIp0Tklder31dCRG4WkV8Skc+JSD/Kc9se7a5KJhFpicjfE5EzIjKIx33rtZBlL65GPhG57TLjqiKysqvtvpIv9uk9IvKbIvJE7NODIvJhEVnc1W4mxzD26VllnINxfJeI/J6IPCMiIxE5LSK/LiJ37Wo3y+P4rDLO+jjuRkR+J/b9g7u2z+w4zipi+si+1UdkznWR2CfTRybtZnkcTR+ZtJvJcbwa+WZ9DPdCrqc+oqoH6gN0gIeArwB/Cfhu4MvAI0D3evfvechzN6DAjwBvnPq8fqqNAJ8FTgPfC/xXwKeBC8DN11uGy8h0Fvj3wO9G+W7b1eaqZQI+BqwDfx14B/AJYAC8Zh/Ld1vc/qFd4/pGINnP8sU+fR74deD7gLcB/1Ps4+cBN+tj+BxknPVx/F7g7wHviTL+AHA/sAncOifjeDUyzvQ47iHvmSjPB6e2z/Q4zuIH00f2tT7CnOsiz0HGmX7/YfqI6SMzMI5XKd9Mj+FlZL5u+sh1vwDX4YL/baAEXjq17XagAH7sevfvechzd7x53nmFNt8d27x9atsysAr84vWWYY/+uqnff5C9/ylflUzAq2O7905tS4EHgXv2sXzVi+4Hn+VY+06+2Idje2z7b2Nf//ysj+FzkHGmx/Eyfb0z9vXH52Ecr1LGuRhHYAV4hqBU7FY65m4c9/sH00cue5/thw9zros8Bxln+v2H6SOmj8zIOF6FfHMzhuwDfeQghsO8G/i8qj5cbVDVx4A/IFz0eeTdwNOq+h+rDaq6Afxb9qHMquqvotnVyvRuIAc+PtWuAH4NeJeINF+QTj8HrlK+q2XfyRf7cH6PzV+IP0/GnzM7hrEPVyPj1bIvZbwMF+PPPP6c6XG8DLtlvFr2u4z/B3C/qv7rPfbN4zjud0wfYf/qI/Oui8Q+mD4SmPVxNH0kMNPjuAfzqovAPtBHDqIR5JsJrqe7uR+4a4/ts8LHRKQUkYsi8q9kZ0zxlWQ+JSIL16aLLyhXK9M3A4+pan+Pdg3gpS9eF18QPiwihYR48Xv2iIebJfneFn8+EH/O4xjulrFipsdRRBIRaYjIy4CPEqz3vxZ3z8U4PouMFTM7jiLy5wgrg3/rMk3mYhxnDNNHJsyqPnKQnpuZff/tgekjMzqO866PzLsuAvtHHzmIRpDDwNoe21eBQ9e4Ly8EG8BHCG6Mfx74eeCdwOdE5HhscyWZYTblvlqZnq3d4Re4Xy8UI8LL728Cbwd+Angl8Ici8oqpdjMhn4icBH4O+JSqfjFunqsx6XXx8AAACc9JREFUvIyM8zKO/4kgy9eBVxHca8/FffMyjleScabHUUQyQv//T1V98DLN5mUcZwnTRybMqj5yEJ6bmX7/7cb0kZkfx3nXR+ZWF4H9pY+kV+7q3KJ7bJNr3osXAFX9Y+CPpzZ9WkQ+A/xn4H8E/heCbHMjc+RqZZpJ2VX1DPBDU5s+KyK/Q7Bu/jTw/XH7vpcvWmz/DSHO/b3Tu5iTMbycjHM0jj8ALAF3EP7p/n8i8udU9XHmZxwvK+McjOP7gTbwd6/QZl7GcdaYm2t5QPWRuX9u5uD9V2P6yFyM47zrI/Osi8A+0kcOoifIGntbhg6xtzVp5lDV+wgWxDfETatcXmaYTbmvVqZna7e6x759iao+Cfw+k3GFfS6fiLSAewgv83ep6ump3XMxhs8i4yXM4jiq6gOq+p9i7OY7gAXgJ+PuuRjHZ5Fxr/YzMY4xFOGngZ8BmiKyMlVKr/o7YU7GccYwfWTCrOojB/K5mZX33zSmj1zKLI7jvOsj86qLwP7TRw6iEeR+QgzRbu4CvnqN+/JiMm0du5LM31DV7WvWqxeOq5XpfuB2Eens0W4MPMxssdvquW/liy5vvwl8K/AXVPXLu5rM/BhehYyX/SozMo67UdV1Qn+qWMuZH8fd7CHj5ZiFcbwDaAH/kqA4VB8Iq0xrBHfauRvHGcD0kQmzqo8c5OdmFt5/gOkjz/ZVZmQcdzPv+sic6SKwz/SRg2gEuQd4o4jcUW0QkduAt8R9M4+IvB74JkJcGQS5TorI26baLAHfxezKfLUy3QNkwPdMtUuBvwp8UlVH16a7f3aiBfUtTMYV9ql8IuIItbvfAXy3qn5+j2YzPYZXKeNe35uZcdwLETkBvBx4JG6a6XHciz1k3KvNrIzjlwixw7s/EBSRtxMUhbkbxxnA9BFmXh85kM/NDL3/TB+58vdmZhz3Yt71kTnTRWC/6SO6D2oFX8sP0I0X+MuEMjvvBv4EeBRYuN79ex7yfAz4IPBfExKR/ThwAfgGcDS2ccAfAk8C/w3wLuBegpvQLddbhsvI9Z74+b8I1s0fjn+/7bnKRMiqvEZI1vYO4DeAIfC6fSzfR4BfAP5KfCn8EPAEsA7cOQPyVXJ9EHjjrs/NczKGVyPjrI/jbxHcFr879v9vAl+L/f+mORnHq5FxpsfxMnIr8MGpv2d6HGfxg+kj+14fYc51kauUcabff5g+YvrIDIzjVco302N4Bdmviz5y3QW/Thf7FMFlbBPYAv5f4Lbr3a/nKcsHgD8lZGXP4w3zK8CNu9odBv5ZvIH6wH8AXn29+38FufQyn3ufq0yEBDx/n1Bmakiwlt69n+UD3keo8b5GSG71DPCvdr/k9rF8j19Bxp+dkzF8VhnnYBzfD/wR4R9sH3iQkNX7tl3tZnkcn1XGWR/Hy8i9Q+mY9XGc1Q+mj+xrfeQK7/h7n6s8+/W5eTYZZ/39h+kjPzsn4zjX+sjVyDfrY3gF2a+LPiLxAIZhGIZhGIZhGIZhGHPNQcwJYhiGYRiGYRiGYRjGAcSMIIZhGIZhGIZhGIZhHAjMCGIYhmEYhmEYhmEYxoHAjCCGYRiGYRiGYRiGYRwIzAhiGIZhGIZhGIZhGMaBwIwghmEYhmEYhmEYhmEcCMwIYhjGTCAi94rIvVN/3y0iKiJ3X79eGYZhGIZxkDB9xDBmn/R6d8AwDON5ch/wJuCr17sjhmEYhmEcWEwfMYwZw4wghmE8L0Skqaqj63V+Vd0EPn+9zm8YhmEYxvXH9BHDMJ4rFg5jGAYAIvJqEfktEbkoIgMReVBEPhD33Ssivy8i3yUifywiI+BvxX1LIvKPRORpERnF7/2oiMjUsRdE5JdE5BuxzVkR+ZSIvHyqzd8WkQfiuddE5Isi8pev0N9L3E+n+vlOEblPRPoi8hUR+UuXkfeeeK6BiPyBiHz7C3M1DcMwDMN4Ppg+YvqIYbzYmCeIYRiIyLcC9wIPAz8KnAZeBrxqqtk3Ab8I/DzwKLAqIg74d8DrgP8V+DLwncDfB44BPxW/+wvAu+PfDwFHgLcAK/H83wd8BPg54LNAO5778PMQ5yXAPwQ+DFwAfhz4DRF5uao+HM/3uniePwb+OtAHfgj4lIi8WVX/6Hmc1zAMwzCMPwOmj5g+YhjXAlHV690HwzCuMyLyGeB24E5V7e+x/17grcDrVPVLU9v/IvBvgfeq6j+f2v5PgB8ATqrqBRH5CvBJVf2xy5z/HwFvVtXXXaGP9wKo6t3x77uB/wi8XVXvnWrzFuAuVX0objsOnAF+RlU/FLf9B+Am4NWqOo7bEuArwIOqeslKjWEYhmEYLy6mj5g+YhjXAguHMYwDjoh0CP+oP7aXwjHF49MKR+StgAf+9a7t/xJoEBKFAXwB+O9E5KdE5PXxH/w0XwBeE11U3xn79Hx5qFI4AFT1HHAOOAUgIm3gbcD/A3gRSUUkBQT4VJTJMAzDMIxriOkjpo8YxrXCjCCGYRwivAtOP0u7M3tsOwys7pGQ7Jmp/QA/AnwUeB9BwTgnIr8wpVz838APA98G/C7BtfUTInLbc5CjYnWPbSOgNdWnBPgZIN/1+R+AQ9Gt1jAMwzCMa4fpI6aPGMY1wXKCGIaxRlg9Ofks7faKnVsFDotIo3LjjNwQf14EUNVt4APAB0TkVuA9wP8OjIH3a4jL+yjwURE5BHwHISb34wRF5IVknSDvPyYoO5egqv4FPqdhGIZhGFfG9JFdmD5iGC8OZl00jANOdDn9feD7o2vmc+HThPfI9+za/n0EheKSknGq+oSqfoSQtOxb9ti/pqofB359r/1/VlS1R0hC9mrgPlX94u7PC31OwzAMwzCujOkjpo8YxrXCPEEMwwD4CYIC8TkR+QjBFfUO4DWq+iNX+N5vExSWXxaRY8D9wF8AfhD4sKpeABCRzwH3EBSNbUIM7KuBfxH3/wqwBXyOEC/7TYREZp98YcWs+THgM8Dvisg/JbjWHiVklU9U9SdfpPMahmEYhnF5TB8xfcQwXnTMCGIYBqr6BRF5C6Ek3C8BTeAJ4Fef5XteRL4T+BDwfkKpuccJ/9T/wVTTzwB/BfhJwnvnUeBHVfUX4/4/AN5LUDSWgacJycz+txdAvL36fZ+IvCEe/xfjOc8D9wG//GKc0zAMwzCMK2P6iOkjhnEtsBK5hmEYhmEYhmEYhmEcCCwniGEYhmEYhmEYhmEYBwIzghiGYRiGYRiGYRiGcSAwI4hhGIZhGIZhGIZhGAcCM4IYhmEYhmEYhmEYhnEgMCOIYRiGYRiGYRiGYRgHAjOCGIZhGIZhGIZhGIZxIDAjiGEYhmEYhmEYhmEYBwIzghiGYRiGYRiGYRiGcSD4/wGjFLTYVw0vywAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ "