diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index db7af520..b2d30600 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [2.7, 3.7, 3.8, 3.9] + python-version: ["2.7", "3.7", "3.8", "3.9", "3.10"] steps: - uses: actions/checkout@v2 @@ -27,8 +27,9 @@ jobs: - name: Install dependencies run: | + sudo apt-get install libhdf5-serial-dev liblzo2-dev -y python -m pip install --upgrade pip - python -m pip install flake8 pytest + if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Build package diff --git a/.github/workflows/regenerate.yml b/.github/workflows/regenerate.yml new file mode 100644 index 00000000..e6faeb87 --- /dev/null +++ b/.github/workflows/regenerate.yml @@ -0,0 +1,53 @@ +name: Regenerate nml.py + +on: + push: + branches: [ master, development ] + pull_request: + branches: [ master, development ] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["2.7", "3.7", "3.8", "3.9", "3.10"] + + steps: + - uses: actions/checkout@v2 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + sudo apt-get install libhdf5-serial-dev liblzo2-dev -y + python -m pip install --upgrade pip + if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + + - name: Rebuild nml.py + run: | + cd neuroml/nml && rm -f nml.py && ./regenerate-nml.sh -a + + - name: Build package + run: | + pip install .[full] + + - name: Test with pytest + run: | + pytest + + - name: Run examples + run: | + cd ./neuroml/examples && python run_all.py + + - name: Lint with flake8 + run: | + # stop the build if there are Python syntax errors or undefined names + flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide + flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics diff --git a/.gitignore b/.gitignore index 0284d3ec..84a53868 100644 --- a/.gitignore +++ b/.gitignore @@ -50,3 +50,5 @@ neuroml/test/*.h5 /mongoo .venv +/data +.mypy_cache/ diff --git a/README.md b/README.md index e02ec9c7..6f6fe309 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,5 @@ ## Introduction -[![Travis CI](https://travis-ci.org/NeuralEnsemble/libNeuroML.svg?branch=master)](https://travis-ci.org/NeuralEnsemble/libNeuroML) [![GH Build](https://github.com/NeuralEnsemble/libNeuroML/actions/workflows/ci.yml/badge.svg)](https://github.com/NeuralEnsemble/libNeuroML/actions/workflows/ci.yml) [![Documentation Status](https://readthedocs.org/projects/libneuroml/badge/?version=latest)](https://libneuroml.readthedocs.io/en/latest/?badge=latest) [![PyPI](https://img.shields.io/pypi/v/libNeuroML)](https://pypi.org/project/libNeuroML/) @@ -10,6 +9,7 @@ [![GitHub issues](https://img.shields.io/github/issues/NeuralEnsemble/libNeuroML)](https://github.com/NeuralEnsemble/libNeuroML/issues) [![GitHub Org's stars](https://img.shields.io/github/stars/NeuralEnsemble?style=social)](https://github.com/NeuralEnsemble) [![Twitter Follow](https://img.shields.io/twitter/follow/NeuroML?style=social)](https://twitter.com/NeuroML) +[![Gitter](https://badges.gitter.im/NeuroML/community.svg)](https://gitter.im/NeuroML/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) This package provides Python libNeuroML, for working with neuronal models specified in [NeuroML 2](http://neuroml.org/neuromlv2). @@ -25,7 +25,7 @@ Documentation is available at http://readthedocs.org/docs/libneuroml/en/latest/ For installation instructions, see http://readthedocs.org/docs/libneuroml/en/latest/install.html -For an overview of all NeuroML related libraries/documentation/publications see https://neuroml.org/getneuroml. +For an overview of all NeuroML related libraries/documentation/publications see https://docs.neuroml.org ## pyNeuroML @@ -36,50 +36,77 @@ pyNeuroML builds on: [libNeuroML](https://github.com/NeuralEnsemble/libNeuroML) ## Development process for libNeuroML -Most of the work happens in the [development branch](https://github.com/NeuralEnsemble/libNeuroML/tree/development). That branch is kept up to date with the development branches for [NeuroML 2](https://github.com/NeuroML/NeuroML2/tree/development) and related libraries. See https://neuroml.org/getneuroml for an overview of the various NeuroML libraries. +Most of the work happens in the [development branch](https://github.com/NeuralEnsemble/libNeuroML/tree/development). +That branch is kept up to date with the development branches for [NeuroML 2](https://github.com/NeuroML/NeuroML2/tree/development) and related libraries. +See https://docs.neuroml.org/ for an overview of the various NeuroML libraries. ## Changelog +### version 0.2.57 (dev) + +- Enable Python 3.10 support +- Regenerate nml.py with generateDS using Python 3 +- Add generic `add` method to all NeuroML ComponentType classes that allows users to easily construct their NeuroML documents. +- Improve unit tests +- DEPRECATION notice: `append_to_element` will be deprecated in future releases, please use the `add` method instead + +### version 0.2.56 + +- Documentation updates for RTD and other minor fixes. + +### version 0.2.55 + +- Patch release with minor changes under the hood. +- Use PyTest for testing. +- Enable CI on GitHub Actions ### version 0.2.54 - - Using Schema for NeuroML v2.1. Better compatibility with Python 3 - ### version 0.2.50 - - Updated to use the final stable Schema for NeuroML v2.0 +- Using Schema for NeuroML v2.1. Better compatibility with Python 3 + +### version 0.2.50 + +- Updated to use the final stable Schema for NeuroML v2.0 ### version 0.2.47 - - Updated to use the final stable Schema for NeuroML v2beta5 + +- Updated to use the final stable Schema for NeuroML v2beta5 ### version 0.2.18 - - Updated to use the final stable Schema for NeuroML v2beta4 - - Tested with Python 3 + +- Updated to use the final stable Schema for NeuroML v2beta4 +- Tested with Python 3 ### version 0.2.4 - - Updated to use the Schema for NeuroML v2beta4 + +- Updated to use the Schema for NeuroML v2beta4 ### version 0.2.2 - - Updated to use the Schema for NeuroML v2beta3 - - Ensures numpy & pytables are only required when using non-XML loaders/writers + +- Updated to use the Schema for NeuroML v2beta3 +- Ensures numpy & pytables are only required when using non-XML loaders/writers ### version 0.2.0 - - Updated to use the Schema for NeuroML v2beta2 + +- Updated to use the Schema for NeuroML v2beta2 ### version 0.1.9 - - Minor release: Update to latest schema + +- Minor release: Update to latest schema ### version 0.1.8 - - Several Bug fixes and small enhamcements - - Support for latest NeuroML schema (see change outline) - - JSON serialization - - MongoDB backend - - HDF5 serialization - - Improved installation process - - All usage examples are now run on the Travis-CI continuous integration server to confirm that that they do not error. - - Schema validation utility - - Improved documentation and documentation new look +- Several Bug fixes and small enhamcements +- Support for latest NeuroML schema (see change outline) +- JSON serialization +- MongoDB backend +- HDF5 serialization +- Improved installation process +- All usage examples are now run on the Travis-CI continuous integration server to confirm that that they do not error. +- Schema validation utility +- Improved documentation and documentation new look -:copyright: Copyright 2020 by the libNeuroML team, see [AUTHORS](AUTHORS). Modified BSD License, see [LICENSE](LICENSE) for details. +:copyright: Copyright 2021 by the libNeuroML team, see [AUTHORS](AUTHORS). Modified BSD License, see [LICENSE](LICENSE) for details. [![Build Status](https://api.travis-ci.org/NeuralEnsemble/libNeuroML.png)](https://travis-ci.org/NeuralEnsemble/libNeuroML) diff --git a/doc/api.rst b/doc/api.rst deleted file mode 100644 index 54487358..00000000 --- a/doc/api.rst +++ /dev/null @@ -1,13 +0,0 @@ -API documentation -================= - -.. toctree:: - :maxdepth: 2 - - coreclasses - loaders - writers - utils - arraymorph - - diff --git a/doc/conf.py b/doc/conf.py index da0c6f79..c59b84d7 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -17,41 +17,45 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) +# sys.path.insert(0, os.path.abspath('.')) -sys.path.insert(0, os.path.abspath('../neuroml')) -sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath("../neuroml")) +sys.path.insert(0, os.path.abspath("..")) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', - 'sphinxcontrib.bibtex'] +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.doctest", + "sphinx.ext.todo", + "sphinxcontrib.bibtex", +] -bibtex_bibfiles = ['refs.bib'] +bibtex_bibfiles = ["refs.bib"] # Include TODOs in docs todo_include_todos = True # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'libNeuroML' -copyright = u'2021, libNeuroML authors and contributors' +project = u"libNeuroML" +copyright = u"2021, libNeuroML authors and contributors" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -59,127 +63,148 @@ # # The short X.Y version. version = "" -for aline in open('../neuroml/__init__.py'): +for aline in open("../neuroml/__init__.py"): # space here is important since __version__ is used in generation of # version_info also - if '__version__ =' in aline: - version = aline.split("'")[1] + if "__version__ =" in aline: + version = aline.split('"')[1] # The full version, including alpha/beta/rc tags. release = version +autodoc_class_signature = "mixed" +autoclass_content = "class" + +# gds specific members to exclude from nml.py doc +autodoc_default_options = { + "exclude-members": "build, buildAttributes, buildChildren, exportAttributes, export, exportChildren, factory, hasContent_, member_data_items_, subclass, superclass, warn_count, validate_allowedSpaces, validate_BlockTypes, validate_channelTypes, validate_DoubleGreaterThanZero, validate_gateTypes, validate_MetaId, validate_MetaId_patterns_, validate_Metric, validate_networkTypes, validate_NeuroLexId, validate_NeuroLexId_patterns_, validate_Nml2Quantity, validate_Nml2Quantity_capacitance, validate_Nml2Quantity_capacitance_patterns_, validate_Nml2Quantity_concentration, validate_Nml2Quantity_concentration_patterns_, validate_Nml2Quantity_conductance, validate_Nml2Quantity_conductanceDensity, validate_Nml2Quantity_conductanceDensity_patterns_, validate_Nml2Quantity_conductance_patterns_, validate_Nml2Quantity_conductancePerVoltage, validate_Nml2Quantity_conductancePerVoltage_patterns_, validate_Nml2Quantity_current, validate_Nml2Quantity_currentDensity, validate_Nml2Quantity_currentDensity_patterns_, validate_Nml2Quantity_current_patterns_, validate_Nml2Quantity_length, validate_Nml2Quantity_length_patterns_, validate_Nml2Quantity_none, validate_Nml2Quantity_none_patterns_, validate_Nml2Quantity_patterns_, validate_Nml2Quantity_permeability, validate_Nml2Quantity_permeability_patterns_, validate_Nml2Quantity_pertime, validate_Nml2Quantity_pertime_patterns_, validate_Nml2Quantity_resistance, validate_Nml2Quantity_resistance_patterns_, validate_Nml2Quantity_rhoFactor, validate_Nml2Quantity_rhoFactor_patterns_, validate_Nml2Quantity_specificCapacitance, validate_Nml2Quantity_specificCapacitance_patterns_, validate_Nml2Quantity_temperature, validate_Nml2Quantity_temperature_patterns_, validate_Nml2Quantity_time, validate_Nml2Quantity_time_patterns_, validate_Nml2Quantity_voltage, validate_Nml2Quantity_voltage_patterns_, validate_NmlId, validate_NmlId_patterns_, validate_NonNegativeInteger, validate_Notes, validate_PlasticityTypes, validate_populationTypes, validate_PositiveInteger, validate_ZeroOrOne, validate_ZeroToOne" +} + # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -#language = None +# language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'nature' +html_theme = "pydata_sphinx_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +html_theme_options = { + "github_url": "https://github.com/NeuralEnsemble/libNeuroML", + "use_edit_page_button": True, + "show_toc_level": 1, + "external_links": [ + {"name": "NeuroML Documentation", "url": "https://docs.neuroml.org"}, + ], +} + +html_context = { + "github_user": "NeuralEnsemble", + "github_repo": "libNeuroML", + "github_version": "development", +} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -html_logo = '_static/neuroml_logo.png' +html_logo = "_static/neuroml_logo.png" # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'libNeuroMLdoc' +htmlhelp_basename = "libNeuroMLdoc" # -- Options for LaTeX output -------------------------------------------------- @@ -197,29 +222,34 @@ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'libNeuroML.tex', 'libNeuroML Documentation', - 'libNeuroML authors and contributors', 'manual'), + ( + "index", + "libNeuroML.tex", + "libNeuroML Documentation", + "libNeuroML authors and contributors", + "manual", + ), ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output -------------------------------------------- @@ -227,12 +257,17 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'libneuroml', 'libNeuroML Documentation', - ['libNeuroML authors and contributors'], 1) + ( + "index", + "libneuroml", + "libNeuroML Documentation", + ["libNeuroML authors and contributors"], + 1, + ) ] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ @@ -241,16 +276,22 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'libNeuroML', 'libNeuroML Documentation', - 'libNeuroML authors and contributors', 'libNeuroML', 'This package provides libNeuroML for working with neuronal models specified in NeuroML 2.', - 'Miscellaneous'), + ( + "index", + "libNeuroML", + "libNeuroML Documentation", + "libNeuroML authors and contributors", + "libNeuroML", + "This package provides libNeuroML for working with neuronal models specified in NeuroML 2.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' diff --git a/doc/coreclasses.rst b/doc/coreclasses.rst deleted file mode 100644 index 6abbdd86..00000000 --- a/doc/coreclasses.rst +++ /dev/null @@ -1,14 +0,0 @@ -:mod:`nml` Module (NeuroML Core classes) ------------------------------------------ - -Note: This module is included in the top level of the `neuroml` package, so you can use these classes by importing neuroml: - -:: - - from neuroml import AdExIaFCell - -.. automodule:: neuroml.nml.nml - :members: - - - diff --git a/doc/2012_06_26_neuroml_with_pymoose.pdf b/doc/devdocs/2012_06_26_neuroml_with_pymoose.pdf similarity index 100% rename from doc/2012_06_26_neuroml_with_pymoose.pdf rename to doc/devdocs/2012_06_26_neuroml_with_pymoose.pdf diff --git a/doc/how_to_contribute.rst b/doc/devdocs/how_to_contribute.rst similarity index 100% rename from doc/how_to_contribute.rst rename to doc/devdocs/how_to_contribute.rst diff --git a/doc/implementation_of_bindings.rst b/doc/devdocs/implementation_of_bindings.rst similarity index 100% rename from doc/implementation_of_bindings.rst rename to doc/devdocs/implementation_of_bindings.rst diff --git a/doc/devdocs/index.rst b/doc/devdocs/index.rst new file mode 100644 index 00000000..7d373357 --- /dev/null +++ b/doc/devdocs/index.rst @@ -0,0 +1,12 @@ +Contributing +************ + +.. toctree:: + :maxdepth: 2 + + how_to_contribute + regenerate_docs + implementation_of_bindings + meeting_june_2012 + nodes_segments_sections + diff --git a/doc/meeting_june_2012.rst b/doc/devdocs/meeting_june_2012.rst similarity index 100% rename from doc/meeting_june_2012.rst rename to doc/devdocs/meeting_june_2012.rst diff --git a/doc/nodes_segments_sections.rst b/doc/devdocs/nodes_segments_sections.rst similarity index 100% rename from doc/nodes_segments_sections.rst rename to doc/devdocs/nodes_segments_sections.rst diff --git a/doc/devdocs/regenerate_docs.rst b/doc/devdocs/regenerate_docs.rst new file mode 100644 index 00000000..66e98283 --- /dev/null +++ b/doc/devdocs/regenerate_docs.rst @@ -0,0 +1,8 @@ +Regenerating documentation +========================== + +Please create a virtual environment and use the `requirements.txt` file to install the necessary bits. + +In most cases, running `make html` should be sufficient to regenerate the documentation. +However, if any changes to `nml.py` have been made, the `nml-core-docs.py` file in the `helpers` directory will also need to be run. +This script manually adds each class from `nml.py` to the documentation as a sub-section using the `autoclass` sphinx directive instead of the `automodule` directive which does not allow us to do this. diff --git a/doc/helpers/nml-core-docs.py b/doc/helpers/nml-core-docs.py new file mode 100644 index 00000000..b15124d2 --- /dev/null +++ b/doc/helpers/nml-core-docs.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python3 +""" +Enter one line description here. + +File: + +Copyright 2021 Ankur Sinha +Author: Ankur Sinha +""" + + +import textwrap + + +excluded_classes = ['GDSParseError', 'MixedContainer', 'MemberSpec_', + 'BlockTypes', 'Metric', 'PlasticityTypes', 'ZeroOrOne', 'allowedSpaces', + 'channelTypes', 'gateTypes', 'networkTypes', 'populationTypes'] +classes = [] +with open("../../neuroml/nml/nml.py", 'r') as file: + lines = file.readlines() + for line in lines: + if line.startswith("class"): + # get the class signature + aclass = line[len("class "):] + aclass = aclass.split('(')[0] + if aclass not in excluded_classes: + classes.append(aclass) + + +classes.sort() + +with open("../userdocs/coreclasses_list.txt", 'w') as fwrite: + print(".. Generated using nml-core-docs.py", file=fwrite) + for aclass in classes: + towrite = textwrap.dedent( + """ + {} + {} + + .. autoclass:: neuroml.nml.nml.{} + :members: + :undoc-members: + :show-inheritance: + + """.format( + aclass, "#" * len(aclass), aclass, + ) + ) + print(towrite, file=fwrite) diff --git a/doc/index.rst b/doc/index.rst index 2086e01a..b4899bb8 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -6,44 +6,11 @@ Here you will find information on installing, using, and contributing to libNeur For more information on NeuroML standard, other tools in the NeuroML eco-system, the NeuroML community and how to get in touch with us, please see the documentation at https://docs.neuroml.org. -User documentation -************************ - -.. toctree:: - :maxdepth: 2 - - introduction - install - api - examples - bibliography - - -Developer documentation -************************ - .. toctree:: :maxdepth: 2 - how_to_contribute - implementation_of_bindings - -Events and meetings -************************ - -.. toctree:: - :maxdepth: 1 - - meeting_june_2012 - -Miscellaneous -************************ - -.. toctree:: - :maxdepth: 1 - - nodes_segments_sections - + userdocs/index + devdocs/index Indices and tables ******************* diff --git a/doc/requirements.txt b/doc/requirements.txt index ef36addc..2a8df123 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -1 +1,2 @@ sphinxcontrib-bibtex +pydata-sphinx-theme diff --git a/doc/userdocs/api.rst b/doc/userdocs/api.rst new file mode 100644 index 00000000..aa2f6688 --- /dev/null +++ b/doc/userdocs/api.rst @@ -0,0 +1,16 @@ +API documentation +================= + +The libNeuroML API includes the core NeuroML classes and various utilities. +You can find information on these in the pages below. + +.. toctree:: + :maxdepth: 2 + + coreclasses + loaders + writers + utils + arraymorph + + diff --git a/doc/arraymorph.rst b/doc/userdocs/arraymorph.rst similarity index 100% rename from doc/arraymorph.rst rename to doc/userdocs/arraymorph.rst diff --git a/doc/bibliography.rst b/doc/userdocs/bibliography.rst similarity index 100% rename from doc/bibliography.rst rename to doc/userdocs/bibliography.rst diff --git a/doc/userdocs/coreclasses.rst b/doc/userdocs/coreclasses.rst new file mode 100644 index 00000000..275208fb --- /dev/null +++ b/doc/userdocs/coreclasses.rst @@ -0,0 +1,45 @@ +:mod:`nml` Module (NeuroML Core classes) +----------------------------------------- + +These NeuroML core classes are Python representations of the Component Types defined in the `NeuroML standard `__ . +These can be used to build NeuroML models in Python, and these models can then be exported to the standard XML NeuroML representation. +These core classes also contain some utility functions to make it easier for users to carry out common tasks. + +Each NeuroML Component Type is represented here as a Python class. +Due to implementation limitations, whereas NeuroML Component Types use `lower camel case naming `_, the Python classes here use `upper camel case naming `__. +So, for example, the :code:`adExIaFCell` Component Type in the NeuroML schema becomes the :code:`AdExIaFCell` class here, and :code:`expTwoSynapse` becomes the :code:`ExpTwoSynapse` class. + +The :code:`child` and :code:`children` elements that NeuroML Component Types can have are represented in the Python classes as variables. +The variable names, to distinguish them from class names, use `snake case `__. +So for example, the :code:`cell` NeuroML Component Type has a corresponding :code:`Cell` Python class here. +The :code:`biophysicalProperties` child Component Type in :code:`cell` is represented as the :code:`biophysical_properties` list variable in the :code:`Cell` Python class. +The class signatures list all the child/children elements and text fields that the corresponding Component Type possesses. +To again use the :code:`Cell` class as an example, the construction signature is this: + +:: + + class neuroml.nml.nml.Cell(neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, morphology_attr=None, biophysical_properties_attr=None, morphology=None, biophysical_properties=None, extensiontype_=None, **kwargs_) + +As can be seen here, it includes both the :code:`biophysical_properties` and :code:`morphology` child elements as variables. + +Please see the examples in the `NeuroML documentation `__ to see usage examples of libNeuroML. +Please also note that this module is also included in the top level of the `neuroml` package, so you can use these classes by importing neuroml: + +:: + + from neuroml import AdExIaFCell + +.. + To get the list of all the validate* methods and members to add to the exlude list, use this: + grep -Eo "def[[:space:]]*validate([[:alnum:]]|_)*|validate([[:alnum:]]|_)*patterns_" nml.py | sed -e 's/^.*def validate/validate/' | sort -h | uniq | tr '\n' ',' + + + +List of Component classes +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This documentation is auto-generated from the `NeuroML schema `__. +In case of issues, please refer to the schema documentation for clarifications. +If the schema documentation does not resolve the issue, please `contact us `__. + +.. include:: coreclasses_list.txt diff --git a/doc/userdocs/coreclasses_list.txt b/doc/userdocs/coreclasses_list.txt new file mode 100644 index 00000000..79e38673 --- /dev/null +++ b/doc/userdocs/coreclasses_list.txt @@ -0,0 +1,1903 @@ +.. Generated using nml-core-docs.py + +AdExIaFCell +########### + +.. autoclass:: neuroml.nml.nml.AdExIaFCell + :members: + :undoc-members: + :show-inheritance: + + + +AlphaCondSynapse +################ + +.. autoclass:: neuroml.nml.nml.AlphaCondSynapse + :members: + :undoc-members: + :show-inheritance: + + + +AlphaCurrSynapse +################ + +.. autoclass:: neuroml.nml.nml.AlphaCurrSynapse + :members: + :undoc-members: + :show-inheritance: + + + +AlphaCurrentSynapse +################### + +.. autoclass:: neuroml.nml.nml.AlphaCurrentSynapse + :members: + :undoc-members: + :show-inheritance: + + + +AlphaSynapse +############ + +.. autoclass:: neuroml.nml.nml.AlphaSynapse + :members: + :undoc-members: + :show-inheritance: + + + +Annotation +########## + +.. autoclass:: neuroml.nml.nml.Annotation + :members: + :undoc-members: + :show-inheritance: + + + +Base +#### + +.. autoclass:: neuroml.nml.nml.Base + :members: + :undoc-members: + :show-inheritance: + + + +BaseCell +######## + +.. autoclass:: neuroml.nml.nml.BaseCell + :members: + :undoc-members: + :show-inheritance: + + + +BaseCellMembPotCap +################## + +.. autoclass:: neuroml.nml.nml.BaseCellMembPotCap + :members: + :undoc-members: + :show-inheritance: + + + +BaseConductanceBasedSynapse +########################### + +.. autoclass:: neuroml.nml.nml.BaseConductanceBasedSynapse + :members: + :undoc-members: + :show-inheritance: + + + +BaseConductanceBasedSynapseTwo +############################## + +.. autoclass:: neuroml.nml.nml.BaseConductanceBasedSynapseTwo + :members: + :undoc-members: + :show-inheritance: + + + +BaseConnection +############## + +.. autoclass:: neuroml.nml.nml.BaseConnection + :members: + :undoc-members: + :show-inheritance: + + + +BaseConnectionNewFormat +####################### + +.. autoclass:: neuroml.nml.nml.BaseConnectionNewFormat + :members: + :undoc-members: + :show-inheritance: + + + +BaseConnectionOldFormat +####################### + +.. autoclass:: neuroml.nml.nml.BaseConnectionOldFormat + :members: + :undoc-members: + :show-inheritance: + + + +BaseCurrentBasedSynapse +####################### + +.. autoclass:: neuroml.nml.nml.BaseCurrentBasedSynapse + :members: + :undoc-members: + :show-inheritance: + + + +BaseNonNegativeIntegerId +######################## + +.. autoclass:: neuroml.nml.nml.BaseNonNegativeIntegerId + :members: + :undoc-members: + :show-inheritance: + + + +BaseProjection +############## + +.. autoclass:: neuroml.nml.nml.BaseProjection + :members: + :undoc-members: + :show-inheritance: + + + +BasePynnSynapse +############### + +.. autoclass:: neuroml.nml.nml.BasePynnSynapse + :members: + :undoc-members: + :show-inheritance: + + + +BaseSynapse +########### + +.. autoclass:: neuroml.nml.nml.BaseSynapse + :members: + :undoc-members: + :show-inheritance: + + + +BaseVoltageDepSynapse +##################### + +.. autoclass:: neuroml.nml.nml.BaseVoltageDepSynapse + :members: + :undoc-members: + :show-inheritance: + + + +BaseWithoutId +############# + +.. autoclass:: neuroml.nml.nml.BaseWithoutId + :members: + :undoc-members: + :show-inheritance: + + + +BiophysicalProperties +##################### + +.. autoclass:: neuroml.nml.nml.BiophysicalProperties + :members: + :undoc-members: + :show-inheritance: + + + +BiophysicalProperties2CaPools +############################# + +.. autoclass:: neuroml.nml.nml.BiophysicalProperties2CaPools + :members: + :undoc-members: + :show-inheritance: + + + +BlockMechanism +############## + +.. autoclass:: neuroml.nml.nml.BlockMechanism + :members: + :undoc-members: + :show-inheritance: + + + +BlockingPlasticSynapse +###################### + +.. autoclass:: neuroml.nml.nml.BlockingPlasticSynapse + :members: + :undoc-members: + :show-inheritance: + + + +Case +#### + +.. autoclass:: neuroml.nml.nml.Case + :members: + :undoc-members: + :show-inheritance: + + + +Cell +#### + +.. autoclass:: neuroml.nml.nml.Cell + :members: + :undoc-members: + :show-inheritance: + + + +Cell2CaPools +############ + +.. autoclass:: neuroml.nml.nml.Cell2CaPools + :members: + :undoc-members: + :show-inheritance: + + + +CellSet +####### + +.. autoclass:: neuroml.nml.nml.CellSet + :members: + :undoc-members: + :show-inheritance: + + + +ChannelDensity +############## + +.. autoclass:: neuroml.nml.nml.ChannelDensity + :members: + :undoc-members: + :show-inheritance: + + + +ChannelDensityGHK +################# + +.. autoclass:: neuroml.nml.nml.ChannelDensityGHK + :members: + :undoc-members: + :show-inheritance: + + + +ChannelDensityGHK2 +################## + +.. autoclass:: neuroml.nml.nml.ChannelDensityGHK2 + :members: + :undoc-members: + :show-inheritance: + + + +ChannelDensityNernst +#################### + +.. autoclass:: neuroml.nml.nml.ChannelDensityNernst + :members: + :undoc-members: + :show-inheritance: + + + +ChannelDensityNernstCa2 +####################### + +.. autoclass:: neuroml.nml.nml.ChannelDensityNernstCa2 + :members: + :undoc-members: + :show-inheritance: + + + +ChannelDensityNonUniform +######################## + +.. autoclass:: neuroml.nml.nml.ChannelDensityNonUniform + :members: + :undoc-members: + :show-inheritance: + + + +ChannelDensityNonUniformGHK +########################### + +.. autoclass:: neuroml.nml.nml.ChannelDensityNonUniformGHK + :members: + :undoc-members: + :show-inheritance: + + + +ChannelDensityNonUniformNernst +############################## + +.. autoclass:: neuroml.nml.nml.ChannelDensityNonUniformNernst + :members: + :undoc-members: + :show-inheritance: + + + +ChannelDensityVShift +#################### + +.. autoclass:: neuroml.nml.nml.ChannelDensityVShift + :members: + :undoc-members: + :show-inheritance: + + + +ChannelPopulation +################# + +.. autoclass:: neuroml.nml.nml.ChannelPopulation + :members: + :undoc-members: + :show-inheritance: + + + +ClosedState +########### + +.. autoclass:: neuroml.nml.nml.ClosedState + :members: + :undoc-members: + :show-inheritance: + + + +ComponentType +############# + +.. autoclass:: neuroml.nml.nml.ComponentType + :members: + :undoc-members: + :show-inheritance: + + + +CompoundInput +############# + +.. autoclass:: neuroml.nml.nml.CompoundInput + :members: + :undoc-members: + :show-inheritance: + + + +CompoundInputDL +############### + +.. autoclass:: neuroml.nml.nml.CompoundInputDL + :members: + :undoc-members: + :show-inheritance: + + + +ConcentrationModel_D +#################### + +.. autoclass:: neuroml.nml.nml.ConcentrationModel_D + :members: + :undoc-members: + :show-inheritance: + + + +ConditionalDerivedVariable +########################## + +.. autoclass:: neuroml.nml.nml.ConditionalDerivedVariable + :members: + :undoc-members: + :show-inheritance: + + + +Connection +########## + +.. autoclass:: neuroml.nml.nml.Connection + :members: + :undoc-members: + :show-inheritance: + + + +ConnectionWD +############ + +.. autoclass:: neuroml.nml.nml.ConnectionWD + :members: + :undoc-members: + :show-inheritance: + + + +Constant +######## + +.. autoclass:: neuroml.nml.nml.Constant + :members: + :undoc-members: + :show-inheritance: + + + +ContinuousConnection +#################### + +.. autoclass:: neuroml.nml.nml.ContinuousConnection + :members: + :undoc-members: + :show-inheritance: + + + +ContinuousConnectionInstance +############################ + +.. autoclass:: neuroml.nml.nml.ContinuousConnectionInstance + :members: + :undoc-members: + :show-inheritance: + + + +ContinuousConnectionInstanceW +############################# + +.. autoclass:: neuroml.nml.nml.ContinuousConnectionInstanceW + :members: + :undoc-members: + :show-inheritance: + + + +ContinuousProjection +#################### + +.. autoclass:: neuroml.nml.nml.ContinuousProjection + :members: + :undoc-members: + :show-inheritance: + + + +DecayingPoolConcentrationModel +############################## + +.. autoclass:: neuroml.nml.nml.DecayingPoolConcentrationModel + :members: + :undoc-members: + :show-inheritance: + + + +DerivedVariable +############### + +.. autoclass:: neuroml.nml.nml.DerivedVariable + :members: + :undoc-members: + :show-inheritance: + + + +DistalDetails +############# + +.. autoclass:: neuroml.nml.nml.DistalDetails + :members: + :undoc-members: + :show-inheritance: + + + +DoubleSynapse +############# + +.. autoclass:: neuroml.nml.nml.DoubleSynapse + :members: + :undoc-members: + :show-inheritance: + + + +Dynamics +######## + +.. autoclass:: neuroml.nml.nml.Dynamics + :members: + :undoc-members: + :show-inheritance: + + + +EIF_cond_alpha_isfa_ista +######################## + +.. autoclass:: neuroml.nml.nml.EIF_cond_alpha_isfa_ista + :members: + :undoc-members: + :show-inheritance: + + + +EIF_cond_exp_isfa_ista +###################### + +.. autoclass:: neuroml.nml.nml.EIF_cond_exp_isfa_ista + :members: + :undoc-members: + :show-inheritance: + + + +ElectricalConnection +#################### + +.. autoclass:: neuroml.nml.nml.ElectricalConnection + :members: + :undoc-members: + :show-inheritance: + + + +ElectricalConnectionInstance +############################ + +.. autoclass:: neuroml.nml.nml.ElectricalConnectionInstance + :members: + :undoc-members: + :show-inheritance: + + + +ElectricalConnectionInstanceW +############################# + +.. autoclass:: neuroml.nml.nml.ElectricalConnectionInstanceW + :members: + :undoc-members: + :show-inheritance: + + + +ElectricalProjection +#################### + +.. autoclass:: neuroml.nml.nml.ElectricalProjection + :members: + :undoc-members: + :show-inheritance: + + + +ExpCondSynapse +############## + +.. autoclass:: neuroml.nml.nml.ExpCondSynapse + :members: + :undoc-members: + :show-inheritance: + + + +ExpCurrSynapse +############## + +.. autoclass:: neuroml.nml.nml.ExpCurrSynapse + :members: + :undoc-members: + :show-inheritance: + + + +ExpOneSynapse +############# + +.. autoclass:: neuroml.nml.nml.ExpOneSynapse + :members: + :undoc-members: + :show-inheritance: + + + +ExpThreeSynapse +############### + +.. autoclass:: neuroml.nml.nml.ExpThreeSynapse + :members: + :undoc-members: + :show-inheritance: + + + +ExpTwoSynapse +############# + +.. autoclass:: neuroml.nml.nml.ExpTwoSynapse + :members: + :undoc-members: + :show-inheritance: + + + +ExplicitInput +############# + +.. autoclass:: neuroml.nml.nml.ExplicitInput + :members: + :undoc-members: + :show-inheritance: + + + +Exposure +######## + +.. autoclass:: neuroml.nml.nml.Exposure + :members: + :undoc-members: + :show-inheritance: + + + +ExtracellularProperties +####################### + +.. autoclass:: neuroml.nml.nml.ExtracellularProperties + :members: + :undoc-members: + :show-inheritance: + + + +ExtracellularPropertiesLocal +############################ + +.. autoclass:: neuroml.nml.nml.ExtracellularPropertiesLocal + :members: + :undoc-members: + :show-inheritance: + + + +FitzHughNagumo1969Cell +###################### + +.. autoclass:: neuroml.nml.nml.FitzHughNagumo1969Cell + :members: + :undoc-members: + :show-inheritance: + + + +FitzHughNagumoCell +################## + +.. autoclass:: neuroml.nml.nml.FitzHughNagumoCell + :members: + :undoc-members: + :show-inheritance: + + + +FixedFactorConcentrationModel +############################# + +.. autoclass:: neuroml.nml.nml.FixedFactorConcentrationModel + :members: + :undoc-members: + :show-inheritance: + + + +ForwardTransition +################# + +.. autoclass:: neuroml.nml.nml.ForwardTransition + :members: + :undoc-members: + :show-inheritance: + + + +GapJunction +########### + +.. autoclass:: neuroml.nml.nml.GapJunction + :members: + :undoc-members: + :show-inheritance: + + + +GateFractional +############## + +.. autoclass:: neuroml.nml.nml.GateFractional + :members: + :undoc-members: + :show-inheritance: + + + +GateFractionalSubgate +##################### + +.. autoclass:: neuroml.nml.nml.GateFractionalSubgate + :members: + :undoc-members: + :show-inheritance: + + + +GateHHInstantaneous +################### + +.. autoclass:: neuroml.nml.nml.GateHHInstantaneous + :members: + :undoc-members: + :show-inheritance: + + + +GateHHRates +########### + +.. autoclass:: neuroml.nml.nml.GateHHRates + :members: + :undoc-members: + :show-inheritance: + + + +GateHHRatesInf +############## + +.. autoclass:: neuroml.nml.nml.GateHHRatesInf + :members: + :undoc-members: + :show-inheritance: + + + +GateHHRatesTau +############## + +.. autoclass:: neuroml.nml.nml.GateHHRatesTau + :members: + :undoc-members: + :show-inheritance: + + + +GateHHRatesTauInf +################# + +.. autoclass:: neuroml.nml.nml.GateHHRatesTauInf + :members: + :undoc-members: + :show-inheritance: + + + +GateHHTauInf +############ + +.. autoclass:: neuroml.nml.nml.GateHHTauInf + :members: + :undoc-members: + :show-inheritance: + + + +GateHHUndetermined +################## + +.. autoclass:: neuroml.nml.nml.GateHHUndetermined + :members: + :undoc-members: + :show-inheritance: + + + +GateKS +###### + +.. autoclass:: neuroml.nml.nml.GateKS + :members: + :undoc-members: + :show-inheritance: + + + +GradedSynapse +############# + +.. autoclass:: neuroml.nml.nml.GradedSynapse + :members: + :undoc-members: + :show-inheritance: + + + +GridLayout +########## + +.. autoclass:: neuroml.nml.nml.GridLayout + :members: + :undoc-members: + :show-inheritance: + + + +HHRate +###### + +.. autoclass:: neuroml.nml.nml.HHRate + :members: + :undoc-members: + :show-inheritance: + + + +HHTime +###### + +.. autoclass:: neuroml.nml.nml.HHTime + :members: + :undoc-members: + :show-inheritance: + + + +HHVariable +########## + +.. autoclass:: neuroml.nml.nml.HHVariable + :members: + :undoc-members: + :show-inheritance: + + + +HH_cond_exp +########### + +.. autoclass:: neuroml.nml.nml.HH_cond_exp + :members: + :undoc-members: + :show-inheritance: + + + +IF_cond_alpha +############# + +.. autoclass:: neuroml.nml.nml.IF_cond_alpha + :members: + :undoc-members: + :show-inheritance: + + + +IF_cond_exp +########### + +.. autoclass:: neuroml.nml.nml.IF_cond_exp + :members: + :undoc-members: + :show-inheritance: + + + +IF_curr_alpha +############# + +.. autoclass:: neuroml.nml.nml.IF_curr_alpha + :members: + :undoc-members: + :show-inheritance: + + + +IF_curr_exp +########### + +.. autoclass:: neuroml.nml.nml.IF_curr_exp + :members: + :undoc-members: + :show-inheritance: + + + +IafCell +####### + +.. autoclass:: neuroml.nml.nml.IafCell + :members: + :undoc-members: + :show-inheritance: + + + +IafRefCell +########## + +.. autoclass:: neuroml.nml.nml.IafRefCell + :members: + :undoc-members: + :show-inheritance: + + + +IafTauCell +########## + +.. autoclass:: neuroml.nml.nml.IafTauCell + :members: + :undoc-members: + :show-inheritance: + + + +IafTauRefCell +############# + +.. autoclass:: neuroml.nml.nml.IafTauRefCell + :members: + :undoc-members: + :show-inheritance: + + + +Include +####### + +.. autoclass:: neuroml.nml.nml.Include + :members: + :undoc-members: + :show-inheritance: + + + +IncludeType +########### + +.. autoclass:: neuroml.nml.nml.IncludeType + :members: + :undoc-members: + :show-inheritance: + + + +InhomogeneousParameter +###################### + +.. autoclass:: neuroml.nml.nml.InhomogeneousParameter + :members: + :undoc-members: + :show-inheritance: + + + +InhomogeneousValue +################## + +.. autoclass:: neuroml.nml.nml.InhomogeneousValue + :members: + :undoc-members: + :show-inheritance: + + + +InitMembPotential +################# + +.. autoclass:: neuroml.nml.nml.InitMembPotential + :members: + :undoc-members: + :show-inheritance: + + + +Input +##### + +.. autoclass:: neuroml.nml.nml.Input + :members: + :undoc-members: + :show-inheritance: + + + +InputList +######### + +.. autoclass:: neuroml.nml.nml.InputList + :members: + :undoc-members: + :show-inheritance: + + + +InputW +###### + +.. autoclass:: neuroml.nml.nml.InputW + :members: + :undoc-members: + :show-inheritance: + + + +Instance +######## + +.. autoclass:: neuroml.nml.nml.Instance + :members: + :undoc-members: + :show-inheritance: + + + +InstanceRequirement +################### + +.. autoclass:: neuroml.nml.nml.InstanceRequirement + :members: + :undoc-members: + :show-inheritance: + + + +IntracellularProperties +####################### + +.. autoclass:: neuroml.nml.nml.IntracellularProperties + :members: + :undoc-members: + :show-inheritance: + + + +IntracellularProperties2CaPools +############################### + +.. autoclass:: neuroml.nml.nml.IntracellularProperties2CaPools + :members: + :undoc-members: + :show-inheritance: + + + +IonChannel +########## + +.. autoclass:: neuroml.nml.nml.IonChannel + :members: + :undoc-members: + :show-inheritance: + + + +IonChannelHH +############ + +.. autoclass:: neuroml.nml.nml.IonChannelHH + :members: + :undoc-members: + :show-inheritance: + + + +IonChannelKS +############ + +.. autoclass:: neuroml.nml.nml.IonChannelKS + :members: + :undoc-members: + :show-inheritance: + + + +IonChannelScalable +################## + +.. autoclass:: neuroml.nml.nml.IonChannelScalable + :members: + :undoc-members: + :show-inheritance: + + + +IonChannelVShift +################ + +.. autoclass:: neuroml.nml.nml.IonChannelVShift + :members: + :undoc-members: + :show-inheritance: + + + +Izhikevich2007Cell +################## + +.. autoclass:: neuroml.nml.nml.Izhikevich2007Cell + :members: + :undoc-members: + :show-inheritance: + + + +IzhikevichCell +############## + +.. autoclass:: neuroml.nml.nml.IzhikevichCell + :members: + :undoc-members: + :show-inheritance: + + + +LEMS_Property +############# + +.. autoclass:: neuroml.nml.nml.LEMS_Property + :members: + :undoc-members: + :show-inheritance: + + + +Layout +###### + +.. autoclass:: neuroml.nml.nml.Layout + :members: + :undoc-members: + :show-inheritance: + + + +LinearGradedSynapse +################### + +.. autoclass:: neuroml.nml.nml.LinearGradedSynapse + :members: + :undoc-members: + :show-inheritance: + + + +Location +######## + +.. autoclass:: neuroml.nml.nml.Location + :members: + :undoc-members: + :show-inheritance: + + + +Member +###### + +.. autoclass:: neuroml.nml.nml.Member + :members: + :undoc-members: + :show-inheritance: + + + +MembraneProperties +################## + +.. autoclass:: neuroml.nml.nml.MembraneProperties + :members: + :undoc-members: + :show-inheritance: + + + +MembraneProperties2CaPools +########################## + +.. autoclass:: neuroml.nml.nml.MembraneProperties2CaPools + :members: + :undoc-members: + :show-inheritance: + + + +MixedContainer: + +################ + +.. autoclass:: neuroml.nml.nml.MixedContainer: + + :members: + :undoc-members: + :show-inheritance: + + + +Morphology +########## + +.. autoclass:: neuroml.nml.nml.Morphology + :members: + :undoc-members: + :show-inheritance: + + + +NamedDimensionalType +#################### + +.. autoclass:: neuroml.nml.nml.NamedDimensionalType + :members: + :undoc-members: + :show-inheritance: + + + +NamedDimensionalVariable +######################## + +.. autoclass:: neuroml.nml.nml.NamedDimensionalVariable + :members: + :undoc-members: + :show-inheritance: + + + +Network +####### + +.. autoclass:: neuroml.nml.nml.Network + :members: + :undoc-members: + :show-inheritance: + + + +NeuroMLDocument +############### + +.. autoclass:: neuroml.nml.nml.NeuroMLDocument + :members: + :undoc-members: + :show-inheritance: + + + +OpenState +######### + +.. autoclass:: neuroml.nml.nml.OpenState + :members: + :undoc-members: + :show-inheritance: + + + +Parameter +######### + +.. autoclass:: neuroml.nml.nml.Parameter + :members: + :undoc-members: + :show-inheritance: + + + +Path +#### + +.. autoclass:: neuroml.nml.nml.Path + :members: + :undoc-members: + :show-inheritance: + + + +PinskyRinzelCA3Cell +################### + +.. autoclass:: neuroml.nml.nml.PinskyRinzelCA3Cell + :members: + :undoc-members: + :show-inheritance: + + + +PlasticityMechanism +################### + +.. autoclass:: neuroml.nml.nml.PlasticityMechanism + :members: + :undoc-members: + :show-inheritance: + + + +Point3DWithDiam +############### + +.. autoclass:: neuroml.nml.nml.Point3DWithDiam + :members: + :undoc-members: + :show-inheritance: + + + +PoissonFiringSynapse +#################### + +.. autoclass:: neuroml.nml.nml.PoissonFiringSynapse + :members: + :undoc-members: + :show-inheritance: + + + +Population +########## + +.. autoclass:: neuroml.nml.nml.Population + :members: + :undoc-members: + :show-inheritance: + + + +Projection +########## + +.. autoclass:: neuroml.nml.nml.Projection + :members: + :undoc-members: + :show-inheritance: + + + +Property +######## + +.. autoclass:: neuroml.nml.nml.Property + :members: + :undoc-members: + :show-inheritance: + + + +ProximalDetails +############### + +.. autoclass:: neuroml.nml.nml.ProximalDetails + :members: + :undoc-members: + :show-inheritance: + + + +PulseGenerator +############## + +.. autoclass:: neuroml.nml.nml.PulseGenerator + :members: + :undoc-members: + :show-inheritance: + + + +PulseGeneratorDL +################ + +.. autoclass:: neuroml.nml.nml.PulseGeneratorDL + :members: + :undoc-members: + :show-inheritance: + + + +Q10ConductanceScaling +##################### + +.. autoclass:: neuroml.nml.nml.Q10ConductanceScaling + :members: + :undoc-members: + :show-inheritance: + + + +Q10Settings +########### + +.. autoclass:: neuroml.nml.nml.Q10Settings + :members: + :undoc-members: + :show-inheritance: + + + +RampGenerator +############# + +.. autoclass:: neuroml.nml.nml.RampGenerator + :members: + :undoc-members: + :show-inheritance: + + + +RampGeneratorDL +############### + +.. autoclass:: neuroml.nml.nml.RampGeneratorDL + :members: + :undoc-members: + :show-inheritance: + + + +RandomLayout +############ + +.. autoclass:: neuroml.nml.nml.RandomLayout + :members: + :undoc-members: + :show-inheritance: + + + +ReactionScheme +############## + +.. autoclass:: neuroml.nml.nml.ReactionScheme + :members: + :undoc-members: + :show-inheritance: + + + +Region +###### + +.. autoclass:: neuroml.nml.nml.Region + :members: + :undoc-members: + :show-inheritance: + + + +Requirement +########### + +.. autoclass:: neuroml.nml.nml.Requirement + :members: + :undoc-members: + :show-inheritance: + + + +Resistivity +########### + +.. autoclass:: neuroml.nml.nml.Resistivity + :members: + :undoc-members: + :show-inheritance: + + + +ReverseTransition +################# + +.. autoclass:: neuroml.nml.nml.ReverseTransition + :members: + :undoc-members: + :show-inheritance: + + + +Segment +####### + +.. autoclass:: neuroml.nml.nml.Segment + :members: + :undoc-members: + :show-inheritance: + + + +SegmentEndPoint +############### + +.. autoclass:: neuroml.nml.nml.SegmentEndPoint + :members: + :undoc-members: + :show-inheritance: + + + +SegmentGroup +############ + +.. autoclass:: neuroml.nml.nml.SegmentGroup + :members: + :undoc-members: + :show-inheritance: + + + +SegmentParent +############# + +.. autoclass:: neuroml.nml.nml.SegmentParent + :members: + :undoc-members: + :show-inheritance: + + + +SilentSynapse +############# + +.. autoclass:: neuroml.nml.nml.SilentSynapse + :members: + :undoc-members: + :show-inheritance: + + + +SineGenerator +############# + +.. autoclass:: neuroml.nml.nml.SineGenerator + :members: + :undoc-members: + :show-inheritance: + + + +SineGeneratorDL +############### + +.. autoclass:: neuroml.nml.nml.SineGeneratorDL + :members: + :undoc-members: + :show-inheritance: + + + +Space +##### + +.. autoclass:: neuroml.nml.nml.Space + :members: + :undoc-members: + :show-inheritance: + + + +SpaceStructure +############## + +.. autoclass:: neuroml.nml.nml.SpaceStructure + :members: + :undoc-members: + :show-inheritance: + + + +Species +####### + +.. autoclass:: neuroml.nml.nml.Species + :members: + :undoc-members: + :show-inheritance: + + + +SpecificCapacitance +################### + +.. autoclass:: neuroml.nml.nml.SpecificCapacitance + :members: + :undoc-members: + :show-inheritance: + + + +Spike +##### + +.. autoclass:: neuroml.nml.nml.Spike + :members: + :undoc-members: + :show-inheritance: + + + +SpikeArray +########## + +.. autoclass:: neuroml.nml.nml.SpikeArray + :members: + :undoc-members: + :show-inheritance: + + + +SpikeGenerator +############## + +.. autoclass:: neuroml.nml.nml.SpikeGenerator + :members: + :undoc-members: + :show-inheritance: + + + +SpikeGeneratorPoisson +##################### + +.. autoclass:: neuroml.nml.nml.SpikeGeneratorPoisson + :members: + :undoc-members: + :show-inheritance: + + + +SpikeGeneratorRandom +#################### + +.. autoclass:: neuroml.nml.nml.SpikeGeneratorRandom + :members: + :undoc-members: + :show-inheritance: + + + +SpikeGeneratorRefPoisson +######################## + +.. autoclass:: neuroml.nml.nml.SpikeGeneratorRefPoisson + :members: + :undoc-members: + :show-inheritance: + + + +SpikeSourcePoisson +################## + +.. autoclass:: neuroml.nml.nml.SpikeSourcePoisson + :members: + :undoc-members: + :show-inheritance: + + + +SpikeThresh +########### + +.. autoclass:: neuroml.nml.nml.SpikeThresh + :members: + :undoc-members: + :show-inheritance: + + + +Standalone +########## + +.. autoclass:: neuroml.nml.nml.Standalone + :members: + :undoc-members: + :show-inheritance: + + + +StateVariable +############# + +.. autoclass:: neuroml.nml.nml.StateVariable + :members: + :undoc-members: + :show-inheritance: + + + +SubTree +####### + +.. autoclass:: neuroml.nml.nml.SubTree + :members: + :undoc-members: + :show-inheritance: + + + +SynapticConnection +################## + +.. autoclass:: neuroml.nml.nml.SynapticConnection + :members: + :undoc-members: + :show-inheritance: + + + +TauInfTransition +################ + +.. autoclass:: neuroml.nml.nml.TauInfTransition + :members: + :undoc-members: + :show-inheritance: + + + +TimeDerivative +############## + +.. autoclass:: neuroml.nml.nml.TimeDerivative + :members: + :undoc-members: + :show-inheritance: + + + +TimedSynapticInput +################## + +.. autoclass:: neuroml.nml.nml.TimedSynapticInput + :members: + :undoc-members: + :show-inheritance: + + + +TransientPoissonFiringSynapse +############################# + +.. autoclass:: neuroml.nml.nml.TransientPoissonFiringSynapse + :members: + :undoc-members: + :show-inheritance: + + + +UnstructuredLayout +################## + +.. autoclass:: neuroml.nml.nml.UnstructuredLayout + :members: + :undoc-members: + :show-inheritance: + + + +VariableParameter +################# + +.. autoclass:: neuroml.nml.nml.VariableParameter + :members: + :undoc-members: + :show-inheritance: + + + +VoltageClamp +############ + +.. autoclass:: neuroml.nml.nml.VoltageClamp + :members: + :undoc-members: + :show-inheritance: + + + +VoltageClampTriple +################## + +.. autoclass:: neuroml.nml.nml.VoltageClampTriple + :members: + :undoc-members: + :show-inheritance: + + + +basePyNNCell +############ + +.. autoclass:: neuroml.nml.nml.basePyNNCell + :members: + :undoc-members: + :show-inheritance: + + + +basePyNNIaFCell +############### + +.. autoclass:: neuroml.nml.nml.basePyNNIaFCell + :members: + :undoc-members: + :show-inheritance: + + + +basePyNNIaFCondCell +################### + +.. autoclass:: neuroml.nml.nml.basePyNNIaFCondCell + :members: + :undoc-members: + :show-inheritance: + + diff --git a/doc/examples.rst b/doc/userdocs/examples.rst similarity index 58% rename from doc/examples.rst rename to doc/userdocs/examples.rst index f6bf23b5..2b7e1407 100644 --- a/doc/examples.rst +++ b/doc/userdocs/examples.rst @@ -13,38 +13,38 @@ be tested to confirm they work by running the run_all.py script. Creating a NeuroML morphology ----------------------------- -.. literalinclude:: ../neuroml/examples/morphology_generation.py +.. literalinclude:: ../../neuroml/examples/morphology_generation.py Loading and modifying a file ---------------------------- -.. literalinclude:: ../neuroml/examples/loading_modifying_writing.py +.. literalinclude:: ../../neuroml/examples/loading_modifying_writing.py Building a network ------------------ -.. literalinclude:: ../neuroml/examples/build_network.py +.. literalinclude:: ../../neuroml/examples/build_network.py Building a 3D network --------------------- -.. literalinclude:: ../neuroml/examples/build_3D_network.py +.. literalinclude:: ../../neuroml/examples/build_3D_network.py Ion channels ------------ -.. literalinclude:: ../neuroml/examples/ion_channel_generation.py +.. literalinclude:: ../../neuroml/examples/ion_channel_generation.py PyNN models ----------- -.. literalinclude:: ../neuroml/examples/write_pynn.py +.. literalinclude:: ../../neuroml/examples/write_pynn.py Synapses -------- -.. literalinclude:: ../neuroml/examples/write_syns.py +.. literalinclude:: ../../neuroml/examples/write_syns.py Working with JSON serialization ------------------------------- @@ -53,18 +53,18 @@ One thing to note is that the JSONWriter, unlike NeuroMLWriter, will serializing using array-based (Arraymorph) representation if this has been used. -.. literalinclude:: ../neuroml/examples/json_serialization.py +.. literalinclude:: ../../neuroml/examples/json_serialization.py Working with arraymorphs ------------------------ -.. literalinclude:: ../neuroml/examples/arraymorph_generation.py +.. literalinclude:: ../../neuroml/examples/arraymorph_generation.py Working with Izhikevich Cells ----------------------------- These examples were kindly contributed by Steve Marsh -.. literalinclude:: ../neuroml/examples/single_izhikevich_reader.py -.. literalinclude:: ../neuroml/examples/single_izhikevich_writer.py +.. literalinclude:: ../../neuroml/examples/single_izhikevich_reader.py +.. literalinclude:: ../../neuroml/examples/single_izhikevich_writer.py diff --git a/doc/userdocs/get-nml-core-docs.sh b/doc/userdocs/get-nml-core-docs.sh new file mode 100644 index 00000000..5f37b6c6 --- /dev/null +++ b/doc/userdocs/get-nml-core-docs.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +# Copyright 2021 Ankur Sinha +# Author: Ankur Sinha +# File : get-nml-core-docs.sh +# +# Since automodule does not allow us to list each class as a sub-section, the +# NeuroML core class page becomes rather heavy and not easy to navigate. +# The only way around this is to not use automodule, and instead, use autoclass +# individuall for each class. + +# This script will generate the rst file for use by sphinx. + +grep -E '^class.*' ../neuroml/nml/nml.py | sed -e 's/^class //' -e 's/(.*)://' > classlist.txt + + diff --git a/doc/userdocs/index.rst b/doc/userdocs/index.rst new file mode 100644 index 00000000..f12f4592 --- /dev/null +++ b/doc/userdocs/index.rst @@ -0,0 +1,12 @@ +User guide +********** + +.. toctree:: + :maxdepth: 2 + + introduction + install + api + examples + bibliography + diff --git a/doc/install.rst b/doc/userdocs/install.rst similarity index 100% rename from doc/install.rst rename to doc/userdocs/install.rst diff --git a/doc/introduction.rst b/doc/userdocs/introduction.rst similarity index 100% rename from doc/introduction.rst rename to doc/userdocs/introduction.rst diff --git a/doc/loaders.rst b/doc/userdocs/loaders.rst similarity index 100% rename from doc/loaders.rst rename to doc/userdocs/loaders.rst diff --git a/doc/utils.rst b/doc/userdocs/utils.rst similarity index 100% rename from doc/utils.rst rename to doc/userdocs/utils.rst diff --git a/doc/writers.rst b/doc/userdocs/writers.rst similarity index 100% rename from doc/writers.rst rename to doc/userdocs/writers.rst diff --git a/neuroml/__init__.py b/neuroml/__init__.py index 1a7fb55e..e9cfc434 100644 --- a/neuroml/__init__.py +++ b/neuroml/__init__.py @@ -1,7 +1,7 @@ from .nml.nml import * # allows importation of all neuroml classes -__version__ = '0.2.55' -__version_info__ = tuple(int(i) for i in __version__.split('.')) +__version__ = "0.3.1" +__version_info__ = tuple(int(i) for i in __version__.split(".")) -current_neuroml_version = "v2.1" +current_neuroml_version = "v2.2" diff --git a/neuroml/arraymorph.py b/neuroml/arraymorph.py index 10d9c7d8..3e2123ee 100644 --- a/neuroml/arraymorph.py +++ b/neuroml/arraymorph.py @@ -28,16 +28,18 @@ class ArrayMorphology(neuroml.Morphology): """ - def __init__(self, - vertices=[], - connectivity=[], - id=None, - node_types=None, - name=None, - physical_mask=None, - fractions_along=None): - - super(ArrayMorphology,self).__init__() + def __init__( + self, + vertices=[], + connectivity=[], + id=None, + node_types=None, + name=None, + physical_mask=None, + fractions_along=None, + ): + + super(ArrayMorphology, self).__init__() self.connectivity = np.array(connectivity) self.vertices = np.array(vertices) @@ -45,32 +47,30 @@ def __init__(self, self.id = id if np.any(physical_mask): - self.physical_mask=np.array(physical_mask) + self.physical_mask = np.array(physical_mask) else: - self.physical_mask=np.zeros(len(connectivity),dtype='bool') + self.physical_mask = np.zeros(len(connectivity), dtype="bool") if np.any(node_types): - self.node_types=np.array(node_types) + self.node_types = np.array(node_types) else: - self.node_types=np.zeros(len(connectivity), - dtype='int32') + self.node_types = np.zeros(len(connectivity), dtype="int32") if np.any(fractions_along): self.fractions_along = np.array(fractions_along) else: - self.fractions_along=np.zeros(len(connectivity), - dtype='int32') + self.fractions_along = np.zeros(len(connectivity), dtype="int32") - #it will need a reference to its parent? + # it will need a reference to its parent? self.segments = SegmentList(self) - assert self.valid_morphology,'invalid_morphology' + assert self.valid_morphology, "invalid_morphology" @property def valid_morphology(self): all_nodes = self.__all_nodes_satisfied all_vertices = self.__all_vertices_present - return (all_nodes and all_vertices) + return all_nodes and all_vertices @property def __all_vertices_present(self): @@ -81,7 +81,7 @@ def __all_vertices_present(self): num_vertices = len(self.vertices) - return(all_vertices_present or num_vertices == 0) + return all_vertices_present or num_vertices == 0 @property def valid_ids(self): @@ -93,14 +93,13 @@ def valid_ids(self): return valid_flag - @property def __all_nodes_satisfied(self): m = self.vertices.shape[0] n = self.connectivity.shape[0] p = self.node_types.shape[0] - all_nodes_satisfied = (m == n == p) + all_nodes_satisfied = m == n == p return all_nodes_satisfied @property @@ -121,11 +120,11 @@ def physical_indices(self): physical_indices = np.where(self.physical_mask == 0)[0] return physical_indices - def children(self,index): + def children(self, index): """Returns an array with indexes of children""" return np.where(self.connectivity == index) - def to_root(self,index): + def to_root(self, index): """ Changes the connectivity matrix so that the node at index becomes the root @@ -133,37 +132,37 @@ def to_root(self,index): old_root_index = self.root_index new_root_index = index - #do a tree traversal: + # do a tree traversal: parent_index = self.connectivity[index] - grandparent_index=self.connectivity[parent_index] - while index!=old_root_index: - self.connectivity[parent_index]=index + grandparent_index = self.connectivity[parent_index] + while index != old_root_index: + self.connectivity[parent_index] = index index = parent_index parent_index = grandparent_index grandparent_index = self.connectivity[parent_index] self.connectivity[new_root_index] = -1 - def parent_id(self,index): + def parent_id(self, index): """Return the parent index for the given index""" return self.connectivity[index] - def vertex(self,index): + def vertex(self, index): """Return vertex corresponding to index in morphology""" return self.vertices[index] def __len__(self): return len(self.connectivity) - def pop(self,index): + def pop(self, index): """ TODO:This is failing tests (understandably) - need to fix! Deletes a node from the morphology, its children become children of the deleted node's parent. """ - self.vertices = np.delete(self.vertices,index) - self.node_types = np.delete(self.node_types,index) - self.connectivity = np.delete(self.connectivity,index) + self.vertices = np.delete(self.vertices, index) + self.node_types = np.delete(self.node_types, index) + self.connectivity = np.delete(self.connectivity, index) k = 0 for i in self.connectivity: @@ -172,19 +171,19 @@ def pop(self,index): k += 1 pass - def to_neuroml_morphology(self,id=""): + def to_neuroml_morphology(self, id=""): morphology = neuroml.Morphology() morphology.id = id - #need to traverse the tree: - for index in range(self.num_vertices-1): + # need to traverse the tree: + for index in range(self.num_vertices - 1): seg = self.segment_from_vertex_index(index) morphology.segments.append(seg) return morphology - def segment_from_vertex_index(self,index): + def segment_from_vertex_index(self, index): parent_index = self.connectivity[index] node_x = self.vertices[index][0] @@ -197,26 +196,20 @@ def segment_from_vertex_index(self,index): parent_z = self.vertices[parent_index][2] parent_d = self.vertices[parent_index][3] - p = neuroml.Point3DWithDiam(x=node_x, - y=node_y, - z=node_z, - diameter=node_d) + p = neuroml.Point3DWithDiam(x=node_x, y=node_y, z=node_z, diameter=node_d) - d = neuroml.Point3DWithDiam(x=parent_x, - y=parent_y, - z=parent_z, - diameter=parent_d) + d = neuroml.Point3DWithDiam( + x=parent_x, y=parent_y, z=parent_z, diameter=parent_d + ) - - seg = neuroml.Segment(proximal=p, - distal=d, - id=index) + seg = neuroml.Segment(proximal=p, distal=d, id=index) if index > 1: parent = neuroml.SegmentParent(segments=parent_index) seg.parent = parent return seg + class SegmentList(object): """ This class is a proxy, it returns a segment either @@ -224,11 +217,11 @@ class SegmentList(object): it returns the relevant segment. """ - def __init__(self,arraymorph): + def __init__(self, arraymorph): self.arraymorph = arraymorph self.instantiated_segments = {} - def __vertex_index_from_segment_index__(self,index): + def __vertex_index_from_segment_index__(self, index): """ The existence of a physical mask means that segment and and vertex indices fall out of sync. This function returns the @@ -250,18 +243,18 @@ def __len__(self): num_vertices = self.arraymorph.num_vertices num_floating = np.sum(self.arraymorph.physical_mask) - num_segments = num_vertices - num_floating -1 + num_segments = num_vertices - num_floating - 1 if num_segments < 0: num_segments = 0 return int(num_segments) - def __iadd__(self,segment_list): + def __iadd__(self, segment_list): for segment in segment_list: self.append(segment) return self - def __getitem__(self,segment_index): + def __getitem__(self, segment_index): if segment_index in self.instantiated_segments: neuroml_segment = self.instantiated_segments[segment_index] @@ -271,10 +264,10 @@ def __getitem__(self,segment_index): self.instantiated_segments[segment_index] = neuroml_segment return neuroml_segment - def __setitem__(self,index,user_set_segment): - self.instantiated_segments[index] = user_set_segment + def __setitem__(self, index, user_set_segment): + self.instantiated_segments[index] = user_set_segment - def append(self,segment): + def append(self, segment): """ Adds a new segment @@ -294,20 +287,26 @@ def append(self,segment): dist_z = segment.distal.z distal_diam = segment.distal.diameter - prox_vertex = [prox_x,prox_y,prox_z,prox_diam] - dist_vertex = [dist_x,dist_y,dist_z,distal_diam] + prox_vertex = [prox_x, prox_y, prox_z, prox_diam] + dist_vertex = [dist_x, dist_y, dist_z, distal_diam] if len(self.arraymorph.vertices) > 0: - self.arraymorph.vertices = np.append(self.arraymorph.vertices,[dist_vertex,prox_vertex],axis = 0) + self.arraymorph.vertices = np.append( + self.arraymorph.vertices, [dist_vertex, prox_vertex], axis=0 + ) else: - self.arraymorph.vertices = np.array([dist_vertex,prox_vertex]) + self.arraymorph.vertices = np.array([dist_vertex, prox_vertex]) - self.arraymorph.connectivity = np.append(self.arraymorph.connectivity,[-1,dist_vertex_index]) + self.arraymorph.connectivity = np.append( + self.arraymorph.connectivity, [-1, dist_vertex_index] + ) if len(self.arraymorph.physical_mask) == 0: - self.arraymorph.physical_mask = np.array([0,0]) + self.arraymorph.physical_mask = np.array([0, 0]) else: - self.arraymorph.physical_mask = np.append(self.arraymorph.physical_mask,[1,0]) + self.arraymorph.physical_mask = np.append( + self.arraymorph.physical_mask, [1, 0] + ) segment_index = len(self) - 1 self.instantiated_segments[segment_index] = segment diff --git a/neuroml/arraymorph_load_time_benchmark.py b/neuroml/arraymorph_load_time_benchmark.py index 9df6d8f1..c8f36185 100644 --- a/neuroml/arraymorph_load_time_benchmark.py +++ b/neuroml/arraymorph_load_time_benchmark.py @@ -6,54 +6,56 @@ class Benchmark: def __init__(self, num_segments): self.num_segments = num_segments - + def set_up(self): - num_segments = int(1e4) #Per cell + num_segments = int(1e4) # Per cell num_vertices = num_segments + 1 - x = np.linspace(0,10,num_vertices) + x = np.linspace(0, 10, num_vertices) y = np.zeros(num_vertices) z = np.zeros(num_vertices) - d = np.linspace(1,0.01,num_vertices) + d = np.linspace(1, 0.01, num_vertices) + + vertices = np.array([x, y, z, d]).T - vertices = np.array([x,y,z,d]).T - - connectivity = range(-1,num_segments) + connectivity = range(-1, num_segments) - big_arraymorph = am.ArrayMorphology(vertices = vertices, - connectivity = connectivity) - transposed_x = x+10 - transposed_vertices = np.array([transposed_x,y,z,d]).T + big_arraymorph = am.ArrayMorphology( + vertices=vertices, connectivity=connectivity + ) + transposed_x = x + 10 + transposed_vertices = np.array([transposed_x, y, z, d]).T - transposed_arraymorph = am.ArrayMorphology(vertices = transposed_vertices, - connectivity = connectivity) + transposed_arraymorph = am.ArrayMorphology( + vertices=transposed_vertices, connectivity=connectivity + ) bigger_d = d + 0.5 - fatter_vertices = np.array([x,y,z,bigger_d]).T + fatter_vertices = np.array([x, y, z, bigger_d]).T - fatter_arraymorph = am.ArrayMorphology(vertices = fatter_vertices, - connectivity = connectivity) + fatter_arraymorph = am.ArrayMorphology( + vertices=fatter_vertices, connectivity=connectivity + ) - neuroml_cell = neuroml.Cell(id='cell_4') - neuroml_morphology = neuroml.Morphology(id = 'my_morph') + neuroml_cell = neuroml.Cell(id="cell_4") + neuroml_morphology = neuroml.Morphology(id="my_morph") neuroml_cell.morphology = neuroml_morphology self.transposed_arraymorph = transposed_arraymorph self.fatter_arraymorph = fatter_arraymorph self.big_arraymorph = big_arraymorph - self.cell_1 = neuroml.Cell(id='cell_1') - self.cell_2 = neuroml.Cell(id='cell_2') - self.cell_3 = neuroml.Cell(id='cell_3') - + self.cell_1 = neuroml.Cell(id="cell_1") + self.cell_2 = neuroml.Cell(id="cell_2") + self.cell_3 = neuroml.Cell(id="cell_3") + self.cell_1.morphology = transposed_arraymorph self.cell_2.morphology = fatter_arraymorph self.cell_3.morphology = big_arraymorph - - self.test_doc = neuroml.NeuroMLDocument(id='TestDocument') + + self.test_doc = neuroml.NeuroMLDocument(id="TestDocument") self.test_doc.cells.append(self.cell_1) self.test_doc.cells.append(self.cell_2) self.test_doc.cells.append(self.cell_3) - self.test_doc.cells.append(neuroml_cell) - + self.test_doc.cells.append(neuroml_cell) diff --git a/neuroml/benchmarks/arraymorph_benchmarks.py b/neuroml/benchmarks/arraymorph_benchmarks.py index 7c779a4f..3366aa2a 100644 --- a/neuroml/benchmarks/arraymorph_benchmarks.py +++ b/neuroml/benchmarks/arraymorph_benchmarks.py @@ -13,14 +13,13 @@ def timeit(method): - def timed(*args, **kw): ts = time.time() result = method(*args, **kw) te = time.time() - print('%r (%r, %r) %2.2f sec' % (method.__name__, args, kw, te-ts)) - return te-ts + print("%r (%r, %r) %2.2f sec" % (method.__name__, args, kw, te - ts)) + return te - ts return timed @@ -29,29 +28,31 @@ class AMWriteBenchmark(object): """ TODO: Get rid of methods which are not used """ - def __init__(self,num_segments=1e6): + + def __init__(self, num_segments=1e6): num_segments = int(num_segments) num_vertices = int(num_segments) + 1 - x = np.linspace(0,10,num_vertices) + x = np.linspace(0, 10, num_vertices) y = np.zeros(num_vertices) z = np.zeros(num_vertices) - d = np.linspace(1,0.01,num_vertices) + d = np.linspace(1, 0.01, num_vertices) - vertices = np.array([x,y,z,d]).T + vertices = np.array([x, y, z, d]).T - connectivity = range(-1,num_segments) + connectivity = range(-1, num_segments) - big_arraymorph = am.ArrayMorphology(vertices=vertices, - connectivity=connectivity) + big_arraymorph = am.ArrayMorphology( + vertices=vertices, connectivity=connectivity + ) self.big_arraymorph = big_arraymorph - self.cell = neuroml.Cell(id='test_cell') + self.cell = neuroml.Cell(id="test_cell") self.cell.morphology = big_arraymorph - self.test_doc = neuroml.NeuroMLDocument(id='TestDocument') + self.test_doc = neuroml.NeuroMLDocument(id="TestDocument") self.test_doc.cells.append(self.cell) @@ -70,61 +71,41 @@ def write_time(self): def run_neuroml(self): self.test_write_big_arraymorph_neuroml() - @timeit - def run_json(self): - self.test_write_big_arraymorph_json() - @timeit def run_hdf5(self): self.test_write_big_arraymorph_hdf5() - def test_write_big_arraymorph_json(self): - writer_method = neuroml.writers.JSONWriter.write - fh,filename = tempfile.mkstemp() - - try: - writer_method(self.test_doc,filename) - except: - self.fail("Exception raised!") - - print('JSON Number of segments:') - print(self.num_segments) - print('JSON size in bytes:') - print(self.file_size(filename)) - - os.close(fh) - def test_write_big_arraymorph_neuroml(self): writer_method = neuroml.writers.NeuroMLWriter.write - fh,filename = tempfile.mkstemp() + fh, filename = tempfile.mkstemp() try: - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) except: self.fail("Exception raised!") - print('NeuroML (XML) Number of segments:') + print("NeuroML (XML) Number of segments:") print(self.num_segments) - print('NeuroML (XML) size in bytes:') + print("NeuroML (XML) size in bytes:") print(self.file_size(filename)) os.close(fh) - def file_size(self,path): + def file_size(self, path): return os.path.getsize(path) def test_write_big_arraymorph_hdf5(self): writer_method = neuroml.writers.ArrayMorphWriter.write - fh,filename = tempfile.mkstemp() + fh, filename = tempfile.mkstemp() try: - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) except: self.fail("Exception raised!") - print('HDF5 Number of segments:') + print("HDF5 Number of segments:") print(self.num_segments) - print('HDF5 size in bytes:') + print("HDF5 size in bytes:") print(self.file_size(filename)) os.close(fh) @@ -137,38 +118,44 @@ def test_write_expected(self): filename = tempfile.mkstemp()[1] writer_method = neuroml.writers.ArrayMorphWriter.write - writer_method(self.big_arraymorph,filename) + writer_method(self.big_arraymorph, filename) loader_method = neuroml.loaders.ArrayMorphLoader.load doc = loader_method(filename) array_morph = doc.morphology[0] - connectivity_equal = np.testing.assert_array_equal(array_morph.connectivity,self.big_arraymorph.connectivity) - physical_masks_equal = np.testing.assert_array_equal(array_morph.physical_mask,self.big_arraymorph.physical_mask) - vertices_equal = np.testing.assert_array_equal(array_morph.vertices,self.big_arraymorph.vertices) + connectivity_equal = np.testing.assert_array_equal( + array_morph.connectivity, self.big_arraymorph.connectivity + ) + physical_masks_equal = np.testing.assert_array_equal( + array_morph.physical_mask, self.big_arraymorph.physical_mask + ) + vertices_equal = np.testing.assert_array_equal( + array_morph.vertices, self.big_arraymorph.vertices + ) - self.assertEqual(connectivity_equal,None) # None when equal - self.assertEqual(physical_masks_equal,None) # None when equal - self.assertEqual(vertices_equal,None)# None when equal + self.assertEqual(connectivity_equal, None) # None when equal + self.assertEqual(physical_masks_equal, None) # None when equal + self.assertEqual(vertices_equal, None) # None when equal def test_write_multiple_morphologies(self): filename = tempfile.mkstemp()[1] writer_method = neuroml.writers.ArrayMorphWriter.write try: - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) except: self.fail("Exception raised!") def test_write_multiple_morphologies(self): filename = tempfile.mkstemp()[1] writer_method = neuroml.writers.ArrayMorphWriter.write - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) loader_method = neuroml.loaders.ArrayMorphLoader.load document = loader_method(filename) - self.assertIsInstance(document,neuroml.NeuroMLDocument) + self.assertIsInstance(document, neuroml.NeuroMLDocument) def benchmark_arraymorph_writer(): @@ -178,16 +165,13 @@ def benchmark_arraymorph_writer(): """ num_tests = 10 - json_results= [] - neuroml_results= [] - hdf5_results= [] + neuroml_results = [] + hdf5_results = [] for i in range(num_tests): - json_runtimes = [] hdf5_runtimes = [] neuroml_runtimes = [] - json_num_segments_list = [] hdf5_num_segments_list = [] neuroml_num_segments_list = [] @@ -195,98 +179,86 @@ def benchmark_arraymorph_writer(): print("test %d" % (i)) neuroml_num_segments_factor = 4e2 - json_num_segments_factor = 4e2 hdf5_num_segments_factor = 4e2 neuroml_num_segments = i * neuroml_num_segments_factor - json_num_segments = i * json_num_segments_factor hdf5_num_segments = i * hdf5_num_segments_factor neuroml_benchmark = AMWriteBenchmark(num_segments=neuroml_num_segments) - json_benchmark = AMWriteBenchmark(num_segments=json_num_segments) hdf5_benchmark = AMWriteBenchmark(num_segments=hdf5_num_segments) write_time_neuroml = neuroml_benchmark.run_neuroml() - write_time_json = json_benchmark.run_json() write_time_hdf5 = hdf5_benchmark.run_hdf5() neuroml_runtimes.append(write_time_neuroml) - json_runtimes.append(write_time_json) hdf5_runtimes.append(write_time_hdf5) neuroml_num_segments_list.append(neuroml_num_segments) - json_num_segments_list.append(json_num_segments) hdf5_num_segments_list.append(hdf5_num_segments) neuroml_results.append(neuroml_runtimes) - json_results.append(json_runtimes) hdf5_results.append(hdf5_runtimes) np.savetxt("neuroml_results.csv", neuroml_results, delimiter=",") - np.savetxt("json_results.csv", json_results, delimiter=",") np.savetxt("hdf5_results.csv", hdf5_results, delimiter=",") - neuroml_runtimes_averaged = np.mean(neuroml_results,axis=0) - json_runtimes_averaged = np.mean(json_results,axis=0) - hdf5_runtimes_averaged = np.mean(hdf5_results,axis=0) + neuroml_runtimes_averaged = np.mean(neuroml_results, axis=0) + hdf5_runtimes_averaged = np.mean(hdf5_results, axis=0) - hdf5_errors = np.std(hdf5_results,axis=0) - json_errors = np.std(json_results,axis=0) - neuroml_errors = np.std(neuroml_results,axis=0) + hdf5_errors = np.std(hdf5_results, axis=0) + neuroml_errors = np.std(neuroml_results, axis=0) neuroml_num_segments_list = np.array(neuroml_num_segments_list) - json_num_segments_list = np.array(json_num_segments_list) hdf5_num_segments_list = np.array(hdf5_num_segments_list) - plt_neuroml = plt.errorbar(neuroml_num_segments_list, - neuroml_runtimes_averaged, - yerr=hdf5_errors, - marker='o', - color='k', - ecolor='k', - markerfacecolor='r', - label="series 2", - capsize=5,) + plt_neuroml = plt.errorbar( + neuroml_num_segments_list, + neuroml_runtimes_averaged, + yerr=hdf5_errors, + marker="o", + color="k", + ecolor="k", + markerfacecolor="r", + label="series 2", + capsize=5, + ) plt.title("ArrayMorph write to disk benchmark (NeuroML (XML) serialization)") plt.xlabel("Number of segments in morphology (Units of 1000 segments)") plt.ylabel("Time to write to disk (s)") - plt_hdf5 = plt.errorbar(hdf5_num_segments_list, - hdf5_runtimes_averaged, - yerr=hdf5_errors, - marker='o', - color='k', - ecolor='k', - markerfacecolor='g', - label="series 2", - capsize=5,) + plt_hdf5 = plt.errorbar( + hdf5_num_segments_list, + hdf5_runtimes_averaged, + yerr=hdf5_errors, + marker="o", + color="k", + ecolor="k", + markerfacecolor="g", + label="series 2", + capsize=5, + ) plt.title("ArrayMorph write to disk benchmark (HDF5 serialization)") plt.xlabel("Number of segments in morphology (Units of 1000 segments)") plt.ylabel("Time to write to disk (s)") -# plt.show() - - plt_json = plt.errorbar(json_num_segments_list, - json_runtimes_averaged, - yerr=json_errors, - marker='o', - color='k', - ecolor='k', - markerfacecolor='b', - label="series 2", - capsize=5,) + # plt.show() - plt.title("ArrayMorph write to disk benchmarks for JSON, HDF5 and NeuroML serialization formats") + plt.title( + "ArrayMorph write to disk benchmarks for HDF5 and NeuroML serialization formats" + ) plt.xlabel("Number of segments in morphology") plt.ylabel("Time to write to disk (s)") - plt.legend([plt_json, plt_hdf5,plt_neuroml], ["JSON serialization", "HDF5 serialization","NeuroML serialization"]) + plt.legend( + [plt_hdf5, plt_neuroml], + ["HDF5 serialization", "NeuroML serialization"], + ) - plt.yscale('log') - plt.xscale('log') + plt.yscale("log") + plt.xscale("log") plt.show() -#prototype: +# prototype: if __name__ == "__main__": benchmark_arraymorph_writer() diff --git a/neuroml/examples/arraymorph_generation.py b/neuroml/examples/arraymorph_generation.py index 3506be4f..af281b6f 100644 --- a/neuroml/examples/arraymorph_generation.py +++ b/neuroml/examples/arraymorph_generation.py @@ -11,41 +11,43 @@ import neuroml.writers as writers import neuroml.arraymorph as am -p = neuroml.Point3DWithDiam(x=0,y=0,z=0,diameter=50) -d = neuroml.Point3DWithDiam(x=50,y=0,z=0,diameter=50) +p = neuroml.Point3DWithDiam(x=0, y=0, z=0, diameter=50) +d = neuroml.Point3DWithDiam(x=50, y=0, z=0, diameter=50) soma = neuroml.Segment(proximal=p, distal=d) -soma.name = 'Soma' +soma.name = "Soma" soma.id = 0 -#now make an axon with 100 compartments: +# now make an axon with 100 compartments: parent = neuroml.SegmentParent(segments=soma.id) parent_segment = soma axon_segments = [] seg_id = 1 for i in range(100): - p = neuroml.Point3DWithDiam(x=parent_segment.distal.x, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - d = neuroml.Point3DWithDiam(x=parent_segment.distal.x+10, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - axon_segment = neuroml.Segment(proximal = p, - distal = d, - parent = parent) + p = neuroml.Point3DWithDiam( + x=parent_segment.distal.x, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + d = neuroml.Point3DWithDiam( + x=parent_segment.distal.x + 10, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + axon_segment = neuroml.Segment(proximal=p, distal=d, parent=parent) axon_segment.id = seg_id - - axon_segment.name = 'axon_segment_' + str(axon_segment.id) - #now reset everything: + axon_segment.name = "axon_segment_" + str(axon_segment.id) + + # now reset everything: parent = neuroml.SegmentParent(segments=axon_segment.id) parent_segment = axon_segment - seg_id += 1 + seg_id += 1 axon_segments.append(axon_segment) @@ -55,25 +57,25 @@ test_morphology.id = "TestMorphology" cell = neuroml.Cell() -cell.name = 'TestCell' -cell.id = 'TestCell' +cell.name = "TestCell" +cell.id = "TestCell" cell.morphology = test_morphology doc = neuroml.NeuroMLDocument() -#doc.name = "Test neuroML document" +# doc.name = "Test neuroML document" doc.cells.append(cell) doc.id = "TestNeuroMLDocument" -nml_file = 'tmp/arraymorph.nml' +nml_file = "tmp/arraymorph.nml" -writers.NeuroMLWriter.write(doc,nml_file) +writers.NeuroMLWriter.write(doc, nml_file) -print("Written morphology file to: "+nml_file) +print("Written morphology file to: " + nml_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 -validate_neuroml2(nml_file) \ No newline at end of file +validate_neuroml2(nml_file) diff --git a/neuroml/examples/build_3D_network.py b/neuroml/examples/build_3D_network.py index a6b16f24..4da0fff9 100644 --- a/neuroml/examples/build_3D_network.py +++ b/neuroml/examples/build_3D_network.py @@ -28,22 +28,23 @@ dend_len = 10 dend_num = 10 + def generateRandomMorphology(): morphology = Morphology() - p = Point3DWithDiam(x=0,y=0,z=0,diameter=soma_diam) - d = Point3DWithDiam(x=soma_len,y=0,z=0,diameter=soma_diam) - soma = Segment(proximal=p, distal=d, name = 'Soma', id = 0) + p = Point3DWithDiam(x=0, y=0, z=0, diameter=soma_diam) + d = Point3DWithDiam(x=soma_len, y=0, z=0, diameter=soma_diam) + soma = Segment(proximal=p, distal=d, name="Soma", id=0) morphology.segments.append(soma) parent_seg = soma - for dend_id in range(0,dend_num): + for dend_id in range(0, dend_num): - p = Point3DWithDiam(x=d.x,y=d.y,z=d.z,diameter=dend_diam) - d = Point3DWithDiam(x=p.x,y=p.y+dend_len,z=p.z,diameter=dend_diam) - dend = Segment(proximal=p, distal=d, name = 'Dend_%i'%dend_id, id = 1+dend_id) + p = Point3DWithDiam(x=d.x, y=d.y, z=d.z, diameter=dend_diam) + d = Point3DWithDiam(x=p.x, y=p.y + dend_len, z=p.z, diameter=dend_diam) + dend = Segment(proximal=p, distal=d, name="Dend_%i" % dend_id, id=1 + dend_id) dend.parent = SegmentParent(segments=parent_seg.id) parent_seg = dend @@ -53,80 +54,89 @@ def generateRandomMorphology(): return morphology + def run(): cell_num = 10 x_size = 500 y_size = 500 z_size = 500 - + nml_doc = NeuroMLDocument(id="Net3DExample") syn0 = ExpOneSynapse(id="syn0", gbase="65nS", erev="0mV", tau_decay="3ms") nml_doc.exp_one_synapses.append(syn0) - + net = Network(id="Net3D") nml_doc.networks.append(net) - proj_count = 0 - #conn_count = 0 + # conn_count = 0 - for cell_id in range(0,cell_num): + for cell_id in range(0, cell_num): - cell = Cell(id="Cell_%i"%cell_id) + cell = Cell(id="Cell_%i" % cell_id) cell.morphology = generateRandomMorphology() - + nml_doc.cells.append(cell) - pop = Population(id="Pop_%i"%cell_id, component=cell.id, type="populationList") + pop = Population( + id="Pop_%i" % cell_id, component=cell.id, type="populationList" + ) net.populations.append(pop) pop.properties.append(Property(tag="color", value="1 0 0")) inst = Instance(id="0") pop.instances.append(inst) - inst.location = Location(x=str(x_size*random()), y=str(y_size*random()), z=str(z_size*random())) - + inst.location = Location( + x=str(x_size * random()), y=str(y_size * random()), z=str(z_size * random()) + ) + prob_connection = 0.5 - for post in range(0,cell_num): + for post in range(0, cell_num): if post is not cell_id and random() <= prob_connection: - from_pop = "Pop_%i"%cell_id - to_pop = "Pop_%i"%post + from_pop = "Pop_%i" % cell_id + to_pop = "Pop_%i" % post pre_seg_id = 0 post_seg_id = 1 - - projection = Projection(id="Proj_%i"%proj_count, presynaptic_population=from_pop, postsynaptic_population=to_pop, synapse=syn0.id) + projection = Projection( + id="Proj_%i" % proj_count, + presynaptic_population=from_pop, + postsynaptic_population=to_pop, + synapse=syn0.id, + ) net.projections.append(projection) - connection = Connection(id=proj_count, \ - pre_cell_id="%s[%i]"%(from_pop,0), \ - pre_segment_id=pre_seg_id, \ - pre_fraction_along=random(), - post_cell_id="%s[%i]"%(to_pop,0), \ - post_segment_id=post_seg_id, - post_fraction_along=random()) + connection = Connection( + id=proj_count, + pre_cell_id="%s[%i]" % (from_pop, 0), + pre_segment_id=pre_seg_id, + pre_fraction_along=random(), + post_cell_id="%s[%i]" % (to_pop, 0), + post_segment_id=post_seg_id, + post_fraction_along=random(), + ) projection.connections.append(connection) proj_count += 1 - #net.synaptic_connections.append(SynapticConnection(from_="%s[%i]"%(from_pop,0), to="%s[%i]"%(to_pop,0))) - - - ####### Write to file ###### - - nml_file = 'tmp/net3d.nml' + # net.synaptic_connections.append(SynapticConnection(from_="%s[%i]"%(from_pop,0), to="%s[%i]"%(to_pop,0))) + + ####### Write to file ###### + + nml_file = "tmp/net3d.nml" writers.NeuroMLWriter.write(nml_doc, nml_file) - - print("Written network file to: "+nml_file) + print("Written network file to: " + nml_file) - ###### Validate the NeuroML ###### + ###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 validate_neuroml2(nml_file) + run() diff --git a/neuroml/examples/build_complete.py b/neuroml/examples/build_complete.py index 95b0b38d..26bde1ea 100644 --- a/neuroml/examples/build_complete.py +++ b/neuroml/examples/build_complete.py @@ -48,45 +48,46 @@ nml_doc.notes = "Lots of notes...." -IafCell0 = IafCell(id="iaf0", - C="1.0 nF", - thresh = "-50mV", - reset="-65mV", - leak_conductance="10 nS", - leak_reversal="-65mV") +IafCell0 = IafCell( + id="iaf0", + C="1.0 nF", + thresh="-50mV", + reset="-65mV", + leak_conductance="10 nS", + leak_reversal="-65mV", +) nml_doc.iaf_cells.append(IafCell0) -IafCell1 = IafCell(id="iaf1", - C="1.0 nF", - thresh = "-50mV", - reset="-65mV", - leak_conductance="20 nS", - leak_reversal="-65mV") +IafCell1 = IafCell( + id="iaf1", + C="1.0 nF", + thresh="-50mV", + reset="-65mV", + leak_conductance="20 nS", + leak_reversal="-65mV", +) nml_doc.iaf_cells.append(IafCell1) -iz0 = IzhikevichCell(id="iz0", v0="-70mV", thresh="30mV", a="0.02", b="0.2", c="-65.0", d="6") +iz0 = IzhikevichCell( + id="iz0", v0="-70mV", thresh="30mV", a="0.02", b="0.2", c="-65.0", d="6" +) nml_doc.izhikevich_cells.append(iz0) -syn0 = ExpOneSynapse(id="syn0", - gbase="14nS", - erev="0mV", - tau_decay="3ms") +syn0 = ExpOneSynapse(id="syn0", gbase="14nS", erev="0mV", tau_decay="3ms") nml_doc.exp_one_synapses.append(syn0) -syn1 = ExpTwoSynapse(id="syn1", - gbase="2nS", - erev="0mV", - tau_rise="1ms", - tau_decay="3ms") +syn1 = ExpTwoSynapse( + id="syn1", gbase="2nS", erev="0mV", tau_rise="1ms", tau_decay="3ms" +) nml_doc.exp_two_synapses.append(syn1) -gj = GapJunction(id="gj1",conductance="10pS") +gj = GapJunction(id="gj1", conductance="10pS") nml_doc.gap_junctions.append(gj) @@ -95,13 +96,14 @@ nml_doc.silent_synapses.append(sil_syn) -grad_syn = GradedSynapse(id="gs1",conductance="0.5pS",delta="5mV",Vth="-55mV",k="0.025per_ms",erev="0mV") +grad_syn = GradedSynapse( + id="gs1", conductance="0.5pS", delta="5mV", Vth="-55mV", k="0.025per_ms", erev="0mV" +) nml_doc.graded_synapses.append(grad_syn) -pfs = PoissonFiringSynapse(id='pfs', - average_rate='150Hz', - synapse=syn0.id, - spike_target="./%s"%syn0.id) +pfs = PoissonFiringSynapse( + id="pfs", average_rate="150Hz", synapse=syn0.id, spike_target="./%s" % syn0.id +) nml_doc.poisson_firing_synapses.append(pfs) @@ -111,43 +113,43 @@ nml_doc.networks.append(net) -size0 = int(5*scale) -pop0 = Population(id="IafPop0", - component=IafCell0.id, - size=size0) +size0 = int(5 * scale) +pop0 = Population(id="IafPop0", component=IafCell0.id, size=size0) net.populations.append(pop0) -size1 = int(5*scale) -pop1 = Population(id="IafPop1", - component=IafCell1.id, - size=size1) +size1 = int(5 * scale) +pop1 = Population(id="IafPop1", component=IafCell1.id, size=size1) net.populations.append(pop1) -size2 = int(5*scale) -pop2 = Population(id="IzhPop", - component=iz0.id, - size=size2) +size2 = int(5 * scale) +pop2 = Population(id="IzhPop", component=iz0.id, size=size2) net.populations.append(pop2) -cell_num = int(4*scale) -pop = Population(id="Pop_x", component=IafCell0.id, type="populationList",size=cell_num) +cell_num = int(4 * scale) +pop = Population( + id="Pop_x", component=IafCell0.id, type="populationList", size=cell_num +) net.populations.append(pop) pop.properties.append(Property(tag="color", value="1 0 0")) x_size = 500 y_size = 500 z_size = 500 - + for i in range(cell_num): inst = Instance(id=i) pop.instances.append(inst) - inst.location = Location(x=str(x_size*random.random()), y=str(y_size*random.random()), z=str(z_size*random.random())) + inst.location = Location( + x=str(x_size * random.random()), + y=str(y_size * random.random()), + z=str(z_size * random.random()), + ) prob_connection = 0.5 proj_count = 0 @@ -155,114 +157,142 @@ from_pop = pop.id to_pop = pop.id -projection = Projection(id="Proj", presynaptic_population=from_pop, postsynaptic_population=to_pop, synapse=syn0.id) -electricalProjection = ElectricalProjection(id="ElectProj", presynaptic_population=from_pop, postsynaptic_population=to_pop) -electricalProjectionW = ElectricalProjection(id="ElectProjW", presynaptic_population=from_pop, postsynaptic_population=to_pop) +projection = Projection( + id="Proj", + presynaptic_population=from_pop, + postsynaptic_population=to_pop, + synapse=syn0.id, +) +electricalProjection = ElectricalProjection( + id="ElectProj", presynaptic_population=from_pop, postsynaptic_population=to_pop +) +electricalProjectionW = ElectricalProjection( + id="ElectProjW", presynaptic_population=from_pop, postsynaptic_population=to_pop +) net.projections.append(projection) net.electrical_projections.append(electricalProjection) net.electrical_projections.append(electricalProjectionW) -input_list = InputList(id='il', - component=pfs.id, - populations=from_pop) +input_list = InputList(id="il", component=pfs.id, populations=from_pop) net.input_lists.append(input_list) -input_list_w = InputList(id='ilw', - component=pfs.id, - populations=from_pop) +input_list_w = InputList(id="ilw", component=pfs.id, populations=from_pop) net.input_lists.append(input_list_w) -for pre_index in range(0,cell_num): - - for post_index in range(0,cell_num): +for pre_index in range(0, cell_num): + + for post_index in range(0, cell_num): if pre_index != post_index and random.random() <= prob_connection: pre_seg_id = 0 post_seg_id = 0 - connection = ConnectionWD(id=proj_count, \ - pre_cell_id="../%s/%i/%s"%(from_pop,pre_index,IafCell0.id), \ - pre_segment_id=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell_id="../%s/%i/%s"%(to_pop,post_index,IafCell0.id), \ - post_segment_id=post_seg_id, - post_fraction_along=random.random(), - weight=random.random(), - delay='%sms'%(random.random()*10)) + connection = ConnectionWD( + id=proj_count, + pre_cell_id="../%s/%i/%s" % (from_pop, pre_index, IafCell0.id), + pre_segment_id=pre_seg_id, + pre_fraction_along=random.random(), + post_cell_id="../%s/%i/%s" % (to_pop, post_index, IafCell0.id), + post_segment_id=post_seg_id, + post_fraction_along=random.random(), + weight=random.random(), + delay="%sms" % (random.random() * 10), + ) projection.connection_wds.append(connection) - - electricalConnection = ElectricalConnectionInstance(id=proj_count, \ - pre_cell="../%s/%i/%s"%(from_pop,pre_index,IafCell0.id), \ - pre_segment=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell="../%s/%i/%s"%(to_pop,post_index,IafCell0.id), \ - post_segment=post_seg_id, - post_fraction_along=random.random(), - synapse=gj.id) - - electricalProjection.electrical_connection_instances.append(electricalConnection) - - electricalConnectionW = ElectricalConnectionInstanceW(id=proj_count, \ - pre_cell="../%s/%i/%s"%(from_pop,pre_index,IafCell0.id), \ - pre_segment=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell="../%s/%i/%s"%(to_pop,post_index,IafCell0.id), \ - post_segment=post_seg_id, - post_fraction_along=random.random(), - synapse=gj.id, - weight=random.random()) - - electricalProjectionW.electrical_connection_instance_ws.append(electricalConnectionW) - + + electricalConnection = ElectricalConnectionInstance( + id=proj_count, + pre_cell="../%s/%i/%s" % (from_pop, pre_index, IafCell0.id), + pre_segment=pre_seg_id, + pre_fraction_along=random.random(), + post_cell="../%s/%i/%s" % (to_pop, post_index, IafCell0.id), + post_segment=post_seg_id, + post_fraction_along=random.random(), + synapse=gj.id, + ) + + electricalProjection.electrical_connection_instances.append( + electricalConnection + ) + + electricalConnectionW = ElectricalConnectionInstanceW( + id=proj_count, + pre_cell="../%s/%i/%s" % (from_pop, pre_index, IafCell0.id), + pre_segment=pre_seg_id, + pre_fraction_along=random.random(), + post_cell="../%s/%i/%s" % (to_pop, post_index, IafCell0.id), + post_segment=post_seg_id, + post_fraction_along=random.random(), + synapse=gj.id, + weight=random.random(), + ) + + electricalProjectionW.electrical_connection_instance_ws.append( + electricalConnectionW + ) + proj_count += 1 - - input = Input(id=pre_index, - target="../%s/%i/%s"%(from_pop, pre_index, pop.component), - destination="synapses") - input_list.input.append(input) - - input_w = InputW(id=pre_index, - target="../%s/%i/%s"%(from_pop, pre_index, pop.component), - destination="synapses", - weight=10) - - input_list_w.input_ws.append(input_w) - + + input = Input( + id=pre_index, + target="../%s/%i/%s" % (from_pop, pre_index, pop.component), + destination="synapses", + ) + input_list.input.append(input) + + input_w = InputW( + id=pre_index, + target="../%s/%i/%s" % (from_pop, pre_index, pop.component), + destination="synapses", + weight=10, + ) + + input_list_w.input_ws.append(input_w) + proj_count = 0 from_pop = pop0.id to_pop = pop1.id -electricalProjection = ElectricalProjection(id="ElectProj0", presynaptic_population=from_pop, postsynaptic_population=to_pop) +electricalProjection = ElectricalProjection( + id="ElectProj0", presynaptic_population=from_pop, postsynaptic_population=to_pop +) net.electrical_projections.append(electricalProjection) -for pre_index in range(0,size0): - - for post_index in range(0,size1): +for pre_index in range(0, size0): + + for post_index in range(0, size1): if pre_index != post_index and random.random() <= prob_connection: pre_seg_id = 0 post_seg_id = 0 - - electricalConnection = ElectricalConnection(id=proj_count, \ - pre_cell="%s"%(pre_index), \ - pre_segment=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell="%s"%(post_index), \ - post_segment=post_seg_id, - post_fraction_along=random.random(), - synapse=gj.id) - + + electricalConnection = ElectricalConnection( + id=proj_count, + pre_cell="%s" % (pre_index), + pre_segment=pre_seg_id, + pre_fraction_along=random.random(), + post_cell="%s" % (post_index), + post_segment=post_seg_id, + post_fraction_along=random.random(), + synapse=gj.id, + ) + electricalProjection.electrical_connections.append(electricalConnection) - + proj_count += 1 -projection0 = Projection(id="ProjEmpty", presynaptic_population=from_pop, postsynaptic_population=to_pop, synapse=syn0.id) +projection0 = Projection( + id="ProjEmpty", + presynaptic_population=from_pop, + postsynaptic_population=to_pop, + synapse=syn0.id, +) net.projections.append(projection0) @@ -270,16 +300,23 @@ from_pop = pop0.id to_pop = pop1.id -projection = Projection(id="ProjConnection", presynaptic_population=from_pop, postsynaptic_population=to_pop, synapse=syn1.id) +projection = Projection( + id="ProjConnection", + presynaptic_population=from_pop, + postsynaptic_population=to_pop, + synapse=syn1.id, +) net.projections.append(projection) -connection = Connection(id=0, \ - pre_cell_id="../%s[%i]"%(from_pop,size0-1), \ - pre_segment_id=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell_id="../%s[%i]"%(to_pop,size0-1), \ - post_segment_id=post_seg_id, - post_fraction_along=random.random()) +connection = Connection( + id=0, + pre_cell_id="../%s[%i]" % (from_pop, size0 - 1), + pre_segment_id=pre_seg_id, + pre_fraction_along=random.random(), + post_cell_id="../%s[%i]" % (to_pop, size0 - 1), + post_segment_id=post_seg_id, + post_fraction_along=random.random(), +) projection.connections.append(connection) @@ -287,66 +324,80 @@ from_pop = pop0.id to_pop = pop1.id -continuous_projection = ContinuousProjection(id="ProjCC", presynaptic_population=from_pop, postsynaptic_population=to_pop) +continuous_projection = ContinuousProjection( + id="ProjCC", presynaptic_population=from_pop, postsynaptic_population=to_pop +) net.continuous_projections.append(continuous_projection) -continuous_connection = ContinuousConnection(id=0, \ - pre_cell="0", \ - post_cell="0", \ - pre_component=sil_syn.id, \ - post_component=grad_syn.id) +continuous_connection = ContinuousConnection( + id=0, + pre_cell="0", + post_cell="0", + pre_component=sil_syn.id, + post_component=grad_syn.id, +) continuous_projection.continuous_connections.append(continuous_connection) from_pop = pop.id to_pop = pop1.id -continuous_projection_i = ContinuousProjection(id="ProjCCI", presynaptic_population=from_pop, postsynaptic_population=to_pop) +continuous_projection_i = ContinuousProjection( + id="ProjCCI", presynaptic_population=from_pop, postsynaptic_population=to_pop +) net.continuous_projections.append(continuous_projection_i) -continuous_connection_i = ContinuousConnectionInstance(id=0, \ - pre_cell="../%s/%i/%s"%(from_pop,0,pop.component), \ - pre_segment=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell="../%s[%i]"%(to_pop,0), \ - post_segment=post_seg_id, - post_fraction_along=random.random(), - pre_component=sil_syn.id, \ - post_component=grad_syn.id) +continuous_connection_i = ContinuousConnectionInstance( + id=0, + pre_cell="../%s/%i/%s" % (from_pop, 0, pop.component), + pre_segment=pre_seg_id, + pre_fraction_along=random.random(), + post_cell="../%s[%i]" % (to_pop, 0), + post_segment=post_seg_id, + post_fraction_along=random.random(), + pre_component=sil_syn.id, + post_component=grad_syn.id, +) continuous_projection_i.continuous_connection_instances.append(continuous_connection_i) -continuous_projection_iw = ContinuousProjection(id="ProjCCIW", presynaptic_population=from_pop, postsynaptic_population=to_pop) +continuous_projection_iw = ContinuousProjection( + id="ProjCCIW", presynaptic_population=from_pop, postsynaptic_population=to_pop +) net.continuous_projections.append(continuous_projection_iw) -continuous_connection_iw = ContinuousConnectionInstanceW(id=0, \ - pre_cell="../%s/%i/%s"%(from_pop,0,pop.component), \ - pre_segment=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell="../%s[%i]"%(to_pop,0), \ - post_segment=post_seg_id, - post_fraction_along=random.random(), - pre_component=sil_syn.id, \ - post_component=grad_syn.id, \ - weight=5) - -continuous_projection_iw.continuous_connection_instance_ws.append(continuous_connection_iw) - - -nml_file = 'test_files/complete.nml' +continuous_connection_iw = ContinuousConnectionInstanceW( + id=0, + pre_cell="../%s/%i/%s" % (from_pop, 0, pop.component), + pre_segment=pre_seg_id, + pre_fraction_along=random.random(), + post_cell="../%s[%i]" % (to_pop, 0), + post_segment=post_seg_id, + post_fraction_along=random.random(), + pre_component=sil_syn.id, + post_component=grad_syn.id, + weight=5, +) + +continuous_projection_iw.continuous_connection_instance_ws.append( + continuous_connection_iw +) + + +nml_file = "test_files/complete.nml" writers.NeuroMLWriter.write(nml_doc, nml_file) -summary0 = nml_doc.summary() -print("Created:\n"+summary0) -print("Written network file to: "+nml_file) +summary0 = nml_doc.summary() +print("Created:\n" + summary0) +print("Written network file to: " + nml_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 validate_neuroml2(nml_file) -nml_h5_file = 'test_files/complete.nml.h5' +nml_h5_file = "test_files/complete.nml.h5" writers.NeuroMLHdf5Writer.write(nml_doc, nml_h5_file) -print("Written H5 network file to: "+nml_h5_file) +print("Written H5 network file to: " + nml_h5_file) diff --git a/neuroml/examples/build_network.py b/neuroml/examples/build_network.py index 18acc070..cf127c52 100644 --- a/neuroml/examples/build_network.py +++ b/neuroml/examples/build_network.py @@ -19,28 +19,29 @@ nml_doc = NeuroMLDocument(id="IafNet") -IafCell0 = IafCell(id="iaf0", - C="1.0 nF", - thresh = "-50mV", - reset="-65mV", - leak_conductance="10 nS", - leak_reversal="-65mV") +IafCell0 = IafCell( + id="iaf0", + C="1.0 nF", + thresh="-50mV", + reset="-65mV", + leak_conductance="10 nS", + leak_reversal="-65mV", +) nml_doc.iaf_cells.append(IafCell0) -IafCell1 = IafCell(id="iaf1", - C="1.0 nF", - thresh = "-50mV", - reset="-65mV", - leak_conductance="20 nS", - leak_reversal="-65mV") +IafCell1 = IafCell( + id="iaf1", + C="1.0 nF", + thresh="-50mV", + reset="-65mV", + leak_conductance="20 nS", + leak_reversal="-65mV", +) nml_doc.iaf_cells.append(IafCell1) -syn0 = ExpOneSynapse(id="syn0", - gbase="65nS", - erev="0mV", - tau_decay="3ms") +syn0 = ExpOneSynapse(id="syn0", gbase="65nS", erev="0mV", tau_decay="3ms") nml_doc.exp_one_synapses.append(syn0) @@ -49,51 +50,50 @@ nml_doc.networks.append(net) size0 = 5 -pop0 = Population(id="IafPop0", - component=IafCell0.id, - size=size0) +pop0 = Population(id="IafPop0", component=IafCell0.id, size=size0) net.populations.append(pop0) size1 = 5 -pop1 = Population(id="IafPop1", - component=IafCell0.id, - size=size1) +pop1 = Population(id="IafPop1", component=IafCell0.id, size=size1) net.populations.append(pop1) prob_connection = 0.5 -for pre in range(0,size0): +for pre in range(0, size0): - pg = PulseGenerator(id="pulseGen_%i"%pre, - delay="0ms", - duration="100ms", - amplitude="%f nA"%(0.1*random())) + pg = PulseGenerator( + id="pulseGen_%i" % pre, + delay="0ms", + duration="100ms", + amplitude="%f nA" % (0.1 * random()), + ) nml_doc.pulse_generators.append(pg) - exp_input = ExplicitInput(target="%s[%i]"%(pop0.id,pre), - input=pg.id) + exp_input = ExplicitInput(target="%s[%i]" % (pop0.id, pre), input=pg.id) net.explicit_inputs.append(exp_input) - for post in range(0,size1): + for post in range(0, size1): # fromxx is used since from is Python keyword if random() <= prob_connection: - syn = SynapticConnection(from_="%s[%i]"%(pop0.id,pre), - synapse=syn0.id, - to="%s[%i]"%(pop1.id,post)) + syn = SynapticConnection( + from_="%s[%i]" % (pop0.id, pre), + synapse=syn0.id, + to="%s[%i]" % (pop1.id, post), + ) net.synaptic_connections.append(syn) -nml_file = 'tmp/testnet.nml' +nml_file = "tmp/testnet.nml" writers.NeuroMLWriter.write(nml_doc, nml_file) -print("Written network file to: "+nml_file) +print("Written network file to: " + nml_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 diff --git a/neuroml/examples/build_network2.py b/neuroml/examples/build_network2.py index 042f0961..e0969217 100644 --- a/neuroml/examples/build_network2.py +++ b/neuroml/examples/build_network2.py @@ -35,42 +35,41 @@ nml_doc.notes = "Root notes" -IafCell0 = IafCell(id="iaf0", - C="1.0 nF", - thresh = "-50mV", - reset="-65mV", - leak_conductance="10 nS", - leak_reversal="-65mV") +IafCell0 = IafCell( + id="iaf0", + C="1.0 nF", + thresh="-50mV", + reset="-65mV", + leak_conductance="10 nS", + leak_reversal="-65mV", +) nml_doc.iaf_cells.append(IafCell0) -IafCell1 = IafCell(id="iaf1", - C="1.0 nF", - thresh = "-50mV", - reset="-65mV", - leak_conductance="20 nS", - leak_reversal="-65mV") +IafCell1 = IafCell( + id="iaf1", + C="1.0 nF", + thresh="-50mV", + reset="-65mV", + leak_conductance="20 nS", + leak_reversal="-65mV", +) nml_doc.iaf_cells.append(IafCell1) -syn0 = ExpOneSynapse(id="syn0", - gbase="65nS", - erev="0mV", - tau_decay="3ms") +syn0 = ExpOneSynapse(id="syn0", gbase="65nS", erev="0mV", tau_decay="3ms") nml_doc.exp_one_synapses.append(syn0) -gj = GapJunction(id="gj1",conductance="10pS") +gj = GapJunction(id="gj1", conductance="10pS") nml_doc.gap_junctions.append(gj) - -pfs = PoissonFiringSynapse(id='pfs', - average_rate='50Hz', - synapse=syn0.id, - spike_target="./%s"%syn0.id) +pfs = PoissonFiringSynapse( + id="pfs", average_rate="50Hz", synapse=syn0.id, spike_target="./%s" % syn0.id +) nml_doc.poisson_firing_synapses.append(pfs) @@ -80,35 +79,37 @@ nml_doc.networks.append(net) -size0 = int(5*scale) -pop0 = Population(id="IafPop0", - component=IafCell0.id, - size=size0) +size0 = int(5 * scale) +pop0 = Population(id="IafPop0", component=IafCell0.id, size=size0) net.populations.append(pop0) -size1 = int(5*scale) -pop1 = Population(id="IafPop1", - component=IafCell1.id, - size=size1) +size1 = int(5 * scale) +pop1 = Population(id="IafPop1", component=IafCell1.id, size=size1) net.populations.append(pop1) -cell_num = int(4*scale) -pop = Population(id="Pop_x", component=IafCell0.id, type="populationList",size=cell_num) +cell_num = int(4 * scale) +pop = Population( + id="Pop_x", component=IafCell0.id, type="populationList", size=cell_num +) net.populations.append(pop) pop.properties.append(Property(tag="color", value="1 0 0")) x_size = 500 y_size = 500 z_size = 500 - + for i in range(cell_num): inst = Instance(id=i) pop.instances.append(inst) - inst.location = Location(x=str(x_size*random.random()), y=str(y_size*random.random()), z=str(z_size*random.random())) + inst.location = Location( + x=str(x_size * random.random()), + y=str(y_size * random.random()), + z=str(z_size * random.random()), + ) prob_connection = 0.5 proj_count = 0 @@ -116,103 +117,119 @@ from_pop = "Pop_x" to_pop = "Pop_x" -projection = Projection(id="Proj", presynaptic_population=from_pop, postsynaptic_population=to_pop, synapse=syn0.id) -electricalProjection = ElectricalProjection(id="ElectProj", presynaptic_population=from_pop, postsynaptic_population=to_pop) +projection = Projection( + id="Proj", + presynaptic_population=from_pop, + postsynaptic_population=to_pop, + synapse=syn0.id, +) +electricalProjection = ElectricalProjection( + id="ElectProj", presynaptic_population=from_pop, postsynaptic_population=to_pop +) net.projections.append(projection) net.electrical_projections.append(electricalProjection) -input_list = InputList(id='il', - component=pfs.id, - populations=from_pop) +input_list = InputList(id="il", component=pfs.id, populations=from_pop) net.input_lists.append(input_list) -for pre_index in range(0,cell_num): - - for post_index in range(0,cell_num): +for pre_index in range(0, cell_num): + + for post_index in range(0, cell_num): if pre_index != post_index and random.random() <= prob_connection: pre_seg_id = 0 post_seg_id = 0 - connection = ConnectionWD(id=proj_count, \ - pre_cell_id="../%s/%i/%s"%(from_pop,pre_index,IafCell0.id), \ - pre_segment_id=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell_id="../%s/%i/%s"%(to_pop,post_index,IafCell0.id), \ - post_segment_id=post_seg_id, - post_fraction_along=random.random(), - weight=random.random(), - delay='%sms'%(random.random()*10)) + connection = ConnectionWD( + id=proj_count, + pre_cell_id="../%s/%i/%s" % (from_pop, pre_index, IafCell0.id), + pre_segment_id=pre_seg_id, + pre_fraction_along=random.random(), + post_cell_id="../%s/%i/%s" % (to_pop, post_index, IafCell0.id), + post_segment_id=post_seg_id, + post_fraction_along=random.random(), + weight=random.random(), + delay="%sms" % (random.random() * 10), + ) projection.connection_wds.append(connection) - - electricalConnection = ElectricalConnectionInstance(id=proj_count, \ - pre_cell="../%s/%i/%s"%(from_pop,pre_index,IafCell0.id), \ - pre_segment=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell="../%s/%i/%s"%(to_pop,post_index,IafCell0.id), \ - post_segment=post_seg_id, - post_fraction_along=random.random(), - synapse=gj.id) - - electricalProjection.electrical_connection_instances.append(electricalConnection) - + + electricalConnection = ElectricalConnectionInstance( + id=proj_count, + pre_cell="../%s/%i/%s" % (from_pop, pre_index, IafCell0.id), + pre_segment=pre_seg_id, + pre_fraction_along=random.random(), + post_cell="../%s/%i/%s" % (to_pop, post_index, IafCell0.id), + post_segment=post_seg_id, + post_fraction_along=random.random(), + synapse=gj.id, + ) + + electricalProjection.electrical_connection_instances.append( + electricalConnection + ) + proj_count += 1 - - input = Input(id=pre_index, - target="../%s/%i/%s"%(from_pop, pre_index, pop.component), - destination="synapses") - input_list.input.append(input) - + + input = Input( + id=pre_index, + target="../%s/%i/%s" % (from_pop, pre_index, pop.component), + destination="synapses", + ) + input_list.input.append(input) + proj_count = 0 from_pop = pop0.id to_pop = pop1.id -electricalProjection = ElectricalProjection(id="ElectProj0", presynaptic_population=from_pop, postsynaptic_population=to_pop) +electricalProjection = ElectricalProjection( + id="ElectProj0", presynaptic_population=from_pop, postsynaptic_population=to_pop +) net.electrical_projections.append(electricalProjection) -for pre_index in range(0,size0): - - for post_index in range(0,size1): +for pre_index in range(0, size0): + + for post_index in range(0, size1): if pre_index != post_index and random.random() <= prob_connection: pre_seg_id = 0 post_seg_id = 0 - - electricalConnection = ElectricalConnection(id=proj_count, \ - pre_cell="%s"%(pre_index), \ - pre_segment=pre_seg_id, \ - pre_fraction_along=random.random(), - post_cell="%s"%(post_index), \ - post_segment=post_seg_id, - post_fraction_along=random.random(), - synapse=gj.id) - + + electricalConnection = ElectricalConnection( + id=proj_count, + pre_cell="%s" % (pre_index), + pre_segment=pre_seg_id, + pre_fraction_along=random.random(), + post_cell="%s" % (post_index), + post_segment=post_seg_id, + post_fraction_along=random.random(), + synapse=gj.id, + ) + electricalProjection.electrical_connections.append(electricalConnection) - + proj_count += 1 -nml_file = 'test_files/testh5.nml' +nml_file = "test_files/testh5.nml" writers.NeuroMLWriter.write(nml_doc, nml_file) -print("Created:\n"+nml_doc.summary()) -print("Written network file to: "+nml_file) +print("Created:\n" + nml_doc.summary()) +print("Written network file to: " + nml_file) -nml_h5_file = 'tmp/testh5.nml.h5' +nml_h5_file = "tmp/testh5.nml.h5" writers.NeuroMLHdf5Writer.write(nml_doc, nml_h5_file) -print("Written H5 network file to: "+nml_h5_file) +print("Written H5 network file to: " + nml_h5_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 validate_neuroml2(nml_file) - diff --git a/neuroml/examples/ion_channel_generation.py b/neuroml/examples/ion_channel_generation.py index 70e809e5..229c067e 100644 --- a/neuroml/examples/ion_channel_generation.py +++ b/neuroml/examples/ion_channel_generation.py @@ -5,33 +5,31 @@ import neuroml import neuroml.writers as writers -chan = neuroml.IonChannelHH(id='na', - conductance='10pS', - species='na', - notes="This is an example voltage-gated Na channel") - -m_gate = neuroml.GateHHRates(id='m',instances='3') -h_gate = neuroml.GateHHRates(id='h',instances='1') - -m_gate.forward_rate = neuroml.HHRate(type="HHExpRate", - rate="0.07per_ms", - midpoint="-65mV", - scale="-20mV") - -m_gate.reverse_rate = neuroml.HHRate(type="HHSigmoidRate", - rate="1per_ms", - midpoint="-35mV", - scale="10mV") - -h_gate.forward_rate = neuroml.HHRate(type="HHExpLinearRate", - rate="0.1per_ms", - midpoint="-55mV", - scale="10mV") - -h_gate.reverse_rate = neuroml.HHRate(type="HHExpRate", - rate="0.125per_ms", - midpoint="-65mV", - scale="-80mV") +chan = neuroml.IonChannelHH( + id="na", + conductance="10pS", + species="na", + notes="This is an example voltage-gated Na channel", +) + +m_gate = neuroml.GateHHRates(id="m", instances="3") +h_gate = neuroml.GateHHRates(id="h", instances="1") + +m_gate.forward_rate = neuroml.HHRate( + type="HHExpRate", rate="0.07per_ms", midpoint="-65mV", scale="-20mV" +) + +m_gate.reverse_rate = neuroml.HHRate( + type="HHSigmoidRate", rate="1per_ms", midpoint="-35mV", scale="10mV" +) + +h_gate.forward_rate = neuroml.HHRate( + type="HHExpLinearRate", rate="0.1per_ms", midpoint="-55mV", scale="10mV" +) + +h_gate.reverse_rate = neuroml.HHRate( + type="HHExpRate", rate="0.125per_ms", midpoint="-65mV", scale="-80mV" +) chan.gate_hh_rates.append(m_gate) chan.gate_hh_rates.append(h_gate) @@ -41,13 +39,13 @@ doc.id = "ChannelMLDemo" -nml_file = './tmp/ionChannelTest.xml' -writers.NeuroMLWriter.write(doc,nml_file) +nml_file = "./tmp/ionChannelTest.xml" +writers.NeuroMLWriter.write(doc, nml_file) -print("Written channel file to: "+nml_file) +print("Written channel file to: " + nml_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 diff --git a/neuroml/examples/json_serialization.py b/neuroml/examples/json_serialization.py deleted file mode 100644 index e5b8be1c..00000000 --- a/neuroml/examples/json_serialization.py +++ /dev/null @@ -1,72 +0,0 @@ -""" -In this example an axon is built, a morphology is loaded, the axon is -then connected to the loadeed morphology. The whole thing is serialized -in JSON format, reloaded and validated. -""" - -import neuroml -import neuroml.loaders as loaders -import neuroml.writers as writers - -fn = './test_files/Purk2M9s.nml' -doc = loaders.NeuroMLLoader.load(fn) -print("Loaded morphology file from: "+fn) - -#get the parent segment: -parent_segment = doc.cells[0].morphology.segments[0] - -parent = neuroml.SegmentParent(segments=parent_segment.id) - -#make an axon: -seg_id = 5000 # need a way to get a unique id from a morphology -axon_segments = [] -for i in range(10): - p = neuroml.Point3DWithDiam(x=parent_segment.distal.x, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - d = neuroml.Point3DWithDiam(x=parent_segment.distal.x+10, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - axon_segment = neuroml.Segment(proximal = p, - distal = d, - parent = parent) - - axon_segment.id = seg_id - - axon_segment.name = 'axon_segment_' + str(axon_segment.id) - - #now reset everything: - parent = neuroml.SegmentParent(segments=axon_segment.id) - parent_segment = axon_segment - seg_id += 1 - - axon_segments.append(axon_segment) - -doc.cells[0].morphology.segments += axon_segments - -json_file = './tmp/modified_morphology.json' - -writers.JSONWriter.write(doc,json_file) - -print("Saved modified morphology in JSON format to: " + json_file) - - -##### load it again, this time write it to a normal neuroml file ### - -neuroml_document_from_json = loaders.JSONLoader.load(json_file) - -print("Re-loaded neuroml document in JSON format to NeuroMLDocument object") - -nml_file = './tmp/modified_morphology_from_json.nml' - -writers.NeuroMLWriter.write(neuroml_document_from_json,nml_file) - -###### Validate the NeuroML ###### - -from neuroml.utils import validate_neuroml2 - -validate_neuroml2(nml_file) diff --git a/neuroml/examples/loading_modifying_writing.py b/neuroml/examples/loading_modifying_writing.py index a8221f36..23ebe7b5 100644 --- a/neuroml/examples/loading_modifying_writing.py +++ b/neuroml/examples/loading_modifying_writing.py @@ -7,54 +7,56 @@ import neuroml.loaders as loaders import neuroml.writers as writers -fn = './test_files/Purk2M9s.nml' +fn = "./test_files/Purk2M9s.nml" doc = loaders.NeuroMLLoader.load(fn) -print("Loaded morphology file from: "+fn) +print("Loaded morphology file from: " + fn) -#get the parent segment: +# get the parent segment: parent_segment = doc.cells[0].morphology.segments[0] parent = neuroml.SegmentParent(segments=parent_segment.id) -#make an axon: -seg_id = 5000 # need a way to get a unique id from a morphology +# make an axon: +seg_id = 5000 # need a way to get a unique id from a morphology axon_segments = [] for i in range(10): - p = neuroml.Point3DWithDiam(x=parent_segment.distal.x, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - d = neuroml.Point3DWithDiam(x=parent_segment.distal.x+10, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - axon_segment = neuroml.Segment(proximal = p, - distal = d, - parent = parent) + p = neuroml.Point3DWithDiam( + x=parent_segment.distal.x, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + d = neuroml.Point3DWithDiam( + x=parent_segment.distal.x + 10, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + axon_segment = neuroml.Segment(proximal=p, distal=d, parent=parent) axon_segment.id = seg_id - - axon_segment.name = 'axon_segment_' + str(axon_segment.id) - #now reset everything: + axon_segment.name = "axon_segment_" + str(axon_segment.id) + + # now reset everything: parent = neuroml.SegmentParent(segments=axon_segment.id) parent_segment = axon_segment - seg_id += 1 + seg_id += 1 axon_segments.append(axon_segment) doc.cells[0].morphology.segments += axon_segments -nml_file = './tmp/modified_morphology.nml' +nml_file = "./tmp/modified_morphology.nml" -writers.NeuroMLWriter.write(doc,nml_file) +writers.NeuroMLWriter.write(doc, nml_file) -print("Saved modified morphology file to: "+nml_file) +print("Saved modified morphology file to: " + nml_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 diff --git a/neuroml/examples/loading_modifying_writing_large.py b/neuroml/examples/loading_modifying_writing_large.py index 113ab312..ee806cd7 100644 --- a/neuroml/examples/loading_modifying_writing_large.py +++ b/neuroml/examples/loading_modifying_writing_large.py @@ -9,12 +9,12 @@ from guppy import hpy -h = hpy() +h = hpy() -fn = './test_files/EC2-609291-4.CNG.nml' -#fn = './test_files/Purk2M9s.nml' +fn = "./test_files/EC2-609291-4.CNG.nml" +# fn = './test_files/Purk2M9s.nml' cells = [] @@ -24,58 +24,55 @@ doc = loaders.NeuroMLLoader.load(fn) cells.append(doc.cells[0]) - print("Loaded morphology file from: "+fn) + print("Loaded morphology file from: " + fn) - #get the parent segment: + # get the parent segment: parent_segment = doc.cells[0].morphology.segments[0] - print("Cell %i has %i segments"%(n,len(doc.cells[0].morphology.segments))) + print("Cell %i has %i segments" % (n, len(doc.cells[0].morphology.segments))) print(h.heap()) - - parent = neuroml.SegmentParent(segments=parent_segment.id) - #make an axon: - seg_id = 5000 # need a way to get a unique id from a morphology + # make an axon: + seg_id = 5000 # need a way to get a unique id from a morphology axon_segments = [] for i in range(10): - p = neuroml.Point3DWithDiam(x=parent_segment.distal.x, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - d = neuroml.Point3DWithDiam(x=parent_segment.distal.x+10, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - axon_segment = neuroml.Segment(proximal = p, - distal = d, - parent = parent) + p = neuroml.Point3DWithDiam( + x=parent_segment.distal.x, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + d = neuroml.Point3DWithDiam( + x=parent_segment.distal.x + 10, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + axon_segment = neuroml.Segment(proximal=p, distal=d, parent=parent) axon_segment.id = seg_id - - axon_segment.name = 'axon_segment_' + str(axon_segment.id) - #now reset everything: + axon_segment.name = "axon_segment_" + str(axon_segment.id) + + # now reset everything: parent = neuroml.SegmentParent(segments=axon_segment.id) parent_segment = axon_segment - seg_id += 1 + seg_id += 1 axon_segments.append(axon_segment) doc.cells[0].morphology.segments += axon_segments - - -fn = './tmp/modified_morphology2.nml' -writers.NeuroMLWriter.write(doc,fn) -print("Saved modified morphology file to: "+fn) +fn = "./tmp/modified_morphology2.nml" +writers.NeuroMLWriter.write(doc, fn) -print("Done") - +print("Saved modified morphology file to: " + fn) +print("Done") diff --git a/neuroml/examples/morphology_generation.py b/neuroml/examples/morphology_generation.py index c4e2dff9..aa2cd19c 100644 --- a/neuroml/examples/morphology_generation.py +++ b/neuroml/examples/morphology_generation.py @@ -6,10 +6,10 @@ import neuroml import neuroml.writers as writers -p = neuroml.Point3DWithDiam(x=0,y=0,z=0,diameter=50) -d = neuroml.Point3DWithDiam(x=50,y=0,z=0,diameter=50) +p = neuroml.Point3DWithDiam(x=0, y=0, z=0, diameter=50) +d = neuroml.Point3DWithDiam(x=50, y=0, z=0, diameter=50) soma = neuroml.Segment(proximal=p, distal=d) -soma.name = 'Soma' +soma.name = "Soma" soma.id = 0 # Make an axon with 100 compartments: @@ -20,28 +20,30 @@ seg_id = 1 for i in range(100): - p = neuroml.Point3DWithDiam(x=parent_segment.distal.x, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - d = neuroml.Point3DWithDiam(x=parent_segment.distal.x+10, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - axon_segment = neuroml.Segment(proximal = p, - distal = d, - parent = parent) + p = neuroml.Point3DWithDiam( + x=parent_segment.distal.x, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + d = neuroml.Point3DWithDiam( + x=parent_segment.distal.x + 10, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + axon_segment = neuroml.Segment(proximal=p, distal=d, parent=parent) axon_segment.id = seg_id - - axon_segment.name = 'axon_segment_' + str(axon_segment.id) - #now reset everything: + axon_segment.name = "axon_segment_" + str(axon_segment.id) + + # now reset everything: parent = neuroml.SegmentParent(segments=axon_segment.id) parent_segment = axon_segment - seg_id += 1 + seg_id += 1 axon_segments.append(axon_segment) @@ -51,22 +53,22 @@ test_morphology.id = "TestMorphology" cell = neuroml.Cell() -cell.name = 'TestCell' -cell.id = 'TestCell' +cell.name = "TestCell" +cell.id = "TestCell" cell.morphology = test_morphology -doc = neuroml.NeuroMLDocument(id = "TestNeuroMLDocument") +doc = neuroml.NeuroMLDocument(id="TestNeuroMLDocument") doc.cells.append(cell) -nml_file = 'tmp/testmorphwrite.nml' - -writers.NeuroMLWriter.write(doc,nml_file) - -print("Written morphology file to: "+nml_file) +nml_file = "tmp/testmorphwrite.nml" + +writers.NeuroMLWriter.write(doc, nml_file) + +print("Written morphology file to: " + nml_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 diff --git a/neuroml/examples/run_all.py b/neuroml/examples/run_all.py index 6f32acb7..51036714 100644 --- a/neuroml/examples/run_all.py +++ b/neuroml/examples/run_all.py @@ -1,9 +1,11 @@ print("Running all examples...") + def run_example(ex_file): - print("-------------------------------------\nRunning %s"%ex_file) - exec("import %s"%ex_file[:-3]) - print("-------------------------------------\nCompleted: %s"%ex_file) + print("-------------------------------------\nRunning %s" % ex_file) + exec("import %s" % ex_file[:-3]) + print("-------------------------------------\nCompleted: %s" % ex_file) + run_example("arraymorph_generation.py") run_example("build_3D_network.py") @@ -11,10 +13,9 @@ def run_example(ex_file): run_example("build_network2.py") run_example("build_complete.py") run_example("ion_channel_generation.py") -run_example("json_serialization.py") run_example("loading_modifying_writing.py") # This next one takes a while to run but all of its functionality is covered in loading_modifying_writing.py -#run_example("loading_modifying_writing_large.py") +# run_example("loading_modifying_writing_large.py") run_example("morphology_generation.py") run_example("write_pynn.py") run_example("write_syns.py") diff --git a/neuroml/examples/single_izhikevich_reader.py b/neuroml/examples/single_izhikevich_reader.py index 046c0ce0..0525d3f4 100644 --- a/neuroml/examples/single_izhikevich_reader.py +++ b/neuroml/examples/single_izhikevich_reader.py @@ -1,8 +1,9 @@ -#from neuroml import NeuroMLDocument +# from neuroml import NeuroMLDocument from neuroml import IzhikevichCell from neuroml.loaders import NeuroMLLoader from neuroml.utils import validate_neuroml2 + def load_izhikevich(filename="./test_files/SingleIzhikevich.nml"): nml_filename = filename validate_neuroml2(nml_filename) @@ -11,7 +12,15 @@ def load_izhikevich(filename="./test_files/SingleIzhikevich.nml"): iz_cells = nml_doc.izhikevich_cells for i, iz in enumerate(iz_cells): if isinstance(iz, IzhikevichCell): - neuron_string = "%d %s %s %s %s %s (%s)" % (i, iz.v0, iz.a, iz.b, iz.c, iz.d, iz.id) + neuron_string = "%d %s %s %s %s %s (%s)" % ( + i, + iz.v0, + iz.a, + iz.b, + iz.c, + iz.d, + iz.id, + ) print(neuron_string) else: print("Error: Cell %d is not an IzhikevichCell" % i) diff --git a/neuroml/examples/single_izhikevich_writer.py b/neuroml/examples/single_izhikevich_writer.py index 095756cf..e49afc99 100644 --- a/neuroml/examples/single_izhikevich_writer.py +++ b/neuroml/examples/single_izhikevich_writer.py @@ -8,7 +8,9 @@ def write_izhikevich(filename="./tmp/SingleIzhikevich_test.nml"): nml_doc = NeuroMLDocument(id="SingleIzhikevich") nml_filename = filename - iz0 = IzhikevichCell(id="iz0", v0="-70mV", thresh="30mV", a="0.02", b="0.2", c="-65.0", d="6") + iz0 = IzhikevichCell( + id="iz0", v0="-70mV", thresh="30mV", a="0.02", b="0.2", c="-65.0", d="6" + ) nml_doc.izhikevich_cells.append(iz0) @@ -16,7 +18,4 @@ def write_izhikevich(filename="./tmp/SingleIzhikevich_test.nml"): validate_neuroml2(nml_filename) - write_izhikevich() - - diff --git a/neuroml/examples/test_files/complete.nml b/neuroml/examples/test_files/complete.nml index fa250ae2..ce78e6df 100644 --- a/neuroml/examples/test_files/complete.nml +++ b/neuroml/examples/test_files/complete.nml @@ -1,4 +1,4 @@ - + Lots of notes.... @@ -42,160 +42,160 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - + diff --git a/neuroml/examples/test_files/complete.nml.h5 b/neuroml/examples/test_files/complete.nml.h5 index 85ee88f2..c1b02e5f 100644 Binary files a/neuroml/examples/test_files/complete.nml.h5 and b/neuroml/examples/test_files/complete.nml.h5 differ diff --git a/neuroml/examples/test_files/sbml-example.xml b/neuroml/examples/test_files/sbml-example.xml new file mode 100644 index 00000000..dae4e7fd --- /dev/null +++ b/neuroml/examples/test_files/sbml-example.xml @@ -0,0 +1,2404 @@ + + + + + +

This a model from the article:
+ Mathematical model of the morphogenesis checkpoint in budding yeast. +
+ Ciliberto A, Novak B, Tyson JJ J. Cell Biol. + [2003 Dec; Volume: 163 (Issue: 6 )] Page info: 1243-54 14691135 + ,
+ Abstract: +
+ The morphogenesis checkpoint in budding yeast delays progression through the cell cycle in response to stimuli that prevent bud formation. Central to the checkpoint mechanism is Swe1 kinase: normally inactive, its activation halts cell cycle progression in G2. We propose a molecular network for Swe1 control, based on published observations of budding yeast and analogous control signals in fission yeast. The proposed Swe1 network is merged with a model of cyclin-dependent kinase regulation, converted into a set of differential equations and studied by numerical simulation. The simulations accurately reproduce the phenotypes of a dozen checkpoint mutants. Among other predictions, the model attributes a new role to Hsl1, a kinase known to play a role in Swe1 degradation: Hsl1 must also be indirectly responsible for potent inhibition of Swe1 activity. The model supports the idea that the morphogenesis checkpoint, like other checkpoints, raises the cell size threshold for progression from one phase of the cell cycle to the next.

+

+ The model reproduces Fig 3 of the paper.

+

+ This model originates from BioModels Database: A Database of Annotated Published Models (http://www.ebi.ac.uk/biomodels/). It is copyright (c) 2005-2011 The BioModels.net Team.
+ For more information see the terms of use + .
+ To cite BioModels Database, please use: Li C, Donizelli M, Rodriguez N, Dharuri H, Endler L, Chelliah V, Li L, He E, Henry A, Stefan MI, Snoep JL, Hucka M, Le Novère N, Laibe C (2010) BioModels Database: An enhanced, curated and annotated resource for published quantitative kinetic models. BMC Syst Biol., 4:92. +

+ + +
+ + + + + + + + Dharuri + Harish + + hdharuri@cds.caltech.edu + + California Institute of Technology + + + + + Chelliah + Vijayalakshmi + + viji@ebi.ac.uk + + EMBL-EBI + + + + + + 2008-02-07T14:15:40Z + + + 2012-01-31T13:52:48Z + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kswe_prime + Swe1 + + + + kswe_doubleprime + Swe1M + + + + kswe_tripleprime + PSwe1 + + + + + + + + + + + kmih_prime + Mih_ast + + + + kmih_doubleprime + Mih + + + + + + + + + Swe1 + Swe1M + PSwe1 + PSwe1M + + + + + + + + IEtot + IE + + + + + + + + Cdh1tot + Cdh1 + + + + + + + + Mih1tot + Mih1a + + + + + + + + Mcmtot + Mcm + + + + + + + + SBFtot + SBF + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kdiss + Trim + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Trim + + + + + kdsic_prime + Cln + + + + kdsic_doubleprime + Clb + + kdsic + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Trim + + + + + kdclb_doubleprime + Cdh1 + + + + kdclb_tripleprime + Cdc20a + + kdclb_prime + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Trim + kswe + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kass + Sic + Clb + + + + + + + + + + + + + + + + + + + + + + + + + + + + PTrim + kmih + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Clb + + + + + kdclb_doubleprime + Cdh1 + + + + kdclb_tripleprime + Cdc20a + + kdclb_prime + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kswe + Clb + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ksclb + mass + Jm + + + eps + Mcm + + + + + mass + Jm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kmih + PClb + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + SBF + + + kisbf_prime + + + kisbf_doubleprime + Clb + + + + + + jisbf + SBF + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + SBFin + + + + + kasbf_prime + mass + + + + kasbf_doubleprime + Cln + + + + + + jasbf + SBFin + + + + + + + + + + + + + + + + + + + + + + + + + + + + IE + kiie + + + + jiie + IE + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kaie + IEin + Clb + + + + jaie + IEin + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Cdc20a + kicdc20 + + + + jicdc20 + Cdc20a + + + + + + + + + + + + + + + + + + + + + + + + + + kdcdc20 + Cdc20a + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kacdc20 + Cdc20 + IE + + + + jacdc20 + Cdc20 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Cdh1 + + + + + kicdh + Clb + + + + kicdh_prime + Cln + + + + + + jicdh + Cdh1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Cdh1in + + + Kacdh_prime + + + Kacdh_doubleprime + Cdc20a + + + + + + jacdh + Cdh1in + + + + + + + + + + + + + + + + + + + + + + + + + + + + + khsl1 + BUD + Swe1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + khsl1 + BUD + PSwe1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Viwee + Swe1 + Clb + + + + Jiwee + Swe1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Viwee + Swe1M + Clb + + + + Jiwee + Swe1M + + + + + + + + + + + + + + + + + + + + + + + + + + kdswe_prime + Swe1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + khsl1r + Swe1M + + + + + + + + + + + + + + + + + + + + + + + + + + + + khsl1r + PSwe1M + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PSwe1 + Vawee + + + + Jawee + PSwe1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PSwe1M + Vawee + + + + Jawee + PSwe1M + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ksswe + SBF + + + + + + + + + + + + + + + + + + + + + + + kssweC + + + + + + + + + + + + + + + + + + + + + + + + kdswe_prime + PSwe1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kdiss + PTrim + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PTrim + + + + + kdclb_doubleprime + Cdh1 + + + + kdclb_tripleprime + Cdc20a + + kdclb_prime + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PTrim + + + + + kdsic_prime + Cln + + + + kdsic_doubleprime + Clb + + kdsic + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + kass + PClb + Sic + + + + + + + + + + + + + + + + + + + + + + + + + + + Mih1a + Vimih + + + + jimih + Mih1a + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Vamih + Mih1 + Clb + + + + Jamih + Mih1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + Mcm + kimcm + + + + jimcm + Mcm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Mcmin + Clb + kamcm + + + + jamcm + Mcmin + + + + + + + + + + + + + + + + + + + + + + + + + + kdbud + BE + + + + + + + + + + + + + + + + ksbud + Cln + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Sic + + + + + kdsic_prime + Cln + + + + kdsic_doubleprime + Clb + + kdsic + + + + + + + + + + + + + + + + + + + + + + + + kssic + + + + + + + + + + + + + + + + + + + + + + + + kdcln + Cln + + + + + + + + + + + + + + + + + + + + + + + + + + + + kscln + SBF + + + + + + + + + + + + + + + + + + + + + + + + + kdswe_prime + Swe1M + + + + + + + + + + + + + + + + + + + + + + + + + kdcdc20 + Cdc20 + + + + + + + + + + + + + + + + + + + + + + + + + + + + kscdc20_prime + + + + + kscdc20_doubleprime + + + Clb + 4 + + + + + + + jscdc20 + 4 + + + + Clb + 4 + + + + + + + + + + + + + + + + + + + + + + + + + + + + kdswe_doubleprime + PSwe1M + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PClb + + + + + kdclb_doubleprime + Cdh1 + + + + kdclb_tripleprime + Cdc20a + + kdclb_prime + + + + + + + + + + + + + + + + + + + + + + + + + + mu + mass + + + + + + + + + + + + Clb + 0.2 + + + + + + + 1 + + + + + + + + + + + + Clb + 0.2 + + + + flag + 0 + + + + + + + + 0 + + + + + + + 0.5 + mass + + + + + + + + + + + + + Clb + 0.2 + + + + BE + 0.6 + + + + + + + + 1 + + + + + + + + + + + + Clb + 0.2 + + + + BE + 0.6 + + + + + + + + 0 + + + + + +
+
\ No newline at end of file diff --git a/neuroml/examples/test_files/testh5.nml b/neuroml/examples/test_files/testh5.nml index 816ebc5a..232103e7 100644 --- a/neuroml/examples/test_files/testh5.nml +++ b/neuroml/examples/test_files/testh5.nml @@ -1,4 +1,4 @@ - + Root notes @@ -37,112 +37,112 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/neuroml/examples/write_pynn.py b/neuroml/examples/write_pynn.py index 7acae3ce..00ee9224 100644 --- a/neuroml/examples/write_pynn.py +++ b/neuroml/examples/write_pynn.py @@ -15,21 +15,49 @@ nml_doc = NeuroMLDocument(id="IafNet") -pynn0 = IF_curr_alpha(id="IF_curr_alpha_pop_IF_curr_alpha", cm="1.0", i_offset="0.9", tau_m="20.0", tau_refrac="10.0", tau_syn_E="0.5", tau_syn_I="0.5", v_init="-65", v_reset="-62.0", v_rest="-65.0", v_thresh="-52.0") +pynn0 = IF_curr_alpha( + id="IF_curr_alpha_pop_IF_curr_alpha", + cm="1.0", + i_offset="0.9", + tau_m="20.0", + tau_refrac="10.0", + tau_syn_E="0.5", + tau_syn_I="0.5", + v_init="-65", + v_reset="-62.0", + v_rest="-65.0", + v_thresh="-52.0", +) nml_doc.IF_curr_alpha.append(pynn0) -pynn1 = HH_cond_exp(id="HH_cond_exp_pop_HH_cond_exp", cm="0.2", e_rev_E="0.0", e_rev_I="-80.0", e_rev_K="-90.0", e_rev_Na="50.0", e_rev_leak="-65.0", g_leak="0.01", gbar_K="6.0", gbar_Na="20.0", i_offset="0.2", tau_syn_E="0.2", tau_syn_I="2.0", v_init="-65", v_offset="-63.0") +pynn1 = HH_cond_exp( + id="HH_cond_exp_pop_HH_cond_exp", + cm="0.2", + e_rev_E="0.0", + e_rev_I="-80.0", + e_rev_K="-90.0", + e_rev_Na="50.0", + e_rev_leak="-65.0", + g_leak="0.01", + gbar_K="6.0", + gbar_Na="20.0", + i_offset="0.2", + tau_syn_E="0.2", + tau_syn_I="2.0", + v_init="-65", + v_offset="-63.0", +) nml_doc.HH_cond_exp.append(pynn1) pynnSynn0 = ExpCondSynapse(id="ps1", tau_syn="5", e_rev="0") nml_doc.exp_cond_synapses.append(pynnSynn0) -nml_file = 'tmp/pynn_network.xml' +nml_file = "tmp/pynn_network.xml" writers.NeuroMLWriter.write(nml_doc, nml_file) -print("Saved to: "+nml_file) +print("Saved to: " + nml_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 diff --git a/neuroml/examples/write_syns.py b/neuroml/examples/write_syns.py index 8938f17e..af806476 100644 --- a/neuroml/examples/write_syns.py +++ b/neuroml/examples/write_syns.py @@ -15,27 +15,36 @@ expOneSyn0 = ExpOneSynapse(id="ampa", tau_decay="5ms", gbase="1nS", erev="0mV") nml_doc.exp_one_synapses.append(expOneSyn0) -expTwoSyn0 = ExpTwoSynapse(id="gaba", tau_decay="12ms", tau_rise="3ms", gbase="1nS", erev="-70mV") +expTwoSyn0 = ExpTwoSynapse( + id="gaba", tau_decay="12ms", tau_rise="3ms", gbase="1nS", erev="-70mV" +) nml_doc.exp_two_synapses.append(expTwoSyn0) -bpSyn = BlockingPlasticSynapse(id="blockStpSynDep", gbase="1nS", erev="0mV", tau_rise="0.1ms", tau_decay="2ms") +bpSyn = BlockingPlasticSynapse( + id="blockStpSynDep", gbase="1nS", erev="0mV", tau_rise="0.1ms", tau_decay="2ms" +) bpSyn.notes = "This is a note" -bpSyn.plasticity_mechanism = PlasticityMechanism(type="tsodyksMarkramDepMechanism", init_release_prob="0.5", tau_rec="120 ms") -bpSyn.block_mechanism = BlockMechanism(type="voltageConcDepBlockMechanism", species="mg", block_concentration="1.2 mM", scaling_conc="1.920544 mM", scaling_volt="16.129 mV") +bpSyn.plasticity_mechanism = PlasticityMechanism( + type="tsodyksMarkramDepMechanism", init_release_prob="0.5", tau_rec="120 ms" +) +bpSyn.block_mechanism = BlockMechanism( + type="voltageConcDepBlockMechanism", + species="mg", + block_concentration="1.2 mM", + scaling_conc="1.920544 mM", + scaling_volt="16.129 mV", +) nml_doc.blocking_plastic_synapses.append(bpSyn) -nml_file = 'tmp/synapses.xml' +nml_file = "tmp/synapses.xml" writers.NeuroMLWriter.write(nml_doc, nml_file) -print("Saved to: "+nml_file) +print("Saved to: " + nml_file) -###### Validate the NeuroML ###### +###### Validate the NeuroML ###### from neuroml.utils import validate_neuroml2 validate_neuroml2(nml_file) - - - diff --git a/neuroml/hdf5/DefaultNetworkHandler.py b/neuroml/hdf5/DefaultNetworkHandler.py index aff7d80f..2148cbcb 100644 --- a/neuroml/hdf5/DefaultNetworkHandler.py +++ b/neuroml/hdf5/DefaultNetworkHandler.py @@ -5,7 +5,7 @@ # Parsing classes, e.g. NetworkBuilder should call the appropriate # function here when a cell location, connection, etc. is encountered. # -# Use of this handler class should mean that the network setup is +# Use of this handler class should mean that the network setup is # independent of the source of the network info (XML or HDF5 based NeuroML # files for example) and the instantiator of the network objects (NetManagerNEURON # or PyNN based setup class) @@ -22,9 +22,9 @@ class DefaultNetworkHandler: - + log = logging.getLogger("DefaultNetworkHandler") - + isParallel = 0 # @@ -32,142 +32,249 @@ class DefaultNetworkHandler: # def print_location_information(self, id, population_id, component, x, y, z): position = "(%s, %s, %s)" % (x, y, z) - self.log.debug("Location "+str(id)+" of population: "+population_id+", component: "+component+": "+position) - - + self.log.debug( + "Location " + + str(id) + + " of population: " + + population_id + + ", component: " + + component + + ": " + + position + ) + # # Internal info method, can be reused in overriding classes for debugging - # - def print_connection_information(self, projName, id, prePop, postPop, synapseType, preCellId, postCellId, weight): - self.log.debug("Connection "+str(id)+" of: "+projName+": cell "+str(preCellId)+" in "+prePop \ - +" -> cell "+str(postCellId)+" in "+postPop+", syn: "+ str(synapseType)+", weight: "+str(weight)) - - + # + def print_connection_information( + self, projName, id, prePop, postPop, synapseType, preCellId, postCellId, weight + ): + self.log.debug( + "Connection " + + str(id) + + " of: " + + projName + + ": cell " + + str(preCellId) + + " in " + + prePop + + " -> cell " + + str(postCellId) + + " in " + + postPop + + ", syn: " + + str(synapseType) + + ", weight: " + + str(weight) + ) + # # Internal info method, can be reused in overriding classes for debugging - # + # def print_input_information(self, inputName, population_id, component, size=-1): - sizeInfo = " size: "+ str(size)+ " cells" - self.log.debug("Input Source: "+inputName+", on population: "+population_id+sizeInfo+" with component: "+ component) - + sizeInfo = " size: " + str(size) + " cells" + self.log.debug( + "Input Source: " + + inputName + + ", on population: " + + population_id + + sizeInfo + + " with component: " + + component + ) + # Temp for older API version def printLocationInformation(self, id, population_id, component, x, y, z): return self.print_location_information(id, population_id, component, x, y, z) - def printConnectionInformation(self, projName, id, prePop, postPop, synapseType, preCellId, postCellId, weight): - return self.print_connection_information(projName, id, prePop, postPop, synapseType, preCellId, postCellId, weight) + + def printConnectionInformation( + self, projName, id, prePop, postPop, synapseType, preCellId, postCellId, weight + ): + return self.print_connection_information( + projName, id, prePop, postPop, synapseType, preCellId, postCellId, weight + ) + def printInputInformation(self, inputName, population_id, component, size=-1): return self.print_input_information(inputName, population_id, component, size) # # Should be overridden - # + # def handle_document_start(self, id, notes): - - self.log.debug("Document: %s"%id) + + self.log.debug("Document: %s" % id) if notes: - self.log.debug(" Notes: "+notes) + self.log.debug(" Notes: " + notes) # # Should be overridden to create network - # + # def handle_network(self, network_id, notes, temperature=None): - - self.log.debug("Network: %s"%network_id) + + self.log.debug("Network: %s" % network_id) if temperature: - self.log.debug(" Temperature: "+temperature) + self.log.debug(" Temperature: " + temperature) if notes: - self.log.debug(" Notes: "+notes) + self.log.debug(" Notes: " + notes) # # Should be overridden to create population array - # - def handle_population(self, population_id, component, size=-1, component_obj=None, properties={}, notes=None): + # + def handle_population( + self, + population_id, + component, + size=-1, + component_obj=None, + properties={}, + notes=None, + ): sizeInfo = " as yet unspecified size" - if size>=0: - sizeInfo = " size: "+ str(size)+ " cells" + if size >= 0: + sizeInfo = " size: " + str(size) + " cells" if component_obj: - compInfo = " (%s)"%component_obj.__class__.__name__ + compInfo = " (%s)" % component_obj.__class__.__name__ else: - compInfo="" - propsInfo='' - if len(properties)>0: - propsInfo+='; %s'%properties - - self.log.debug("Population: "+population_id+", component: "+component+compInfo+sizeInfo+propsInfo) - - + compInfo = "" + propsInfo = "" + if len(properties) > 0: + propsInfo += "; %s" % properties + + self.log.debug( + "Population: " + + population_id + + ", component: " + + component + + compInfo + + sizeInfo + + propsInfo + ) + # # Should be overridden to create specific cell instance - # + # def handle_location(self, id, population_id, component, x, y, z): self.print_location_information(id, population_id, component, x, y, z) - - # # Should be overridden to create population array # - def handle_projection(self, projName, prePop, postPop, synapse, hasWeights=False, hasDelays=False, type="projection", synapse_obj=None, pre_synapse_obj=None): + def handle_projection( + self, + projName, + prePop, + postPop, + synapse, + hasWeights=False, + hasDelays=False, + type="projection", + synapse_obj=None, + pre_synapse_obj=None, + ): - synInfo="" + synInfo = "" if synapse_obj: - synInfo += " (syn: %s)"%synapse_obj.__class__.__name__ - - if pre_synapse_obj: - synInfo += " (pre comp: %s)"%pre_synapse_obj.__class__.__name__ + synInfo += " (syn: %s)" % synapse_obj.__class__.__name__ - self.log.debug("Projection: "+projName+" ("+type+") from "+prePop+" to "+postPop+" with syn: %s%s"%(synapse,synInfo)) + if pre_synapse_obj: + synInfo += " (pre comp: %s)" % pre_synapse_obj.__class__.__name__ + self.log.debug( + "Projection: " + + projName + + " (" + + type + + ") from " + + prePop + + " to " + + postPop + + " with syn: %s%s" % (synapse, synInfo) + ) # # Should be overridden to handle network connection - # - def handle_connection(self, projName, id, prePop, postPop, synapseType, \ - preCellId, \ - postCellId, \ - preSegId = 0, \ - preFract = 0.5, \ - postSegId = 0, \ - postFract = 0.5, \ - delay = 0, \ - weight = 1): - - self.print_connection_information(projName, id, prePop, postPop, synapseType, preCellId, postCellId, weight) - if preSegId != 0 or postSegId!=0 or preFract != 0.5 or postFract != 0.5: - self.log.debug("Src cell: %d, seg: %f, fract: %f -> Tgt cell %d, seg: %f, fract: %f; weight %s, delay: %s ms" % (preCellId,preSegId,preFract,postCellId,postSegId,postFract, weight, delay)) - + # + def handle_connection( + self, + projName, + id, + prePop, + postPop, + synapseType, + preCellId, + postCellId, + preSegId=0, + preFract=0.5, + postSegId=0, + postFract=0.5, + delay=0, + weight=1, + ): + + self.print_connection_information( + projName, id, prePop, postPop, synapseType, preCellId, postCellId, weight + ) + if preSegId != 0 or postSegId != 0 or preFract != 0.5 or postFract != 0.5: + self.log.debug( + "Src cell: %d, seg: %f, fract: %f -> Tgt cell %d, seg: %f, fract: %f; weight %s, delay: %s ms" + % ( + preCellId, + preSegId, + preFract, + postCellId, + postSegId, + postFract, + weight, + delay, + ) + ) + # # Should be overridden to handle end of network connection - # - def finalise_projection(self, projName, prePop, postPop, synapse=None, type="projection"): - - self.log.debug("Projection: "+projName+" from "+prePop+" to "+postPop+" completed") - - + # + def finalise_projection( + self, projName, prePop, postPop, synapse=None, type="projection" + ): + + self.log.debug( + "Projection: " + + projName + + " from " + + prePop + + " to " + + postPop + + " completed" + ) + # # Should be overridden to create input source array - # - def handle_input_list(self, inputListId, population_id, component, size, input_comp_obj=None): - + # + def handle_input_list( + self, inputListId, population_id, component, size, input_comp_obj=None + ): + self.print_input_information(inputListId, population_id, component, size) - - if size<0: - self.log.error("Error! Need a size attribute in sites element to create spike source!") + + if size < 0: + self.log.error( + "Error! Need a size attribute in sites element to create spike source!" + ) return - - + # # Should be overridden to to connect each input to the target cell - # - def handle_single_input(self, inputListId, id, cellId, segId = 0, fract = 0.5, weight=1): - - self.log.debug("Input: %s[%s], cellId: %i, seg: %i, fract: %f, weight: %f" % (inputListId,id,cellId,segId,fract,weight)) - - + # + def handle_single_input( + self, inputListId, id, cellId, segId=0, fract=0.5, weight=1 + ): + + self.log.debug( + "Input: %s[%s], cellId: %i, seg: %i, fract: %f, weight: %f" + % (inputListId, id, cellId, segId, fract, weight) + ) + # # Should be overridden to to connect each input to the target cell - # + # def finalise_input_source(self, inputName): self.log.debug("Input : %s completed" % inputName) - - diff --git a/neuroml/hdf5/NetworkBuilder.py b/neuroml/hdf5/NetworkBuilder.py index f85e7f73..a2408df4 100644 --- a/neuroml/hdf5/NetworkBuilder.py +++ b/neuroml/hdf5/NetworkBuilder.py @@ -21,343 +21,449 @@ class NetworkBuilder(DefaultNetworkHandler): - + log = logging.getLogger("NetworkBuilder") - + populations = {} projections = {} projection_syns = {} projection_types = {} projection_syns_pre = {} input_lists = {} - + weightDelays = {} - - def get_nml_doc(self): return self.nml_doc - + # # Overridden from DefaultNetworkHandler - # + # def handle_document_start(self, id, notes): self.nml_doc = neuroml.NeuroMLDocument(id=id) - if notes and len(notes)>0: + if notes and len(notes) > 0: self.nml_doc.notes = notes - + # # Overridden from DefaultNetworkHandler - # + # def handle_network(self, network_id, notes, temperature=None): - + self.network = neuroml.Network(id=network_id) self.nml_doc.networks.append(self.network) - if notes and len(notes)>0: + if notes and len(notes) > 0: self.network.notes = notes if temperature is not None: self.network.temperature = temperature self.network.type = "networkWithTemperature" - + # # Overridden from DefaultNetworkHandler - # - def handle_population(self, population_id, component, size, component_obj=None, properties={}, notes=None): - + # + def handle_population( + self, + population_id, + component, + size, + component_obj=None, + properties={}, + notes=None, + ): + if component_obj: self.nml_doc.append(component_obj) - + pop = neuroml.Population(id=population_id, component=component, size=size) - - if notes and len(notes)>0: + + if notes and len(notes) > 0: pop.notes = notes - + self.populations[population_id] = pop self.network.populations.append(pop) for p in properties: - pop.properties.append(neuroml.Property(p,properties[p])) - - comp_obj_info = ' (%s)'%type(component_obj) if component_obj else '' - - if (size>=0): - sizeInfo = ", size "+ str(size)+ " cells" - self.log.debug("Creating population: "+population_id+", cell type: "+component+comp_obj_info+sizeInfo) + pop.properties.append(neuroml.Property(p, properties[p])) + + comp_obj_info = " (%s)" % type(component_obj) if component_obj else "" + + if size >= 0: + sizeInfo = ", size " + str(size) + " cells" + self.log.debug( + "Creating population: " + + population_id + + ", cell type: " + + component + + comp_obj_info + + sizeInfo + ) else: - self.log.error("Population: "+population_id+", cell type: "+component+comp_obj_info+" specifies no size. May lead to errors!") - - + self.log.error( + "Population: " + + population_id + + ", cell type: " + + component + + comp_obj_info + + " specifies no size. May lead to errors!" + ) + # # Overridden from DefaultNetworkHandler - # + # def handle_location(self, id, population_id, component, x, y, z): ##print('Loc: %s %s (%s,%s,%s)'%(id, population_id, x, y, z)) self.print_location_information(id, population_id, component, x, y, z) - + if x is not None and y is not None and z is not None: inst = neuroml.Instance(id=id) - inst.location = neuroml.Location(x=x,y=y,z=z) + inst.location = neuroml.Location(x=x, y=y, z=z) self.populations[population_id].instances.append(inst) - self.populations[population_id].type = 'populationList' + self.populations[population_id].type = "populationList" else: - self.log.warning('Ignoring location: %s %s (%s,%s,%s)'%(id, population_id, x, y, z)) - + self.log.warning( + "Ignoring location: %s %s (%s,%s,%s)" % (id, population_id, x, y, z) + ) + # # Overridden from DefaultNetworkHandler # - def handle_projection(self, id, prePop, postPop, synapse, hasWeights=False, hasDelays=False, type="projection", synapse_obj=None, pre_synapse_obj=None): - + def handle_projection( + self, + id, + prePop, + postPop, + synapse, + hasWeights=False, + hasDelays=False, + type="projection", + synapse_obj=None, + pre_synapse_obj=None, + ): + if synapse_obj: self.nml_doc.append(synapse_obj) if pre_synapse_obj: self.nml_doc.append(pre_synapse_obj) - + proj = None - - if type=="projection": - proj = neuroml.Projection(id=id, presynaptic_population=prePop, postsynaptic_population=postPop, synapse=synapse) + + if type == "projection": + proj = neuroml.Projection( + id=id, + presynaptic_population=prePop, + postsynaptic_population=postPop, + synapse=synapse, + ) self.network.projections.append(proj) - elif type=="electricalProjection": - proj = neuroml.ElectricalProjection(id=id, presynaptic_population=prePop, postsynaptic_population=postPop) + elif type == "electricalProjection": + proj = neuroml.ElectricalProjection( + id=id, presynaptic_population=prePop, postsynaptic_population=postPop + ) self.network.electrical_projections.append(proj) self.projection_syns[id] = synapse - elif type=="continuousProjection": - proj = neuroml.ContinuousProjection(id=id, presynaptic_population=prePop, postsynaptic_population=postPop) + elif type == "continuousProjection": + proj = neuroml.ContinuousProjection( + id=id, presynaptic_population=prePop, postsynaptic_population=postPop + ) self.network.continuous_projections.append(proj) - - if synapse_obj!=None: + + if synapse_obj != None: self.projection_syns[id] = synapse_obj.id else: self.projection_syns[id] = synapse - + if pre_synapse_obj == None: - pre_synapse_obj = neuroml.SilentSynapse(id="silentSyn_%s"%id) + pre_synapse_obj = neuroml.SilentSynapse(id="silentSyn_%s" % id) self.nml_doc.silent_synapses.append(pre_synapse_obj) - + self.projection_syns_pre[id] = pre_synapse_obj.id - + self.projections[id] = proj self.projection_types[id] = type self.weightDelays[id] = hasWeights or hasDelays - self.log.debug("Projection: %s (%s) from %s to %s with syn: %s, weights: %s, delays: %s"%(id, type, prePop, postPop, synapse, hasWeights, hasDelays)) - + self.log.debug( + "Projection: %s (%s) from %s to %s with syn: %s, weights: %s, delays: %s" + % (id, type, prePop, postPop, synapse, hasWeights, hasDelays) + ) + # # Overridden from DefaultNetworkHandler # def finalise_projection(self, id, prePop, postPop, synapse=None, type=None): - ''' - Check whether handle_projection was not called, e.g. due to no connections present - ''' - if type==None: - type=self.projection_types[id] - - self.log.debug("Projection: %s (%s) from %s to %s completed"%(id,type,prePop,postPop)) - - if type=="projection": + """ + Check whether handle_projection was not called, e.g. due to no connections present + """ + if type == None: + type = self.projection_types[id] + + self.log.debug( + "Projection: %s (%s) from %s to %s completed" % (id, type, prePop, postPop) + ) + + if type == "projection": present = False for p in self.network.projections: - if p.id==id: present=True + if p.id == id: + present = True if not present: - proj = neuroml.Projection(id=id, presynaptic_population=prePop, postsynaptic_population=postPop, synapse=synapse) + proj = neuroml.Projection( + id=id, + presynaptic_population=prePop, + postsynaptic_population=postPop, + synapse=synapse, + ) self.network.projections.append(proj) - elif type=="electricalProjection": + elif type == "electricalProjection": present = False for p in self.network.electrical_projections: - if p.id==id: present=True + if p.id == id: + present = True if not present: - proj = neuroml.ElectricalProjection(id=id, presynaptic_population=prePop, postsynaptic_population=postPop) + proj = neuroml.ElectricalProjection( + id=id, + presynaptic_population=prePop, + postsynaptic_population=postPop, + ) self.network.electrical_projections.append(proj) - + # # Overridden from DefaultNetworkHandler # # Assumes delay in ms - # - def handle_connection(self, proj_id, conn_id, prePop, postPop, synapseType, \ - preCellId, \ - postCellId, \ - preSegId = 0, \ - preFract = 0.5, \ - postSegId = 0, \ - postFract = 0.5, \ - delay=0, - weight=1): - - #self.printConnectionInformation(proj_id, conn_id, prePop, postPop, synapseType, preCellId, postCellId, weight) - - pre_cell_path = "../%s/%i/%s"%(prePop,preCellId,self.populations[prePop].component) - if self.populations[prePop].type==None: - pre_cell_path = "../%s[%i]"%(prePop,preCellId) - - post_cell_path = "../%s/%i/%s"%(postPop,postCellId,self.populations[postPop].component) - if self.populations[postPop].type==None: - post_cell_path = "../%s[%i]"%(postPop,postCellId) - + # + def handle_connection( + self, + proj_id, + conn_id, + prePop, + postPop, + synapseType, + preCellId, + postCellId, + preSegId=0, + preFract=0.5, + postSegId=0, + postFract=0.5, + delay=0, + weight=1, + ): + + # self.printConnectionInformation(proj_id, conn_id, prePop, postPop, synapseType, preCellId, postCellId, weight) + + pre_cell_path = "../%s/%i/%s" % ( + prePop, + preCellId, + self.populations[prePop].component, + ) + if self.populations[prePop].type == None: + pre_cell_path = "../%s[%i]" % (prePop, preCellId) + + post_cell_path = "../%s/%i/%s" % ( + postPop, + postCellId, + self.populations[postPop].component, + ) + if self.populations[postPop].type == None: + post_cell_path = "../%s[%i]" % (postPop, postCellId) + if isinstance(self.projections[proj_id], neuroml.ElectricalProjection): - + instances = False - if len(self.populations[prePop].instances)>0 or len(self.populations[postPop].instances)>0: + if ( + len(self.populations[prePop].instances) > 0 + or len(self.populations[postPop].instances) > 0 + ): instances = True - + if not instances: - conn = neuroml.ElectricalConnection(id=conn_id, \ - pre_cell="%s"%(preCellId), \ - pre_segment=preSegId, \ - pre_fraction_along=preFract, - post_cell="%s"%(postCellId), \ - post_segment=postSegId, - post_fraction_along=postFract, - synapse=self.projection_syns[proj_id]) - + conn = neuroml.ElectricalConnection( + id=conn_id, + pre_cell="%s" % (preCellId), + pre_segment=preSegId, + pre_fraction_along=preFract, + post_cell="%s" % (postCellId), + post_segment=postSegId, + post_fraction_along=postFract, + synapse=self.projection_syns[proj_id], + ) + self.projections[proj_id].electrical_connections.append(conn) - + else: - if weight==1: - conn = neuroml.ElectricalConnectionInstance(id=conn_id, \ - pre_cell=pre_cell_path, \ - pre_segment=preSegId, \ - pre_fraction_along=preFract, - post_cell=post_cell_path, \ - post_segment=postSegId, - post_fraction_along=postFract, - synapse=self.projection_syns[proj_id]) - - self.projections[proj_id].electrical_connection_instances.append(conn) + if weight == 1: + conn = neuroml.ElectricalConnectionInstance( + id=conn_id, + pre_cell=pre_cell_path, + pre_segment=preSegId, + pre_fraction_along=preFract, + post_cell=post_cell_path, + post_segment=postSegId, + post_fraction_along=postFract, + synapse=self.projection_syns[proj_id], + ) + + self.projections[proj_id].electrical_connection_instances.append( + conn + ) else: - conn = neuroml.ElectricalConnectionInstanceW(id=conn_id, \ - pre_cell=pre_cell_path, \ - pre_segment=preSegId, \ - pre_fraction_along=preFract, - post_cell=post_cell_path, \ - post_segment=postSegId, - post_fraction_along=postFract, - synapse=self.projection_syns[proj_id], - weight=weight) - - self.projections[proj_id].electrical_connection_instance_ws.append(conn) - - + conn = neuroml.ElectricalConnectionInstanceW( + id=conn_id, + pre_cell=pre_cell_path, + pre_segment=preSegId, + pre_fraction_along=preFract, + post_cell=post_cell_path, + post_segment=postSegId, + post_fraction_along=postFract, + synapse=self.projection_syns[proj_id], + weight=weight, + ) + + self.projections[proj_id].electrical_connection_instance_ws.append( + conn + ) + elif isinstance(self.projections[proj_id], neuroml.ContinuousProjection): - + instances = False - if len(self.populations[prePop].instances)>0 or len(self.populations[postPop].instances)>0: + if ( + len(self.populations[prePop].instances) > 0 + or len(self.populations[postPop].instances) > 0 + ): instances = True - + if not instances: - if weight!=1: - raise Exception("Case not (yet) supported: weight!=1 when not an instance based population...") - - conn = neuroml.ContinuousConnection(id=conn_id, \ - pre_cell="%s"%(preCellId), \ - pre_segment=preSegId, \ - pre_fraction_along=preFract, - post_cell="%s"%(postCellId), \ - post_segment=postSegId, - post_fraction_along=postFract, - pre_component=self.projection_syns_pre[proj_id], - post_component=self.projection_syns[proj_id]) - + if weight != 1: + raise Exception( + "Case not (yet) supported: weight!=1 when not an instance based population..." + ) + + conn = neuroml.ContinuousConnection( + id=conn_id, + pre_cell="%s" % (preCellId), + pre_segment=preSegId, + pre_fraction_along=preFract, + post_cell="%s" % (postCellId), + post_segment=postSegId, + post_fraction_along=postFract, + pre_component=self.projection_syns_pre[proj_id], + post_component=self.projection_syns[proj_id], + ) + self.projections[proj_id].continuous_connections.append(conn) - + else: - if weight==1: - conn = neuroml.ContinuousConnectionInstance(id=conn_id, \ - pre_cell=pre_cell_path, \ - pre_segment=preSegId, \ - pre_fraction_along=preFract, - post_cell=post_cell_path, \ - post_segment=postSegId, - post_fraction_along=postFract, - pre_component=self.projection_syns_pre[proj_id], - post_component=self.projection_syns[proj_id]) - - self.projections[proj_id].continuous_connection_instances.append(conn) + if weight == 1: + conn = neuroml.ContinuousConnectionInstance( + id=conn_id, + pre_cell=pre_cell_path, + pre_segment=preSegId, + pre_fraction_along=preFract, + post_cell=post_cell_path, + post_segment=postSegId, + post_fraction_along=postFract, + pre_component=self.projection_syns_pre[proj_id], + post_component=self.projection_syns[proj_id], + ) + + self.projections[proj_id].continuous_connection_instances.append( + conn + ) else: - conn = neuroml.ContinuousConnectionInstanceW(id=conn_id, \ - pre_cell=pre_cell_path, \ - pre_segment=preSegId, \ - pre_fraction_along=preFract, - post_cell=post_cell_path, \ - post_segment=postSegId, - post_fraction_along=postFract, - pre_component=self.projection_syns_pre[proj_id], - post_component=self.projection_syns[proj_id], - weight=weight) - - self.projections[proj_id].continuous_connection_instance_ws.append(conn) - + conn = neuroml.ContinuousConnectionInstanceW( + id=conn_id, + pre_cell=pre_cell_path, + pre_segment=preSegId, + pre_fraction_along=preFract, + post_cell=post_cell_path, + post_segment=postSegId, + post_fraction_along=postFract, + pre_component=self.projection_syns_pre[proj_id], + post_component=self.projection_syns[proj_id], + weight=weight, + ) + + self.projections[proj_id].continuous_connection_instance_ws.append( + conn + ) + else: - if not self.weightDelays[proj_id] and delay==0 and weight==1: + if not self.weightDelays[proj_id] and delay == 0 and weight == 1: - connection = neuroml.Connection(id=conn_id, \ - pre_cell_id=pre_cell_path, \ - pre_segment_id=preSegId, \ - pre_fraction_along=preFract, - post_cell_id=post_cell_path, \ - post_segment_id=postSegId, - post_fraction_along=postFract) + connection = neuroml.Connection( + id=conn_id, + pre_cell_id=pre_cell_path, + pre_segment_id=preSegId, + pre_fraction_along=preFract, + post_cell_id=post_cell_path, + post_segment_id=postSegId, + post_fraction_along=postFract, + ) self.projections[proj_id].connections.append(connection) else: - connection = neuroml.ConnectionWD(id=conn_id, \ - pre_cell_id=pre_cell_path, \ - pre_segment_id=preSegId, \ - pre_fraction_along=preFract, - post_cell_id=post_cell_path, \ - post_segment_id=postSegId, - post_fraction_along=postFract, - weight=weight, - delay='%sms'%(delay)) + connection = neuroml.ConnectionWD( + id=conn_id, + pre_cell_id=pre_cell_path, + pre_segment_id=preSegId, + pre_fraction_along=preFract, + post_cell_id=post_cell_path, + post_segment_id=postSegId, + post_fraction_along=postFract, + weight=weight, + delay="%sms" % (delay), + ) self.projections[proj_id].connection_wds.append(connection) - # # Overridden from DefaultNetworkHandler - # - def handle_input_list(self, inputListId, population_id, component, size, input_comp_obj=None): + # + def handle_input_list( + self, inputListId, population_id, component, size, input_comp_obj=None + ): if input_comp_obj: self.nml_doc.append(input_comp_obj) - - input_list = neuroml.InputList(id=inputListId, - component=component, - populations=population_id) - - self.input_lists[inputListId]=input_list - + + input_list = neuroml.InputList( + id=inputListId, component=component, populations=population_id + ) + + self.input_lists[inputListId] = input_list + self.network.input_lists.append(input_list) - + # # Overridden from DefaultNetworkHandler - # - def handle_single_input(self, inputListId, id, cellId, segId = 0, fract = 0.5, weight = 1.0): + # + def handle_single_input( + self, inputListId, id, cellId, segId=0, fract=0.5, weight=1.0 + ): input_list = self.input_lists[inputListId] - target_path = "../%s/%i/%s"%(input_list.populations, cellId, self.populations[input_list.populations].component) - - if self.populations[input_list.populations].type==None: - target_path = "../%s[%i]"%(input_list.populations, cellId) - - if weight==1: - input = neuroml.Input(id=id, - target=target_path, - destination="synapses") - if segId!=0: - input.segment_id="%s"%(segId) - if fract!=0.5: - input.fraction_along="%s"%(fract) + target_path = "../%s/%i/%s" % ( + input_list.populations, + cellId, + self.populations[input_list.populations].component, + ) + + if self.populations[input_list.populations].type == None: + target_path = "../%s[%i]" % (input_list.populations, cellId) + + if weight == 1: + input = neuroml.Input(id=id, target=target_path, destination="synapses") + if segId != 0: + input.segment_id = "%s" % (segId) + if fract != 0.5: + input.fraction_along = "%s" % (fract) input_list.input.append(input) else: - input_w = neuroml.InputW(id=id, - target=target_path, - destination="synapses") - if segId!=0: - input_w.segment_id="%s"%(segId) - if fract!=0.5: - input_w.fraction_along="%s"%(fract) - input_w.weight=weight - input_list.input_ws.append(input_w) \ No newline at end of file + input_w = neuroml.InputW(id=id, target=target_path, destination="synapses") + if segId != 0: + input_w.segment_id = "%s" % (segId) + if fract != 0.5: + input_w.fraction_along = "%s" % (fract) + input_w.weight = weight + input_list.input_ws.append(input_w) diff --git a/neuroml/hdf5/NetworkContainer.py b/neuroml/hdf5/NetworkContainer.py index 82ad4b66..8434384e 100644 --- a/neuroml/hdf5/NetworkContainer.py +++ b/neuroml/hdf5/NetworkContainer.py @@ -1,437 +1,571 @@ -''' +""" Work in progress.. -''' +""" import neuroml import numpy as np class NetworkContainer(neuroml.Network): - + pass - #def __getattr__(self,name): + # def __getattr__(self,name): # print('Requesting in NC: %s'%name) - + class OptimizedList(object): - + array = None cursor = 0 - - def __init__(self, array=None,indices={}): + + def __init__(self, array=None, indices={}): self.array = array self.indices = indices - #print("Created %s with %s"% (self,indices)) - - - def _get_index_or_add(self,name,default_index): + # print("Created %s with %s"% (self,indices)) + + def _get_index_or_add(self, name, default_index): if not name in self.indices: self.indices[name] = default_index return default_index else: return self.indices[name] - - def _get_value(self,i,name,default_value): + + def _get_value(self, i, name, default_value): if not name in self.indices: return default_value else: return self.array[i][self.indices[name]] - - def _add_index_information(self,hdf5_array): + + def _add_index_information(self, hdf5_array): for name in self.indices.keys(): - - n="column_%i"%self.indices[name] - #print("Adding attribute to H5 array: %s = %s"%(n,name)) + + n = "column_%i" % self.indices[name] + # print("Adding attribute to H5 array: %s = %s"%(n,name)) hdf5_array._f_setattr(n, name) - + def __len__(self): - length = self.array.shape[0] if isinstance(self.array,np.ndarray) else 0 - #print('Getting length (%s) of %s'%(length,self.__class__.__name__,)) + length = self.array.shape[0] if isinstance(self.array, np.ndarray) else 0 + # print('Getting length (%s) of %s'%(length,self.__class__.__name__,)) return length - + def __iter__(self): self.cursor = 0 return self - + def __next__(self): if self.cursor == len(self): raise StopIteration else: instance = self.__getitem__(self.cursor) - self.cursor +=1 + self.cursor += 1 return instance def next(self): return self.__next__() - - def __iadd__(self,i_list): + + def __iadd__(self, i_list): for i in i_list: self.append(i) return self - + def __delitem__(self, index): - - raise NotImplementedError("__delitem__ is not implemented (yet) in HDF5 based optimized list") - def __setitem__(self,index,instance): - - raise NotImplementedError("__setitem__() is not implemented (yet) in HDF5 based instance list") - + raise NotImplementedError( + "__delitem__ is not implemented (yet) in HDF5 based optimized list" + ) + + def __setitem__(self, index, instance): + + raise NotImplementedError( + "__setitem__() is not implemented (yet) in HDF5 based instance list" + ) + def __str__(self): - - return "%s (optimized) with %s entries"%(self.__class__.__name__,len(self)) - - -class InstanceList(OptimizedList): + return "%s (optimized) with %s entries" % (self.__class__.__name__, len(self)) - def __getitem__(self,index): - if len(self.array[index])==4: - id = self.array[index][self._get_index_or_add('id',0)] - #print(' Getting instance %s in %s (%s)'%(index,self,id)) - assert(id==index) +class InstanceList(OptimizedList): + def __getitem__(self, index): + + if len(self.array[index]) == 4: + id = self.array[index][self._get_index_or_add("id", 0)] + # print(' Getting instance %s in %s (%s)'%(index,self,id)) + assert id == index else: id = index - + instance = neuroml.Instance(id=id) - instance.location = neuroml.Location(self.array[index][self._get_index_or_add('x',1)], - self.array[index][self._get_index_or_add('y',2)], - self.array[index][self._get_index_or_add('z',3)]) - + instance.location = neuroml.Location( + self.array[index][self._get_index_or_add("x", 1)], + self.array[index][self._get_index_or_add("y", 2)], + self.array[index][self._get_index_or_add("z", 3)], + ) + return instance - - - def append(self,instance): - - #print('Adding instance: %s'%instance) + + def append(self, instance): + + # print('Adding instance: %s'%instance) l = instance.location - i = np.array([[instance.id,l.x,l.y,l.z]], np.float32) - if len(self)==0: + i = np.array([[instance.id, l.x, l.y, l.z]], np.float32) + if len(self) == 0: self.array = i else: self.array = np.concatenate((self.array, i)) - - def add_instance(self,id,x,y,z): - - i = np.array([[id,x,y,z]], np.float32) - if len(self)==0: + + def add_instance(self, id, x, y, z): + + i = np.array([[id, x, y, z]], np.float32) + if len(self) == 0: self.array = i else: self.array = np.concatenate((self.array, i)) - - - - + + class PopulationContainer(neuroml.Population): + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + component=None, + size=None, + type=None, + extracellular_properties=None, + layout=None, + instances=None, + ): + super(self.__class__, self).__init__( + neuro_lex_id=neuro_lex_id, + id=id, + metaid=metaid, + notes=notes, + properties=properties, + annotation=annotation, + component=component, + size=size, + type=type, + extracellular_properties=extracellular_properties, + layout=layout, + instances=instances, + ) - - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, component=None, size=None, type=None, extracellular_properties=None, layout=None, instances=None): - super(self.__class__, self).__init__(neuro_lex_id=neuro_lex_id, id=id, metaid=metaid, notes=notes, properties=properties, annotation=annotation, component=component, size=size, type=type, extracellular_properties=extracellular_properties, layout=layout, instances=instances) - self.instances = InstanceList() - - #print("PopulationContainer created") - + + # print("PopulationContainer created") + def __str__(self): - - return "Population (optimized): "+str(self.id)+" with "+str( self.get_size() )+" components of type "+(self.component if self.component else "???") - + + return ( + "Population (optimized): " + + str(self.id) + + " with " + + str(self.get_size()) + + " components of type " + + (self.component if self.component else "???") + ) + def exportHdf5(self, h5file, h5Group): - #print("Exporting %s as HDF5..."%self) - - popGroup = h5file.create_group(h5Group, 'population_'+self.id) + # print("Exporting %s as HDF5..."%self) + + popGroup = h5file.create_group(h5Group, "population_" + self.id) popGroup._f_setattr("id", self.id) popGroup._f_setattr("component", self.component) - + for p in self.properties: - popGroup._f_setattr("property:%s"%p.tag, p) - - if len(self.instances)>0: + popGroup._f_setattr("property:%s" % p.tag, p) + + if len(self.instances) > 0: popGroup._f_setattr("size", len(self.instances)) popGroup._f_setattr("type", "populationList") - h5array = h5file.create_carray(popGroup, self.id, obj=self.instances.array, title="Locations of cells in "+ self.id) - + h5array = h5file.create_carray( + popGroup, + self.id, + obj=self.instances.array, + title="Locations of cells in " + self.id, + ) + self.instances._add_index_information(h5array) - + else: popGroup._f_setattr("size", self.size) - - + + class ProjectionContainer(neuroml.Projection): - - def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, synapse=None, connections=None, connection_wds=None): - - super(self.__class__, self).__init__(neuro_lex_id=neuro_lex_id, id=id, presynaptic_population=presynaptic_population,postsynaptic_population=postsynaptic_population,synapse=synapse,connections=connections,connection_wds=connection_wds) - - assert(connections==None) - assert(connection_wds==None) + def __init__( + self, + neuro_lex_id=None, + id=None, + presynaptic_population=None, + postsynaptic_population=None, + synapse=None, + connections=None, + connection_wds=None, + ): + + super(self.__class__, self).__init__( + neuro_lex_id=neuro_lex_id, + id=id, + presynaptic_population=presynaptic_population, + postsynaptic_population=postsynaptic_population, + synapse=synapse, + connections=connections, + connection_wds=connection_wds, + ) + + assert connections == None + assert connection_wds == None self.connections = ConnectionList() self.connection_wds = ConnectionList() self.connections.presynaptic_population = presynaptic_population self.connections.postsynaptic_population = postsynaptic_population - - + def __str__(self): - return "Projection (optimized): "+self.id+" from "+self.presynaptic_population+" to "+self.postsynaptic_population+", synapse: "+self.synapse - - + return ( + "Projection (optimized): " + + self.id + + " from " + + self.presynaptic_population + + " to " + + self.postsynaptic_population + + ", synapse: " + + self.synapse + ) + def exportHdf5(self, h5file, h5Group): - #print("Exporting %s as HDF5"%self) - - projGroup = h5file.create_group(h5Group, 'projection_'+self.id) + # print("Exporting %s as HDF5"%self) + + projGroup = h5file.create_group(h5Group, "projection_" + self.id) projGroup._f_setattr("id", self.id) projGroup._f_setattr("type", "projection") projGroup._f_setattr("presynapticPopulation", self.presynaptic_population) projGroup._f_setattr("postsynapticPopulation", self.postsynaptic_population) projGroup._f_setattr("synapse", self.synapse) - - h5array = h5file.create_carray(projGroup, self.id, obj=self.connections.array, title="Connections of cells in "+ self.id) - + + h5array = h5file.create_carray( + projGroup, + self.id, + obj=self.connections.array, + title="Connections of cells in " + self.id, + ) + self.connections._add_index_information(h5array) - - + + ## ## TODO: update this to act like ElectricalProjection ## class ElectricalProjectionContainer(neuroml.ElectricalProjection): - - def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, electrical_connections=None, electrical_connection_instances=None): - - super(self.__class__, self).__init__(neuro_lex_id=neuro_lex_id, id=id, presynaptic_population=presynaptic_population,postsynaptic_population=postsynaptic_population,electrical_connections=electrical_connections,electrical_connection_instances=electrical_connection_instances) - - assert(electrical_connections==None) - assert(electrical_connection_instances==None) + def __init__( + self, + neuro_lex_id=None, + id=None, + presynaptic_population=None, + postsynaptic_population=None, + electrical_connections=None, + electrical_connection_instances=None, + ): + + super(self.__class__, self).__init__( + neuro_lex_id=neuro_lex_id, + id=id, + presynaptic_population=presynaptic_population, + postsynaptic_population=postsynaptic_population, + electrical_connections=electrical_connections, + electrical_connection_instances=electrical_connection_instances, + ) + + assert electrical_connections == None + assert electrical_connection_instances == None self.connections = ConnectionList() self.connection_wds = ConnectionList() self.connections.presynaptic_population = presynaptic_population self.connections.postsynaptic_population = postsynaptic_population - - + def __str__(self): - return "Electrical projection (optimized): "+self.id+" from "+self.presynaptic_population+" to "+self.postsynaptic_population+", synapse: "+self.synapse - - + return ( + "Electrical projection (optimized): " + + self.id + + " from " + + self.presynaptic_population + + " to " + + self.postsynaptic_population + + ", synapse: " + + self.synapse + ) + def exportHdf5(self, h5file, h5Group): - #print("Exporting %s as HDF5"%self) - - projGroup = h5file.create_group(h5Group, 'projection_'+self.id) + # print("Exporting %s as HDF5"%self) + + projGroup = h5file.create_group(h5Group, "projection_" + self.id) projGroup._f_setattr("id", self.id) projGroup._f_setattr("type", "projection") projGroup._f_setattr("presynapticPopulation", self.presynaptic_population) projGroup._f_setattr("postsynapticPopulation", self.postsynaptic_population) projGroup._f_setattr("synapse", self.synapse) - - h5array = h5file.create_carray(projGroup, self.id, obj=self.connections.array, title="Connections of cells in "+ self.id) - + + h5array = h5file.create_carray( + projGroup, + self.id, + obj=self.connections.array, + title="Connections of cells in " + self.id, + ) + self.connections._add_index_information(h5array) - - + + class ConnectionList(OptimizedList): presynaptic_population = None postsynaptic_population = None - - def __getitem__(self,index): - id_index = self._get_index_or_add('id',-1) - if id_index>0: + def __getitem__(self, index): + + id_index = self._get_index_or_add("id", -1) + if id_index > 0: id = int(self.array[index][id_index]) - assert(self.array[index][0]==index) + assert self.array[index][0] == index else: id = index - - pre_cell_id = int(self.array[index][self._get_index_or_add('pre_cell_id',1)]) - post_cell_id = int(self.array[index][self._get_index_or_add('post_cell_id',2)]) - pre_segment_id = int(self._get_value(index,'pre_segment_id',0)) - post_segment_id = int(self._get_value(index,'post_segment_id',0)) - pre_fraction_along = float(self._get_value(index,'pre_fraction_along',0.5)) - post_fraction_along = float(self._get_value(index,'post_fraction_along',0.5)) - - input = neuroml.Connection(id=id, - pre_cell_id="../%s/%i/%s"%(self.presynaptic_population,pre_cell_id,"???"), - post_cell_id="../%s/%i/%s"%(self.postsynaptic_population,post_cell_id,"???"), - pre_segment_id=pre_segment_id, - post_segment_id=post_segment_id, - pre_fraction_along=pre_fraction_along, - post_fraction_along=post_fraction_along) - + + pre_cell_id = int(self.array[index][self._get_index_or_add("pre_cell_id", 1)]) + post_cell_id = int(self.array[index][self._get_index_or_add("post_cell_id", 2)]) + pre_segment_id = int(self._get_value(index, "pre_segment_id", 0)) + post_segment_id = int(self._get_value(index, "post_segment_id", 0)) + pre_fraction_along = float(self._get_value(index, "pre_fraction_along", 0.5)) + post_fraction_along = float(self._get_value(index, "post_fraction_along", 0.5)) + + input = neuroml.Connection( + id=id, + pre_cell_id="../%s/%i/%s" + % (self.presynaptic_population, pre_cell_id, "???"), + post_cell_id="../%s/%i/%s" + % (self.postsynaptic_population, post_cell_id, "???"), + pre_segment_id=pre_segment_id, + post_segment_id=post_segment_id, + pre_fraction_along=pre_fraction_along, + post_fraction_along=post_fraction_along, + ) + return input - - - def append(self,conn): - - i = np.array([[conn.id, - conn.get_pre_cell_id(), - conn.get_post_cell_id()]], np.float32) - - if len(self)==0: + + def append(self, conn): + + i = np.array( + [[conn.id, conn.get_pre_cell_id(), conn.get_post_cell_id()]], np.float32 + ) + + if len(self) == 0: self.array = i else: self.array = np.concatenate((self.array, i)) - - + + class InputListContainer(neuroml.InputList): - - def __init__(self, neuro_lex_id=None, id=None, populations=None, component=None, input=None): - super(self.__class__, self).__init__(neuro_lex_id=neuro_lex_id, id=id, populations=populations, component=component, input=input) - - assert(input==None) + def __init__( + self, neuro_lex_id=None, id=None, populations=None, component=None, input=None + ): + super(self.__class__, self).__init__( + neuro_lex_id=neuro_lex_id, + id=id, + populations=populations, + component=component, + input=input, + ) + + assert input == None self.input = InputsList() self.input.target_population = populations - - #print("InputListContainer %s created"%self.id) - + + # print("InputListContainer %s created"%self.id) + def __str__(self): - - return "Input list (optimized): "+self.id+" to "+self.populations+", component "+self.component - + + return ( + "Input list (optimized): " + + self.id + + " to " + + self.populations + + ", component " + + self.component + ) + def exportHdf5(self, h5file, h5Group): - #print("Exporting %s as HDF5"%self) - - ilGroup = h5file.create_group(h5Group, 'inputList_'+self.id) + # print("Exporting %s as HDF5"%self) + + ilGroup = h5file.create_group(h5Group, "inputList_" + self.id) ilGroup._f_setattr("id", self.id) ilGroup._f_setattr("component", self.component) ilGroup._f_setattr("population", self.populations) - - - h5array = h5file.create_carray(ilGroup, self.id, obj=self.input.array , title="Locations of inputs in "+ self.id) - + + h5array = h5file.create_carray( + ilGroup, + self.id, + obj=self.input.array, + title="Locations of inputs in " + self.id, + ) + self.input._add_index_information(h5array) - - - - - + + class InputsList(OptimizedList): target_population = None - - def __getitem__(self,index): - - #print(' Getting instance %s'%(index)) - #print self.array - - id = self.array[index][self._get_index_or_add('id',0)] - assert(id==index) - - target_cell_id = int(self.array[index][self._get_index_or_add('target_cell_id',1)]) - segment_id = int(self.array[index][self._get_index_or_add('segment_id',1)]) - fraction_along = float(self.array[index][self._get_index_or_add('fraction_along',1)]) - - input = neuroml.Input(id=index, - target="../%s/%i/%s"%(self.target_population, target_cell_id, "???"), - destination="synapses", - segment_id=segment_id, - fraction_along=fraction_along) - + + def __getitem__(self, index): + + # print(' Getting instance %s'%(index)) + # print self.array + + id = self.array[index][self._get_index_or_add("id", 0)] + assert id == index + + target_cell_id = int( + self.array[index][self._get_index_or_add("target_cell_id", 1)] + ) + segment_id = int(self.array[index][self._get_index_or_add("segment_id", 1)]) + fraction_along = float( + self.array[index][self._get_index_or_add("fraction_along", 1)] + ) + + input = neuroml.Input( + id=index, + target="../%s/%i/%s" % (self.target_population, target_cell_id, "???"), + destination="synapses", + segment_id=segment_id, + fraction_along=fraction_along, + ) + return input - - - def append(self,input): - - #print('Adding input: %s'%input) - - i = np.array([[input.id, - input.get_target_cell_id(), - input.get_segment_id(), - input.get_fraction_along()]], np.float32) - - if len(self)==0: + + def append(self, input): + + # print('Adding input: %s'%input) + + i = np.array( + [ + [ + input.id, + input.get_target_cell_id(), + input.get_segment_id(), + input.get_fraction_along(), + ] + ], + np.float32, + ) + + if len(self) == 0: self.array = i else: self.array = np.concatenate((self.array, i)) - -if __name__ == '__main__': - + +if __name__ == "__main__": + from neuroml.loaders import read_neuroml2_file - - - file_name = '../examples/test_files/MediumNet.net.nml' - - nml_doc = read_neuroml2_file(file_name,include_includes=True) - + + file_name = "../examples/test_files/MediumNet.net.nml" + + nml_doc = read_neuroml2_file(file_name, include_includes=True) + print(nml_doc.summary()) - + net0 = nml_doc.networks[0] - + nc = NetworkContainer(id="testnet") - pc0 = PopulationContainer(id="pre", component="iffy",size=4) + pc0 = PopulationContainer(id="pre", component="iffy", size=4) nc.populations.append(pc0) pc = PopulationContainer(id="fake", component="izzy") nc.populations.append(pc) instance = neuroml.Instance(0) - instance.location = neuroml.Location(100,100,33.333) + instance.location = neuroml.Location(100, 100, 33.333) pc.instances.append(instance) instance = neuroml.Instance(1) - instance.location = neuroml.Location(200,200,66.66) + instance.location = neuroml.Location(200, 200, 66.66) pc.instances.append(instance) - - prc = ProjectionContainer(id="proj", presynaptic_population=pc0.id, postsynaptic_population=pc.id,synapse="ampa") - - conn = neuroml.Connection(id=0, - pre_cell_id="../%s/%i/%s"%(pc0.id,0,pc0.component), \ - pre_segment_id=2, \ - pre_fraction_along=0.1, - post_cell_id="../%s/%i/%s"%(pc.id,2,pc.id)) - prc.connections.append(conn) + + prc = ProjectionContainer( + id="proj", + presynaptic_population=pc0.id, + postsynaptic_population=pc.id, + synapse="ampa", + ) + + conn = neuroml.Connection( + id=0, + pre_cell_id="../%s/%i/%s" % (pc0.id, 0, pc0.component), + pre_segment_id=2, + pre_fraction_along=0.1, + post_cell_id="../%s/%i/%s" % (pc.id, 2, pc.id), + ) + prc.connections.append(conn) nc.projections.append(prc) - - ilc = InputListContainer(id="iii",component="CCC",populations=pc.id) + + ilc = InputListContainer(id="iii", component="CCC", populations=pc.id) nc.input_lists.append(ilc) - ilc.input.append(neuroml.Input(id=0,target="../pyramidals_48/37/pyr_4_sym", destination="synapses", segment_id="2", fraction_along="0.3")) - - + ilc.input.append( + neuroml.Input( + id=0, + target="../pyramidals_48/37/pyr_4_sym", + destination="synapses", + segment_id="2", + fraction_along="0.3", + ) + ) + nc2 = NetworkContainer(id="testnet2") - - pc2 = PopulationContainer(id="fake2", component="iaf",size=4) + + pc2 = PopulationContainer(id="fake2", component="iaf", size=4) nc2.populations.append(pc2) - + print(pc) - - nets = [nc,nc2,net0] + + nets = [nc, nc2, net0] nets = [nc] - + for n in nets: - print('\n--------------------------------') + print("\n--------------------------------") print(n) for p in n.populations: - print(" %s"%p) - print(" > %s"%(p.instances)) + print(" %s" % p) + print(" > %s" % (p.instances)) for i in p.instances: - print(" > %s"%i) + print(" > %s" % i) for pr in n.projections: - print(" %s"%pr) - print(" > %s"%(pr.connections)) + print(" %s" % pr) + print(" > %s" % (pr.connections)) for c in pr.connections: - print(" > %s"%c) + print(" > %s" % c) for il in n.input_lists: - print(" %s"%il) + print(" %s" % il) for i in il.input: - print(" > %s"%i) - + print(" > %s" % i) + nml_doc_c = neuroml.NeuroMLDocument(id="ndc") nml_doc_c.networks.append(nc) - print('\n++++++++++') + print("\n++++++++++") print(nml_doc_c.summary()) - - print('\n++++++++++') + + print("\n++++++++++") exit() - - file_name = '../examples/tmp/MediumNet.net.nml.h5' - - nml_doc = read_neuroml2_file(file_name,optimized=True,include_includes=True) - + + file_name = "../examples/tmp/MediumNet.net.nml.h5" + + nml_doc = read_neuroml2_file(file_name, optimized=True, include_includes=True) + print(nml_doc.summary()) - - file_name = '../examples/tmp/MediumNet2.net.nml.h5' - + + file_name = "../examples/tmp/MediumNet2.net.nml.h5" + from neuroml.writers import NeuroMLHdf5Writer - - NeuroMLHdf5Writer.write(nml_doc,file_name) - - print("Rewritten to %s"%file_name) \ No newline at end of file + + NeuroMLHdf5Writer.write(nml_doc, file_name) + + print("Rewritten to %s" % file_name) diff --git a/neuroml/hdf5/NeuroMLHdf5Parser.py b/neuroml/hdf5/NeuroMLHdf5Parser.py index 17e5ed7a..11fb342e 100644 --- a/neuroml/hdf5/NeuroMLHdf5Parser.py +++ b/neuroml/hdf5/NeuroMLHdf5Parser.py @@ -2,7 +2,7 @@ # # A class to parse HDF5 based NeuroML files. # Calls the appropriate methods in DefaultNetworkHandler when cell locations, -# network connections are found. The DefaultNetworkHandler can either print +# network connections are found. The DefaultNetworkHandler can either print # information, or if it's a class overriding DefaultNetworkHandler can create # the appropriate network in a simulator dependent fashion # @@ -13,13 +13,13 @@ # This work has been funded by the Medical Research Council & Wellcome Trust # # - + import logging import sys import inspect -import tables # pytables for HDF5 support -import numpy as np +import tables # pytables for HDF5 support +import numpy as np import neuroml @@ -38,627 +38,767 @@ import os.path -class NeuroMLHdf5Parser(): - - log = logging.getLogger("NeuroMLHdf5Parser") - - currPopulation = "" - currentComponent = "" - totalInstances = 0 - - currentProjectionId = "" - currentProjectionType = "" - currentProjectionPrePop = "" - currentProjectionPostPop = "" - currentSynapse = "" - currentPreSynapse = "" - - currInputList = "" - - nml_doc_extra_elements = None - - optimized=False - - def __init__ (self, netHandler, optimized=False): - self.netHandler = netHandler - self.optimized = optimized - - if not self.optimized: - # For continued use with old API - if not hasattr(self.netHandler,'handle_network') or hasattr(self.netHandler,'handleNetwork'): - self.netHandler.handle_network = self.netHandler.handleNetwork - if not hasattr(self.netHandler,'handle_document_start') or hasattr(self.netHandler,'handleDocumentStart'): - self.netHandler.handle_document_start = self.netHandler.handleDocumentStart - if not hasattr(self.netHandler,'handle_population') or hasattr(self.netHandler,'handlePopulation'): - self.netHandler.handle_population = self.netHandler.handlePopulation - if not hasattr(self.netHandler,'handle_location') or hasattr(self.netHandler,'handleLocation'): - self.netHandler.handle_location = self.netHandler.handleLocation - - if not hasattr(self.netHandler,'handle_projection') or hasattr(self.netHandler,'handleProjection'): - self.netHandler.handle_projection = self.netHandler.handleProjection - if not hasattr(self.netHandler,'finalise_projection') or hasattr(self.netHandler,'finaliseProjection'): - self.netHandler.finalise_projection = self.netHandler.finaliseProjection - - if not hasattr(self.netHandler,'handle_connection') or hasattr(self.netHandler,'handleConnection'): - self.netHandler.handle_connection = self.netHandler.handleConnection - if not hasattr(self.netHandler,'handle_input_list') or hasattr(self.netHandler,'handleInputList'): - self.netHandler.handle_input_list = self.netHandler.handleInputList - if not hasattr(self.netHandler,'handle_single_input') or hasattr(self.netHandler,'handleSingleInput'): - self.netHandler.handle_single_input = self.netHandler.handleSingleInput - if not hasattr(self.netHandler,'finalise_input_source') or hasattr(self.netHandler,'finaliseInputSource'): - self.netHandler.finalise_input_source = self.netHandler.finaliseInputSource - - - - def parse(self, filename): - - h5file=tables.open_file(filename,mode='r') - - self.log.info("Opened HDF5 file: %s; id=%s"%(h5file.filename,h5file.root.neuroml._v_attrs.id)) - - if hasattr(h5file.root.neuroml._v_attrs,"neuroml_top_level"): - nml = get_str_attribute_group(h5file.root.neuroml,"neuroml_top_level") - - try: - self.nml_doc_extra_elements = read_neuroml2_string(nml, - include_includes=True, - verbose=False, - base_path=os.path.dirname(os.path.abspath(filename))) - except Exception as e: - print('Unable to read XML string extracted from HDF5 file!') - print(e) - print(nml) - raise e - - self.log.info("Extracted NeuroML2 elements from extra string found in HDF5 file") - - self.parse_group(h5file.root.neuroml) - - h5file.close() - - def get_nml_doc(self): - - if not self.optimized: - nml2_doc = nmlHandler.get_nml_doc() - if self.nml_doc_extra_elements: - add_all_to_document(self.nml_doc_extra_elements,nml2_doc) - return nml2_doc - else: - - nml_doc = neuroml.NeuroMLDocument(id=self.doc_id, notes=self.doc_notes) - if self.nml_doc_extra_elements: - add_all_to_document(self.nml_doc_extra_elements,nml_doc) - nml_doc.networks.append(self.optimizedNetwork) - - return nml_doc - - - - def _is_dataset(self,node): - return node._c_classid == 'ARRAY' or node._c_classid == 'CARRAY' - - def parse_group(self, g): - self.log.debug("Parsing group: "+ str(g)) - - self.start_group(g) - - # Note this ensures groups are parsed before datasets. Important for synapse props - - # Ensure populations parsed first! - for node in g: - - if node._c_classid == 'GROUP' and node._v_name.count('population_')>=1: - self.log.debug("Sub node: "+ str(node)+ ", class: "+ node._c_classid) - self.parse_group(node) - - # Non populations! - for node in g: - - if node._c_classid == 'GROUP' and node._v_name.count('population_')==0: - self.log.debug("Sub node (ng): "+ str(node)+ ", class: "+ node._c_classid) - self.parse_group(node) - - for node in g: - self.log.debug("Sub node: "+ str(node)+ ", class: "+ node._c_classid) - - if self._is_dataset(node): - self.parse_dataset(node) - - self.end_group(g) - - def _extract_named_indices(self,d): - - named_indices = {} - - for attrName in d.attrs._v_attrnames: - - if 'column_' in attrName: - val = d.attrs.__getattr__(attrName) - if isinstance(val,str): - name = val - else: - name = val[0] - - index = int(attrName[len('column_'):]) - - named_indices[name] = index - - return named_indices - - - def parse_dataset(self, d): - self.log.debug("Parsing dataset/array: "+ str(d)) - - if self.currPopulation!="": - self.log.debug("Using data for population: "+ self.currPopulation) - self.log.debug("Size is: "+str(d.shape[0])+" rows of: "+ str(d.shape[1])+ " entries") - +class NeuroMLHdf5Parser: + + log = logging.getLogger("NeuroMLHdf5Parser") + + currPopulation = "" + currentComponent = "" + totalInstances = 0 + + currentProjectionId = "" + currentProjectionType = "" + currentProjectionPrePop = "" + currentProjectionPostPop = "" + currentSynapse = "" + currentPreSynapse = "" + + currInputList = "" + + nml_doc_extra_elements = None + + optimized = False + + def __init__(self, netHandler, optimized=False): + self.netHandler = netHandler + self.optimized = optimized + if not self.optimized: - - indexId = -1 - indexX = -1 - indexY = -1 - indexZ = -1 - - for attrName in d.attrs._v_attrnames: - val = d.attrs.__getattr__(attrName) + # For continued use with old API + if not hasattr(self.netHandler, "handle_network") or hasattr( + self.netHandler, "handleNetwork" + ): + self.netHandler.handle_network = self.netHandler.handleNetwork + if not hasattr(self.netHandler, "handle_document_start") or hasattr( + self.netHandler, "handleDocumentStart" + ): + self.netHandler.handle_document_start = ( + self.netHandler.handleDocumentStart + ) + if not hasattr(self.netHandler, "handle_population") or hasattr( + self.netHandler, "handlePopulation" + ): + self.netHandler.handle_population = self.netHandler.handlePopulation + if not hasattr(self.netHandler, "handle_location") or hasattr( + self.netHandler, "handleLocation" + ): + self.netHandler.handle_location = self.netHandler.handleLocation + + if not hasattr(self.netHandler, "handle_projection") or hasattr( + self.netHandler, "handleProjection" + ): + self.netHandler.handle_projection = self.netHandler.handleProjection + if not hasattr(self.netHandler, "finalise_projection") or hasattr( + self.netHandler, "finaliseProjection" + ): + self.netHandler.finalise_projection = self.netHandler.finaliseProjection + + if not hasattr(self.netHandler, "handle_connection") or hasattr( + self.netHandler, "handleConnection" + ): + self.netHandler.handle_connection = self.netHandler.handleConnection + if not hasattr(self.netHandler, "handle_input_list") or hasattr( + self.netHandler, "handleInputList" + ): + self.netHandler.handle_input_list = self.netHandler.handleInputList + if not hasattr(self.netHandler, "handle_single_input") or hasattr( + self.netHandler, "handleSingleInput" + ): + self.netHandler.handle_single_input = self.netHandler.handleSingleInput + if not hasattr(self.netHandler, "finalise_input_source") or hasattr( + self.netHandler, "finaliseInputSource" + ): + self.netHandler.finalise_input_source = ( + self.netHandler.finaliseInputSource + ) + + def parse(self, filename): + + h5file = tables.open_file(filename, mode="r") + + self.log.info( + "Opened HDF5 file: %s; id=%s" + % (h5file.filename, h5file.root.neuroml._v_attrs.id) + ) + + if hasattr(h5file.root.neuroml._v_attrs, "neuroml_top_level"): + nml = get_str_attribute_group(h5file.root.neuroml, "neuroml_top_level") + + try: + self.nml_doc_extra_elements = read_neuroml2_string( + nml, + include_includes=True, + verbose=False, + base_path=os.path.dirname(os.path.abspath(filename)), + ) + except Exception as e: + print("Unable to read XML string extracted from HDF5 file!") + print(e) + print(nml) + raise e + + self.log.info( + "Extracted NeuroML2 elements from extra string found in HDF5 file" + ) + + self.parse_group(h5file.root.neuroml) + + h5file.close() + + def get_nml_doc(self): - if val == 'id' or val[0] == 'id': - indexId = int(attrName[len('column_'):]) - if val == 'x' or val[0] == 'x': - indexX = int(attrName[len('column_'):]) - if val == 'y' or val[0] == 'y': - indexY = int(attrName[len('column_'):]) - if val == 'z' or val[0] == 'z': - indexZ = int(attrName[len('column_'):]) - - if indexId<0: - if len(d[0])==4: indexId=0 - if indexX<0: - if len(d[0])==3: indexX=0 - if len(d[0])==4: indexX=1 - if indexY<0: - if len(d[0])==3: indexY=1 - if len(d[0])==4: indexY=2 - if indexZ<0: - if len(d[0])==3: indexZ=2 - if len(d[0])==4: indexY=3 - - for i in range(0, d.shape[0]): - - self.netHandler.handle_location( int(d[i,indexId]) if indexId>=0 else i, \ - self.currPopulation, \ - self.currentComponent, \ - float(d[i,indexX]), \ - float(d[i,indexY]), \ - float(d[i,indexZ])) - else: - - #TODO: a better way to convert??? - a = np.array(d) - self.currOptPopulation.instances = InstanceList(array=a,indices=self._extract_named_indices(d)) - - - elif self.currentProjectionId!="": - self.log.debug("Using data for proj: "+ self.currentProjectionId) - self.log.debug("Size is: "+str(d.shape[0])+" rows of: "+ str(d.shape[1])+ " entries") - if not self.optimized: - - indexId = -1 - indexPreCellId = -1 - indexPreSegId = -1 - indexPreFractAlong= -1 - indexPostCellId = -1 - indexPostSegId = -1 - indexPostFractAlong= -1 - indexWeight= -1 - indexDelay= -1 - - id = -1 - preCellId = -1 - preSegId = 0 - preFractAlong= 0.5 - postCellId = -1 - postSegId = 0 - postFractAlong= 0.5 - - type="projection" - - extraParamIndices = {} - - - for attrName in d.attrs._v_attrnames: - val = d.attrs.__getattr__(attrName) - if not isinstance(val, str): - val = val.decode() - - #self.log.debug("Val of attribute: "+ attrName + " is "+ str(val)) - - if val == 'id' or val[0] == 'id': - indexId = int(attrName[len('column_'):]) - elif val == 'pre_cell_id' or val[0] == 'pre_cell_id': - indexPreCellId = int(attrName[len('column_'):]) - elif val == 'pre_segment_id' or val[0] == 'pre_segment_id': - indexPreSegId = int(attrName[len('column_'):]) - elif val == 'pre_fraction_along' or val[0] == 'pre_fraction_along': - indexPreFractAlong = int(attrName[len('column_'):]) - elif val == 'post_cell_id' or val[0] == 'post_cell_id': - indexPostCellId = int(attrName[len('column_'):]) - elif val == 'post_segment_id' or val[0] == 'post_segment_id': - indexPostSegId = int(attrName[len('column_'):]) - elif val == 'post_fraction_along' or val[0] == 'post_fraction_along': - indexPostFractAlong = int(attrName[len('column_'):]) - elif val == 'weight' or val[0] == 'weight': - indexWeight = int(attrName[len('column_'):]) - elif val == 'delay' or val[0] == 'delay': - indexDelay = int(attrName[len('column_'):]) + nml2_doc = nmlHandler.get_nml_doc() + if self.nml_doc_extra_elements: + add_all_to_document(self.nml_doc_extra_elements, nml2_doc) + return nml2_doc + else: + nml_doc = neuroml.NeuroMLDocument(id=self.doc_id, notes=self.doc_notes) if self.nml_doc_extra_elements: - synapse_obj = self.nml_doc_extra_elements.get_by_id(self.currentSynapse) - - pre_synapse_obj = self.nml_doc_extra_elements.get_by_id(self.currentPreSynapse) if len(self.currentPreSynapse)>0 else None - - else: - synapse_obj = None - pre_synapse_obj = None + add_all_to_document(self.nml_doc_extra_elements, nml_doc) + nml_doc.networks.append(self.optimizedNetwork) + + return nml_doc + + def _is_dataset(self, node): + return node._c_classid == "ARRAY" or node._c_classid == "CARRAY" - self.netHandler.handle_projection(self.currentProjectionId, - self.currentProjectionPrePop, - self.currentProjectionPostPop, - self.currentSynapse, - hasWeights=indexWeight>0, - hasDelays=indexDelay>0, - type=self.currentProjectionType, - synapse_obj = synapse_obj, - pre_synapse_obj = pre_synapse_obj) + def parse_group(self, g): + self.log.debug("Parsing group: " + str(g)) + self.start_group(g) - self.log.debug("Cols: Id: %d precell: %d, postcell: %d, pre fract: %d, post fract: %d" % (indexId, indexPreCellId, indexPostCellId, indexPreFractAlong, indexPostFractAlong)) + # Note this ensures groups are parsed before datasets. Important for synapse props - self.log.debug("Extra cols: "+str(extraParamIndices) ) + # Ensure populations parsed first! + for node in g: + + if node._c_classid == "GROUP" and node._v_name.count("population_") >= 1: + self.log.debug("Sub node: " + str(node) + ", class: " + node._c_classid) + self.parse_group(node) + # Non populations! + for node in g: - for i in range(0, d.shape[0]): - row = d[i,:] + if node._c_classid == "GROUP" and node._v_name.count("population_") == 0: + self.log.debug( + "Sub node (ng): " + str(node) + ", class: " + node._c_classid + ) + self.parse_group(node) + + for node in g: + self.log.debug("Sub node: " + str(node) + ", class: " + node._c_classid) - id = int(row[indexId]) if indexId>0 else i - - preCellId = int(row[indexPreCellId]) + if self._is_dataset(node): + self.parse_dataset(node) - if indexPreSegId >= 0: - preSegId = int(row[indexPreSegId]) - if indexPreFractAlong >= 0: - preFractAlong = row[indexPreFractAlong] + self.end_group(g) - postCellId = int(row[indexPostCellId]) + def _extract_named_indices(self, d): - if indexPostSegId >= 0: - postSegId = int(row[indexPostSegId]) - if indexPostFractAlong >= 0: - postFractAlong = row[indexPostFractAlong] + named_indices = {} + for attrName in d.attrs._v_attrnames: - if indexWeight >= 0: - weight = row[indexWeight] + if "column_" in attrName: + val = d.attrs.__getattr__(attrName) + if isinstance(val, str): + name = val else: - weight=1 + name = val[0] + + index = int(attrName[len("column_") :]) + + named_indices[name] = index + + return named_indices + + def parse_dataset(self, d): + self.log.debug("Parsing dataset/array: " + str(d)) + + if self.currPopulation != "": + self.log.debug("Using data for population: " + self.currPopulation) + self.log.debug( + "Size is: " + + str(d.shape[0]) + + " rows of: " + + str(d.shape[1]) + + " entries" + ) + + if not self.optimized: + + indexId = -1 + indexX = -1 + indexY = -1 + indexZ = -1 + + for attrName in d.attrs._v_attrnames: + val = d.attrs.__getattr__(attrName) + + if val == "id" or val[0] == "id": + indexId = int(attrName[len("column_") :]) + if val == "x" or val[0] == "x": + indexX = int(attrName[len("column_") :]) + if val == "y" or val[0] == "y": + indexY = int(attrName[len("column_") :]) + if val == "z" or val[0] == "z": + indexZ = int(attrName[len("column_") :]) + + if indexId < 0: + if len(d[0]) == 4: + indexId = 0 + if indexX < 0: + if len(d[0]) == 3: + indexX = 0 + if len(d[0]) == 4: + indexX = 1 + if indexY < 0: + if len(d[0]) == 3: + indexY = 1 + if len(d[0]) == 4: + indexY = 2 + if indexZ < 0: + if len(d[0]) == 3: + indexZ = 2 + if len(d[0]) == 4: + indexY = 3 + + for i in range(0, d.shape[0]): + + self.netHandler.handle_location( + int(d[i, indexId]) if indexId >= 0 else i, + self.currPopulation, + self.currentComponent, + float(d[i, indexX]), + float(d[i, indexY]), + float(d[i, indexZ]), + ) + else: + + # TODO: a better way to convert??? + a = np.array(d) + self.currOptPopulation.instances = InstanceList( + array=a, indices=self._extract_named_indices(d) + ) + + elif self.currentProjectionId != "": + self.log.debug("Using data for proj: " + self.currentProjectionId) + self.log.debug( + "Size is: " + + str(d.shape[0]) + + " rows of: " + + str(d.shape[1]) + + " entries" + ) + + if not self.optimized: + + indexId = -1 + indexPreCellId = -1 + indexPreSegId = -1 + indexPreFractAlong = -1 + indexPostCellId = -1 + indexPostSegId = -1 + indexPostFractAlong = -1 + indexWeight = -1 + indexDelay = -1 + + id = -1 + preCellId = -1 + preSegId = 0 + preFractAlong = 0.5 + postCellId = -1 + postSegId = 0 + postFractAlong = 0.5 + + type = "projection" + + extraParamIndices = {} + + for attrName in d.attrs._v_attrnames: + val = d.attrs.__getattr__(attrName) + if not isinstance(val, str): + val = val.decode() + + # self.log.debug("Val of attribute: "+ attrName + " is "+ str(val)) + + if val == "id" or val[0] == "id": + indexId = int(attrName[len("column_") :]) + elif val == "pre_cell_id" or val[0] == "pre_cell_id": + indexPreCellId = int(attrName[len("column_") :]) + elif val == "pre_segment_id" or val[0] == "pre_segment_id": + indexPreSegId = int(attrName[len("column_") :]) + elif val == "pre_fraction_along" or val[0] == "pre_fraction_along": + indexPreFractAlong = int(attrName[len("column_") :]) + elif val == "post_cell_id" or val[0] == "post_cell_id": + indexPostCellId = int(attrName[len("column_") :]) + elif val == "post_segment_id" or val[0] == "post_segment_id": + indexPostSegId = int(attrName[len("column_") :]) + elif ( + val == "post_fraction_along" or val[0] == "post_fraction_along" + ): + indexPostFractAlong = int(attrName[len("column_") :]) + elif val == "weight" or val[0] == "weight": + indexWeight = int(attrName[len("column_") :]) + elif val == "delay" or val[0] == "delay": + indexDelay = int(attrName[len("column_") :]) + + if self.nml_doc_extra_elements: + synapse_obj = self.nml_doc_extra_elements.get_by_id( + self.currentSynapse + ) + + pre_synapse_obj = ( + self.nml_doc_extra_elements.get_by_id(self.currentPreSynapse) + if len(self.currentPreSynapse) > 0 + else None + ) - if indexDelay >= 0: - delay = row[indexDelay] else: - delay = 0 - - #self.log.debug("Connection %d from %f to %f" % (id, preCellId, postCellId)) - - - self.netHandler.handle_connection(self.currentProjectionId, \ - id, \ - self.currentProjectionPrePop, \ - self.currentProjectionPostPop, \ - self.currentSynapse, \ - preCellId, \ - postCellId, \ - preSegId, \ - preFractAlong, \ - postSegId, \ - postFractAlong, - delay=delay, - weight=weight) - else: - #TODO: a better way to convert??? - a = np.array(d) - self.currOptProjection.connections = ConnectionList(array=a,indices=self._extract_named_indices(d)) - - if self.currInputList!="": - self.log.debug("Using data for input list: "+ self.currInputList) - self.log.debug("Size is: "+str(d.shape[0])+" rows of: "+ str(d.shape[1])+ " entries") - - if not self.optimized: + synapse_obj = None + pre_synapse_obj = None + + self.netHandler.handle_projection( + self.currentProjectionId, + self.currentProjectionPrePop, + self.currentProjectionPostPop, + self.currentSynapse, + hasWeights=indexWeight > 0, + hasDelays=indexDelay > 0, + type=self.currentProjectionType, + synapse_obj=synapse_obj, + pre_synapse_obj=pre_synapse_obj, + ) + + self.log.debug( + "Cols: Id: %d precell: %d, postcell: %d, pre fract: %d, post fract: %d" + % ( + indexId, + indexPreCellId, + indexPostCellId, + indexPreFractAlong, + indexPostFractAlong, + ) + ) + + self.log.debug("Extra cols: " + str(extraParamIndices)) + + for i in range(0, d.shape[0]): + row = d[i, :] + + id = int(row[indexId]) if indexId > 0 else i + + preCellId = int(row[indexPreCellId]) + + if indexPreSegId >= 0: + preSegId = int(row[indexPreSegId]) + if indexPreFractAlong >= 0: + preFractAlong = row[indexPreFractAlong] + + postCellId = int(row[indexPostCellId]) + + if indexPostSegId >= 0: + postSegId = int(row[indexPostSegId]) + if indexPostFractAlong >= 0: + postFractAlong = row[indexPostFractAlong] + + if indexWeight >= 0: + weight = row[indexWeight] + else: + weight = 1 + + if indexDelay >= 0: + delay = row[indexDelay] + else: + delay = 0 + + # self.log.debug("Connection %d from %f to %f" % (id, preCellId, postCellId)) + + self.netHandler.handle_connection( + self.currentProjectionId, + id, + self.currentProjectionPrePop, + self.currentProjectionPostPop, + self.currentSynapse, + preCellId, + postCellId, + preSegId, + preFractAlong, + postSegId, + postFractAlong, + delay=delay, + weight=weight, + ) + else: + # TODO: a better way to convert??? + a = np.array(d) + self.currOptProjection.connections = ConnectionList( + array=a, indices=self._extract_named_indices(d) + ) + + if self.currInputList != "": + self.log.debug("Using data for input list: " + self.currInputList) + self.log.debug( + "Size is: " + + str(d.shape[0]) + + " rows of: " + + str(d.shape[1]) + + " entries" + ) + + if not self.optimized: + + indexId = -1 + indexTargetCellId = -1 + indexSegId = -1 + indexFractAlong = -1 + indexWeight = -1 + + segId = 0 + fractAlong = 0.5 + weight = 1 + + for attrName in d.attrs._v_attrnames: + val = d.attrs.__getattr__(attrName) + if not isinstance(val, str): + val = val.decode() + + self.log.debug("Val of attribute: " + attrName + " is " + str(val)) + + if val == "id" or val[0] == "id": + indexId = int(attrName[len("column_") :]) + elif val == "target_cell_id" or val[0] == "target_cell_id": + indexTargetCellId = int(attrName[len("column_") :]) + elif val == "segment_id" or val[0] == "segment_id": + indexSegId = int(attrName[len("column_") :]) + elif val == "fraction_along" or val[0] == "fraction_along": + indexFractAlong = int(attrName[len("column_") :]) + elif val == "weight" or val[0] == "weight": + indexWeight = int(attrName[len("column_") :]) + + for i in range(0, d.shape[0]): + + if indexId >= 0: + id_ = int(d[i, indexId]) + else: + id_ = i + + tid = int(d[i, indexTargetCellId]) + + if indexSegId >= 0: + segId = int(d[i, indexSegId]) + + if indexFractAlong >= 0: + fractAlong = float(d[i, indexFractAlong]) + + if indexWeight >= 0: + weight = float(d[i, indexWeight]) + + self.log.debug("Adding %s, %s, %s" % (tid, segId, fractAlong)) + + self.netHandler.handle_single_input( + self.currInputList, + id_, + tid, + segId=segId, + fract=fractAlong, + weight=weight, + ) + + self.netHandler.finalise_input_source(self.currInputList) - indexId = -1 - indexTargetCellId = -1 - indexSegId = -1 - indexFractAlong= -1 - indexWeight= -1 + else: - segId = 0 - fractAlong= 0.5 - weight=1 + # TODO: a better way to convert??? + a = np.array(d) + self.currOptInputList.input = InputsList( + array=a, indices=self._extract_named_indices(d) + ) + def _get_node_size(self, g, name): - for attrName in d.attrs._v_attrnames: - val = d.attrs.__getattr__(attrName) - if not isinstance(val, str): - val = val.decode() - - self.log.debug("Val of attribute: "+ attrName + " is "+ str(val)) - - if val == 'id' or val[0] == 'id': - indexId = int(attrName[len('column_'):]) - elif val == 'target_cell_id' or val[0] == 'target_cell_id': - indexTargetCellId = int(attrName[len('column_'):]) - elif val == 'segment_id' or val[0] == 'segment_id': - indexSegId = int(attrName[len('column_'):]) - elif val == 'fraction_along' or val[0] == 'fraction_along': - indexFractAlong = int(attrName[len('column_'):]) - elif val == 'weight' or val[0] == 'weight': - indexWeight = int(attrName[len('column_'):]) - - for i in range(0, d.shape[0]): - - if indexId >= 0: - id_ = int(d[i,indexId]) - else: - id_ = i - - tid = int(d[i,indexTargetCellId]) - - if indexSegId >= 0: - segId = int(d[i,indexSegId]) - - if indexFractAlong >= 0: - fractAlong = float(d[i,indexFractAlong]) - - if indexWeight >= 0: - weight = float(d[i,indexWeight]) - - self.log.debug("Adding %s, %s, %s"%(tid,segId,fractAlong)) - - self.netHandler.handle_single_input(self.currInputList, - id_, - tid, - segId = segId, - fract = fractAlong, - weight = weight) - - self.netHandler.finalise_input_source(self.currInputList) - - else: - - #TODO: a better way to convert??? - a = np.array(d) - self.currOptInputList.input = InputsList(array=a,indices=self._extract_named_indices(d)) - - def _get_node_size(self, g, name): - size = -1 # Peek ahead for size... for node in g: if self._is_dataset(node) and node.name == name: - size = node.shape[0] - + size = node.shape[0] + if size == -1: size = g._v_attrs.size - + return size - - - def start_group(self, g): - self.log.debug("Going into a group: "+ g._v_name) - - - if g._v_name == 'neuroml': - - if not self.optimized: - self.netHandler.handle_document_start(get_str_attribute_group(g,'id'), - get_str_attribute_group(g,'notes')) - else: - self.doc_id = get_str_attribute_group(g,'id') - self.doc_notes = get_str_attribute_group(g,'notes') - - if g._v_name == 'network': - - if not self.optimized: - self.netHandler.handle_network(get_str_attribute_group(g,'id'), - get_str_attribute_group(g,'notes'), - temperature=get_str_attribute_group(g,'temperature')) - else: - self.optimizedNetwork = NetworkContainer(id=get_str_attribute_group(g,'id'), - notes=get_str_attribute_group(g,'notes'), - temperature=get_str_attribute_group(g,'temperature')) - - if g._v_name.count('population_')>=1: - # TODO: a better check to see if the attribute is a str or numpy.ndarray - self.currPopulation = get_str_attribute_group(g,'id') - self.currentComponent = get_str_attribute_group(g,'component') - - size = self._get_node_size(g,self.currPopulation) - - properties = {} - for fname in g._v_attrs._v_attrnames: - if fname.startswith('property:'): - name = str(fname).split(':')[1] - properties[name] = get_str_attribute_group(g,fname) - - self.log.debug("Found a population: %s, component: %s, size: %s, properties: %s" % \ - (self.currPopulation,self.currentComponent,size,properties)) - - if not self.optimized: - if self.nml_doc_extra_elements: - component_obj = self.nml_doc_extra_elements.get_by_id(self.currentComponent) + + def start_group(self, g): + self.log.debug("Going into a group: " + g._v_name) + + if g._v_name == "neuroml": + + if not self.optimized: + self.netHandler.handle_document_start( + get_str_attribute_group(g, "id"), + get_str_attribute_group(g, "notes"), + ) else: - component_obj = None + self.doc_id = get_str_attribute_group(g, "id") + self.doc_notes = get_str_attribute_group(g, "notes") - # Try for Python3 - try: - args = inspect.getfullargspec(self.netHandler.handle_population)[0] - except AttributeError: - # Fall back for Python 2 - args = inspect.getargspec(self.netHandler.handle_population)[0] + if g._v_name == "network": - if 'properties' in args: - self.netHandler.handle_population(self.currPopulation, self.currentComponent, size, component_obj=component_obj, properties=properties) + if not self.optimized: + self.netHandler.handle_network( + get_str_attribute_group(g, "id"), + get_str_attribute_group(g, "notes"), + temperature=get_str_attribute_group(g, "temperature"), + ) else: - self.netHandler.handle_population(self.currPopulation, self.currentComponent, size, component_obj=component_obj) - else: - self.currOptPopulation = PopulationContainer(id=self.currPopulation, component=self.currentComponent, size=size) - - for p in properties: - self.currOptPopulation.properties.append(neuroml.Property(p,properties[p])) - - self.optimizedNetwork.populations.append(self.currOptPopulation) - - if g._v_name.count('projection_')>=1: - - self.currentProjectionId = get_str_attribute_group(g,'id') - pt = get_str_attribute_group(g,'type') - self.currentProjectionType = pt if pt else "projection" - self.currentProjectionPrePop = get_str_attribute_group(g,'presynapticPopulation') - self.currentProjectionPostPop = get_str_attribute_group(g,'postsynapticPopulation') - - if "synapse" in g._v_attrs: - self.currentSynapse = get_str_attribute_group(g,'synapse') - elif "postComponent" in g._v_attrs: - self.currentSynapse = get_str_attribute_group(g,'postComponent') - - if "preComponent" in g._v_attrs: - self.currentPreSynapse = get_str_attribute_group(g,'preComponent') - - - if not self.optimized: - self.log.debug("------ Found a projection: %s, from %s to %s through %s" %(self.currentProjectionId,self.currentProjectionPrePop,self.currentProjectionPostPop,self.currentSynapse)) - else: - - if self.currentProjectionType=='electricalProjection': - raise Exception("Cannot yet export electricalProjections to optimized HDF5 format") - self.currOptProjection = ElectricalProjectionContainer(id=self.currentProjectionId, - presynaptic_population=self.currentProjectionPrePop, - postsynaptic_population=self.currentProjectionPostPop) - - self.optimizedNetwork.electrical_projections.append(self.currOptProjection) - - elif self.currentProjectionType=='continuousProjection': - - raise Exception("Cannot yet export continuousProjections to optimized HDF5 format") - - self.optimizedNetwork.continuous_projections.append(self.currOptProjection) - + self.optimizedNetwork = NetworkContainer( + id=get_str_attribute_group(g, "id"), + notes=get_str_attribute_group(g, "notes"), + temperature=get_str_attribute_group(g, "temperature"), + ) + + if g._v_name.count("population_") >= 1: + # TODO: a better check to see if the attribute is a str or numpy.ndarray + self.currPopulation = get_str_attribute_group(g, "id") + self.currentComponent = get_str_attribute_group(g, "component") + + size = self._get_node_size(g, self.currPopulation) + + properties = {} + for fname in g._v_attrs._v_attrnames: + if fname.startswith("property:"): + name = str(fname).split(":")[1] + properties[name] = get_str_attribute_group(g, fname) + + self.log.debug( + "Found a population: %s, component: %s, size: %s, properties: %s" + % (self.currPopulation, self.currentComponent, size, properties) + ) + + if not self.optimized: + if self.nml_doc_extra_elements: + component_obj = self.nml_doc_extra_elements.get_by_id( + self.currentComponent + ) + else: + component_obj = None + + # Try for Python3 + try: + args = inspect.getfullargspec(self.netHandler.handle_population)[0] + except AttributeError: + # Fall back for Python 2 + args = inspect.getargspec(self.netHandler.handle_population)[0] + + if "properties" in args: + self.netHandler.handle_population( + self.currPopulation, + self.currentComponent, + size, + component_obj=component_obj, + properties=properties, + ) + else: + self.netHandler.handle_population( + self.currPopulation, + self.currentComponent, + size, + component_obj=component_obj, + ) else: - self.currOptProjection = ProjectionContainer(id=self.currentProjectionId, - presynaptic_population=self.currentProjectionPrePop, - postsynaptic_population=self.currentProjectionPostPop, - synapse=self.currentSynapse) - - self.optimizedNetwork.projections.append(self.currOptProjection) - - - if g._v_name.count('inputList_')>=1 or g._v_name.count('input_list_')>=1: # inputList_ preferred - # TODO: a better check to see if the attribute is a str or numpy.ndarray - self.currInputList = get_str_attribute_group(g,'id') - component = get_str_attribute_group(g,'component') - population = get_str_attribute_group(g,'population') - - size = self._get_node_size(g,self.currInputList) - - - if not self.optimized: - if self.nml_doc_extra_elements: - input_comp_obj = self.nml_doc_extra_elements.get_by_id(component) + self.currOptPopulation = PopulationContainer( + id=self.currPopulation, component=self.currentComponent, size=size + ) + + for p in properties: + self.currOptPopulation.properties.append( + neuroml.Property(p, properties[p]) + ) + + self.optimizedNetwork.populations.append(self.currOptPopulation) + + if g._v_name.count("projection_") >= 1: + + self.currentProjectionId = get_str_attribute_group(g, "id") + pt = get_str_attribute_group(g, "type") + self.currentProjectionType = pt if pt else "projection" + self.currentProjectionPrePop = get_str_attribute_group( + g, "presynapticPopulation" + ) + self.currentProjectionPostPop = get_str_attribute_group( + g, "postsynapticPopulation" + ) + + if "synapse" in g._v_attrs: + self.currentSynapse = get_str_attribute_group(g, "synapse") + elif "postComponent" in g._v_attrs: + self.currentSynapse = get_str_attribute_group(g, "postComponent") + + if "preComponent" in g._v_attrs: + self.currentPreSynapse = get_str_attribute_group(g, "preComponent") + + if not self.optimized: + self.log.debug( + "------ Found a projection: %s, from %s to %s through %s" + % ( + self.currentProjectionId, + self.currentProjectionPrePop, + self.currentProjectionPostPop, + self.currentSynapse, + ) + ) else: - input_comp_obj = None - self.log.debug("Found an inputList: "+ self.currInputList+", component: "+component+", population: "+population+", size: "+ str(size)) + if self.currentProjectionType == "electricalProjection": + raise Exception( + "Cannot yet export electricalProjections to optimized HDF5 format" + ) + self.currOptProjection = ElectricalProjectionContainer( + id=self.currentProjectionId, + presynaptic_population=self.currentProjectionPrePop, + postsynaptic_population=self.currentProjectionPostPop, + ) - self.netHandler.handle_input_list(self.currInputList, population, component, size, input_comp_obj=input_comp_obj) - else: - self.currOptInputList = InputListContainer(id=self.currInputList, component=component, populations=population) - self.optimizedNetwork.input_lists.append(self.currOptInputList) - - - def end_group(self, g): - self.log.debug("Coming out of a group: "+ str(g)) - - if g._v_name.count('population_')>=1: - self.log.debug("End of population: "+ self.currPopulation+", cell type: "+self.currentComponent) - self.currPopulation ="" - self.currentComponent = "" - - if g._v_name.count('projection_')>=1: - - if not self.optimized: - self.netHandler.finalise_projection(self.currentProjectionId, - self.currentProjectionPrePop, - self.currentProjectionPostPop, - synapse=self.currentSynapse, - type=self.currentProjectionType) - - self.currentProjectionId = "" - self.currentProjectionType = "" - self.currentProjectionPrePop = "" - self.currentProjectionPostPop = "" - self.currentSynapse = "" - self.currentPreSynapse = "" - - if g._v_name.count('inputList_')>=1 or g._v_name.count('input_list_')>=1: - self.currInputList ="" - - - - -if __name__ == '__main__': - - - file_name = '../examples/test_files/complete.nml.h5' - #file_name = '../../../neuroConstruct/osb/showcase/NetPyNEShowcase/NeuroML2/scaling/Balanced.net.nml.h5' - - logging.basicConfig(level=logging.DEBUG, format="%(name)-19s %(levelname)-5s - %(message)s") + self.optimizedNetwork.electrical_projections.append( + self.currOptProjection + ) + + elif self.currentProjectionType == "continuousProjection": + + raise Exception( + "Cannot yet export continuousProjections to optimized HDF5 format" + ) + + self.optimizedNetwork.continuous_projections.append( + self.currOptProjection + ) + + else: + self.currOptProjection = ProjectionContainer( + id=self.currentProjectionId, + presynaptic_population=self.currentProjectionPrePop, + postsynaptic_population=self.currentProjectionPostPop, + synapse=self.currentSynapse, + ) + + self.optimizedNetwork.projections.append(self.currOptProjection) + + if ( + g._v_name.count("inputList_") >= 1 or g._v_name.count("input_list_") >= 1 + ): # inputList_ preferred + # TODO: a better check to see if the attribute is a str or numpy.ndarray + self.currInputList = get_str_attribute_group(g, "id") + component = get_str_attribute_group(g, "component") + population = get_str_attribute_group(g, "population") + + size = self._get_node_size(g, self.currInputList) + + if not self.optimized: + if self.nml_doc_extra_elements: + input_comp_obj = self.nml_doc_extra_elements.get_by_id(component) + else: + input_comp_obj = None + + self.log.debug( + "Found an inputList: " + + self.currInputList + + ", component: " + + component + + ", population: " + + population + + ", size: " + + str(size) + ) + + self.netHandler.handle_input_list( + self.currInputList, + population, + component, + size, + input_comp_obj=input_comp_obj, + ) + else: + self.currOptInputList = InputListContainer( + id=self.currInputList, component=component, populations=population + ) + self.optimizedNetwork.input_lists.append(self.currOptInputList) + + def end_group(self, g): + self.log.debug("Coming out of a group: " + str(g)) + + if g._v_name.count("population_") >= 1: + self.log.debug( + "End of population: " + + self.currPopulation + + ", cell type: " + + self.currentComponent + ) + self.currPopulation = "" + self.currentComponent = "" + + if g._v_name.count("projection_") >= 1: + + if not self.optimized: + self.netHandler.finalise_projection( + self.currentProjectionId, + self.currentProjectionPrePop, + self.currentProjectionPostPop, + synapse=self.currentSynapse, + type=self.currentProjectionType, + ) + + self.currentProjectionId = "" + self.currentProjectionType = "" + self.currentProjectionPrePop = "" + self.currentProjectionPostPop = "" + self.currentSynapse = "" + self.currentPreSynapse = "" + + if g._v_name.count("inputList_") >= 1 or g._v_name.count("input_list_") >= 1: + self.currInputList = "" + + +if __name__ == "__main__": + + file_name = "../examples/test_files/complete.nml.h5" + # file_name = '../../../neuroConstruct/osb/showcase/NetPyNEShowcase/NeuroML2/scaling/Balanced.net.nml.h5' + + logging.basicConfig( + level=logging.DEBUG, format="%(name)-19s %(levelname)-5s - %(message)s" + ) from neuroml.hdf5.DefaultNetworkHandler import DefaultNetworkHandler - nmlHandler = DefaultNetworkHandler() + nmlHandler = DefaultNetworkHandler() - currParser = NeuroMLHdf5Parser(nmlHandler) # The HDF5 handler knows of the structure of NetworkML and calls appropriate functions in NetworkHandler + currParser = NeuroMLHdf5Parser( + nmlHandler + ) # The HDF5 handler knows of the structure of NetworkML and calls appropriate functions in NetworkHandler currParser.parse(file_name) - - print('-------------------------------\n\n') - + + print("-------------------------------\n\n") + from neuroml.hdf5.NetworkBuilder import NetworkBuilder - nmlHandler = NetworkBuilder() + nmlHandler = NetworkBuilder() + + currParser = NeuroMLHdf5Parser(nmlHandler) - currParser = NeuroMLHdf5Parser(nmlHandler) - currParser.parse(file_name) - + nml_doc = currParser.get_nml_doc() summary0 = nml_doc.summary() print(summary0) exit() - print('-------------------------------\n\n') - - currParser = NeuroMLHdf5Parser(None,optimized=True) - + print("-------------------------------\n\n") + + currParser = NeuroMLHdf5Parser(None, optimized=True) + currParser.parse(file_name) - + nml_doc = currParser.get_nml_doc() print(nml_doc.summary()) - - - - - - - diff --git a/neuroml/hdf5/NeuroMLXMLParser.py b/neuroml/hdf5/NeuroMLXMLParser.py index 43160e1e..1958b118 100644 --- a/neuroml/hdf5/NeuroMLXMLParser.py +++ b/neuroml/hdf5/NeuroMLXMLParser.py @@ -2,7 +2,7 @@ # # A class to handle XML based NeuroML files. # Calls the appropriate methods in DefaultNetworkHandler when cell locations, -# network connections are found. The NetworkHandler can either print +# network connections are found. The NetworkHandler can either print # information, or if it's a class overriding NetworkHandler can create # the appropriate network in a simulator dependent fashion # @@ -10,428 +10,525 @@ # Author: Padraig Gleeson # # - + import logging import inspect -class NeuroMLXMLParser(): - - log = logging.getLogger("NeuroMLXMLParser") - - currPopulation = "" - currentComponent = "" - totalInstances = 0 - - currentProjectionId = "" - currentProjectionPrePop = "" - currentProjectionPostPop = "" - currentSynapse = "" - - - def __init__ (self, netHandler): - - self.netHandler = netHandler - - # For continued use with old API - if not hasattr(self.netHandler,'handle_network') or hasattr(self.netHandler,'handleNetwork'): - self.netHandler.handle_network = self.netHandler.handleNetwork - if not hasattr(self.netHandler,'handle_document_start') or hasattr(self.netHandler,'handleDocumentStart'): - self.netHandler.handle_document_start = self.netHandler.handleDocumentStart - if not hasattr(self.netHandler,'handle_population') or hasattr(self.netHandler,'handlePopulation'): - self.netHandler.handle_population = self.netHandler.handlePopulation - if not hasattr(self.netHandler,'handle_location') or hasattr(self.netHandler,'handleLocation'): - self.netHandler.handle_location = self.netHandler.handleLocation - - if not hasattr(self.netHandler,'handle_projection') or hasattr(self.netHandler,'handleProjection'): - self.netHandler.handle_projection = self.netHandler.handleProjection - if not hasattr(self.netHandler,'finalise_projection') or hasattr(self.netHandler,'finaliseProjection'): - self.netHandler.finalise_projection = self.netHandler.finaliseProjection - - if not hasattr(self.netHandler,'handle_connection') or hasattr(self.netHandler,'handleConnection'): - self.netHandler.handle_connection = self.netHandler.handleConnection - if not hasattr(self.netHandler,'handle_input_list') or hasattr(self.netHandler,'handleInputList'): - self.netHandler.handle_input_list = self.netHandler.handleInputList - if not hasattr(self.netHandler,'handle_single_input') or hasattr(self.netHandler,'handleSingleInput'): - self.netHandler.handle_single_input = self.netHandler.handleSingleInput - if not hasattr(self.netHandler,'finalise_input_source') or hasattr(self.netHandler,'finaliseInputSource'): - self.netHandler.finalise_input_source = self.netHandler.finaliseInputSource - - - - - def _parse_delay(self, delay_string): - if delay_string.endswith('ms'): - return float(delay_string[:-2].strip()) - elif delay_string.endswith('s'): - return float(delay_string[:-1].strip())*1000.0 - else: - print("Can't parse string for delay: %s"%delay_string) - exit(1) - - def parse(self, filename): - - print("Parsing: %s"%filename) - - import neuroml.loaders as loaders - - self.nml_doc = loaders.read_neuroml2_file(filename, - include_includes=True, - already_included=[]) - - print("Loaded: %s as NeuroMLDocument"%filename) - - self.netHandler.handle_document_start(self.nml_doc.id,self.nml_doc.notes) - - for network in self.nml_doc.networks: - - self.netHandler.handle_network(network.id,network.notes, temperature=network.temperature) - - for population in network.populations: - - component_obj = self.nml_doc.get_by_id(population.component) - properties = {} - for p in population.properties: - properties[p.tag]=p.value - - if len(population.instances)>0 and population.type=='populationList': - - # Try for Python3 - try: - args = inspect.getfullargspec(self.netHandler.handle_population)[0] - except AttributeError: - # Fall back for Python 2 - args = inspect.getargspec(self.netHandler.handle_population)[0] - - if 'properties' in args: - self.netHandler.handle_population(population.id, - population.component, - len(population.instances), - component_obj=component_obj, - properties=properties) - else: - self.netHandler.handle_population(population.id, - population.component, - len(population.instances), - component_obj=component_obj) - - for inst in population.instances: - - loc = inst.location - self.netHandler.handle_location(inst.id, \ - population.id, \ - population.component, \ - loc.x, \ - loc.y, \ - loc.z) - else: - # Try for Python3 - try: - args = inspect.getfullargspec(self.netHandler.handle_population)[0] - except AttributeError: - # Fall back for Python 2 - args = inspect.getargspec(self.netHandler.handle_population)[0] - - if 'properties' in args: - self.netHandler.handle_population(population.id, - population.component, - population.size, - component_obj=component_obj, - properties=properties) + +class NeuroMLXMLParser: + + log = logging.getLogger("NeuroMLXMLParser") + + currPopulation = "" + currentComponent = "" + totalInstances = 0 + + currentProjectionId = "" + currentProjectionPrePop = "" + currentProjectionPostPop = "" + currentSynapse = "" + + def __init__(self, netHandler): + + self.netHandler = netHandler + + # For continued use with old API + if not hasattr(self.netHandler, "handle_network") or hasattr( + self.netHandler, "handleNetwork" + ): + self.netHandler.handle_network = self.netHandler.handleNetwork + if not hasattr(self.netHandler, "handle_document_start") or hasattr( + self.netHandler, "handleDocumentStart" + ): + self.netHandler.handle_document_start = self.netHandler.handleDocumentStart + if not hasattr(self.netHandler, "handle_population") or hasattr( + self.netHandler, "handlePopulation" + ): + self.netHandler.handle_population = self.netHandler.handlePopulation + if not hasattr(self.netHandler, "handle_location") or hasattr( + self.netHandler, "handleLocation" + ): + self.netHandler.handle_location = self.netHandler.handleLocation + + if not hasattr(self.netHandler, "handle_projection") or hasattr( + self.netHandler, "handleProjection" + ): + self.netHandler.handle_projection = self.netHandler.handleProjection + if not hasattr(self.netHandler, "finalise_projection") or hasattr( + self.netHandler, "finaliseProjection" + ): + self.netHandler.finalise_projection = self.netHandler.finaliseProjection + + if not hasattr(self.netHandler, "handle_connection") or hasattr( + self.netHandler, "handleConnection" + ): + self.netHandler.handle_connection = self.netHandler.handleConnection + if not hasattr(self.netHandler, "handle_input_list") or hasattr( + self.netHandler, "handleInputList" + ): + self.netHandler.handle_input_list = self.netHandler.handleInputList + if not hasattr(self.netHandler, "handle_single_input") or hasattr( + self.netHandler, "handleSingleInput" + ): + self.netHandler.handle_single_input = self.netHandler.handleSingleInput + if not hasattr(self.netHandler, "finalise_input_source") or hasattr( + self.netHandler, "finaliseInputSource" + ): + self.netHandler.finalise_input_source = self.netHandler.finaliseInputSource + + def _parse_delay(self, delay_string): + if delay_string.endswith("ms"): + return float(delay_string[:-2].strip()) + elif delay_string.endswith("s"): + return float(delay_string[:-1].strip()) * 1000.0 + else: + print("Can't parse string for delay: %s" % delay_string) + exit(1) + + def parse(self, filename): + + print("Parsing: %s" % filename) + + import neuroml.loaders as loaders + + self.nml_doc = loaders.read_neuroml2_file( + filename, include_includes=True, already_included=[] + ) + + print("Loaded: %s as NeuroMLDocument" % filename) + + self.netHandler.handle_document_start(self.nml_doc.id, self.nml_doc.notes) + + for network in self.nml_doc.networks: + + self.netHandler.handle_network( + network.id, network.notes, temperature=network.temperature + ) + + for population in network.populations: + + component_obj = self.nml_doc.get_by_id(population.component) + properties = {} + for p in population.properties: + properties[p.tag] = p.value + + if ( + len(population.instances) > 0 + and population.type == "populationList" + ): + + # Try for Python3 + try: + args = inspect.getfullargspec( + self.netHandler.handle_population + )[0] + except AttributeError: + # Fall back for Python 2 + args = inspect.getargspec(self.netHandler.handle_population)[0] + + if "properties" in args: + self.netHandler.handle_population( + population.id, + population.component, + len(population.instances), + component_obj=component_obj, + properties=properties, + ) + else: + self.netHandler.handle_population( + population.id, + population.component, + len(population.instances), + component_obj=component_obj, + ) + + for inst in population.instances: + + loc = inst.location + self.netHandler.handle_location( + inst.id, + population.id, + population.component, + loc.x, + loc.y, + loc.z, + ) else: - self.netHandler.handle_population(population.id, - population.component, - population.size, - component_obj=component_obj) - - for i in range(population.size): - self.netHandler.handle_location(i, \ - population.id, \ - population.component, \ - None, \ - None, \ - None) - - for projection in network.projections: - - synapse_obj = self.nml_doc.get_by_id(projection.synapse) - - self.netHandler.handle_projection(projection.id, - projection.presynaptic_population, - projection.postsynaptic_population, - projection.synapse, - synapse_obj=synapse_obj) - - for connection in projection.connections: - - self.netHandler.handle_connection(projection.id, \ - connection.id, \ - projection.presynaptic_population, - projection.postsynaptic_population, - projection.synapse, \ - preCellId=connection.get_pre_cell_id(), \ - postCellId=connection.get_post_cell_id(), \ - preSegId=int(connection.pre_segment_id), \ - postSegId=int(connection.post_segment_id), \ - preFract=float(connection.pre_fraction_along), \ - postFract=float(connection.post_fraction_along), \ - delay=0, - weight=1) - - for connection in projection.connection_wds: - - self.netHandler.handle_connection(projection.id, \ - connection.id, \ - projection.presynaptic_population, - projection.postsynaptic_population, - projection.synapse, \ - preCellId=connection.get_pre_cell_id(), \ - postCellId=connection.get_post_cell_id(), \ - preSegId=int(connection.pre_segment_id), \ - postSegId=int(connection.post_segment_id), \ - preFract=float(connection.pre_fraction_along), \ - postFract=float(connection.post_fraction_along), \ - delay=self._parse_delay(connection.delay), - weight=float(connection.weight)) - - - for ep in network.electrical_projections: - - synapse = None - - for connection in ep.electrical_connections: - if synapse != None and synapse != connection.synapse: - raise Exception("There are different synapses for connections inside: %s!"%ep) - synapse = connection.synapse - for connection in ep.electrical_connection_instances: - if synapse != None and synapse != connection.synapse: - raise Exception("There are different synapses for connections inside: %s!"%ep) - synapse = connection.synapse - for connection in ep.electrical_connection_instance_ws: - if synapse != None and synapse != connection.synapse: - raise Exception("There are different synapses for connections inside: %s!"%ep) - synapse = connection.synapse - - synapse_obj = self.nml_doc.get_by_id(synapse) - - self.netHandler.handle_projection(ep.id, - ep.presynaptic_population, - ep.postsynaptic_population, - synapse, - type="electricalProjection", - synapse_obj=synapse_obj) - - for connection in ep.electrical_connections: - - self.netHandler.handle_connection(ep.id, \ - connection.id, \ - ep.presynaptic_population, - ep.postsynaptic_population, - connection.synapse, \ - preCellId=connection.get_pre_cell_id(), \ - postCellId=connection.get_post_cell_id(), \ - preSegId=int(connection.pre_segment), \ - postSegId=int(connection.post_segment), \ - preFract=float(connection.pre_fraction_along), \ - postFract=float(connection.post_fraction_along)) - - for connection in ep.electrical_connection_instances: - - self.netHandler.handle_connection(ep.id, \ - connection.id, \ - ep.presynaptic_population, - ep.postsynaptic_population, - connection.synapse, \ - preCellId=connection.get_pre_cell_id(), \ - postCellId=connection.get_post_cell_id(), \ - preSegId=int(connection.pre_segment), \ - postSegId=int(connection.post_segment), \ - preFract=float(connection.pre_fraction_along), \ - postFract=float(connection.post_fraction_along)) - - for connection in ep.electrical_connection_instance_ws: - - self.netHandler.handle_connection(ep.id, \ - connection.id, \ - ep.presynaptic_population, - ep.postsynaptic_population, - connection.synapse, \ - preCellId=connection.get_pre_cell_id(), \ - postCellId=connection.get_post_cell_id(), \ - preSegId=int(connection.pre_segment), \ - postSegId=int(connection.post_segment), \ - preFract=float(connection.pre_fraction_along), \ - postFract=float(connection.post_fraction_along), - weight=connection.get_weight()) - - for cp in network.continuous_projections: - - pre_comp = None - post_comp = None - - - for connection in cp.continuous_connections: - if pre_comp != None and pre_comp != connection.pre_component: - raise Exception("There are different pre components for connections inside: %s!"%cp) - pre_comp = connection.pre_component - if post_comp != None and post_comp != connection.post_component: - raise Exception("There are different post components for connections inside: %s!"%cp) - post_comp = connection.post_component - - for connection in cp.continuous_connection_instances: - if pre_comp != None and pre_comp != connection.pre_component: - raise Exception("There are different pre components for connections inside: %s!"%cp) - pre_comp = connection.pre_component - if post_comp != None and post_comp != connection.post_component: - raise Exception("There are different post components for connections inside: %s!"%cp) - post_comp = connection.post_component - - for connection in cp.continuous_connection_instance_ws: - if pre_comp != None and pre_comp != connection.pre_component: - raise Exception("There are different pre components for connections inside: %s!"%cp) - pre_comp = connection.pre_component - if post_comp != None and post_comp != connection.post_component: - raise Exception("There are different post components for connections inside: %s!"%cp) - post_comp = connection.post_component - - pre_obj = self.nml_doc.get_by_id(pre_comp) - post_obj = self.nml_doc.get_by_id(post_comp) - - synapse = pre_comp - - self.netHandler.handle_projection(cp.id, - cp.presynaptic_population, - cp.postsynaptic_population, - synapse, - type="continuousProjection", - pre_synapse_obj=pre_obj, - synapse_obj=post_obj) - - for connection in cp.continuous_connections: - - self.netHandler.handle_connection(cp.id, \ - connection.id, \ - cp.presynaptic_population, - cp.postsynaptic_population, - synapseType=None, \ - preCellId=connection.get_pre_cell_id(), \ - postCellId=connection.get_post_cell_id(), \ - preSegId=int(connection.pre_segment), \ - postSegId=int(connection.post_segment), \ - preFract=float(connection.pre_fraction_along), \ - postFract=float(connection.post_fraction_along)) - - for connection in cp.continuous_connection_instances: - - self.netHandler.handle_connection(cp.id, \ - connection.id, \ - cp.presynaptic_population, - cp.postsynaptic_population, - synapseType=None, \ - preCellId=connection.get_pre_cell_id(), \ - postCellId=connection.get_post_cell_id(), \ - preSegId=int(connection.pre_segment), \ - postSegId=int(connection.post_segment), \ - preFract=float(connection.pre_fraction_along), \ - postFract=float(connection.post_fraction_along)) - - for connection in cp.continuous_connection_instance_ws: - - self.netHandler.handle_connection(cp.id, \ - connection.id, \ - cp.presynaptic_population, - cp.postsynaptic_population, - synapseType=None, \ - preCellId=connection.get_pre_cell_id(), \ - postCellId=connection.get_post_cell_id(), \ - preSegId=int(connection.pre_segment), \ - postSegId=int(connection.post_segment), \ - preFract=float(connection.pre_fraction_along), \ - postFract=float(connection.post_fraction_along), - weight=connection.get_weight()) - - - for input_list in network.input_lists: - - input_comp_obj = self.nml_doc.get_by_id(input_list.component) - - self.netHandler.handle_input_list(input_list.id, input_list.populations, input_list.component, len(input_list.input),input_comp_obj=input_comp_obj) - - for input in input_list.input: - - self.netHandler.handle_single_input(input_list.id, - input.id, - cellId = input.get_target_cell_id(), - segId = input.get_segment_id(), - fract = input.get_fraction_along()) - for input_w in input_list.input_ws: - - self.netHandler.handle_single_input(input_list.id, - input_w.id, - cellId = input_w.get_target_cell_id(), - segId = input_w.get_segment_id(), - fract = input_w.get_fraction_along(), - weight = input_w.get_weight()) - - self.netHandler.finalise_input_source(input_list.id) - - for explicitInput in network.explicit_inputs: - - list_name = 'INPUT_%s_%s'%(explicitInput.input,explicitInput.target.replace('[','_').replace(']','_')) - if list_name.endswith('_'): list_name=list_name[:-1] - - pop = explicitInput.target.split('[')[0] - - input_comp_obj = self.nml_doc.get_by_id(explicitInput.input) - - self.netHandler.handle_input_list(list_name, pop, explicitInput.input, 1,input_comp_obj=input_comp_obj) - - self.netHandler.handle_single_input(list_name, - 0, - cellId = explicitInput.get_target_cell_id(), - segId = 0, - fract = 0.5) - - self.netHandler.finalise_input_source(list_name) - - - -if __name__ == '__main__': - - - file_name = '../examples/tmp/testh5.nml' - - logging.basicConfig(level=logging.INFO, format="%(name)-19s %(levelname)-5s - %(message)s") + # Try for Python3 + try: + args = inspect.getfullargspec( + self.netHandler.handle_population + )[0] + except AttributeError: + # Fall back for Python 2 + args = inspect.getargspec(self.netHandler.handle_population)[0] + + if "properties" in args: + self.netHandler.handle_population( + population.id, + population.component, + population.size, + component_obj=component_obj, + properties=properties, + ) + else: + self.netHandler.handle_population( + population.id, + population.component, + population.size, + component_obj=component_obj, + ) + + for i in range(population.size): + self.netHandler.handle_location( + i, population.id, population.component, None, None, None + ) + + for projection in network.projections: + + synapse_obj = self.nml_doc.get_by_id(projection.synapse) + + self.netHandler.handle_projection( + projection.id, + projection.presynaptic_population, + projection.postsynaptic_population, + projection.synapse, + synapse_obj=synapse_obj, + ) + + for connection in projection.connections: + + self.netHandler.handle_connection( + projection.id, + connection.id, + projection.presynaptic_population, + projection.postsynaptic_population, + projection.synapse, + preCellId=connection.get_pre_cell_id(), + postCellId=connection.get_post_cell_id(), + preSegId=int(connection.pre_segment_id), + postSegId=int(connection.post_segment_id), + preFract=float(connection.pre_fraction_along), + postFract=float(connection.post_fraction_along), + delay=0, + weight=1, + ) + + for connection in projection.connection_wds: + + self.netHandler.handle_connection( + projection.id, + connection.id, + projection.presynaptic_population, + projection.postsynaptic_population, + projection.synapse, + preCellId=connection.get_pre_cell_id(), + postCellId=connection.get_post_cell_id(), + preSegId=int(connection.pre_segment_id), + postSegId=int(connection.post_segment_id), + preFract=float(connection.pre_fraction_along), + postFract=float(connection.post_fraction_along), + delay=self._parse_delay(connection.delay), + weight=float(connection.weight), + ) + + for ep in network.electrical_projections: + + synapse = None + + for connection in ep.electrical_connections: + if synapse != None and synapse != connection.synapse: + raise Exception( + "There are different synapses for connections inside: %s!" + % ep + ) + synapse = connection.synapse + for connection in ep.electrical_connection_instances: + if synapse != None and synapse != connection.synapse: + raise Exception( + "There are different synapses for connections inside: %s!" + % ep + ) + synapse = connection.synapse + for connection in ep.electrical_connection_instance_ws: + if synapse != None and synapse != connection.synapse: + raise Exception( + "There are different synapses for connections inside: %s!" + % ep + ) + synapse = connection.synapse + + synapse_obj = self.nml_doc.get_by_id(synapse) + + self.netHandler.handle_projection( + ep.id, + ep.presynaptic_population, + ep.postsynaptic_population, + synapse, + type="electricalProjection", + synapse_obj=synapse_obj, + ) + + for connection in ep.electrical_connections: + + self.netHandler.handle_connection( + ep.id, + connection.id, + ep.presynaptic_population, + ep.postsynaptic_population, + connection.synapse, + preCellId=connection.get_pre_cell_id(), + postCellId=connection.get_post_cell_id(), + preSegId=int(connection.pre_segment), + postSegId=int(connection.post_segment), + preFract=float(connection.pre_fraction_along), + postFract=float(connection.post_fraction_along), + ) + + for connection in ep.electrical_connection_instances: + + self.netHandler.handle_connection( + ep.id, + connection.id, + ep.presynaptic_population, + ep.postsynaptic_population, + connection.synapse, + preCellId=connection.get_pre_cell_id(), + postCellId=connection.get_post_cell_id(), + preSegId=int(connection.pre_segment), + postSegId=int(connection.post_segment), + preFract=float(connection.pre_fraction_along), + postFract=float(connection.post_fraction_along), + ) + + for connection in ep.electrical_connection_instance_ws: + + self.netHandler.handle_connection( + ep.id, + connection.id, + ep.presynaptic_population, + ep.postsynaptic_population, + connection.synapse, + preCellId=connection.get_pre_cell_id(), + postCellId=connection.get_post_cell_id(), + preSegId=int(connection.pre_segment), + postSegId=int(connection.post_segment), + preFract=float(connection.pre_fraction_along), + postFract=float(connection.post_fraction_along), + weight=connection.get_weight(), + ) + + for cp in network.continuous_projections: + + pre_comp = None + post_comp = None + + for connection in cp.continuous_connections: + if pre_comp != None and pre_comp != connection.pre_component: + raise Exception( + "There are different pre components for connections inside: %s!" + % cp + ) + pre_comp = connection.pre_component + if post_comp != None and post_comp != connection.post_component: + raise Exception( + "There are different post components for connections inside: %s!" + % cp + ) + post_comp = connection.post_component + + for connection in cp.continuous_connection_instances: + if pre_comp != None and pre_comp != connection.pre_component: + raise Exception( + "There are different pre components for connections inside: %s!" + % cp + ) + pre_comp = connection.pre_component + if post_comp != None and post_comp != connection.post_component: + raise Exception( + "There are different post components for connections inside: %s!" + % cp + ) + post_comp = connection.post_component + + for connection in cp.continuous_connection_instance_ws: + if pre_comp != None and pre_comp != connection.pre_component: + raise Exception( + "There are different pre components for connections inside: %s!" + % cp + ) + pre_comp = connection.pre_component + if post_comp != None and post_comp != connection.post_component: + raise Exception( + "There are different post components for connections inside: %s!" + % cp + ) + post_comp = connection.post_component + + pre_obj = self.nml_doc.get_by_id(pre_comp) + post_obj = self.nml_doc.get_by_id(post_comp) + + synapse = pre_comp + + self.netHandler.handle_projection( + cp.id, + cp.presynaptic_population, + cp.postsynaptic_population, + synapse, + type="continuousProjection", + pre_synapse_obj=pre_obj, + synapse_obj=post_obj, + ) + + for connection in cp.continuous_connections: + + self.netHandler.handle_connection( + cp.id, + connection.id, + cp.presynaptic_population, + cp.postsynaptic_population, + synapseType=None, + preCellId=connection.get_pre_cell_id(), + postCellId=connection.get_post_cell_id(), + preSegId=int(connection.pre_segment), + postSegId=int(connection.post_segment), + preFract=float(connection.pre_fraction_along), + postFract=float(connection.post_fraction_along), + ) + + for connection in cp.continuous_connection_instances: + + self.netHandler.handle_connection( + cp.id, + connection.id, + cp.presynaptic_population, + cp.postsynaptic_population, + synapseType=None, + preCellId=connection.get_pre_cell_id(), + postCellId=connection.get_post_cell_id(), + preSegId=int(connection.pre_segment), + postSegId=int(connection.post_segment), + preFract=float(connection.pre_fraction_along), + postFract=float(connection.post_fraction_along), + ) + + for connection in cp.continuous_connection_instance_ws: + + self.netHandler.handle_connection( + cp.id, + connection.id, + cp.presynaptic_population, + cp.postsynaptic_population, + synapseType=None, + preCellId=connection.get_pre_cell_id(), + postCellId=connection.get_post_cell_id(), + preSegId=int(connection.pre_segment), + postSegId=int(connection.post_segment), + preFract=float(connection.pre_fraction_along), + postFract=float(connection.post_fraction_along), + weight=connection.get_weight(), + ) + + for input_list in network.input_lists: + + input_comp_obj = self.nml_doc.get_by_id(input_list.component) + + self.netHandler.handle_input_list( + input_list.id, + input_list.populations, + input_list.component, + len(input_list.input), + input_comp_obj=input_comp_obj, + ) + + for input in input_list.input: + + self.netHandler.handle_single_input( + input_list.id, + input.id, + cellId=input.get_target_cell_id(), + segId=input.get_segment_id(), + fract=input.get_fraction_along(), + ) + for input_w in input_list.input_ws: + + self.netHandler.handle_single_input( + input_list.id, + input_w.id, + cellId=input_w.get_target_cell_id(), + segId=input_w.get_segment_id(), + fract=input_w.get_fraction_along(), + weight=input_w.get_weight(), + ) + + self.netHandler.finalise_input_source(input_list.id) + + for explicitInput in network.explicit_inputs: + + list_name = "INPUT_%s_%s" % ( + explicitInput.input, + explicitInput.target.replace("[", "_").replace("]", "_"), + ) + if list_name.endswith("_"): + list_name = list_name[:-1] + + pop = explicitInput.target.split("[")[0] + + input_comp_obj = self.nml_doc.get_by_id(explicitInput.input) + + self.netHandler.handle_input_list( + list_name, + pop, + explicitInput.input, + 1, + input_comp_obj=input_comp_obj, + ) + + self.netHandler.handle_single_input( + list_name, + 0, + cellId=explicitInput.get_target_cell_id(), + segId=0, + fract=0.5, + ) + + self.netHandler.finalise_input_source(list_name) + + +if __name__ == "__main__": + + file_name = "../examples/tmp/testh5.nml" + + logging.basicConfig( + level=logging.INFO, format="%(name)-19s %(levelname)-5s - %(message)s" + ) from neuroml.hdf5.DefaultNetworkHandler import DefaultNetworkHandler - nmlHandler = DefaultNetworkHandler() + nmlHandler = DefaultNetworkHandler() - currParser = NeuroMLXMLParser(nmlHandler) # The HDF5 handler knows of the structure of NetworkML and calls appropriate functions in NetworkHandler + currParser = NeuroMLXMLParser( + nmlHandler + ) # The HDF5 handler knows of the structure of NetworkML and calls appropriate functions in NetworkHandler currParser.parse(file_name) - - print('-------------------------------\n\n') - + + print("-------------------------------\n\n") + from neuroml.hdf5.NetworkBuilder import NetworkBuilder - nmlHandler = NetworkBuilder() + nmlHandler = NetworkBuilder() + + currParser = NeuroMLXMLParser(nmlHandler) - currParser = NeuroMLXMLParser(nmlHandler) - currParser.parse(file_name) - + nml_doc = nmlHandler.get_nml_doc() - + print(nml_doc.summary()) - nml_file = '../examples/tmp/testh5_2_.nml' + nml_file = "../examples/tmp/testh5_2_.nml" import neuroml.writers as writers + writers.NeuroMLWriter.write(nml_doc, nml_file) - print("Written network file to: "+nml_file) - - - - - - - + print("Written network file to: " + nml_file) diff --git a/neuroml/hdf5/__init__.py b/neuroml/hdf5/__init__.py index ca3ab688..74d95cb1 100644 --- a/neuroml/hdf5/__init__.py +++ b/neuroml/hdf5/__init__.py @@ -1,26 +1,23 @@ - import neuroml import numpy import sys def get_str_attribute_group(group, name): - if not hasattr(group._v_attrs,name): + if not hasattr(group._v_attrs, name): return None - for attrName in group._v_attrs._v_attrnames: - if attrName == name: - val = group._v_attrs[name] - - if isinstance(val,numpy.ndarray): - val = val[0] - elif isinstance(val, numpy.bytes_): - val = val.decode('UTF-8') - else: - val = str(val) - - #print("- Found [%s] in [%s]: %s = [%s] %s"%(attrName, group, name,val,type(val))) - return val - return None + for attrName in group._v_attrs._v_attrnames: + if attrName == name: + val = group._v_attrs[name] + if isinstance(val, numpy.ndarray): + val = val[0] + elif isinstance(val, numpy.bytes_): + val = val.decode("UTF-8") + else: + val = str(val) + # print("- Found [%s] in [%s]: %s = [%s] %s"%(attrName, group, name,val,type(val))) + return val + return None diff --git a/neuroml/loaders.py b/neuroml/loaders.py index a37dc749..a0f69326 100644 --- a/neuroml/loaders.py +++ b/neuroml/loaders.py @@ -1,4 +1,3 @@ - from neuroml.nml.nml import parse as nmlparse from neuroml.nml.nml import parseString as nmlparsestring @@ -12,73 +11,84 @@ supressGeneratedsWarnings = True + def print_(text, verbose=True): if verbose: prefix = "libNeuroML >>> " - #if not isinstance(text, str): text = text.decode('ascii') + # if not isinstance(text, str): text = text.decode('ascii') if verbose: - print("%s%s"%(prefix, text.replace("\n", "\n"+prefix))) + print("%s%s" % (prefix, text.replace("\n", "\n" + prefix))) class NeuroMLLoader(object): - @classmethod - def load(cls,src): + def load(cls, src): doc = cls.__nml2_doc(src) - return doc + if isinstance(doc, neuroml.nml.nml.NeuroMLDocument): + return doc + else: + raise TypeError( + "{} does not appear to be a NeuroML Document. NeuroML documents must be contained in a tag.".format( + src + ) + ) - @classmethod - def __nml2_doc(cls,file_name): + @classmethod + def __nml2_doc(cls, file_name): import sys try: - - if supressGeneratedsWarnings: warnings.simplefilter("ignore") + + if supressGeneratedsWarnings: + warnings.simplefilter("ignore") nml2_doc = nmlparse(file_name) - if supressGeneratedsWarnings: warnings.resetwarnings() + if supressGeneratedsWarnings: + warnings.resetwarnings() except Exception as e: - raise Exception("Not a valid NeuroML 2 doc (%s): %s" % (file_name,e), e) - + raise Exception("Not a valid NeuroML 2 doc (%s): %s" % (file_name, e), e) + return nml2_doc class NeuroMLHdf5Loader(object): - @classmethod - def load(cls,src,optimized=False): - doc = cls.__nml2_doc(src,optimized) + def load(cls, src, optimized=False): + doc = cls.__nml2_doc(src, optimized) return doc - @classmethod - def __nml2_doc(cls,file_name,optimized=False): + @classmethod + def __nml2_doc(cls, file_name, optimized=False): import sys - + import logging - logging.basicConfig(level=logging.INFO, format="%(name)-19s %(levelname)-5s - %(message)s") - + + logging.basicConfig( + level=logging.INFO, format="%(name)-19s %(levelname)-5s - %(message)s" + ) + from neuroml.hdf5.NeuroMLHdf5Parser import NeuroMLHdf5Parser - + if optimized: - - currParser = NeuroMLHdf5Parser(None,optimized=True) - + + currParser = NeuroMLHdf5Parser(None, optimized=True) + currParser.parse(file_name) - + return currParser.get_nml_doc() else: - + from neuroml.hdf5.NetworkBuilder import NetworkBuilder - nmlHandler = NetworkBuilder() + nmlHandler = NetworkBuilder() - currParser = NeuroMLHdf5Parser(nmlHandler) + currParser = NeuroMLHdf5Parser(nmlHandler) currParser.parse(file_name) nml2_doc = nmlHandler.get_nml_doc() if currParser.nml_doc_extra_elements: - add_all_to_document(currParser.nml_doc_extra_elements,nml2_doc) + add_all_to_document(currParser.nml_doc_extra_elements, nml2_doc) return nml2_doc @@ -87,123 +97,74 @@ class SWCLoader(object): """ WARNING: Class defunct """ - + @classmethod - def load_swc_single(cls, src, name=None): - + def load_swc_single(cls, src, name=None): + import numpy as np from neuroml import arraymorph - - dtype= {'names': ('id', 'type', 'x','y','z','r','pid'), - 'formats': ('int32', 'int32', 'f4','f4','f4','f4','int32') } - - d = np.loadtxt(src,dtype=dtype ) - - if len( np.nonzero( d['pid']==-1)) != 1: - assert False, "Unexpected number of id's of -1 in file" - - num_nodes=len(d['pid']) - - root_index=np.where(d['pid']==-1)[0][0] - - # We might not nessesarily have continuous indices in the - # SWC file, so lets convert them: - index_to_id = d['id'] - id_to_index_dict = dict( [(id,index) for index,id in enumerate(index_to_id) ] ) - - if len(id_to_index_dict) != len(index_to_id): - s = "Internal Error Loading SWC: Index and ID map are different lengths." - s += " [ID:%d, Index:%d]"%( len(index_to_id), len(id_to_index_dict) ) - # TODO: this is undefined!! - raise MorphologyImportError(s) # noqa: F821 - - # Vertices and section types are easy: - vertices = d[ ['x','y','z','r'] ] - vertices = np.vstack( [d['x'], d['y'],d['z'],d['r'] ]).T - section_types = [ swctype for ID,swctype in d[['id','type']]] - - #for connection indices we want the root to have index -1: - connection_indices=np.zeros(num_nodes,dtype='int32') - for i in range(num_nodes): - pID=d['pid'][i] - if pID !=-1: - parent_index=id_to_index_dict[pID] - connection_indices[i]=parent_index - else: - connection_indices[i]=-1 - - #This needs to become an "Optimized Morphology" of some kind - return arraymorph.ArrayMorphology(vertices=vertices, - connectivity=connection_indices, - node_types=section_types, - name=name ) - - -class JSONLoader(object): - @classmethod - def load(cls,file): + dtype = { + "names": ("id", "type", "x", "y", "z", "r", "pid"), + "formats": ("int32", "int32", "f4", "f4", "f4", "f4", "int32"), + } - from jsonpickle import decode as json_decode - if isinstance(file,str): - fileh = open(file,'r') - else: - fileh = file + d = np.loadtxt(src, dtype=dtype) - json_string = fileh.read() - unpickled = json_decode(json_string) - fileh.close() - return unpickled - - @classmethod - def load_from_mongodb(cls, - db, - id, - host=None, - port=None): - - from pymongo import MongoClient - try: - import simplejson as json - except ImportError: - import json - - from jsonpickle import decode as json_decode + if len(np.nonzero(d["pid"] == -1)) != 1: + assert False, "Unexpected number of id's of -1 in file" - if host == None: - host = 'localhost' - if port == None: - port = 27017 + num_nodes = len(d["pid"]) - client = MongoClient(host,port) + root_index = np.where(d["pid"] == -1)[0][0] - db = client[db] + # We might not nessesarily have continuous indices in the + # SWC file, so lets convert them: + index_to_id = d["id"] + id_to_index_dict = dict([(id, index) for index, id in enumerate(index_to_id)]) - collection = db[id] + if len(id_to_index_dict) != len(index_to_id): + s = "Internal Error Loading SWC: Index and ID map are different lengths." + s += " [ID:%d, Index:%d]" % (len(index_to_id), len(id_to_index_dict)) + # TODO: this is undefined!! + raise MorphologyImportError(s) # noqa: F821 - doc = collection.find_one() + # Vertices and section types are easy: + vertices = d[["x", "y", "z", "r"]] + vertices = np.vstack([d["x"], d["y"], d["z"], d["r"]]).T + section_types = [swctype for ID, swctype in d[["id", "type"]]] - del doc['_id'] + # for connection indices we want the root to have index -1: + connection_indices = np.zeros(num_nodes, dtype="int32") + for i in range(num_nodes): + pID = d["pid"][i] + if pID != -1: + parent_index = id_to_index_dict[pID] + connection_indices[i] = parent_index + else: + connection_indices[i] = -1 - doc = json.dumps(doc) + # This needs to become an "Optimized Morphology" of some kind + return arraymorph.ArrayMorphology( + vertices=vertices, + connectivity=connection_indices, + node_types=section_types, + name=name, + ) - document = json_decode(doc) - return document - class ArrayMorphLoader(object): - @classmethod def __extract_morphology(cls, node): - - from neuroml import arraymorph - - loaded_morphology = arraymorph.ArrayMorphology() - loaded_morphology.physical_mask = node.physical_mask[:] - loaded_morphology.vertices = node.vertices[:] - loaded_morphology.connectivity = node.connectivity[:] - return loaded_morphology + from neuroml import arraymorph + + loaded_morphology = arraymorph.ArrayMorphology() + loaded_morphology.physical_mask = node.physical_mask[:] + loaded_morphology.vertices = node.vertices[:] + loaded_morphology.connectivity = node.connectivity[:] + + return loaded_morphology @classmethod def load(cls, filepath): @@ -212,12 +173,13 @@ def load(cls, filepath): TODO: Complete refactoring. """ import tables - with tables.open_file(filepath,mode='r') as file: + + with tables.open_file(filepath, mode="r") as file: document = neuroml.NeuroMLDocument() for node in file.root: - if hasattr(node,'vertices'): + if hasattr(node, "vertices"): loaded_morphology = cls.__extract_morphology(node) document.morphology.append(loaded_morphology) else: @@ -228,88 +190,139 @@ def load(cls, filepath): return document -def read_neuroml2_file(nml2_file_name, include_includes=False, verbose=False, - already_included=[], print_method=print_, optimized=False): - +def read_neuroml2_file( + nml2_file_name, + include_includes=False, + verbose=False, + already_included=[], + print_method=print_, + optimized=False, +): + print_method("Loading NeuroML2 file: %s" % nml2_file_name, verbose) - + if not os.path.isfile(nml2_file_name): print_method("Unable to find file: %s!" % nml2_file_name, True) sys.exit() - - return _read_neuroml2(nml2_file_name, include_includes=include_includes, verbose=verbose, - already_included=already_included, print_method=print_method, optimized=optimized) - - - -def read_neuroml2_string(nml2_string, include_includes=False, verbose=False, - already_included=[], print_method=print_, optimized=False, base_path=None): - + + return _read_neuroml2( + nml2_file_name, + include_includes=include_includes, + verbose=verbose, + already_included=already_included, + print_method=print_method, + optimized=optimized, + ) + + +def read_neuroml2_string( + nml2_string, + include_includes=False, + verbose=False, + already_included=[], + print_method=print_, + optimized=False, + base_path=None, +): + print_method("Loading NeuroML2 string, base_path: %s" % base_path, verbose) - - return _read_neuroml2(nml2_string, include_includes=include_includes, verbose=verbose, - already_included=already_included, print_method=print_method, - optimized=optimized, base_path=base_path) - - - -def _read_neuroml2(nml2_file_name_or_string, include_includes=False, verbose=False, - already_included=[], print_method=print_, optimized=False, base_path=None): - - #print("................ Loading: %s"%nml2_file_name_or_string[:7]) - - base_path_to_use = os.path.dirname(os.path.realpath(nml2_file_name_or_string)) if base_path == None else base_path - - - if supressGeneratedsWarnings: warnings.simplefilter("ignore") - - if not isinstance(nml2_file_name_or_string, str) or nml2_file_name_or_string.startswith('<'): + + return _read_neuroml2( + nml2_string, + include_includes=include_includes, + verbose=verbose, + already_included=already_included, + print_method=print_method, + optimized=optimized, + base_path=base_path, + ) + + +def _read_neuroml2( + nml2_file_name_or_string, + include_includes=False, + verbose=False, + already_included=[], + print_method=print_, + optimized=False, + base_path=None, +): + + # print("................ Loading: %s"%nml2_file_name_or_string[:7]) + + base_path_to_use = ( + os.path.dirname(os.path.realpath(nml2_file_name_or_string)) + if base_path == None + else base_path + ) + + if supressGeneratedsWarnings: + warnings.simplefilter("ignore") + + if not isinstance( + nml2_file_name_or_string, str + ) or nml2_file_name_or_string.startswith("<"): nml2_doc = nmlparsestring(nml2_file_name_or_string) - base_path_to_use = './' if base_path == None else base_path - elif nml2_file_name_or_string.endswith('.h5') or nml2_file_name_or_string.endswith('.hdf5'): - nml2_doc = NeuroMLHdf5Loader.load(nml2_file_name_or_string,optimized=optimized) + base_path_to_use = "./" if base_path == None else base_path + elif nml2_file_name_or_string.endswith(".h5") or nml2_file_name_or_string.endswith( + ".hdf5" + ): + nml2_doc = NeuroMLHdf5Loader.load(nml2_file_name_or_string, optimized=optimized) else: nml2_doc = NeuroMLLoader.load(nml2_file_name_or_string) - - if supressGeneratedsWarnings: warnings.resetwarnings() - + + if supressGeneratedsWarnings: + warnings.resetwarnings() + if include_includes: - print_method('Including included files (included already: %s)' \ - % already_included, verbose) - + print_method( + "Including included files (included already: %s)" % already_included, + verbose, + ) + for include in nml2_doc.includes: incl_loc = os.path.abspath(os.path.join(base_path_to_use, include.href)) if incl_loc not in already_included: - print_method("Loading included NeuroML2 file: %s (base: %s, resolved: %s)" % (include.href, base_path_to_use, incl_loc), - verbose) - - if incl_loc.endswith('.nml') or incl_loc.endswith('.xml'): - nml2_sub_doc = read_neuroml2_file(incl_loc, True, - verbose=verbose, already_included=already_included) + print_method( + "Loading included NeuroML2 file: %s (base: %s, resolved: %s)" + % (include.href, base_path_to_use, incl_loc), + verbose, + ) + + if incl_loc.endswith(".nml") or incl_loc.endswith(".xml"): + nml2_sub_doc = read_neuroml2_file( + incl_loc, + True, + verbose=verbose, + already_included=already_included, + ) already_included.append(incl_loc) - add_all_to_document(nml2_sub_doc,nml2_doc) - - elif incl_loc.endswith('.nml.h5'): + add_all_to_document(nml2_sub_doc, nml2_doc) + + elif incl_loc.endswith(".nml.h5"): nml2_sub_doc = NeuroMLHdf5Loader.load(incl_loc) already_included.append(incl_loc) - add_all_to_document(nml2_sub_doc,nml2_doc) - + add_all_to_document(nml2_sub_doc, nml2_doc) + else: - raise Exception("Unrecognised extension on file: %s"%incl_loc) - + raise Exception("Unrecognised extension on file: %s" % incl_loc) + nml2_doc.includes = [] - + else: - if len(nml2_doc.includes)>0: - print_method('NOT including included files, even though %s are included!'%len(nml2_doc.includes), verbose) - - + if len(nml2_doc.includes) > 0: + print_method( + "NOT including included files, even though %s are included!" + % len(nml2_doc.includes), + verbose, + ) + nml2_doc.includes = [] - + return nml2_doc -if __name__ == '__main__': - +if __name__ == "__main__": + nml_doc = read_neuroml2_file(sys.argv[1]) print(nml_doc.summary()) diff --git a/neuroml/nml/NeuroML_v2.2.xsd b/neuroml/nml/NeuroML_v2.2.xsd new file mode 100644 index 00000000..a5a5461e --- /dev/null +++ b/neuroml/nml/NeuroML_v2.2.xsd @@ -0,0 +1,3666 @@ + + + + + + + + An id attribute for elements which need to be identified uniquely (normally just within their parent element). + + + + + + + + A value for a physical quantity in NeuroML 2, e.g. 20, -60.0mV or 5nA + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + An id string for pointing to an entry in an annotation element related to a MIRIAM resource. Based on metaid of SBML + + + + + + + + An id string for pointing to an entry in the NeuroLex ontology. Use of this attribute is a shorthand for a full + RDF based reference to the MIRIAM Resource urn:miriam:neurolex, with an bqbiol:is qualifier + + + + + + + + + An attribute useful as id of segments, connections, etc: integer >=0 only! + + + + + + + + + + + Integer >=1 only! + + + + + + + + + + + Double >0 only + + + + + + + + Value which is either 0 or 1 + + + + + + + + + + + + + Textual human readable notes related to the element in question. It's useful to put these into + the NeuroML files instead of XML comments, as the notes can be extracted and repeated in the files to which the NeuroML is mapped. + + + + + + + A property ( a **tag** and **value** pair ), which can be on any **baseStandalone** either as a direct child, or within an **Annotation** . Generally something which helps the visual display or facilitates simulation of a Component, but is not a core physiological property. Common examples include: **numberInternalDivisions,** equivalent of nseg in NEURON; **radius,** for a radius to use in graphical displays for abstract cells ( i. e. without defined morphologies ); **color,** the color to use for a **Population** or **populationList** of cells; **recommended_dt_ms,** the recommended timestep to use for simulating a **Network** , **recommended_duration_ms** the recommended duration to use when running a **Network** + + + + + + + + A structured annotation containing metadata, specifically RDF or **property** elements + + + + + + + + + + Contains an extension to NeuroML by creating custom LEMS ComponentType. + + + + + + + + + + + + + + + + + LEMS ComponentType for Constant. + + + + + + + + + LEMS Exposure (ComponentType property) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + LEMS ComponentType for Dynamics + + + + + + + + + + + LEMS ComponentType for DerivedVariable + + + + + + + + + + + + + + + + + + + LEMS ComponentType for ConditionalDerivedVariable + + + + + + + + + + + + + + + + + + Float value restricted to between 1 and 0 + + + + + + + + + + + + The root NeuroML element. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Various types of cells which are defined in NeuroML 2. This list will be expanded... + + + + + + + + + + + + + + + + + + + + Various types of cells which are defined in NeuroML 2 based on PyNN standard cell models. + + + + + + + + + + + + + + Various types of synapse which are defined in NeuroML 2. This list will be expanded... + + + + + + + + + + + + + + + + + + Various types of synapse which are defined in NeuroML 2 based on PyNN standard cell/synapse models. + + + + + + + + + + + Various types of inputs which are defined in NeuroML2. This list will be expanded... + + + + + + + + + + + + + + + + + + + + + + + + + Various types of input which are defined in NeuroML 2 based on PyNN standard cell/synapse models. + + + + + + + + Various types of concentration model which are defined in NeuroML 2. This list will be expanded... + + + + + + + + + + + + + + + + + + + + + A kinetic scheme based ion channel with multiple **gateKS** s, each of which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them +\n +:param conductance: +:type conductance: conductance + + + + + + + + + + + + + + + + Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH. +\n +:param conductance: +:type conductance: conductance + + + + + + + + + + + + + + + + + + + + + + + + Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH. +\n +:param conductance: +:type conductance: conductance + + + + + + + + + + Same as **ionChannel** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. +\n +:param vShift: +:type vShift: voltage +:param conductance: +:type conductance: conductance + + + + + + + + + + + + + + + + + + + A value for the conductance scaling which varies as a standard function of the difference between the current temperature, **temperature,** and the temperature at which the conductance was originally determined, **experimentalTemp** +\n +:param q10Factor: +:type q10Factor: none +:param experimentalTemp: +:type experimentalTemp: temperature + + + + + + + + + + + + + + + + + + + + + A **KSState** with **relativeConductance** of 0 +\n +:param relativeConductance: +:type relativeConductance: none + + + + + + + + + + + + A **KSState** with **relativeConductance** of 1 +\n +:param relativeConductance: +:type relativeConductance: none + + + + + + + + + + + + A forward only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate** + + + + + + + + + + + + + + + + A reverse only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate** + + + + + + + + + + + + + + + + + + + + + + KS Transition specified in terms of time constant **tau** and steady state **inf** + + + + + + + + + + + + + + + + A gate which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them +\n +:param instances: +:type instances: none + + + + + + + + + + + + + + + + + + + + + + Note all sub elements for gateHHrates, gateHHratesTau, gateFractional etc. allowed here. Which are valid should be constrained by what type is set + + + + + + + + + + + + + + + + + + + + + Gate which follows the general Hodgkin Huxley formalism +\n +:param instances: +:type instances: none + + + + + + + + + + + + + + + + + + + Gate which follows the general Hodgkin Huxley formalism +\n +:param instances: +:type instances: none + + + + + + + + + + + + + + + + + + + Gate which follows the general Hodgkin Huxley formalism +\n +:param instances: +:type instances: none + + + + + + + + + + + + + + + + + + + + + Gate which follows the general Hodgkin Huxley formalism +\n +:param instances: +:type instances: none + + + + + + + + + + + + + + + + + + + + Gate which follows the general Hodgkin Huxley formalism +\n +:param instances: +:type instances: none + + + + + + + + + + + + + + + + + + + + Gate which follows the general Hodgkin Huxley formalism but is instantaneous, so tau = 0 and gate follows exactly inf value +\n +:param instances: +:type instances: none + + + + + + + + + + + + + + + + + Gate composed of subgates contributing with fractional conductance +\n +:param instances: +:type instances: none + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Model of an intracellular buffering mechanism for **ion** ( currently hard Coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** The ion is assumed to occupy a shell inside the membrane of thickness **shellThickness.** +\n +:param restingConc: +:type restingConc: concentration +:param decayConstant: +:type decayConstant: time +:param shellThickness: +:type shellThickness: length + + + + + + + + + + + + + + + + + + Model of buffering of concentration of an ion ( currently hard coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** A fixed factor **rho** is used to scale the incoming current *independently of the size of the compartment* to produce a concentration change. +\n +:param restingConc: +:type restingConc: concentration +:param decayConstant: +:type decayConstant: time +:param rho: +:type rho: rho_factor + + + + + + + + + + + + + + + + + + + + + Base type for all synapses, i. e. ComponentTypes which produce a current ( dimension current ) and change Dynamics in response to an incoming event. cno_0000009 + + + + + + + + + + + Base type for synapses with a dependence on membrane potential + + + + + + + + + + + Synapse model which produces a synaptic current. + + + + + + + + + + + Synapse model which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 +\n +:param gbase: Baseline conductance, generally the maximum conductance following a single spike +:type gbase: conductance +:param erev: Reversal potential of the synapse +:type erev: voltage + + + + + + + + + + + + + Synapse model suited for a sum of two expTwoSynapses which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 +\n +:param gbase1: Baseline conductance 1 +:type gbase1: conductance +:param gbase2: Baseline conductance 2 +:type gbase2: conductance +:param erev: Reversal potential of the synapse +:type erev: voltage + + + + + + + + + + + + + + Gap junction/single electrical connection +\n +:param conductance: +:type conductance: conductance + + + + + + + + + + + + Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection. + + + + + + + + + + + Behaves just like a one way gap junction. +\n +:param conductance: +:type conductance: conductance + + + + + + + + + + + + Graded/analog synapse. Based on synapse in Methods of http://www. nature.com/neuro/journal/v7/n12/abs/nn1352.html +\n +:param conductance: +:type conductance: conductance +:param delta: Slope of the activation curve +:type delta: voltage +:param k: Rate constant for transmitter-receptor dissociation rate +:type k: per_time +:param Vth: The half-activation voltage of the synapse +:type Vth: voltage +:param erev: The reversal potential of the synapse +:type erev: voltage + + + + + + + + + + + + + + + + Alpha current synapse: rise time and decay time are both **tau.** +\n +:param tau: Time course for rise and decay +:type tau: time +:param ibase: Baseline current increase after receiving a spike +:type ibase: current + + + + + + + + + + + + + Ohmic synapse model where rise time and decay time are both **tau.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** +\n +:param tau: Time course of rise/decay +:type tau: time +:param gbase: Baseline conductance, generally the maximum conductance following a single spike +:type gbase: conductance +:param erev: Reversal potential of the synapse +:type erev: voltage + + + + + + + + + + + + Ohmic synapse model whose conductance rises instantaneously by ( **gbase** * **weight** ) on receiving an event, and which decays exponentially to zero with time course **tauDecay** +\n +:param tauDecay: Time course of decay +:type tauDecay: time +:param gbase: Baseline conductance, generally the maximum conductance following a single spike +:type gbase: conductance +:param erev: Reversal potential of the synapse +:type erev: voltage + + + + + + + + + + + + Ohmic synapse model whose conductance waveform on receiving an event has a rise time of **tauRise** and a decay time of **tauDecay.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** +\n +:param tauRise: +:type tauRise: time +:param tauDecay: +:type tauDecay: time +:param gbase: Baseline conductance, generally the maximum conductance following a single spike +:type gbase: conductance +:param erev: Reversal potential of the synapse +:type erev: voltage + + + + + + + + + + + + + Ohmic synapse similar to expTwoSynapse but consisting of two components that can differ in decay times and max conductances but share the same rise time. +\n +:param tauRise: +:type tauRise: time +:param tauDecay1: +:type tauDecay1: time +:param tauDecay2: +:type tauDecay2: time +:param gbase1: Baseline conductance 1 +:type gbase1: conductance +:param gbase2: Baseline conductance 2 +:type gbase2: conductance +:param erev: Reversal potential of the synapse +:type erev: voltage + + + + + + + + + + + + + + Synapse consisting of two independent synaptic mechanisms ( e. g. AMPA-R and NMDA-R ), which can be easily colocated in connections + + + + + + + + + + + + + + Biexponential synapse that allows for optional block and plasticity mechanisms, which can be expressed as child elements. +\n +:param tauRise: +:type tauRise: time +:param tauDecay: +:type tauDecay: time +:param gbase: Baseline conductance, generally the maximum conductance following a single spike +:type gbase: conductance +:param erev: Reversal potential of the synapse +:type erev: voltage + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Base type of any cell ( e. g. point neuron like **izhikevich2007Cell** , or a morphologically detailed **Cell** with **segment** s ) which can be used in a **population** + + + + + + + + + + Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time constant **tau** +\n +:param leakReversal: +:type leakReversal: voltage +:param tau: +:type tau: time +:param thresh: The membrane potential at which to emit a spiking event and reset voltage +:type thresh: voltage +:param reset: The value the membrane potential is reset to on spiking +:type reset: voltage + + + + + + + + + + + + + + + Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time course **tau.** It has a refractory period of **refract** after spiking +\n +:param refract: +:type refract: time +:param leakReversal: +:type leakReversal: voltage +:param tau: +:type tau: time +:param thresh: The membrane potential at which to emit a spiking event and reset voltage +:type thresh: voltage +:param reset: The value the membrane potential is reset to on spiking +:type reset: voltage + + + + + + + + + + + + Integrate and fire cell with capacitance **C,** **leakConductance** and **leakReversal** +\n +:param leakConductance: +:type leakConductance: conductance +:param leakReversal: +:type leakReversal: voltage +:param thresh: +:type thresh: voltage +:param reset: +:type reset: voltage +:param C: Total capacitance of the cell membrane +:type C: capacitance + + + + + + + + + + + + + + + + Integrate and fire cell with capacitance **C,** **leakConductance,** **leakReversal** and refractory period **refract** +\n +:param refract: +:type refract: time +:param leakConductance: +:type leakConductance: conductance +:param leakReversal: +:type leakReversal: voltage +:param thresh: +:type thresh: voltage +:param reset: +:type reset: voltage +:param C: Total capacitance of the cell membrane +:type C: capacitance + + + + + + + + + + + + Cell based on the 2003 model of Izhikevich, see http://izhikevich.org/publications/spikes.htm +\n +:param v0: Initial membrane potential +:type v0: voltage +:param a: Time scale of the recovery variable U +:type a: none +:param b: Sensitivity of U to the subthreshold fluctuations of the membrane potential V +:type b: none +:param c: After-spike reset value of V +:type c: none +:param d: After-spike increase to U +:type d: none +:param thresh: Spike threshold +:type thresh: voltage + + + + + + + + + + + + + + + + + Any cell with a membrane potential **v** with voltage units and a membrane capacitance **C.** Also defines exposed value **iSyn** for current due to external synapses and **iMemb** for total transmembrane current ( usually channel currents plus **iSyn** ) +\n +:param C: Total capacitance of the cell membrane +:type C: capacitance + + + + + + + + + + + + + + + + + + Cell based on the modified Izhikevich model in Izhikevich 2007, Dynamical systems in neuroscience, MIT Press +\n +:param v0: +:type v0: voltage +:param k: +:type k: conductance_per_voltage +:param vr: +:type vr: voltage +:param vt: +:type vt: voltage +:param vpeak: +:type vpeak: voltage +:param a: +:type a: per_time +:param b: +:type b: conductance +:param c: +:type c: voltage +:param d: +:type d: current +:param C: Total capacitance of the cell membrane +:type C: capacitance + + + + + + + + + + + + + + + + + + + + Model based on Brette R and Gerstner W ( 2005 ) Adaptive Exponential Integrate-and-Fire Model as an Effective Description of Neuronal Activity. J Neurophysiol 94:3637-3642 +\n +:param gL: +:type gL: conductance +:param EL: +:type EL: voltage +:param VT: +:type VT: voltage +:param thresh: +:type thresh: voltage +:param reset: +:type reset: voltage +:param delT: +:type delT: voltage +:param tauw: +:type tauw: time +:param refract: +:type refract: time +:param a: +:type a: conductance +:param b: +:type b: current +:param C: Total capacitance of the cell membrane +:type C: capacitance + + + + + + + + + + + + + + + + + + + + + Simple dimensionless model of spiking cell from FitzHugh and Nagumo. Superseded by **fitzHughNagumo1969Cell** ( See https://github.com/NeuroML/NeuroML2/issues/42 ) +\n +:param I: +:type I: none + + + + + + + + + + + + The Fitzhugh Nagumo model is a two-dimensional simplification of the Hodgkin-Huxley model of spike generation in squid giant axons. This system was suggested by FitzHugh ( FitzHugh R. [1961]: Impulses and physiological states in theoretical models of nerve membrane. Biophysical J. 1:445-466 ), who called it " Bonhoeffer-van der Pol model ", and the equivalent circuit by Nagumo et al. ( Nagumo J. , Arimoto S. , and Yoshizawa S. [1962] An active pulse transmission line simulating nerve axon. Proc IRE. 50:2061-2070. 1962 ). This version corresponds to the one described in FitzHugh R. [1969]: Mathematical models of excitation and propagation in nerve. Chapter 1 ( pp. 1-85 in H. P. Schwan, ed. Biological Engineering, McGraw-Hill Book Co. , N. Y. ) +\n +:param a: +:type a: none +:param b: +:type b: none +:param I: plays the role of an external injected current +:type I: none +:param phi: +:type phi: none +:param V0: +:type V0: none +:param W0: +:type W0: none + + + + + + + + + + + + + + + + + Reduced CA3 cell model from Pinsky and Rinzel 1994. See https://github.com/OpenSourceBrain/PinskyRinzelModel +\n +:param iSoma: +:type iSoma: currentDensity +:param iDend: +:type iDend: currentDensity +:param gLs: +:type gLs: conductanceDensity +:param gLd: +:type gLd: conductanceDensity +:param gNa: +:type gNa: conductanceDensity +:param gKdr: +:type gKdr: conductanceDensity +:param gCa: +:type gCa: conductanceDensity +:param gKahp: +:type gKahp: conductanceDensity +:param gKC: +:type gKC: conductanceDensity +:param gc: +:type gc: conductanceDensity +:param eNa: +:type eNa: voltage +:param eCa: +:type eCa: voltage +:param eK: +:type eK: voltage +:param eL: +:type eL: voltage +:param pp: +:type pp: none +:param cm: +:type cm: specificCapacitance +:param alphac: +:type alphac: none +:param betac: +:type betac: none +:param gNmda: +:type gNmda: conductanceDensity +:param gAmpa: +:type gAmpa: conductanceDensity +:param qd0: +:type qd0: none + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. + + + + + + + + + + + + + + + + + + + + + + Variant of cell with two independent Ca2+ pools. Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment. + + + + + + + + + + + + + The collection of **segment** s which specify the 3D structure of the cell, along with a number of **segmentGroup** s + + + + + + + + + + + + + + A segment defines the smallest unit within a possibly branching structure ( **morphology** ), such as a dendrite or axon. Its **id** should be a nonnegative integer ( usually soma/root = 0 ). Its end points are given by the **proximal** and **distal** points. The **proximal** point can be omitted, usually because it is the same as a point on the **parent** segment, see **proximal** for details. **parent** specifies the parent segment. The first segment of a **cell** ( with no **parent** ) usually represents the soma. The shape is normally a cylinder ( radii of the **proximal** and **distal** equal, but positions different ) or a conical frustum ( radii and positions different ). If the x, y, x positions of the **proximal** and **distal** are equal, the segment can be interpreted as a sphere, and in this case the radii of these points must be equal. NOTE: LEMS does not yet support multicompartmental modelling, so the Dynamics here is only appropriate for single compartment modelling. + + + + + + + + + + + + + + + + + + + + + Base type for ComponentTypes which specify an ( **x,** **y,** **z** ) coordinate along with a **diameter.** Note: no dimension used in the attributes for these coordinates! These are assumed to have dimension micrometer ( 10^-6 m ). This is due to micrometers being the default option for the majority of neuronal morphology formats, and dimensions are omitted here to facilitate reading and writing of morphologies in NeuroML. +\n +:param x: x coordinate of the point. Note: no dimension used, see description of **point3DWithDiam** for details. +:type x: none +:param y: y coordinate of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. +:type y: none +:param z: z coordinate of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. +:type z: none +:param diameter: Diameter of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. +:type diameter: none + + + + + + + + + + + A method to describe a group of **segment** s in a **morphology** , e. g. soma_group, dendrite_group, axon_group. While a name is useful to describe the group, the **neuroLexId** attribute can be used to explicitly specify the meaning of the group, e. g. sao1044911821 for 'Neuronal Cell Body', sao1211023249 for 'Dendrite'. The **segment** s in this group can be specified as: a list of individual **member** segments; a **path** , all of the segments along which should be included; a **subTree** of the **cell** to include; other segmentGroups to **include** ( so all segments from those get included here ). An **inhomogeneousParameter** can be defined on the region of the cell specified by this group ( see **variableParameter** for usage ). + + + + + + + + + + + + + + + + + + + + An inhomogeneous parameter specified across the **segmentGroup** ( see **variableParameter** for usage ). + + + + + + + + + + + + + + + + Allowed metrics for InhomogeneousParam + + + + + + + + + + + + + + A single identified **segment** which is part of the **segmentGroup** + + + + + + + Include all members of another **segmentGroup** in this group + + + + + + + Include all the **segment** s between those specified by **from** and **to** , inclusive + + + + + + + + + + Include all the **segment** s distal to that specified by **from** in the **segmentGroup** + + + + + + + + + + + + + + + + The biophysical properties of the **cell** , including the **membraneProperties** and the **intracellularProperties** + + + + + + + + + + + + + + + The biophysical properties of the **cell** , including the **membraneProperties2CaPools** and the **intracellularProperties2CaPools** for a cell with two Ca pools + + + + + + + + + + + + + + + Properties specific to the membrane, such as the **populations** of channels, **channelDensities,** **specificCapacitance,** etc. + + + + + + + + + + + + + + + + + + + + + Variant of membraneProperties with 2 independent Ca pools + + + + + + + + + + + + + + Membrane potential at which to emit a spiking event. Note, usually the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction +\n +:param value: +:type value: voltage + + + + + + + + + Capacitance per unit area +\n +:param value: +:type value: specificCapacitance + + + + + + + + + Explicitly set initial membrane potential for the cell +\n +:param value: +:type value: voltage + + + + + + + + + The resistivity, or specific axial resistance, of the cytoplasm +\n +:param value: +:type value: resistivity + + + + + + + + + Population of a **number** of ohmic ion channels. These each produce a conductance **channelg** across a reversal potential **erev,** giving a total current **i.** Note that active membrane currents are more frequently specified as a density over an area of the **cell** using **channelDensity** +\n +:param number: The number of channels present. This will be multiplied by the time varying conductance of the individual ion channel ( which extends **baseIonChannel** ) to produce the total conductance +:type number: none +:param erev: The reversal potential of the current produced +:type erev: voltage + + + + + + + + + + + + + + + + + + + + + + + + Specifies a time varying ohmic conductance density, which is distributed on a region of the **cell.** The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON +\n +:param erev: The reversal potential of the current produced +:type erev: voltage + + + + + + + + + + + + + + + + + + + + Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + + + + + + + + + + + + + + + + + + Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose current is calculated from the Goldman-Hodgkin-Katz equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + + + + + + + + + + + + + + + + + + Specifies a time varying ohmic conductance density, **gDensity,** which is distributed on an area of the **cell** ( specified in **membraneProperties** ) with fixed reversal potential **erev** producing a current density **iDensity** +\n +:param erev: The reversal potential of the current produced +:type erev: voltage +:param condDensity: +:type condDensity: conductanceDensity + + + + + + + + + + + + + + + + + + + + + + + + Same as **channelDensity** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. +\n +:param vShift: +:type vShift: voltage +:param erev: The reversal potential of the current produced +:type erev: voltage +:param condDensity: +:type condDensity: conductanceDensity + + + + + + + + + + + + Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the **cell,** producing a current density **iDensity** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only! See https://github.com/OpenSourceBrain/ghk-nernst. +\n +:param condDensity: +:type condDensity: conductanceDensity + + + + + + + + + + + + + + + + + + + + + + + This component is similar to the original component type **channelDensityNernst** but it is changed in order to have a reversal potential that depends on a second independent Ca++ pool ( ca2 ). See https://github.com/OpenSourceBrain/ghk-nernst. +\n +:param condDensity: +:type condDensity: conductanceDensity + + + + + + + + + + + + Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity** and whose reversal potential is calculated from the Goldman Hodgkin Katz equation. Hard coded for Ca only! See https://github.com/OpenSourceBrain/ghk-nernst. +\n +:param permeability: +:type permeability: permeability + + + + + + + + + + + + + + + + + + + + Time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity.** Modified version of Jaffe et al. 1994 ( used also in Lawrence et al. 2006 ). See https://github.com/OpenSourceBrain/ghk-nernst. +\n +:param condDensity: +:type condDensity: conductanceDensity + + + + + + + + + + + + + + + + + + + + + Specifies a **parameter** ( e. g. condDensity ) which can vary its value across a **segmentGroup.** The value is calculated from **value** attribute of the **inhomogeneousValue** subelement. This element is normally a child of **channelDensityNonUniform** , **channelDensityNonUniformNernst** or **channelDensityNonUniformGHK** and is used to calculate the value of the conductance, etc. which will vary on different parts of the cell. The **segmentGroup** specified here needs to define an **inhomogeneousParameter** ( referenced from **inhomogeneousParameter** in the **inhomogeneousValue** ), which calculates a **variable** ( e. g. p ) varying across the cell ( e. g. based on the path length from soma ), which is then used in the **value** attribute of the **inhomogeneousValue** ( so for example condDensity = f( p ) ) + + + + + + + + + + + Specifies the **value** of an **inhomogeneousParameter.** For usage see **variableParameter** + + + + + + + + + Description of a chemical species identified by **ion,** which has internal, **concentration,** and external, **extConcentration** values for its concentration +\n +:param initialConcentration: +:type initialConcentration: concentration +:param initialExtConcentration: +:type initialExtConcentration: concentration + + + + + + + + + + + + + + + + + + + + + + + + Biophysical properties related to the intracellular space within the **cell** , such as the **resistivity** and the list of ionic **species** present. **caConc** and **caConcExt** are explicitly exposed here to facilitate accessing these values from other Components, even though **caConcExt** is clearly not an intracellular property + + + + + + + + + + Variant of intracellularProperties with 2 independent Ca pools + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set +\n +:param delay: Delay before change in current. Current is zero prior to this. +:type delay: time +:param duration: Duration for holding current at amplitude. Current is zero after delay + duration. +:type duration: time +:param amplitude: Amplitude of current pulse +:type amplitude: current + + + + + + + + + + + + + + Dimensionless equivalent of **pulseGenerator** . Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set +\n +:param delay: Delay before change in current. Current is zero prior to this. +:type delay: time +:param duration: Duration for holding current at amplitude. Current is zero after delay + duration. +:type duration: time +:param amplitude: Amplitude of current pulse +:type amplitude: none + + + + + + + + + + + + + + Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set +\n +:param phase: Phase ( between 0 and 2*pi ) at which to start the varying current ( i. e. at time given by delay ) +:type phase: none +:param delay: Delay before change in current. Current is zero prior to this. +:type delay: time +:param duration: Duration for holding current at amplitude. Current is zero after delay + duration. +:type duration: time +:param amplitude: Maximum amplitude of current +:type amplitude: current +:param period: Time period of oscillation +:type period: time + + + + + + + + + + + + + + + + Dimensionless equivalent of **sineGenerator** . Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set +\n +:param phase: Phase ( between 0 and 2*pi ) at which to start the varying current ( i. e. at time given by delay ) +:type phase: none +:param delay: Delay before change in current. Current is zero prior to this. +:type delay: time +:param duration: Duration for holding current at amplitude. Current is zero after delay + duration. +:type duration: time +:param amplitude: Maximum amplitude of current +:type amplitude: none +:param period: Time period of oscillation +:type period: time + + + + + + + + + + + + + + + + Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set +\n +:param delay: Delay before change in current. Current is baselineAmplitude prior to this. +:type delay: time +:param duration: Duration for holding current at amplitude. Current is baselineAmplitude after delay + duration. +:type duration: time +:param startAmplitude: Amplitude of linearly varying current at time delay +:type startAmplitude: current +:param finishAmplitude: Amplitude of linearly varying current at time delay + duration +:type finishAmplitude: current +:param baselineAmplitude: Amplitude of current before time delay, and after time delay + duration +:type baselineAmplitude: current + + + + + + + + + + + + + + + + Dimensionless equivalent of **rampGenerator** . Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the dimensionless current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set +\n +:param delay: Delay before change in current. Current is baselineAmplitude prior to this. +:type delay: time +:param duration: Duration for holding current at amplitude. Current is baselineAmplitude after delay + duration. +:type duration: time +:param startAmplitude: Amplitude of linearly varying current at time delay +:type startAmplitude: none +:param finishAmplitude: Amplitude of linearly varying current at time delay + duration +:type finishAmplitude: none +:param baselineAmplitude: Amplitude of current before time delay, and after time delay + duration +:type baselineAmplitude: none + + + + + + + + + + + + + + + + Generates a current which is the sum of all its child **basePointCurrent** element, e. g. can be a combination of **pulseGenerator** , **sineGenerator** elements producing a single **i.** Scaled by **weight,** if set + + + + + + + + + + + + + + + Generates a current which is the sum of all its child **basePointCurrentDL** elements, e. g. can be a combination of **pulseGeneratorDL** , **sineGeneratorDL** elements producing a single **i.** Scaled by **weight,** if set + + + + + + + + + + + + + + + Voltage clamp. Applies a variable current **i** to try to keep parent at **targetVoltage.** Not yet fully tested!!! Consider using voltageClampTriple!! +\n +:param delay: Delay before change in current. Current is zero prior to this. +:type delay: time +:param duration: Duration for attempting to keep parent at targetVoltage. Current is zero after delay + duration. +:type duration: time +:param targetVoltage: Current will be applied to try to get parent to this target voltage +:type targetVoltage: voltage +:param simpleSeriesResistance: Current will be calculated by the difference in voltage between the target and parent, divided by this value +:type simpleSeriesResistance: resistance + + + + + + + + + + + + + + + Voltage clamp with 3 clamp levels. Applies a variable current **i** ( through **simpleSeriesResistance** ) to try to keep parent cell at **conditioningVoltage** until time **delay,** **testingVoltage** until **delay** + **duration,** and **returnVoltage** afterwards. Only enabled if **active** = 1. +\n +:param active: Whether the voltage clamp is active ( 1 ) or inactive ( 0 ). +:type active: none +:param delay: Delay before switching from conditioningVoltage to testingVoltage. +:type delay: time +:param duration: Duration to hold at testingVoltage. +:type duration: time +:param conditioningVoltage: Target voltage before time delay +:type conditioningVoltage: voltage +:param testingVoltage: Target voltage between times delay and delay + duration +:type testingVoltage: voltage +:param returnVoltage: Target voltage after time duration +:type returnVoltage: voltage +:param simpleSeriesResistance: Current will be calculated by the difference in voltage between the target and parent, divided by this value +:type simpleSeriesResistance: resistance + + + + + + + + + + + + + + + + + + Emits a single spike at the specified **time** +\n +:param time: Time at which to emit one spike event +:type time: time + + + + + + + + + + + + Set of spike ComponentTypes, each emitting one spike at a certain time. Can be used to feed a predetermined spike train into a cell + + + + + + + + + + + + + Spike array connected to a single **synapse,** producing a current triggered by each **spike** in the array. + + + + + + + + + + + + + + + Simple generator of spikes at a regular interval set by **period** +\n +:param period: Time between spikes. The first spike will be emitted after this time. +:type period: time + + + + + + + + + + + + Generator of spikes with a random interspike interval of at least **minISI** and at most **maxISI** +\n +:param maxISI: Maximum interspike interval +:type maxISI: time +:param minISI: Minimum interspike interval +:type minISI: time + + + + + + + + + + + + + Generator of spikes whose ISI is distributed according to an exponential PDF with scale: 1 / **averageRate** +\n +:param averageRate: The average rate at which spikes are emitted +:type averageRate: per_time + + + + + + + + + + + + Generator of spikes whose ISI distribution is the maximum entropy distribution over [ **minimumISI,** +infinity ) with mean: 1 / **averageRate** +\n +:param minimumISI: The minimum interspike interval +:type minimumISI: time +:param averageRate: The average rate at which spikes are emitted +:type averageRate: per_time + + + + + + + + + + + + Poisson spike generator firing at **averageRate,** which is connected to single **synapse** that is triggered every time a spike is generated, producing an input current. See also **transientPoissonFiringSynapse** . +\n +:param averageRate: The average rate at which spikes are emitted +:type averageRate: per_time + + + + + + + + + + + + + + Poisson spike generator firing at **averageRate** after a **delay** and for a **duration,** connected to single **synapse** that is triggered every time a spike is generated, providing an input current. Similar to ComponentType **poissonFiringSynapse** . +\n +:param averageRate: +:type averageRate: per_time +:param delay: +:type delay: time +:param duration: +:type duration: time + + + + + + + + + + + + + + + + + + + Network containing: **population** s ( potentially of type **populationList** , and so specifying a list of cell **location** s ); **projection** s ( with lists of **connection** s ) and/or **explicitConnection** s; and **inputList** s ( with lists of **input** s ) and/or **explicitInput** s. Note: often in NeuroML this will be of type **networkWithTemperature** if there are temperature dependent elements ( e. g. ion channels ). + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Initial attempt to specify 3D region for placing cells. Work in progress. . . + + + + + + + + + + + + + + + A population of components, with just one parameter for the **size,** i. e. number of components to create. Note: quite often this is used with type= **populationList** which means the size is determined by the number of **instance** s ( with **location** s ) in the list. The **size** attribute is still set, and there will be a validation error if this does not match the number in the list. +\n +:param size: Number of instances of this Component to create when the population is instantiated +:type size: none + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Specifies a single instance of a component in a **population** ( placed at **location** ). + + + + + + + + + + + + + + + + Specifies the ( x, y, z ) location of a single **instance** of a component in a **population** +\n +:param x: +:type x: none +:param y: +:type y: none +:param z: +:type z: none + + + + + + + + + + + + + + + + + + + + + Explicit event connection between named components, which gets processed via a new instance of a **synapse** component which is created on the target component + + + + + + + + + + Base for projection (set of synaptic connections) between two populations + + + + + + + + + + + Projection from one population, **presynapticPopulation** to another, **postsynapticPopulation,** through **synapse.** Contains lists of **connection** or **connectionWD** elements. + + + + + + + + + + + + + + + Base of all synaptic connections (chemical/electrical/analog, etc.) inside projections + + + + + + + + + + Base of all synaptic connections with preCellId, postSegmentId, etc. + Note: this is not the best name for these attributes, since Id is superfluous, hence BaseConnectionNewFormat + + + + + + + + + + + + + + + Base of all synaptic connections with preCell, postSegment, etc. + See BaseConnectionOldFormat + + + + + + + + + + + + + + + Event connection directly between named components, which gets processed via a new instance of a **synapse** component which is created on the target component. Normally contained inside a **projection** element. + + + + + + + + + + Event connection between named components, which gets processed via a new instance of a synapse component which is created on the target component, includes setting of **weight** and **delay** for the synaptic connection +\n +:param weight: +:type weight: none +:param delay: +:type delay: time + + + + + + + + + + + + + A projection between **presynapticPopulation** to another **postsynapticPopulation** through gap junctions. + + + + + + + + + + + + + + + To enable connections between populations through gap junctions. + + + + + + + + + + + To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. + + + + + + + + + To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. Includes setting of **weight** for the connection +\n +:param weight: +:type weight: none + + + + + + + + + + + + A projection between **presynapticPopulation** and **postsynapticPopulation** through components **preComponent** at the start and **postComponent** at the end of a **continuousConnection** or **continuousConnectionInstance** . Can be used for analog synapses. + + + + + + + + + + + + + + + An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Can be used for analog synapses. + + + + + + + + + + + + An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. + + + + + + + + + An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. Includes setting of **weight** for the connection +\n +:param weight: +:type weight: none + + + + + + + + + + + + An explicit input ( anything which extends **basePointCurrent** ) to a target cell in a population + + + + + + + + + An explicit list of **input** s to a **population.** + + + + + + + + + + + + + + + + Specifies a single input to a **target,** optionally giving the **segmentId** ( default 0 ) and **fractionAlong** the segment ( default 0. 5 ). + + + + + + + + + + + Specifies input lists. Can set **weight** to scale individual inputs. +\n +:param weight: +:type weight: none + + + + + + + + + + + + + + + Base type of any PyNN standard cell model. Note: membrane potential **v** has dimensions voltage, but all other parameters are dimensionless. This is to facilitate translation to and from PyNN scripts in Python, where these parameters have implicit units, see http://neuralensemble.org/trac/PyNN/wiki/StandardModels +\n +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + + + + + + + + + + + + + + + + Base type of any PyNN standard integrate and fire model +\n +:param tau_refrac: +:type tau_refrac: none +:param v_thresh: +:type v_thresh: none +:param tau_m: +:type tau_m: none +:param v_rest: +:type v_rest: none +:param v_reset: +:type v_reset: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + + + + + + + + + + + + + + + + Base type of conductance based PyNN IaF cell models +\n +:param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_E: none +:param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_I: none +:param tau_refrac: +:type tau_refrac: none +:param v_thresh: +:type v_thresh: none +:param tau_m: +:type tau_m: none +:param v_rest: +:type v_rest: none +:param v_reset: +:type v_reset: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + + + + + + + + + + + + + Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic current +\n +:param tau_refrac: +:type tau_refrac: none +:param v_thresh: +:type v_thresh: none +:param tau_m: +:type tau_m: none +:param v_rest: +:type v_rest: none +:param v_reset: +:type v_reset: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + + + + + + + + + + + Leaky integrate and fire model with fixed threshold and decaying-exponential post-synaptic current +\n +:param tau_refrac: +:type tau_refrac: none +:param v_thresh: +:type v_thresh: none +:param tau_m: +:type tau_m: none +:param v_rest: +:type v_rest: none +:param v_reset: +:type v_reset: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + + + + + + + + + + + Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic conductance +\n +:param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_E: none +:param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_I: none +:param tau_refrac: +:type tau_refrac: none +:param v_thresh: +:type v_thresh: none +:param tau_m: +:type tau_m: none +:param v_rest: +:type v_rest: none +:param v_reset: +:type v_reset: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + + + + + + + + + + + Leaky integrate and fire model with fixed threshold and exponentially-decaying post-synaptic conductance +\n +:param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_E: none +:param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_I: none +:param tau_refrac: +:type tau_refrac: none +:param v_thresh: +:type v_thresh: none +:param tau_m: +:type tau_m: none +:param v_rest: +:type v_rest: none +:param v_reset: +:type v_reset: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + + + + + + + + + + + Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with exponentially-decaying post-synaptic conductance +\n +:param v_spike: +:type v_spike: none +:param delta_T: +:type delta_T: none +:param tau_w: +:type tau_w: none +:param a: +:type a: none +:param b: +:type b: none +:param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_E: none +:param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_I: none +:param tau_refrac: +:type tau_refrac: none +:param v_thresh: +:type v_thresh: none +:param tau_m: +:type tau_m: none +:param v_rest: +:type v_rest: none +:param v_reset: +:type v_reset: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + + + + + + + + + + + + + + + + Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with alpha-function-shaped post-synaptic conductance +\n +:param v_spike: +:type v_spike: none +:param delta_T: +:type delta_T: none +:param tau_w: +:type tau_w: none +:param a: +:type a: none +:param b: +:type b: none +:param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_E: none +:param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type e_rev_I: none +:param tau_refrac: +:type tau_refrac: none +:param v_thresh: +:type v_thresh: none +:param tau_m: +:type tau_m: none +:param v_rest: +:type v_rest: none +:param v_reset: +:type v_reset: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + + + + + + + + + + + Single-compartment Hodgkin-Huxley-type neuron with transient sodium and delayed-rectifier potassium currents using the ion channel models from Traub. +\n +:param gbar_K: +:type gbar_K: none +:param gbar_Na: +:type gbar_Na: none +:param g_leak: +:type g_leak: none +:param e_rev_K: +:type e_rev_K: none +:param e_rev_Na: +:type e_rev_Na: none +:param e_rev_leak: +:type e_rev_leak: none +:param v_offset: +:type v_offset: none +:param e_rev_E: +:type e_rev_E: none +:param e_rev_I: +:type e_rev_I: none +:param cm: +:type cm: none +:param i_offset: +:type i_offset: none +:param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_E: none +:param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell +:type tau_syn_I: none +:param v_init: +:type v_init: none + + + + + + + + + + + + + + + + + + + + Base type for all PyNN synapses. Note, the current **I** produced is dimensionless, but it requires a membrane potential **v** with dimension voltage +\n +:param tau_syn: +:type tau_syn: none + + + + + + + + + + + + Conductance based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) +\n +:param e_rev: +:type e_rev: none +:param tau_syn: +:type tau_syn: none + + + + + + + + + + + + Alpha synapse: rise time and decay time are both tau_syn. Conductance based synapse. +\n +:param e_rev: +:type e_rev: none +:param tau_syn: +:type tau_syn: none + + + + + + + + + + + + Current based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) +\n +:param tau_syn: +:type tau_syn: none + + + + + + + + + + + + Alpha synapse: rise time and decay time are both tau_syn. Current based synapse. +\n +:param tau_syn: +:type tau_syn: none + + + + + + + + + + + + Spike source, generating spikes according to a Poisson process. +\n +:param start: +:type start: time +:param duration: +:type duration: time +:param rate: +:type rate: per_time + + + + + + + + + + + + + + + + + + Base element without ID specified *yet*, e.g. for an element with a particular requirement on its id which does not comply with NmlId (e.g. Segment needs nonNegativeInteger). + + + + + + Anything which can have a unique (within its parent) id, which must be an integer zero or greater. + + + + + + + + + + Anything which can have a unique (within its parent) id of the form NmlId (spaceless combination of letters, numbers and underscore). + + + + + + + + + + Elements which can stand alone and be referenced by id, e.g. cell, morphology. + + + + + + + + + + + + + + + + diff --git a/neuroml/nml/README.md b/neuroml/nml/README.md index 9a295246..483add3d 100644 --- a/neuroml/nml/README.md +++ b/neuroml/nml/README.md @@ -1,34 +1,72 @@ ## Autogeneration of API, using generateds_config.py to ensure correct naming conventions. -This requires [generateDS.py](http://www.davekuhlman.org/generateDS.html), version >= 2.20a. +Please regenerate nml.py in a new branch and open a pull request against the `development` branch. -You can get it from [PyPi](http://pypi.python.org/pypi/generateDS/) or [Source Forge](https://sourceforge.net/projects/generateds/). +Please **do not** regenerate nml.py and push directly to the `master` branch because regeneration of `nml.py` may change the libNeuroML API, and so this could be a backwards incompatible change which must not be pushed to master without a corresponding release and sufficient testing. -All that is needed is the Schema - as long as generateds_config.py and helper_methods are present, nml.py should be generated correctly. +### Requirements -Unit tests should be run to confirm this. +- the Schema (XSD) file +- `generateds_config.py`: this includes the correct naming conventions +- `helper_methods.py`: this includes additional helper methods that we want to add to the API -generateDS.py should be invoked in this folder (so that generateds_config.py can be located) with the following command (namespace def here will be mended when it's become stable) +The following packages are required. - generateDS.py -o nml.py --use-getter-setter=none --silence --user-methods=helper_methods NeuroML_v2.1.xsd +- generateds +- black -You may have to add the current folder to your PYTHONPATH, i.e. +They can be installed using the `requirements-dev.txt` file in the root directory: + +``` +pip install -r requirements-dev.txt +``` + +### Regenerating nml.py + +- Run the `regenerate-nml.sh` script in the `neuroml/nml` folder. +- This will use the API version defined in `neuroml/__init__.py` to find the right schema XSD file and run generateds to regenerate the `nml.py` file. +- It will also run `black` on the newly generated file to reformat it. + +The generateDS command that is invoked is of this form: + + generateDS.py -o nml.py --use-getter-setter=none --silence --user-methods=helper_methods.py NeuroML_v2.2.xsd + +You will need to add the current folder to your PYTHONPATH so that the required files can be imported by generateDS. export PYTHONPATH=$PYTHONPATH:. -Note that generateDS.py will import the generateds_config.py file and run it. -Your output should, therefore, include lines of the form: +Your output should include lines of the form: generateds_config.py is being processed Saving NameTable to csv file: name_table.csv Saving name changes table to csv file: changed_names.csv -If these are not included in the output, generateds_config.py has not run, and the generated nml.py file will be incorrect. +If these are not included in the output, generateDS has not run correctly, and the generated nml.py file will be incorrect. + +### Testing + +Please remember to: + +- rebuild libNeuroML after regenerating `nml.py` +- run all unit tests +- run all examples + +The CI on GitHub will always run these. ### Changelog +#### August 26, 2021 + +Author: @sanjayankur31 + +Generate with Python 3.9, generateDS version 2.39.9 + +- all tests pass +- `--use-getter-setter=none`, which causes test failures because of probable generateDS bug. See: https://sourceforge.net/p/generateds/tickets/20/ +- also requires us to manually patch one or two conversions to work around https://sourceforge.net/p/generateds/tickets/13/ + #### March 26, 2020 Author: @sanjayankur31 diff --git a/neuroml/nml/changed_names.csv b/neuroml/nml/changed_names.csv index 57ff6704..f16984f0 100644 --- a/neuroml/nml/changed_names.csv +++ b/neuroml/nml/changed_names.csv @@ -1,233 +1,233 @@ +http://www.w3.org/2001/XMLSchema http://www.w3.org/2001/XMLSchema.xsd,http://www.w3.org/2001/xml_schema http://www.w3.org/2001/xml_schema.xsd +property,properties +unbounded,unboundeds +neuroLexId,neuro_lex_id +fractionAlong,fraction_along +segmentId,segment_id +population,populations +inputW,input_ws +postComponent,post_component +preComponent,pre_component +continuousConnectionInstanceW,continuous_connection_instance_ws +continuousConnectionInstance,continuous_connection_instances +continuousConnection,continuous_connections +electricalConnectionInstanceW,electrical_connection_instance_ws +electricalConnectionInstance,electrical_connection_instances +electricalConnection,electrical_connections +postFractionAlong,post_fraction_along +postSegment,post_segment +postCell,post_cell +preFractionAlong,pre_fraction_along +preSegment,pre_segment +preCell,pre_cell +postSegmentId,post_segment_id +postCellId,post_cell_id +preSegmentId,pre_segment_id +preCellId,pre_cell_id +connectionWD,connection_wds +connection,connections +postsynapticPopulation,postsynaptic_population +presynapticPopulation,presynaptic_population skip,skips -gateHHtauInf,gate_hh_tau_infs -sineGenerator,sine_generators -sineGeneratorDL,sine_generator_dls -cell2CaPools,cell2_ca_poolses -xSpacing,x_spacing -plasticityMechanism,plasticity_mechanism -alphaCondSynapse,alpha_cond_synapses -channelDensityNonUniform,channel_density_non_uniforms +xs:nonNegativeInteger,xs:non_negative_integer +zSize,z_size +ySize,y_size +xSize,x_size +region,regions +space,spaces +populationTypes,population_types +populationList,population_list +extracellularProperties,extracellular_properties +instance,instances +allowedSpaces,allowed_spaces zStart,z_start -membraneProperties,membrane_properties -inhomogeneousParameter,inhomogeneous_parameters -delT,del_t +yStart,y_start +xStart,x_start +zSpacing,z_spacing +ySpacing,y_spacing +xSpacing,x_spacing +basedOn,based_on +networkTypes,network_types +networkWithTemperature,network_with_temperature +network,networks +inputList,input_lists +explicitInput,explicit_inputs +continuousProjection,continuous_projections +electricalProjection,electrical_projections +projection,projections +synapticConnection,synaptic_connections +cellSet,cell_sets +spikeTarget,spike_target +averageRate,average_rate +minimumISI,minimum_isi +minISI,min_isi +maxISI,max_isi +spike,spikes +simpleSeriesResistance,simple_series_resistance +returnVoltage,return_voltage +testingVoltage,testing_voltage +conditioningVoltage,conditioning_voltage targetVoltage,target_voltage -xs:anyURI,xs:any_uri -gLd,g_ld -biophysicalProperties,biophysical_properties -presynapticPopulation,presynaptic_population +rampGeneratorDL,ramp_generator_dls +sineGeneratorDL,sine_generator_dls +pulseGeneratorDL,pulse_generator_dls +rampGenerator,ramp_generators +sineGenerator,sine_generators +pulseGenerator,pulse_generators +baselineAmplitude,baseline_amplitude finishAmplitude,finish_amplitude -gLs,g_ls +startAmplitude,start_amplitude +resistivity,resistivities +decayingPoolConcentrationModel,decaying_pool_concentration_models segmentGroup,segment_groups -alphaCurrentSynapse,alpha_current_synapses -poissonFiringSynapse,poisson_firing_synapses -cell,cells -path,paths -alphaCurrSynapse,alpha_curr_synapses -postsynapticPopulation,postsynaptic_population -gKdr,g_kdr -preFractionAlong,pre_fraction_along -projection,projections -forwardTransition,forward_transition -averageRate,average_rate -cellSet,cell_sets +initialExtConcentration,initial_ext_concentration +initialConcentration,initial_concentration +concentrationModel,concentration_model +inhomogeneousParameter,inhomogeneous_parameters +inhomogeneousValue,inhomogeneous_value +segment,segments +condDensity,cond_density +ionChannel,ion_channel +variableParameter,variable_parameters +vShift,v_shift channelDensityNernstCa2,channel_density_nernst_ca2s -postSegmentId,post_segment_id -spike,spikes -fitzHughNagumoCell,fitz_hugh_nagumo_cells -channelPopulation,channel_populations -continuousProjection,continuous_projections -include,includes +initMembPotential,init_memb_potentials specificCapacitance,specific_capacitances -allowedSpaces,allowed_spaces -xSize,x_size -explicitInput,explicit_inputs -xs:nonNegativeInteger,xs:non_negative_integer -concentrationModel,concentration_model -ionChannelHH,ion_channel_hhs +spikeThresh,spike_threshes +channelDensityNonUniformGHK,channel_density_non_uniform_ghks +channelDensityNonUniformNernst,channel_density_non_uniform_nernsts +channelDensityNonUniform,channel_density_non_uniforms +channelDensityGHK2,channel_density_ghk2s channelDensityGHK,channel_density_ghks -intracellularProperties2CaPools,intracellular_properties2_ca_pools -linearGradedSynapse,linear_graded_synapses -iafCell,iaf_cells -silentSynapse,silent_synapses -spikeGeneratorRandom,spike_generator_randoms -gate,gates -steadyState,steady_state -channelDensity,channel_densities -expCondSynapse,exp_cond_synapses -continuousConnectionInstanceW,continuous_connection_instance_ws -expTwoSynapse,exp_two_synapses -gateKS,gate_kses -izhikevich2007Cell,izhikevich2007_cells -decayConstant,decay_constant -expOneSynapse,exp_one_synapses -baseCell,base_cells -tauDecay,tau_decay -iDend,i_dend -tsodyksMarkramDepFacMechanism,tsodyks_markram_dep_fac_mechanism -reverseRate,reverse_rate channelDensityNernst,channel_density_nernsts -reverseTransition,reverse_transition -preComponent,pre_component -minISI,min_isi -gCa,g_ca -eNa,e_na -transientPoissonFiringSynapse,transient_poisson_firing_synapses -preCell,pre_cell -blockConcentration,block_concentration -inputList,input_lists -tauRec,tau_rec -networkTypes,network_types -inhomogeneousValue,inhomogeneous_value +channelDensityVShift,channel_density_v_shifts +channelDensity,channel_densities +channelPopulation,channel_populations +intracellularProperties2CaPools,intracellular_properties2_ca_pools +membraneProperties2CaPools,membrane_properties2_ca_pools +intracellularProperties,intracellular_properties +membraneProperties,membrane_properties +normalizationEnd,normalization_end +translationStart,translation_start +subTree,sub_trees +path,paths +include,includes +member,members +biophysicalProperties2CaPools,biophysical_properties2_ca_pools +biophysicalProperties,biophysical_properties eL,e_l -network,networks eK,e_k -space,spaces -forwardRate,forward_rate -gKC,g_kc -rampGeneratorDL,ramp_generator_dls -neuroLexId,neuro_lex_id -returnVoltage,return_voltage -intracellularProperties,intracellular_properties -q10Settings,q10_settings -initialConcentration,initial_concentration -decayingPoolConcentrationModel,decaying_pool_concentration_models -voltageClampTriple,voltage_clamp_triples -alphaSynapse,alpha_synapses -preCellId,pre_cell_id -pulseGeneratorDL,pulse_generator_dls -gateHHrates,gate_hh_rates -population,populations eCa,e_ca -shellThickness,shell_thickness -expCurrSynapse,exp_curr_synapses -resistivity,resistivities -ySpacing,y_spacing -ySize,y_size -gateFractional,gate_fractionals -blockMechanism,block_mechanism -instance,instances -connectionWD,connection_wds -gradedSynapse,graded_synapses -gapJunction,gap_junctions -baselineAmplitude,baseline_amplitude -segment,segments -postFractionAlong,post_fraction_along -ionChannelVShift,ion_channel_v_shifts -spikeGeneratorRefPoisson,spike_generator_ref_poissons -channelTypes,channel_types -xStart,x_start -voltageClamp,voltage_clamps -testingVoltage,testing_voltage -adExIaFCell,ad_ex_ia_f_cells -ionChannelKS,ion_channel_kses -preSegmentId,pre_segment_id -spikeThresh,spike_threshes -spikeGeneratorPoisson,spike_generator_poissons -vShift,v_shift -ionChannelPassive,ion_channel_passive -subGate,sub_gates -gL,g_l -tauRise,tau_rise -izhikevichCell,izhikevich_cells -postCellId,post_cell_id -condDensity,cond_density -gateHHratesTauInf,gate_h_hrates_tau_infs -fixedFactorConcentrationModel,fixed_factor_concentration_models -fixedQ10,fixed_q10 -region,regions -extracellularProperties,extracellular_properties -connection,connections -http://www.w3.org/2001/XMLSchema http://www.w3.org/2001/XMLSchema.xsd,http://www.w3.org/2001/xml_schema http://www.w3.org/2001/xml_schema.xsd -electricalConnectionInstance,electrical_connection_instances -translationStart,translation_start -closedState,closed_states -gNa,g_na +eNa,e_na +gAmpa,g_ampa +gNmda,g_nmda +gKC,g_kc gKahp,g_kahp -electricalConnectionInstanceW,electrical_connection_instance_ws -spikeTarget,spike_target -yStart,y_start +gCa,g_ca +gKdr,g_kdr +gNa,g_na +gLd,g_ld +gLs,g_ls +iDend,i_dend +iSoma,i_soma +delT,del_t +gL,g_l +leakConductance,leak_conductance +leakReversal,leak_reversal tauFac,tau_fac -q10Factor,q10_factor -populationList,population_list -zSpacing,z_spacing +tauRec,tau_rec +initReleaseProb,init_release_prob +tsodyksMarkramDepFacMechanism,tsodyks_markram_dep_fac_mechanism +tsodyksMarkramDepMechanism,tsodyks_markram_dep_mechanism +scalingVolt,scaling_volt +scalingConc,scaling_conc +blockConcentration,block_concentration +voltageConcDepBlockMechanism,voltage_conc_dep_block_mechanism +blockMechanism,block_mechanism +plasticityMechanism,plasticity_mechanism synapse2Path,synapse2_path +synapse1Path,synapse1_path +tauRise,tau_rise +tauDecay2,tau_decay2 +tauDecay1,tau_decay1 +tauDecay,tau_decay +decayConstant,decay_constant +restingConc,resting_conc +shellThickness,shell_thickness experimentalTemp,experimental_temp -openState,open_states -property,properties -zSize,z_size -compoundInput,compound_inputs -ionChannel,ion_channel -iafTauRefCell,iaf_tau_ref_cells -scalingConc,scaling_conc -pinskyRinzelCA3Cell,pinsky_rinzel_ca3_cells +q10Factor,q10_factor +fixedQ10,fixed_q10 +fractionalConductance,fractional_conductance +timeCourse,time_course +steadyState,steady_state +q10Settings,q10_settings +subGate,sub_gates +reverseRate,reverse_rate +forwardRate,forward_rate +gateTypes,gate_types tauInfTransition,tau_inf_transition -member,members +reverseTransition,reverse_transition +forwardTransition,forward_transition +openState,open_states +closedState,closed_states +gateFractional,gate_fractionals +gateKS,gate_kses gateHHInstantaneous,gate_hh_instantaneouses -populationTypes,population_types -conditioningVoltage,conditioning_voltage -variableParameter,variable_parameters -iSoma,i_soma -channelDensityVShift,channel_density_v_shifts -biophysicalProperties2CaPools,biophysical_properties2_ca_pools -fractionalConductance,fractional_conductance -tsodyksMarkramDepMechanism,tsodyks_markram_dep_mechanism -fitzHughNagumo1969Cell,fitz_hugh_nagumo1969_cells -segmentId,segment_id +gateHHratesTauInf,gate_h_hrates_tau_infs +gateHHratesInf,gate_h_hrates_infs +gateHHtauInf,gate_hh_tau_infs +gateHHratesTau,gate_h_hrates_taus +gateHHrates,gate_hh_rates +channelTypes,channel_types +ionChannelHH,ion_channel_hhs +ionChannelPassive,ion_channel_passive +gate,gates q10ConductanceScaling,q10_conductance_scalings -rampGenerator,ramp_generators -voltageConcDepBlockMechanism,voltage_conc_dep_block_mechanism -postComponent,post_component -initReleaseProb,init_release_prob -maxISI,max_isi -doubleSynapse,double_synapses -timeCourse,time_course +fixedFactorConcentrationModel,fixed_factor_concentration_models +transientPoissonFiringSynapse,transient_poisson_firing_synapses +poissonFiringSynapse,poisson_firing_synapses +spikeGeneratorRefPoisson,spike_generator_ref_poissons +spikeGeneratorPoisson,spike_generator_poissons +spikeGeneratorRandom,spike_generator_randoms spikeGenerator,spike_generators timedSynapticInput,timed_synaptic_inputs -gateHHratesTau,gate_h_hrates_taus -continuousConnection,continuous_connections -networkWithTemperature,network_with_temperature -fractionAlong,fraction_along -normalizationEnd,normalization_end -synapse1Path,synapse1_path -electricalProjection,electrical_projections -gateHHratesInf,gate_h_hrates_infs -simpleSeriesResistance,simple_series_resistance -channelDensityGHK2,channel_density_ghk2s -preSegment,pre_segment -gNmda,g_nmda -scalingVolt,scaling_volt -initialExtConcentration,initial_ext_concentration -expThreeSynapse,exp_three_synapses -startAmplitude,start_amplitude -unbounded,unboundeds +spikeArray,spike_arrays +voltageClampTriple,voltage_clamp_triples +voltageClamp,voltage_clamps compoundInputDL,compound_input_dls -xs:positiveInteger,xs:positive_integer -membraneProperties2CaPools,membrane_properties2_ca_pools +compoundInput,compound_inputs +alphaCurrSynapse,alpha_curr_synapses +expCurrSynapse,exp_curr_synapses +alphaCondSynapse,alpha_cond_synapses +expCondSynapse,exp_cond_synapses +gradedSynapse,graded_synapses +linearGradedSynapse,linear_graded_synapses +silentSynapse,silent_synapses +gapJunction,gap_junctions +doubleSynapse,double_synapses +blockingPlasticSynapse,blocking_plastic_synapses +expThreeSynapse,exp_three_synapses +expTwoSynapse,exp_two_synapses +expOneSynapse,exp_one_synapses +alphaSynapse,alpha_synapses +alphaCurrentSynapse,alpha_current_synapses +pinskyRinzelCA3Cell,pinsky_rinzel_ca3_cells +fitzHughNagumo1969Cell,fitz_hugh_nagumo1969_cells +fitzHughNagumoCell,fitz_hugh_nagumo_cells +adExIaFCell,ad_ex_ia_f_cells +izhikevich2007Cell,izhikevich2007_cells +izhikevichCell,izhikevich_cells +iafRefCell,iaf_ref_cells +iafCell,iaf_cells +iafTauRefCell,iaf_tau_ref_cells +iafTauCell,iaf_tau_cells +baseCell,base_cells +cell2CaPools,cell2_ca_poolses +cell,cells +xs:anyURI,xs:any_uri +ionChannelKS,ion_channel_kses +ionChannelVShift,ion_channel_v_shifts defaultValue,default_value -inputW,input_ws -continuousConnectionInstance,continuous_connection_instances -tauDecay1,tau_decay1 -leakConductance,leak_conductance -tauDecay2,tau_decay2 -subTree,sub_trees +xs:positiveInteger,xs:positive_integer javax.xml.bind.DatatypeConverter.parseInt,javax.xml.bind._datatype_converter.parse_int -iafTauCell,iaf_tau_cells -pulseGenerator,pulse_generators -iafRefCell,iaf_ref_cells -postSegment,post_segment javax.xml.bind.DatatypeConverter.printInt,javax.xml.bind._datatype_converter.print_int -gAmpa,g_ampa -electricalConnection,electrical_connections -synapticConnection,synaptic_connections -gateTypes,gate_types -channelDensityNonUniformNernst,channel_density_non_uniform_nernsts -restingConc,resting_conc -basedOn,based_on -leakReversal,leak_reversal -channelDensityNonUniformGHK,channel_density_non_uniform_ghks -initMembPotential,init_memb_potentials -postCell,post_cell -spikeArray,spike_arrays -blockingPlasticSynapse,blocking_plastic_synapses -minimumISI,minimum_isi diff --git a/neuroml/nml/config.py b/neuroml/nml/config.py index 447f42f1..60c26d4b 100644 --- a/neuroml/nml/config.py +++ b/neuroml/nml/config.py @@ -1 +1 @@ -variables={'schema_name':'NeuroML_v2.1.xsd'} +variables = {"schema_name": "NeuroML_v2.2.xsd"} diff --git a/neuroml/nml/generateds_config.py b/neuroml/nml/generateds_config.py index 83fc889d..4cb361e8 100644 --- a/neuroml/nml/generateds_config.py +++ b/neuroml/nml/generateds_config.py @@ -1,36 +1,50 @@ -#Maps from NeuroML element space (Key) to python object space (value) -#The class names are essentially correct, the instance names need converting -#also attributes need fixing +# Maps from NeuroML element space (Key) to python object space (value) +# The class names are essentially correct, the instance names need converting +# also attributes need fixing import lxml from lxml import objectify import re -from config import variables import csv import sys +import process_includes + +# In Python 3, it's io +try: + from StringIO import StringIO +except ImportError: + from io import StringIO +from config import variables + sys.setrecursionlimit(10000) + def remove_curlies(string): - return re.sub("{.*}","",string) + return re.sub("{.*}", "", string) + def to_lowercase_with_underscores(string): - s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', string) - return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() + s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", string) + return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower() + def to_camelback(string): string_list = list(string) i = 0 - for m in re.finditer('_', string): + for m in re.finditer("_", string): underscore_index = m.end() - string_list[underscore_index - i] = string_list[underscore_index - i].capitalize() + string_list[underscore_index - i] = string_list[ + underscore_index - i + ].capitalize() string_list.pop(underscore_index - (1 + i)) i += 1 - string = ''.join(string_list) + string = "".join(string_list) return str(string) -def traverse_doc(queue,rename): + +def traverse_doc(queue, rename): """Recursive function to traverse the nodes of a tree in breadth-first order. @@ -43,24 +57,26 @@ def traverse_doc(queue,rename): children = node.getchildren() rename(node) queue = queue + children - traverse_doc(queue,rename) + traverse_doc(queue, rename) else: return None -def pluralize(noun): - if re.search('[sxz]$', noun): - return re.sub('$', 'es', noun) - elif re.search('[^aeioudgkprt]h$', noun): - return re.sub('$', 'es', noun) - elif re.search('[^aeiou]y$', noun): - return re.sub('y$', 'ies', noun) - else: - return noun + 's' + +def pluralize(noun): + if re.search("[sxz]$", noun): + return re.sub("$", "es", noun) + elif re.search("[^aeioudgkprt]h$", noun): + return re.sub("$", "es", noun) + elif re.search("[^aeiou]y$", noun): + return re.sub("y$", "ies", noun) + else: + return noun + "s" + def _node_to_python(node): - pluralize_flag = 'maxOccurs' in node.attrib - + pluralize_flag = "maxOccurs" in node.attrib + for attribute in node.attrib: nml_attribute = node.attrib.pop(attribute) if nml_attribute[0].islower(): @@ -69,16 +85,13 @@ def _node_to_python(node): renamed_attribute = pluralize(renamed_attribute) NameTable[nml_attribute] = renamed_attribute -filename = variables['schema_name'] -import StringIO -import process_includes +filename = variables["schema_name"] -outfile = StringIO.StringIO() -infile = open(filename, 'r') +outfile = StringIO() +infile = open(filename, "r") -process_includes.process_include_files(infile, outfile, - inpath=filename) +process_includes.process_include_files(infile, outfile, inpath=filename) infile.close() outfile.seek(0) doc = objectify.parse(outfile) @@ -87,10 +100,11 @@ def _node_to_python(node): NameTable = {} -traverse_doc(queue,_node_to_python) +traverse_doc(queue, _node_to_python) -#filtering routine, need to get a better way to extract these, asked on Stack Overflow +# filtering routine, need to get a better way to extract these, asked on Stack Overflow import keyword + disallowed_keywords = keyword.kwlist for keyword in disallowed_keywords: try: @@ -98,55 +112,54 @@ def _node_to_python(node): except: pass -NameTable['morphology'] = 'morphology' #overriding change to -#"morphologies" because it only applies outside of a cell - not a very -#elegant solution -NameTable['gateHHtauInf'] = 'gate_hh_tau_infs' -NameTable['ionChannelHH'] = 'ion_channel_hh' -NameTable['gateHHrates'] = 'gate_hh_rates' -NameTable['gateHHtauInf'] = 'gate_hh_tau_infs' - -NameTable['ionChannel'] = 'ion_channel' -NameTable['ionChannelHH'] = 'ion_channel_hhs' +NameTable["morphology"] = "morphology" # overriding change to +# "morphologies" because it only applies outside of a cell - not a very +# elegant solution +NameTable["gateHHtauInf"] = "gate_hh_tau_infs" +NameTable["ionChannelHH"] = "ion_channel_hh" +NameTable["gateHHrates"] = "gate_hh_rates" +NameTable["gateHHtauInf"] = "gate_hh_tau_infs" +NameTable["ionChannel"] = "ion_channel" +NameTable["ionChannelHH"] = "ion_channel_hhs" -NameTable['basePyNNCell'] = 'basePyNNCell' -NameTable['basePyNNIaFCell'] = 'basePyNNIaFCell' -NameTable['basePyNNIaFCondCell'] = 'basePyNNIaFCondCell' -NameTable['tau_syn_E'] = 'tau_syn_E' -NameTable['tau_syn_I'] = 'tau_syn_I' +NameTable["basePyNNCell"] = "basePyNNCell" +NameTable["basePyNNIaFCell"] = "basePyNNIaFCell" +NameTable["basePyNNIaFCondCell"] = "basePyNNIaFCondCell" +NameTable["tau_syn_E"] = "tau_syn_E" +NameTable["tau_syn_I"] = "tau_syn_I" -NameTable['e_rev_E'] = 'e_rev_E' -NameTable['e_rev_I'] = 'e_rev_I' -NameTable['e_rev_Na'] = 'e_rev_Na' -NameTable['e_rev_K'] = 'e_rev_K' +NameTable["e_rev_E"] = "e_rev_E" +NameTable["e_rev_I"] = "e_rev_I" +NameTable["e_rev_Na"] = "e_rev_Na" +NameTable["e_rev_K"] = "e_rev_K" -NameTable['gbar_K'] = 'gbar_K' -NameTable['gbar_Na'] = 'gbar_Na' +NameTable["gbar_K"] = "gbar_K" +NameTable["gbar_Na"] = "gbar_Na" -NameTable['delta_T'] = 'delta_T' +NameTable["delta_T"] = "delta_T" -NameTable['IF_curr_alpha'] = 'IF_curr_alpha' -NameTable['IF_curr_exp'] = 'IF_curr_exp' -NameTable['IF_cond_alpha'] = 'IF_cond_alpha' -NameTable['IF_cond_exp'] = 'IF_cond_exp' +NameTable["IF_curr_alpha"] = "IF_curr_alpha" +NameTable["IF_curr_exp"] = "IF_curr_exp" +NameTable["IF_cond_alpha"] = "IF_cond_alpha" +NameTable["IF_cond_exp"] = "IF_cond_exp" -NameTable['extracellularProperties'] = 'extracellular_properties' -NameTable['intracellularProperties'] = 'intracellular_properties' -NameTable['biophysicalProperties'] = 'biophysical_properties' +NameTable["extracellularProperties"] = "extracellular_properties" +NameTable["intracellularProperties"] = "intracellular_properties" +NameTable["biophysicalProperties"] = "biophysical_properties" print("NameTable is as follows:") print(NameTable) print("Saving NameTable to csv file") -writer = csv.writer(open('name_table.csv', 'wb')) +writer = csv.writer(open("name_table.csv", "w")) for key, value in NameTable.items(): - writer.writerow([key, value]) + writer.writerow([key, value]) -print ("Saving name changes table to csv file") -changes_writer = csv.writer(open('changed_names.csv','wb')) +print("Saving name changes table to csv file") +changes_writer = csv.writer(open("changed_names.csv", "w")) for key in NameTable: value = NameTable[key] if key != value: - changes_writer.writerow([key,value]) + changes_writer.writerow([key, value]) diff --git a/neuroml/nml/helper_methods.py b/neuroml/nml/helper_methods.py index 7c491caa..6640ec55 100644 --- a/neuroml/nml/helper_methods.py +++ b/neuroml/nml/helper_methods.py @@ -6,8 +6,7 @@ # your method specification file. # class MethodSpec(object): - def __init__(self, name='', source='', class_names='', - class_names_compiled=None): + def __init__(self, name="", source="", class_names="", class_names_compiled=None): """MethodSpec -- A specification of a method. Member variables: name -- The method name @@ -21,55 +20,68 @@ class names in which the method is to be inserted. self.name = name self.source = source self.class_names = class_names - ''' + """ if class_names is None: self.class_names = ('.*', ) else: if class_names_compiled is None: self.class_names_compiled = re.compile(self.class_names) else: - self.class_names_compiled = class_names_compiled''' + self.class_names_compiled = class_names_compiled""" + def get_name(self): return self.name + def set_name(self, name): self.name = name + def get_source(self): return self.source + def set_source(self, source): self.source = source + def get_class_names(self): return self.class_names + def set_class_names(self, class_names): self.class_names = class_names self.class_names_compiled = re.compile(class_names) + def get_class_names_compiled(self): return self.class_names_compiled + def set_class_names_compiled(self, class_names_compiled): self.class_names_compiled = class_names_compiled + def match_name(self, class_name): """Match against the name of the class currently being generated. If this method returns True, the method will be inserted in the generated class. """ - if self.class_names == class_name or (isinstance(self.class_names,list) and class_name in self.class_names): + if self.class_names == class_name or ( + isinstance(self.class_names, list) and class_name in self.class_names + ): return True else: return False + def get_interpolated_source(self, values_dict): """Get the method source code, interpolating values from values_dict into it. The source returned by this method is inserted into the generated class. """ source = self.source % values_dict - source = source.replace('PERCENTAGE','%') + source = source.replace("PERCENTAGE", "%") return source + def show(self): - print('specification:') - print(' name: %s' % (self.name, )) + print("specification:") + print(" name: %s" % (self.name,)) print(self.source) - print(' class_names: %s' % (self.class_names, )) - print(' names pat : %s' % (self.class_names_compiled.pattern, )) + print(" class_names: %s" % (self.class_names,)) + print(" names pat : %s" % (self.class_names_compiled.pattern,)) # @@ -81,20 +93,32 @@ def show(self): # generated superclass file and also section "User Methods" in # the documentation, as well as the examples below. -num_segments = MethodSpec(name='num_segments', +num_segments = MethodSpec( + name="num_segments", source='''\ @property def num_segments(self): + """Get the number of segments included in this cell morphology. + + :returns: number of segments + :rtype: int + """ return len(self.segments) ''', - class_names=("Morphology") - ) + class_names=("Morphology"), +) -length = MethodSpec(name='length', +length = MethodSpec( + name="length", source='''\ @property def length(self): + """Get the length of the segment. + + :returns: length of the segment + :rtype: float + """ if self.proximal==None: raise Exception('Cannot get length of segment '+str(self.id)+' using the length property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_length(segment_id) on the cell instead.') @@ -120,13 +144,19 @@ def __repr__(self): return str(self) ''', - class_names=("Segment") - ) + class_names=("Segment"), +) -volume = MethodSpec(name='volume', +volume = MethodSpec( + name="volume", source='''\ @property def volume(self): + """Get the volume of the segment. + + :returns: volume of segment + :rtype: float + """ from math import pi if self.proximal==None: @@ -150,15 +180,21 @@ def volume(self): return volume ''', - class_names=("Segment") - ) + class_names=("Segment"), +) -surface_area = MethodSpec(name='surface_area', +surface_area = MethodSpec( + name="surface_area", source='''\ @property def surface_area(self): + """Get the surface area of the segment. + + :returns: surface area of segment + :rtype: float + """ from math import pi from math import sqrt @@ -183,23 +219,185 @@ def surface_area(self): return surface_area ''', - class_names=("Segment") - ) + class_names=("Segment"), +) + +generic_add = MethodSpec( + name="add", + source='''\ + + def add(self, obj=None, hint=None, force=False): + """Generic function to allow easy addition of a new member to a NeuroML object. + + Without arguments, when `obj=None`, it simply calls the `info()` method + to provide the list of valid member types for the NeuroML class. + Use `info(show_contents=True)` to see the valid members of this class, + and their current contents. + + :param obj: object member to add + :type obj: any NeuroML Type defined by the API + :param hint: member name to add to when there are multiple members that `obj` can be added to + :type hint: string + :param force: boolean to force addition when an obj has already been added previously + :type force: bool + + :raises Exception: if a member compatible to obj could not be found + :raises Exception: if multiple members can accept the object and no hint is provided. + """ + if not obj: + self.info() + return + + # getattr only returns the value of the provided member but one cannot + # then use this to modify the member. Using `vars` also allows us to + # modify the value + targets = [] + all_members = self.get_members() + for member in all_members: + # get_data_type() returns the type as a string, e.g.: 'IncludeType' + if member.get_data_type() == type(obj).__name__: + targets.append(member) + + + if len(targets) == 0: + # no targets found + e = Exception( + """A member object of {} type could not be found in NeuroML class {}.\\n{} + """.format(type(obj).__name__, type(self).__name__, self.info())) + raise e + elif len(targets) == 1: + # good, just add it + self.__add(obj, targets[0], force) + else: + # more than one target + if not hint: + err_string = """Multiple members can accept {}. Please provide the name of the variable using the `hint` argument to specify which member to add to:\\n""".format(type(obj).__name__) + for t in targets: + err_string += "- {}\\n".format(t.get_name()) + raise Exception(err_string) + + # use hint to figure out which target to use + for t in targets: + if hint == t.get_name(): + self.__add(obj, t, force) + break + + + def __add(self, obj, member, force=False): + """Private method to add new member to a specified variable in a NeuroML object. + + :param obj: object member to add + :type obj: any NeuroML Type defined by the API + :param member: member variable name to add to when there are multiple members that `obj` can be added to + :type member: MemberSpec_ + :param force: boolean to force addition when an obj has already been added previously + :type force: bool + + """ + import warnings + + # A single value, not a list: + if member.get_container() == 0: + if force: + vars(self)[member.get_name()] = obj + else: + if vars(self)[member.get_name()]: + warnings.warn("""{} has already been assigned. Use `force=True` to overwrite. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format(member.get_name())) + else: + vars(self)[member.get_name()] = obj + # List + else: + # Do not use 'obj in ..' for membership check because it also + # returns true if an element with the same value exists in the + # container + # https://docs.python.org/3/reference/expressions.html#membership-test-operations + if force: + vars(self)[member.get_name()].append(obj) + else: + if any(obj is e for e in vars(self)[member.get_name()]): + warnings.warn("""{} already exists in {}. Use `force=True` to force readdition. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format(obj, member.get_name())) + else: + vars(self)[member.get_name()].append(obj) + + def get_members(self): + """Get member data items, also from ancestors. + + This function is required because generateDS does not include inherited + members in the member_data_items list for a derived class. So, for + example, while IonChannelHH has `gate_hh_rates` which it inherits from + IonChannel, IonChannelHH's `member_data_items_` is empty. It relies on + the IonChannel classes' `member_data_items_` list. + + :returns: list of members, including ones inherited from ancestors. + """ + import copy + # create a copy by value + # if copied by reference (=), the member_data_items_ object variable is + # modified to a large list, greatly increasing the memory usage. + all_members = copy.copy(self.member_data_items_) + for c in type(self).__mro__: + try: + all_members.extend(c.member_data_items_) + except AttributeError: + pass + except TypeError: + pass + + # deduplicate + # TODO where are the duplicates coming from given that we're not + # calling this recursively? + all_members = list(set(all_members)) + return all_members + ''', + class_names=("BaseWithoutId"), +) + +generic_list = MethodSpec( + name="info", + source='''\ + + def info(self, show_contents=False): + """A helper function to get a list of members of this class. + + This is useful to quickly check what members can go into a particular + NeuroML class (which will match the Schema definitions). It lists these + members and notes whether they are "single" type elements (Child + elements) or "List" elements (Children elements). It will also note + whether a member is optional or required. + + See http://www.davekuhlman.org/generateDS.html#user-methods for more + information on the MemberSpec_ class that generateDS uses. + + :param show_contents: also prints out the contents of the members + :type show_contents: bool + + :returns: the string (for testing purposes) + """ + + info_str = "Valid members for {} are:\\n".format(self.__class__.__name__) + for member in self.member_data_items_: + info_str += ("* {} (class: {})\\n".format(member.name, member.data_type)) + if show_contents: + contents = getattr(self, member.get_name()) + info_str += ("\t* Contents: {}\\n\\n".format(contents)) + + info_str += "Please see the NeuroML standard schema documentation at https://docs.neuroml.org/Userdocs/NeuroMLv2.html for more information." + print(info_str) + return info_str + ''', + class_names=("BaseWithoutId"), +) # # Provide a list of your method specifications. # This list of specifications must be named METHOD_SPECS. # -METHOD_SPECS=(length, - volume, - surface_area, - num_segments, - ) - +METHOD_SPECS = (length, volume, surface_area, num_segments, generic_add, generic_list) -seg_grp = MethodSpec(name='SegmentGroup', - source='''\ +seg_grp = MethodSpec( + name="SegmentGroup", + source="""\ def __str__(self): @@ -210,13 +408,14 @@ def __repr__(self): return str(self) -''', - class_names=("SegmentGroup") - ) +""", + class_names=("SegmentGroup"), +) -METHOD_SPECS+=(seg_grp,) +METHOD_SPECS += (seg_grp,) -seg_grp = MethodSpec(name='Point3DWithDiam', +seg_grp = MethodSpec( + name="Point3DWithDiam", source='''\ def __str__(self): @@ -228,6 +427,13 @@ def __repr__(self): return str(self) def distance_to(self, other_3d_point): + """Find the distance between this point and another. + + :param other_3d_point: other 3D point to calculate distance to + :type other_3d_point: Point3DWithDiam + :returns: distance between the two points + :rtype: float + """ a_x = self.x a_y = self.y a_z = self.z @@ -240,13 +446,14 @@ def distance_to(self, other_3d_point): return distance ''', - class_names=("Point3DWithDiam") - ) + class_names=("Point3DWithDiam"), +) -METHOD_SPECS+=(seg_grp,) +METHOD_SPECS += (seg_grp,) -connection_cell_ids = MethodSpec(name='connection_cell_ids', +connection_cell_ids = MethodSpec( + name="connection_cell_ids", source='''\ def _get_cell_id(self, id_string): @@ -256,35 +463,59 @@ def _get_cell_id(self, id_string): return int(id_string.split('/')[2]) def get_pre_cell_id(self): + """Get the ID of the pre-synaptic cell + + :returns: ID of pre-synaptic cell + :rtype: str + """ return self._get_cell_id(self.pre_cell_id) def get_post_cell_id(self): + """Get the ID of the post-synaptic cell + + :returns: ID of post-synaptic cell + :rtype: str + """ return self._get_cell_id(self.post_cell_id) def get_pre_segment_id(self): + """Get the ID of the pre-synpatic segment + + :returns: ID of pre-synaptic segment. + :rtype: str + """ return int(self.pre_segment_id) def get_post_segment_id(self): + """Get the ID of the post-synpatic segment + + :returns: ID of post-synaptic segment. + :rtype: str + """ return int(self.post_segment_id) def get_pre_fraction_along(self): + """Get pre-synaptic fraction along information""" return float(self.pre_fraction_along) def get_post_fraction_along(self): + """Get post-synaptic fraction along information""" return float(self.post_fraction_along) def get_pre_info(self): + """Get pre-synaptic information summary""" return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') def get_post_info(self): + """Get post-synaptic information summary""" return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') @@ -293,12 +524,13 @@ def __str__(self): return "Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info()) ''', - class_names=(["Connection","ConnectionWD"]) - ) + class_names=(["Connection", "ConnectionWD"]), +) -METHOD_SPECS+=(connection_cell_ids,) +METHOD_SPECS += (connection_cell_ids,) -connection_wd_cell_ids = MethodSpec(name='connection_wd_cell_ids', +connection_wd_cell_ids = MethodSpec( + name="connection_wd_cell_ids", source='''\ def __str__(self): @@ -307,19 +539,25 @@ def __str__(self): ", weight: "+'PERCENTAGEf' PERCENTAGE (float(self.weight))+", delay: "+'PERCENTAGE.5f' PERCENTAGE (self.get_delay_in_ms())+" ms" def get_delay_in_ms(self): + """Get connection delay in milli seconds + + :returns: connection delay in milli seconds + :rtype: float + """ if 'ms' in self.delay: return float(self.delay[:-2].strip()) elif 's' in self.delay: return float(self.delay[:-1].strip())*1000.0 ''', - class_names=("ConnectionWD") - ) + class_names=("ConnectionWD"), +) -METHOD_SPECS+=(connection_wd_cell_ids,) +METHOD_SPECS += (connection_wd_cell_ids,) -elec_connection_instance_cell_ids = MethodSpec(name='elec_connection_instance_cell_ids', - source='''\ +elec_connection_instance_cell_ids = MethodSpec( + name="elec_connection_instance_cell_ids", + source="""\ def _get_cell_id(self, id_string): if '[' in id_string: @@ -333,16 +571,25 @@ def __str__(self): ", synapse: "+str(self.synapse) - ''', - class_names=("ElectricalConnectionInstance") - ) + """, + class_names=("ElectricalConnectionInstance"), +) -METHOD_SPECS+=(elec_connection_instance_cell_ids,) +METHOD_SPECS += (elec_connection_instance_cell_ids,) -elec_connection_instance_w = MethodSpec(name='elec_connection_instance_w', +elec_connection_instance_w = MethodSpec( + name="elec_connection_instance_w", source='''\ def get_weight(self): + """Get the weight of the connection + + If a weight is not set (or is set to None), returns the default value + of 1.0. + + :returns: weight of connection or 1.0 if not set + :rtype: float + """ return float(self.weight) if self.weight!=None else 1.0 @@ -352,47 +599,72 @@ def __str__(self): ", synapse: "+str(self.synapse) + ", weight: "+'PERCENTAGE.6f'PERCENTAGEself.get_weight() ''', - class_names=("ElectricalConnectionInstanceW") - ) + class_names=("ElectricalConnectionInstanceW"), +) -METHOD_SPECS+=(elec_connection_instance_w,) +METHOD_SPECS += (elec_connection_instance_w,) -elec_connection_cell_ids = MethodSpec(name='elec_connection_cell_ids', +elec_connection_cell_ids = MethodSpec( + name="elec_connection_cell_ids", source='''\ def _get_cell_id(self, id_string): return int(float(id_string)) def get_pre_cell_id(self): + """Get the ID of the pre-synaptic cell + + :returns: ID of pre-synaptic cell + :rtype: str + """ return self._get_cell_id(self.pre_cell) def get_post_cell_id(self): + """Get the ID of the post-synaptic cell + + :returns: ID of post-synaptic cell + :rtype: str + """ return self._get_cell_id(self.post_cell) def get_pre_segment_id(self): + """Get the ID of the pre-synpatic segment + + :returns: ID of pre-synaptic segment. + :rtype: str + """ return int(self.pre_segment) def get_post_segment_id(self): + """Get the ID of the post-synpatic segment + + :returns: ID of post-synaptic segment. + :rtype: str + """ return int(self.post_segment) def get_pre_fraction_along(self): + """Get pre-synaptic fraction along information""" return float(self.pre_fraction_along) def get_post_fraction_along(self): + """Get post-synaptic fraction along information""" return float(self.post_fraction_along) def get_pre_info(self): + """Get pre-synaptic information summary""" return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') def get_post_info(self): + """Get post-synaptic information summary""" return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') @@ -404,13 +676,14 @@ def __str__(self): ''', - class_names=("ElectricalConnection") - ) + class_names=("ElectricalConnection"), +) -METHOD_SPECS+=(elec_connection_cell_ids,) +METHOD_SPECS += (elec_connection_cell_ids,) -cont_connection_instance_cell_ids = MethodSpec(name='cont_connection_instance_cell_ids', - source='''\ +cont_connection_instance_cell_ids = MethodSpec( + name="cont_connection_instance_cell_ids", + source="""\ def _get_cell_id(self, id_string): if '[' in id_string: @@ -425,16 +698,21 @@ def __str__(self): ", pre comp: "+str(self.pre_component)+", post comp: "+str(self.post_component) - ''', - class_names=("ContinuousConnectionInstance") - ) + """, + class_names=("ContinuousConnectionInstance"), +) -METHOD_SPECS+=(cont_connection_instance_cell_ids,) +METHOD_SPECS += (cont_connection_instance_cell_ids,) -cont_connection_instance_w = MethodSpec(name='cont_connection_instance_w', +cont_connection_instance_w = MethodSpec( + name="cont_connection_instance_w", source='''\ def get_weight(self): + """Get weight. + + If weight is not set, the default value of 1.0 is returned. + """ return float(self.weight) if self.weight!=None else 1.0 @@ -445,12 +723,13 @@ def __str__(self): ''', - class_names=("ContinuousConnectionInstanceW") - ) + class_names=("ContinuousConnectionInstanceW"), +) -METHOD_SPECS+=(cont_connection_instance_w,) +METHOD_SPECS += (cont_connection_instance_w,) -cont_connection_cell_ids = MethodSpec(name='cont_connection_cell_ids', +cont_connection_cell_ids = MethodSpec( + name="cont_connection_cell_ids", source='''\ def _get_cell_id(self, id_string): @@ -458,35 +737,59 @@ def _get_cell_id(self, id_string): def get_pre_cell_id(self): + """Get the ID of the pre-synaptic cell + + :returns: ID of pre-synaptic cell + :rtype: str + """ return self._get_cell_id(self.pre_cell) def get_post_cell_id(self): + """Get the ID of the post-synaptic cell + + :returns: ID of post-synaptic cell + :rtype: str + """ return self._get_cell_id(self.post_cell) def get_pre_segment_id(self): + """Get the ID of the pre-synpatic segment + + :returns: ID of pre-synaptic segment. + :rtype: str + """ return int(self.pre_segment) def get_post_segment_id(self): + """Get the ID of the post-synpatic segment + + :returns: ID of post-synaptic segment. + :rtype: str + """ return int(self.post_segment) def get_pre_fraction_along(self): + """Get pre-synaptic fraction along information""" return float(self.pre_fraction_along) def get_post_fraction_along(self): + """Get post-synaptic fraction along information""" return float(self.post_fraction_along) def get_pre_info(self): + """Get pre-synaptic information summary""" return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') def get_post_info(self): + """Get post-synaptic information summary""" return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ 'PERCENTAGE.5f'PERCENTAGEself.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') @@ -498,15 +801,15 @@ def __str__(self): ''', - class_names=("ContinuousConnection") - ) + class_names=("ContinuousConnection"), +) -METHOD_SPECS+=(cont_connection_cell_ids,) +METHOD_SPECS += (cont_connection_cell_ids,) -instance = MethodSpec(name='instance', - source='''\ - +instance = MethodSpec( + name="instance", + source="""\ def __str__(self): @@ -516,14 +819,15 @@ def __repr__(self): return str(self) -''', - class_names=("Instance") - ) -METHOD_SPECS+=(instance,) +""", + class_names=("Instance"), +) +METHOD_SPECS += (instance,) -location = MethodSpec(name='location', - source='''\ +location = MethodSpec( + name="location", + source="""\ def _format(self,value): @@ -540,15 +844,14 @@ def __repr__(self): return str(self) -''', - class_names=("Location") - ) -METHOD_SPECS+=(location,) - +""", + class_names=("Location"), +) +METHOD_SPECS += (location,) - -input_cell_ids = MethodSpec(name='input_cell_ids', +input_cell_ids = MethodSpec( + name="input_cell_ids", source='''\ def _get_cell_id(self, id_string): @@ -558,14 +861,22 @@ def _get_cell_id(self, id_string): return int(id_string.split('/')[2]) def get_target_cell_id(self): + """Get ID of target cell. """ return self._get_cell_id(self.target) def get_segment_id(self): + """Get the ID of the segment. + Returns 0 if segment_id was not set. + """ return int(self.segment_id) if self.segment_id else 0 def get_fraction_along(self): + """Get fraction along. + + Returns 0.5 is fraction_along was not set. + """ return float(self.fraction_along) if self.fraction_along else 0.5 @@ -574,16 +885,21 @@ def __str__(self): return "Input "+str(self.id)+": "+str(self.get_target_cell_id())+":"+str(self.get_segment_id())+"("+'PERCENTAGE.6f'PERCENTAGEself.get_fraction_along()+")" ''', - class_names=(["Input","ExplicitInput"]) - ) + class_names=(["Input", "ExplicitInput"]), +) -METHOD_SPECS+=(input_cell_ids,) +METHOD_SPECS += (input_cell_ids,) -input_w = MethodSpec(name='input_w', +input_w = MethodSpec( + name="input_w", source='''\ def get_weight(self): + """Get weight. + + If weight is not set, the default value of 1.0 is returned. + """ return float(self.weight) if self.weight!=None else 1.0 @@ -592,17 +908,23 @@ def __str__(self): return "Input (weight) "+str(self.id)+": "+str(self.get_target_cell_id())+":"+str(self.get_segment_id())+"("+'PERCENTAGE.6f'PERCENTAGEself.get_fraction_along()+"), weight: "+'PERCENTAGE.6f'PERCENTAGEself.get_weight() ''', - class_names=(["InputW"]) - ) + class_names=(["InputW"]), +) -METHOD_SPECS+=(input_w,) +METHOD_SPECS += (input_w,) -nml_doc_summary = MethodSpec(name='summary', +nml_doc_summary = MethodSpec( + name="summary", source='''\ def summary(self, show_includes=True, show_non_network=True): + """Get a pretty-printed summary of the complete NeuroMLDocument. + + This includes information on the various Components included in the + NeuroMLDocument: networks, cells, projections, synapses, and so on. + """ import inspect @@ -729,6 +1051,12 @@ def summary(self, show_includes=True, show_non_network=True): warn_count = 0 def get_by_id(self,id): + """Get a component by specifying its ID. + + :param id: id of Component to get + :type id: str + :returns: Component with given ID or None if no Component with provided ID was found + """ if len(id)==0: import inspect callframe = inspect.getouterframes(inspect.currentframe(), 2) @@ -752,21 +1080,32 @@ def get_by_id(self,id): print_(" - Suppressing further warnings about id not found...") return None - def append(self,element): - from neuroml.utils import append_to_element - append_to_element(self,element) + def append(self, element): + """Append an element + + :param element: element to append + :type element: Object + """ + self.add(element) ''', - class_names=("NeuroMLDocument") - ) + class_names=("NeuroMLDocument"), +) -METHOD_SPECS+=(nml_doc_summary,) +METHOD_SPECS += (nml_doc_summary,) -network_get_by_id = MethodSpec(name='get_by_id', +network_get_by_id = MethodSpec( + name="get_by_id", source='''\ warn_count = 0 def get_by_id(self,id): + """Get a component by its ID + + :param id: ID of component to find + :type id: str + :returns: component with specified ID or None if no component with specified ID found + """ all_ids = [] for ms in self.member_data_items_: mlist = self.__getattribute__(ms.name) @@ -790,18 +1129,20 @@ def __str__(self): return "Network "+str(self.id)+" with "+str(len(self.populations))+" population(s)" ''', - class_names=("Network") - ) + class_names=("Network"), +) -METHOD_SPECS+=(network_get_by_id,) +METHOD_SPECS += (network_get_by_id,) -cell_methods = MethodSpec(name='cell_methods', +cell_methods = MethodSpec( + name="cell_methods", source='''\ # Get segment object by its id def get_segment(self, segment_id): + # type: (str) -> Segment """Get segment object by its id :param segment_id: ID of segment @@ -816,10 +1157,30 @@ def get_segment(self, segment_id): raise Exception("Segment with id "+str(segment_id)+" not found in cell "+str(self.id)) + def get_segments_by_substring(self, substring): + # type: (str) -> dict + """Get a dictionary of segment IDs and the segment matching the specified substring + + :param substring: substring to match + :type substring: str + :return: dictionary with segment ID as key, and segment as value + :raises Exception: if no segments are found + + """ + segments = {} + if substring: + for segment in self.morphology.segments: + if substring in segment.id: + segments[segment.id] = segment + if len(segments) == 0: + raise Exception("Segments with id matching "+str(substring)+" not found in cell "+str(self.id)) + return segments + + # Get the proximal point of a segment, even the proximal field is None and # so the proximal point is on the parent (at a point set by fraction_along) def get_actual_proximal(self, segment_id): - + # type: (str) -> Point3DWithDiam """Get the proximal point of a segment. Get the proximal point of a segment, even the proximal field is None @@ -849,6 +1210,7 @@ def get_actual_proximal(self, segment_id): return p def get_segment_length(self, segment_id): + # type: (str) -> float """Get the length of the segment. :param segment_id: ID of segment @@ -866,6 +1228,7 @@ def get_segment_length(self, segment_id): return length def get_segment_surface_area(self, segment_id): + # type: (str) -> float """Get the surface area of the segment. :param segment_id: ID of the segment @@ -883,6 +1246,7 @@ def get_segment_surface_area(self, segment_id): return temp_seg.surface_area def get_segment_volume(self, segment_id): + # type: (str) -> float """Get volume of segment :param segment_id: ID of the segment @@ -899,6 +1263,7 @@ def get_segment_volume(self, segment_id): return temp_seg.volume def get_segment_ids_vs_segments(self): + # type: () -> Dict """Get a dictionary of segment IDs and the segments in the cell. :return: dictionary with segment ID as key, and segment as value @@ -913,6 +1278,7 @@ def get_segment_ids_vs_segments(self): def get_all_segments_in_group(self, segment_group, assume_all_means_all=True): + # type: (SegmentGroup, bool) -> List[Segment] """Get all the segments in a segment group of the cell. :param segment_group: segment group to get all segments of @@ -959,6 +1325,7 @@ def get_ordered_segments_in_groups(self, include_cumulative_lengths=False, include_path_lengths=False, path_length_metric="Path Length from root"): # Only option supported + # type: (List, bool, bool, bool, str) -> Dict """ Get ordered list of segments in specified groups @@ -1080,6 +1447,38 @@ def get_ordered_segments_in_groups(self, return ord_segs + def get_segment_group(self, sg_id): + # type: (str) -> SegmentGroup + """Return the SegmentGroup object for the specified segment group id. + + :param sg_id: id of segment group to find + :type sg_id: str + :returns: SegmentGroup object of specified ID + :raises Exception: if segment group is not found in cell + """ + if sg_id: + for sg in self.morphology.segment_groups: + if sg.id == sg_id: + return sg + + raise Exception("Segment group with id "+str(sg_id)+" not found in cell "+str(self.id)) + + def get_segment_groups_by_substring(self, substring): + # type: (str) -> dict + """Get a dictionary of segment group IDs and the segment groups matching the specified substring + + :param substring: substring to match + :type substring: str + :return: dictionary with segment group ID as key, and segment group as value + :raises Exception: if no segment groups are not found in cell + """ + sgs = {} + for sg in self.morphology.segment_groups: + if substring in sg.id: + sgs[sg.id] = sg + if len(sgs) == 0: + raise Exception("Segment group with id matching "+str(substring)+" not found in cell "+str(self.id)) + return sgs def summary(self): @@ -1092,15 +1491,17 @@ def summary(self): print("*******************************************************") ''', - class_names=("Cell") - ) + class_names=("Cell"), +) -METHOD_SPECS+=(cell_methods,) +METHOD_SPECS += (cell_methods,) -inserts = {} +inserts = {} -inserts['Network'] = ''' +inserts[ + "Network" +] = """ import numpy @@ -1131,9 +1532,11 @@ def summary(self): for il in self.input_lists: il.exportHdf5(h5file, netGroup) -''' +""" -inserts['Population'] = ''' +inserts[ + "Population" +] = """ import numpy @@ -1175,9 +1578,11 @@ def __str__(self): return "Population: "+str(self.id)+" with "+str( self.get_size() )+" components of type "+(self.component if self.component else "???") -''' +""" -inserts['Projection'] = ''' +inserts[ + "Projection" +] = """ import numpy @@ -1266,9 +1671,11 @@ def __str__(self): -''' +""" -inserts['ElectricalProjection'] = ''' +inserts[ + "ElectricalProjection" +] = """ import numpy @@ -1340,11 +1747,12 @@ def __str__(self): for col in extra_cols.keys(): array._f_setattr(col,extra_cols[col]) -''' - +""" -inserts['ContinuousProjection'] = ''' +inserts[ + "ContinuousProjection" +] = """ import numpy @@ -1422,10 +1830,12 @@ def __str__(self): array._f_setattr(k, extra_cols[k]) -''' +""" -inserts['InputList'] = ''' +inserts[ + "InputList" +] = """ import numpy @@ -1477,24 +1887,27 @@ def __str__(self): return "Input list: "+self.id+" to "+self.populations+", component "+self.component -''' - +""" for insert in inserts.keys(): - ms = MethodSpec(name='exportHdf5', - source='''\ + ms = MethodSpec( + name="exportHdf5", + source='''\ def exportHdf5(self, h5file, h5Group): + """Export to HDF5 file. """ #print("Exporting %s: "+str(self.id)+" as HDF5") %s - '''%(insert,inserts[insert]), - class_names=(insert) + ''' + % (insert, inserts[insert]), + class_names=(insert), ) - METHOD_SPECS+=(ms,) + METHOD_SPECS += (ms,) -synaptic_connections = MethodSpec(name='synaptic_connections', +synaptic_connections = MethodSpec( + name="synaptic_connections", source='''\ def _get_cell_id(self,ref): @@ -1519,12 +1932,13 @@ def __str__(self): ''', - class_names=("SynapticConnection") - ) + class_names=("SynapticConnection"), +) -METHOD_SPECS+=(synaptic_connections,) +METHOD_SPECS += (synaptic_connections,) -explicit_inputs = MethodSpec(name='explicit_inputs', +explicit_inputs = MethodSpec( + name="explicit_inputs", source='''\ def get_target_cell_id(self,): @@ -1549,18 +1963,20 @@ def __str__(self): ''', - class_names=("ExplicitInput") - ) + class_names=("ExplicitInput"), +) + +METHOD_SPECS += (explicit_inputs,) -METHOD_SPECS+=(explicit_inputs,) def test(): for spec in METHOD_SPECS: spec.show() + def main(): test() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/neuroml/nml/name_table.csv b/neuroml/nml/name_table.csv index d7bf4692..5ae54378 100644 --- a/neuroml/nml/name_table.csv +++ b/neuroml/nml/name_table.csv @@ -1,365 +1,365 @@ -all,all -skip,skips -gateHHtauInf,gate_hh_tau_infs -gc,gc -morphology,morphology -gbase2,gbase2 -gbase1,gbase1 -sineGenerator,sine_generators -sineGeneratorDL,sine_generator_dls -cell2CaPools,cell2_ca_poolses -to,to -xSpacing,x_spacing -neuroml,neuroml -plasticityMechanism,plasticity_mechanism -unstructured,unstructured -alphaCondSynapse,alpha_cond_synapses -channelDensityNonUniform,channel_density_non_uniforms -synapse2,synapse2 -synapse1,synapse1 -condition,condition +http://www.neuroml.org/schema/neuroml2,http://www.neuroml.org/schema/neuroml2 +http://www.w3.org/2001/XMLSchema http://www.w3.org/2001/XMLSchema.xsd,http://www.w3.org/2001/xml_schema http://www.w3.org/2001/xml_schema.xsd +qualified,qualified +unqualified,unqualified +metaid,metaid +optional,optional +annotation,annotation +property,properties +unbounded,unboundeds notes,notes -zStart,z_start -membraneProperties,membrane_properties -inhomogeneousParameter,inhomogeneous_parameters -delT,del_t -targetVoltage,target_voltage -xs:anyURI,xs:any_uri -gLd,g_ld +id,id +required,required +neuroLexId,neuro_lex_id rate,rate -biophysicalProperties,biophysical_properties -presynapticPopulation,presynaptic_population -finishAmplitude,finish_amplitude -gLs,g_ls -segmentGroup,segment_groups -alphaCurrentSynapse,alpha_current_synapses -poissonFiringSynapse,poisson_firing_synapses -cell,cells -gbase,gbase +duration,duration +start,start +e_rev,e_rev +xs:float,xs:float +tau_syn,tau_syn +basePyNNCell,basePyNNCell +gbar_Na,gbar_Na +gbar_K,gbar_K +g_leak,g_leak +e_rev_leak,e_rev_leak +e_rev_Na,e_rev_Na +e_rev_K,e_rev_K +e_rev_I,e_rev_I +e_rev_E,e_rev_E +v_offset,v_offset +basePyNNIaFCondCell,basePyNNIaFCondCell +v_spike,v_spike +tau_w,tau_w +delta_T,delta_T +b,b +a,a +basePyNNIaFCell,basePyNNIaFCell +v_thresh,v_thresh +v_rest,v_rest v_reset,v_reset +tau_refrac,tau_refrac +tau_m,tau_m +v_init,v_init +tau_syn_I,tau_syn_I +tau_syn_E,tau_syn_E +i_offset,i_offset +cm,cm +weight,weight +fractionAlong,fraction_along +segmentId,segment_id +destination,destination +target,target +xs:string,xs:string component,component -active,active -path,paths -alphaCurrSynapse,alpha_curr_synapses -postsynapticPopulation,postsynaptic_population -gKdr,g_kdr -diameter,diameter +population,populations +inputW,input_ws +input,input +postComponent,post_component +preComponent,pre_component +continuousConnectionInstanceW,continuous_connection_instance_ws +continuousConnectionInstance,continuous_connection_instances +continuousConnection,continuous_connections +synapse,synapse +electricalConnectionInstanceW,electrical_connection_instance_ws +electricalConnectionInstance,electrical_connection_instances +electricalConnection,electrical_connections +delay,delay +postFractionAlong,post_fraction_along +postSegment,post_segment +postCell,post_cell preFractionAlong,pre_fraction_along -projection,projections -href,href -qd0,qd0 -select,select -forwardTransition,forward_transition -averageRate,average_rate -cellSet,cell_sets -channelDensityNernstCa2,channel_density_nernst_ca2s +preSegment,pre_segment +preCell,pre_cell postSegmentId,post_segment_id -spike,spikes +postCellId,post_cell_id +preSegmentId,pre_segment_id +preCellId,pre_cell_id +connectionWD,connection_wds +connection,connections +postsynapticPopulation,postsynaptic_population +presynapticPopulation,presynaptic_population +to,to +select,select +skip,skips +z,z +y,y +x,x +k,k +xs:nonNegativeInteger,xs:non_negative_integer +j,j +i,i +location,location +zSize,z_size +ySize,y_size +xSize,x_size +region,regions +number,number +space,spaces +unstructured,unstructured +grid,grid +random,random +populationTypes,population_types +populationList,population_list +extracellularProperties,extracellular_properties type,type -fitzHughNagumoCell,fitz_hugh_nagumo_cells -channelPopulation,channel_populations -continuousProjection,continuous_projections -include,includes -i_offset,i_offset -specificCapacitance,specific_capacitances +size,size +instance,instances +layout,layout allowedSpaces,allowed_spaces -delta_T,delta_T -tauw,tauw -xSize,x_size -explicitInput,explicit_inputs -tau,tau -xs:nonNegativeInteger,xs:non_negative_integer -concentrationModel,concentration_model -ionChannelHH,ion_channel_hhs -channelDensityGHK,channel_density_ghks -intracellularProperties2CaPools,intracellular_properties2_ca_pools -tag,tag -v_rest,v_rest -v_spike,v_spike -linearGradedSynapse,linear_graded_synapses -iafCell,iaf_cells -species,species -silentSynapse,silent_synapses -spikeGeneratorRandom,spike_generator_randoms -gate,gates -steadyState,steady_state -parameter,parameter -y,y -e_rev_leak,e_rev_leak -channelDensity,channel_densities -expCondSynapse,exp_cond_synapses -continuousConnectionInstanceW,continuous_connection_instance_ws -expTwoSynapse,exp_two_synapses -annotation,annotation -a,a -gateKS,gate_kses -gbar_Na,gbar_Na -izhikevich2007Cell,izhikevich2007_cells -IF_curr_alpha,IF_curr_alpha -decayConstant,decay_constant -expOneSynapse,exp_one_synapses -baseCell,base_cells -tauDecay,tau_decay -iDend,i_dend -phi,phi -tsodyksMarkramDepFacMechanism,tsodyks_markram_dep_fac_mechanism -delay,delay -reverseRate,reverse_rate -channelDensityNernst,channel_density_nernsts -reverseTransition,reverse_transition -preComponent,pre_component -basePyNNIaFCondCell,basePyNNIaFCondCell -minISI,min_isi -gCa,g_ca -eNa,e_na -transientPoissonFiringSynapse,transient_poisson_firing_synapses -qualified,qualified -name,name -preCell,pre_cell -blockConcentration,block_concentration -inputList,input_lists -tauRec,tau_rec -weight,weight +zStart,z_start +yStart,y_start +xStart,x_start +zSpacing,z_spacing +ySpacing,y_spacing +xSpacing,x_spacing +basedOn,based_on +structure,structure networkTypes,network_types -inhomogeneousValue,inhomogeneous_value -conductance,conductance -description,description -eL,e_l +networkWithTemperature,network_with_temperature network,networks -eK,e_k -space,spaces -forwardRate,forward_rate -midpoint,midpoint -tau_syn_E,tau_syn_E -tau_syn_I,tau_syn_I -gKC,g_kc -rampGeneratorDL,ramp_generator_dls -vpeak,vpeak -alphac,alphac -neuroLexId,neuro_lex_id -returnVoltage,return_voltage -intracellularProperties,intracellular_properties -q10Settings,q10_settings -distal,distal -number,number -initialConcentration,initial_concentration -instances,instances -xs:string,xs:string -size,size temperature,temperature -decayingPoolConcentrationModel,decaying_pool_concentration_models -voltageClampTriple,voltage_clamp_triples -alphaSynapse,alpha_synapses -preCellId,pre_cell_id +inputList,input_lists +explicitInput,explicit_inputs +continuousProjection,continuous_projections +electricalProjection,electrical_projections +projection,projections +synapticConnection,synaptic_connections +cellSet,cell_sets +spikeTarget,spike_target +averageRate,average_rate +minimumISI,minimum_isi +minISI,min_isi +maxISI,max_isi +period,period +spike,spikes +time,time +simpleSeriesResistance,simple_series_resistance +returnVoltage,return_voltage +testingVoltage,testing_voltage +conditioningVoltage,conditioning_voltage +active,active +targetVoltage,target_voltage +rampGeneratorDL,ramp_generator_dls +sineGeneratorDL,sine_generator_dls pulseGeneratorDL,pulse_generator_dls -thresh,thresh -gateHHrates,gate_hh_rates -population,populations -b,b -target,target -eCa,e_ca -shellThickness,shell_thickness -expCurrSynapse,exp_curr_synapses -amplitude,amplitude -resistivity,resistivities -ySpacing,y_spacing -ySize,y_size -gateFractional,gate_fractionals -blockMechanism,block_mechanism -instance,instances -connectionWD,connection_wds -gradedSynapse,graded_synapses -gapJunction,gap_junctions +rampGenerator,ramp_generators +sineGenerator,sine_generators +pulseGenerator,pulse_generators baselineAmplitude,baseline_amplitude -rho,rho +finishAmplitude,finish_amplitude +startAmplitude,start_amplitude +amplitude,amplitude phase,phase +source,source +species,species +resistivity,resistivities +decayingPoolConcentrationModel,decaying_pool_concentration_models +segmentGroup,segment_groups +all,all +initialExtConcentration,initial_ext_concentration +initialConcentration,initial_concentration +ion,ion +concentrationModel,concentration_model +value,value +inhomogeneousParameter,inhomogeneous_parameters +parameter,parameter +inhomogeneousValue,inhomogeneous_value segment,segments -refract,refract -postFractionAlong,post_fraction_along -ionChannelVShift,ion_channel_v_shifts -spikeGeneratorRefPoisson,spike_generator_ref_poissons -random,random -channelTypes,channel_types -xStart,x_start -xs:float,xs:float -tau_m,tau_m -layout,layout -basePyNNIaFCell,basePyNNIaFCell -voltageClamp,voltage_clamps -tau_w,tau_w -testingVoltage,testing_voltage -adExIaFCell,ad_ex_ia_f_cells -ionChannelKS,ion_channel_kses -preSegmentId,pre_segment_id -spikeThresh,spike_threshes -synapse,synapse -spikeGeneratorPoisson,spike_generator_poissons +condDensity,cond_density +ionChannel,ion_channel +permeability,permeability +variableParameter,variable_parameters vShift,v_shift -x,x -e_rev,e_rev -v_offset,v_offset -ionChannelPassive,ion_channel_passive -subGate,sub_gates erev,erev -parent,parent -gL,g_l -tauRise,tau_rise -izhikevichCell,izhikevich_cells -postCellId,post_cell_id -condDensity,cond_density -gateHHratesTauInf,gate_h_hrates_tau_infs -c,c -fixedFactorConcentrationModel,fixed_factor_concentration_models -fixedQ10,fixed_q10 -region,regions -extracellularProperties,extracellular_properties -connection,connections -http://www.w3.org/2001/XMLSchema http://www.w3.org/2001/XMLSchema.xsd,http://www.w3.org/2001/xml_schema http://www.w3.org/2001/xml_schema.xsd -electricalConnectionInstance,electrical_connection_instances -cm,cm +channelDensityNernstCa2,channel_density_nernst_ca2s +initMembPotential,init_memb_potentials +specificCapacitance,specific_capacitances +spikeThresh,spike_threshes +channelDensityNonUniformGHK,channel_density_non_uniform_ghks +channelDensityNonUniformNernst,channel_density_non_uniform_nernsts +channelDensityNonUniform,channel_density_non_uniforms +channelDensityGHK2,channel_density_ghk2s +channelDensityGHK,channel_density_ghks +channelDensityNernst,channel_density_nernsts +channelDensityVShift,channel_density_v_shifts +channelDensity,channel_densities +channelPopulation,channel_populations +intracellularProperties2CaPools,intracellular_properties2_ca_pools +membraneProperties2CaPools,membrane_properties2_ca_pools +intracellularProperties,intracellular_properties +membraneProperties,membrane_properties +normalizationEnd,normalization_end +xs:double,xs:double translationStart,translation_start -closedState,closed_states -period,period -gNa,g_na -gKahp,g_kahp +metric,metric +variable,variable +distal,distal +proximal,proximal +subTree,sub_trees +path,paths +include,includes +member,members +diameter,diameter +name,name +parent,parent +biophysicalProperties2CaPools,biophysical_properties2_ca_pools +biophysicalProperties,biophysical_properties +morphology,morphology +betac,betac +alphac,alphac pp,pp -electricalConnectionInstanceW,electrical_connection_instance_ws -spikeTarget,spike_target -optional,optional -yStart,y_start +qd0,qd0 +eL,e_l +eK,e_k +eCa,e_ca +eNa,e_na +gAmpa,g_ampa +gNmda,g_nmda +gKC,g_kc +gKahp,g_kahp +gCa,g_ca +gKdr,g_kdr +gNa,g_na +gLd,g_ld +gLs,g_ls +gc,gc +iDend,i_dend +iSoma,i_soma +phi,phi +refract,refract +tauw,tauw +delT,del_t +thresh,thresh +reset,reset +gL,g_l +d,d +c,c +vpeak,vpeak +vt,vt +vr,vr +v0,v0 +leakConductance,leak_conductance +leakReversal,leak_reversal +tau,tau tauFac,tau_fac -q10Factor,q10_factor -populationList,population_list -zSpacing,z_spacing +tauRec,tau_rec +initReleaseProb,init_release_prob +tsodyksMarkramDepFacMechanism,tsodyks_markram_dep_fac_mechanism +tsodyksMarkramDepMechanism,tsodyks_markram_dep_mechanism +scalingVolt,scaling_volt +scalingConc,scaling_conc +blockConcentration,block_concentration +voltageConcDepBlockMechanism,voltage_conc_dep_block_mechanism +blockMechanism,block_mechanism +plasticityMechanism,plasticity_mechanism synapse2Path,synapse2_path -betac,betac +synapse1Path,synapse1_path +synapse2,synapse2 +synapse1,synapse1 +tauRise,tau_rise +tauDecay2,tau_decay2 +tauDecay1,tau_decay1 +tauDecay,tau_decay +ibase,ibase +delta,delta +conductance,conductance +gbase2,gbase2 +gbase1,gbase1 +gbase,gbase +rho,rho +decayConstant,decay_constant +restingConc,resting_conc +shellThickness,shell_thickness +scale,scale +midpoint,midpoint experimentalTemp,experimental_temp -value,value -openState,open_states -property,properties -k,k -zSize,z_size -tau_refrac,tau_refrac -metric,metric -compoundInput,compound_inputs -ionChannel,ion_channel -vr,vr -vt,vt -iafTauRefCell,iaf_tau_ref_cells -id,id -basePyNNCell,basePyNNCell -scalingConc,scaling_conc -pinskyRinzelCA3Cell,pinsky_rinzel_ca3_cells +q10Factor,q10_factor +fixedQ10,fixed_q10 +fractionalConductance,fractional_conductance +timeCourse,time_course +steadyState,steady_state +q10Settings,q10_settings +instances,instances +subGate,sub_gates +reverseRate,reverse_rate +forwardRate,forward_rate +gateTypes,gate_types tauInfTransition,tau_inf_transition -member,members +reverseTransition,reverse_transition +forwardTransition,forward_transition +openState,open_states +closedState,closed_states +gateFractional,gate_fractionals +gateKS,gate_kses gateHHInstantaneous,gate_hh_instantaneouses -populationTypes,population_types -conditioningVoltage,conditioning_voltage -ibase,ibase -v0,v0 -ion,ion -variableParameter,variable_parameters -unqualified,unqualified -iSoma,i_soma -g_leak,g_leak -i,i -channelDensityVShift,channel_density_v_shifts -biophysicalProperties2CaPools,biophysical_properties2_ca_pools -fractionalConductance,fractional_conductance -dimension,dimension -http://www.neuroml.org/schema/neuroml2,http://www.neuroml.org/schema/neuroml2 -tsodyksMarkramDepMechanism,tsodyks_markram_dep_mechanism -proximal,proximal -fitzHughNagumo1969Cell,fitz_hugh_nagumo1969_cells -segmentId,segment_id +gateHHratesTauInf,gate_h_hrates_tau_infs +gateHHratesInf,gate_h_hrates_infs +gateHHtauInf,gate_hh_tau_infs +gateHHratesTau,gate_h_hrates_taus +gateHHrates,gate_hh_rates +channelTypes,channel_types +ionChannelHH,ion_channel_hhs +ionChannelPassive,ion_channel_passive +gate,gates q10ConductanceScaling,q10_conductance_scalings -rampGenerator,ramp_generators -source,source -extends,extends -location,location -input,input -voltageConcDepBlockMechanism,voltage_conc_dep_block_mechanism -postComponent,post_component -initReleaseProb,init_release_prob -maxISI,max_isi -doubleSynapse,double_synapses -grid,grid -IF_cond_alpha,IF_cond_alpha -d,d -v_thresh,v_thresh -timeCourse,time_course +fixedFactorConcentrationModel,fixed_factor_concentration_models +transientPoissonFiringSynapse,transient_poisson_firing_synapses +poissonFiringSynapse,poisson_firing_synapses +spikeGeneratorRefPoisson,spike_generator_ref_poissons +spikeGeneratorPoisson,spike_generator_poissons +spikeGeneratorRandom,spike_generator_randoms spikeGenerator,spike_generators timedSynapticInput,timed_synaptic_inputs -gateHHratesTau,gate_h_hrates_taus -continuousConnection,continuous_connections -duration,duration -scale,scale -metaid,metaid -networkWithTemperature,network_with_temperature -fractionAlong,fraction_along -normalizationEnd,normalization_end -synapse1Path,synapse1_path -electricalProjection,electrical_projections -IF_curr_exp,IF_curr_exp -gateHHratesInf,gate_h_hrates_infs -simpleSeriesResistance,simple_series_resistance -e_rev_Na,e_rev_Na -IF_cond_exp,IF_cond_exp -tau_syn,tau_syn -channelDensityGHK2,channel_density_ghk2s -e_rev_E,e_rev_E -e_rev_I,e_rev_I -e_rev_K,e_rev_K -preSegment,pre_segment -v_init,v_init -gNmda,g_nmda -scalingVolt,scaling_volt -initialExtConcentration,initial_ext_concentration -expThreeSynapse,exp_three_synapses -destination,destination -startAmplitude,start_amplitude -unbounded,unboundeds +spikeArray,spike_arrays +voltageClampTriple,voltage_clamp_triples +voltageClamp,voltage_clamps compoundInputDL,compound_input_dls -xs:positiveInteger,xs:positive_integer -membraneProperties2CaPools,membrane_properties2_ca_pools +compoundInput,compound_inputs +alphaCurrSynapse,alpha_curr_synapses +expCurrSynapse,exp_curr_synapses +alphaCondSynapse,alpha_cond_synapses +expCondSynapse,exp_cond_synapses +gradedSynapse,graded_synapses +linearGradedSynapse,linear_graded_synapses +silentSynapse,silent_synapses +gapJunction,gap_junctions +doubleSynapse,double_synapses +blockingPlasticSynapse,blocking_plastic_synapses +expThreeSynapse,exp_three_synapses +expTwoSynapse,exp_two_synapses +expOneSynapse,exp_one_synapses +alphaSynapse,alpha_synapses +alphaCurrentSynapse,alpha_current_synapses +pinskyRinzelCA3Cell,pinsky_rinzel_ca3_cells +fitzHughNagumo1969Cell,fitz_hugh_nagumo1969_cells +fitzHughNagumoCell,fitz_hugh_nagumo_cells +adExIaFCell,ad_ex_ia_f_cells +izhikevich2007Cell,izhikevich2007_cells +izhikevichCell,izhikevich_cells +iafRefCell,iaf_ref_cells +iafCell,iaf_cells +iafTauRefCell,iaf_tau_ref_cells +iafTauCell,iaf_tau_cells +baseCell,base_cells +cell2CaPools,cell2_ca_poolses +cell,cells +href,href +xs:anyURI,xs:any_uri +ionChannelKS,ion_channel_kses +ionChannelVShift,ion_channel_v_shifts +neuroml,neuroml +condition,condition defaultValue,default_value -inputW,input_ws -continuousConnectionInstance,continuous_connection_instances -start,start -tauDecay1,tau_decay1 -leakConductance,leak_conductance -tauDecay2,tau_decay2 exposure,exposure -subTree,sub_trees +description,description +dimension,dimension +extends,extends +tag,tag +xs:positiveInteger,xs:positive_integer +int,int javax.xml.bind.DatatypeConverter.parseInt,javax.xml.bind._datatype_converter.parse_int -delta,delta -iafTauCell,iaf_tau_cells -pulseGenerator,pulse_generators -iafRefCell,iaf_ref_cells -reset,reset -j,j -postSegment,post_segment javax.xml.bind.DatatypeConverter.printInt,javax.xml.bind._datatype_converter.print_int -z,z -xs:double,xs:double -required,required -int,int -gAmpa,g_ampa -electricalConnection,electrical_connections -synapticConnection,synaptic_connections -gateTypes,gate_types -channelDensityNonUniformNernst,channel_density_non_uniform_nernsts -permeability,permeability -restingConc,resting_conc -basedOn,based_on -leakReversal,leak_reversal -channelDensityNonUniformGHK,channel_density_non_uniform_ghks -gbar_K,gbar_K -initMembPotential,init_memb_potentials -postCell,post_cell -variable,variable -structure,structure -spikeArray,spike_arrays -blockingPlasticSynapse,blocking_plastic_synapses -minimumISI,minimum_isi -time,time +IF_curr_alpha,IF_curr_alpha +IF_curr_exp,IF_curr_exp +IF_cond_alpha,IF_cond_alpha +IF_cond_exp,IF_cond_exp diff --git a/neuroml/nml/nml.py b/neuroml/nml/nml.py index 9456bfb3..d6e585e8 100644 --- a/neuroml/nml/nml.py +++ b/neuroml/nml/nml.py @@ -2,37 +2,41 @@ # -*- coding: utf-8 -*- # -# Generated Fri Mar 26 16:21:07 2021 by generateDS.py version 2.30.11. -# Python 2.7.18 (default, Feb 3 2021, 00:00:00) [GCC 11.0.0 20210130 (Red Hat 11.0.0-0)] +# Generated Fri Nov 19 11:14:04 2021 by generateDS.py version 2.40.5. +# Python 3.10.0 (default, Oct 4 2021, 00:00:00) [GCC 11.2.1 20210728 (Red Hat 11.2.1-1)] # # Command line options: # ('-o', 'nml.py') # ('--use-getter-setter', 'none') -# ('--silence', '') -# ('--user-methods', 'helper_methods') +# ('--user-methods', 'helper_methods.py') # # Command line arguments: -# NeuroML_v2.1.xsd +# NeuroML_v2.2.xsd # # Command line: -# /home/asinha/.local/share/virtualenvs/generateds-2.7/bin/generateDS.py -o "nml.py" --use-getter-setter="none" --silence --user-methods="helper_methods" NeuroML_v2.1.xsd +# /home/asinha/.local/share/virtualenvs/generateds-310/bin/generateDS -o "nml.py" --use-getter-setter="none" --user-methods="helper_methods.py" NeuroML_v2.2.xsd # # Current working directory (os.getcwd()): # nml # import sys + +try: + ModulenotfoundExp_ = ModuleNotFoundError +except NameError: + ModulenotfoundExp_ = ImportError +from six.moves import zip_longest +import os import re as re_ import base64 import datetime as datetime_ -import warnings as warnings_ -try: - from lxml import etree as etree_ -except ImportError: - from xml.etree import ElementTree as etree_ +import decimal as decimal_ +from lxml import etree as etree_ Validate_simpletypes_ = True +SaveElementTreeNode = True if sys.version_info.major == 2: BaseStrType_ = basestring else: @@ -48,9 +52,15 @@ def parsexml_(infile, parser=None, **kwargs): except AttributeError: # fallback to xml.etree parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass doc = etree_.parse(infile, parser=parser, **kwargs) return doc + def parsexmlstring_(instring, parser=None, **kwargs): if parser is None: # Use the lxml ElementTree compatible parser so that, e.g., @@ -63,6 +73,7 @@ def parsexmlstring_(instring, parser=None, **kwargs): element = etree_.fromstring(instring, parser=parser, **kwargs) return element + # # Namespace prefix definition table (and other attributes, too) # @@ -72,7 +83,7 @@ def parsexmlstring_(instring, parser=None, **kwargs): # definitions. The export method for any class for which there is # a namespace prefix definition, will export that definition in the # XML representation of that element. See the export method of -# any generated element type class for a example of the use of this +# any generated element type class for an example of the use of this # table. # A sample table is: # @@ -83,11 +94,76 @@ def parsexmlstring_(instring, parser=None, **kwargs): # "ElementtypeB": "http://www.xxx.com/namespaceB", # } # +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the _exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# try: from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ -except ImportError: +except ModulenotfoundExp_: GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import ( + GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_, + ) +except ModulenotfoundExp_: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# You can replace the following class definition by defining an +# importable module named "generatedscollector" containing a class +# named "GdsCollector". See the default class definition below for +# clues about the possible content of that class. +# +try: + from generatedscollector import GdsCollector as GdsCollector_ +except ModulenotfoundExp_: + + class GdsCollector_(object): + def __init__(self, messages=None): + if messages is None: + self.messages = [] + else: + self.messages = messages + + def add_message(self, msg): + self.messages.append(msg) + + def get_messages(self): + return self.messages + + def clear_messages(self): + self.messages = [] + + def print_messages(self): + for msg in self.messages: + print("Warning: {}".format(msg)) + + def write_messages(self, outstream): + for msg in self.messages: + outstream.write("Warning: {}\n".format(msg)) + + +# +# The super-class for enum types +# + +try: + from enum import Enum +except ModulenotfoundExp_: + Enum = object # # The root super-class for element type classes @@ -98,97 +174,262 @@ def parsexmlstring_(instring, parser=None, **kwargs): try: from generatedssuper import GeneratedsSuper -except ImportError as exp: - - class GeneratedsSuper(object): - tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') +except ModulenotfoundExp_ as exp: + try: + from generatedssupersuper import GeneratedsSuperSuper + except ModulenotfoundExp_ as exp: + + class GeneratedsSuperSuper(object): + pass + + class GeneratedsSuper(GeneratedsSuperSuper): + __hash__ = object.__hash__ + tzoff_pattern = re_.compile(r"(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$") + class _FixedOffsetTZ(datetime_.tzinfo): def __init__(self, offset, name): self.__offset = datetime_.timedelta(minutes=offset) self.__name = name + def utcoffset(self, dt): return self.__offset + def tzname(self, dt): return self.__name + def dst(self, dt): return None - def gds_format_string(self, input_data, input_name=''): + + def __str__(self): + settings = { + "str_pretty_print": True, + "str_indent_level": 0, + "str_namespaceprefix": "", + "str_name": None, + "str_namespacedefs": "", + } + for n in settings: + if hasattr(self, n): + setattr(settings[n], self[n]) + if sys.version_info.major == 2: + from StringIO import StringIO + else: + from io import StringIO + + output = StringIO() + self.export( + output, + settings["str_indent_level"], + pretty_print=settings["str_pretty_print"], + namespaceprefix_=settings["str_namespaceprefix"], + name_=settings["str_name"], + namespacedef_=settings["str_namespacedefs"], + ) + strval = output.getvalue() + output.close() + return strval + + def gds_format_string(self, input_data, input_name=""): + return input_data + + def gds_parse_string(self, input_data, node=None, input_name=""): return input_data - def gds_validate_string(self, input_data, node=None, input_name=''): + + def gds_validate_string(self, input_data, node=None, input_name=""): if not input_data: - return '' + return "" else: return input_data - def gds_format_base64(self, input_data, input_name=''): - return base64.b64encode(input_data) - def gds_validate_base64(self, input_data, node=None, input_name=''): - return input_data - def gds_format_integer(self, input_data, input_name=''): - return '%d' % input_data - def gds_validate_integer(self, input_data, node=None, input_name=''): + + def gds_format_base64(self, input_data, input_name=""): + return base64.b64encode(input_data).decode("ascii") + + def gds_validate_base64(self, input_data, node=None, input_name=""): return input_data - def gds_format_integer_list(self, input_data, input_name=''): - return '%s' % ' '.join(input_data) - def gds_validate_integer_list( - self, input_data, node=None, input_name=''): + + def gds_format_integer(self, input_data, input_name=""): + return "%d" % int(input_data) + + def gds_parse_integer(self, input_data, node=None, input_name=""): + try: + ival = int(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires integer value: %s" % exp) + return ival + + def gds_validate_integer(self, input_data, node=None, input_name=""): + try: + value = int(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires integer value") + return value + + def gds_format_integer_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_integer_list(self, input_data, node=None, input_name=""): values = input_data.split() for value in values: try: int(value) except (TypeError, ValueError): - raise_parse_error(node, 'Requires sequence of integers') + raise_parse_error(node, "Requires sequence of integer values") return values - def gds_format_float(self, input_data, input_name=''): - return ('%.15f' % input_data).rstrip('0') - def gds_validate_float(self, input_data, node=None, input_name=''): - return input_data - def gds_format_float_list(self, input_data, input_name=''): - return '%s' % ' '.join(input_data) - def gds_validate_float_list( - self, input_data, node=None, input_name=''): + + def gds_format_float(self, input_data, input_name=""): + return ("%.15f" % float(input_data)).rstrip("0") + + def gds_parse_float(self, input_data, node=None, input_name=""): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires float or double value: %s" % exp) + return fval_ + + def gds_validate_float(self, input_data, node=None, input_name=""): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires float value") + return value + + def gds_format_float_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_float_list(self, input_data, node=None, input_name=""): values = input_data.split() for value in values: try: float(value) except (TypeError, ValueError): - raise_parse_error(node, 'Requires sequence of floats') + raise_parse_error(node, "Requires sequence of float values") return values - def gds_format_double(self, input_data, input_name=''): - return '%e' % input_data - def gds_validate_double(self, input_data, node=None, input_name=''): - return input_data - def gds_format_double_list(self, input_data, input_name=''): - return '%s' % ' '.join(input_data) - def gds_validate_double_list( - self, input_data, node=None, input_name=''): + + def gds_format_decimal(self, input_data, input_name=""): + return_value = "%s" % input_data + if "." in return_value: + return_value = return_value.rstrip("0") + if return_value.endswith("."): + return_value = return_value.rstrip(".") + return return_value + + def gds_parse_decimal(self, input_data, node=None, input_name=""): + try: + decimal_value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires decimal value") + return decimal_value + + def gds_validate_decimal(self, input_data, node=None, input_name=""): + try: + value = decimal_.Decimal(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires decimal value") + return value + + def gds_format_decimal_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return " ".join([self.gds_format_decimal(item) for item in input_data]) + + def gds_validate_decimal_list(self, input_data, node=None, input_name=""): + values = input_data.split() + for value in values: + try: + decimal_.Decimal(value) + except (TypeError, ValueError): + raise_parse_error(node, "Requires sequence of decimal values") + return values + + def gds_format_double(self, input_data, input_name=""): + return "%s" % input_data + + def gds_parse_double(self, input_data, node=None, input_name=""): + try: + fval_ = float(input_data) + except (TypeError, ValueError) as exp: + raise_parse_error(node, "Requires double or float value: %s" % exp) + return fval_ + + def gds_validate_double(self, input_data, node=None, input_name=""): + try: + value = float(input_data) + except (TypeError, ValueError): + raise_parse_error(node, "Requires double or float value") + return value + + def gds_format_double_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_double_list(self, input_data, node=None, input_name=""): values = input_data.split() for value in values: try: float(value) except (TypeError, ValueError): - raise_parse_error(node, 'Requires sequence of doubles') + raise_parse_error( + node, "Requires sequence of double or float values" + ) return values - def gds_format_boolean(self, input_data, input_name=''): - return ('%s' % input_data).lower() - def gds_validate_boolean(self, input_data, node=None, input_name=''): + + def gds_format_boolean(self, input_data, input_name=""): + return ("%s" % input_data).lower() + + def gds_parse_boolean(self, input_data, node=None, input_name=""): + if input_data in ("true", "1"): + bval = True + elif input_data in ("false", "0"): + bval = False + else: + raise_parse_error(node, "Requires boolean value") + return bval + + def gds_validate_boolean(self, input_data, node=None, input_name=""): + if input_data not in ( + True, + 1, + False, + 0, + ): + raise_parse_error( + node, "Requires boolean value " "(one of True, 1, False, 0)" + ) return input_data - def gds_format_boolean_list(self, input_data, input_name=''): - return '%s' % ' '.join(input_data) - def gds_validate_boolean_list( - self, input_data, node=None, input_name=''): + + def gds_format_boolean_list(self, input_data, input_name=""): + if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): + input_data = [str(s) for s in input_data] + return "%s" % " ".join(input_data) + + def gds_validate_boolean_list(self, input_data, node=None, input_name=""): values = input_data.split() for value in values: - if value not in ('true', '1', 'false', '0', ): + value = self.gds_parse_boolean(value, node, input_name) + if value not in ( + True, + 1, + False, + 0, + ): raise_parse_error( node, - 'Requires sequence of booleans ' - '("true", "1", "false", "0")') + "Requires sequence of boolean values " + "(one of True, 1, False, 0)", + ) return values - def gds_validate_datetime(self, input_data, node=None, input_name=''): + + def gds_validate_datetime(self, input_data, node=None, input_name=""): return input_data - def gds_format_datetime(self, input_data, input_name=''): + + def gds_format_datetime(self, input_data, input_name=""): if input_data.microsecond == 0: - _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d" % ( input_data.year, input_data.month, input_data.day, @@ -197,63 +438,65 @@ def gds_format_datetime(self, input_data, input_name=''): input_data.second, ) else: - _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + _svalue = "%04d-%02d-%02dT%02d:%02d:%02d.%s" % ( input_data.year, input_data.month, input_data.day, input_data.hour, input_data.minute, input_data.second, - ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ("%f" % (float(input_data.microsecond) / 1000000))[2:], ) if input_data.tzinfo is not None: tzoff = input_data.tzinfo.utcoffset(input_data) if tzoff is not None: total_seconds = tzoff.seconds + (86400 * tzoff.days) if total_seconds == 0: - _svalue += 'Z' + _svalue += "Z" else: if total_seconds < 0: - _svalue += '-' + _svalue += "-" total_seconds *= -1 else: - _svalue += '+' + _svalue += "+" hours = total_seconds // 3600 minutes = (total_seconds - (hours * 3600)) // 60 - _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) return _svalue + @classmethod def gds_parse_datetime(cls, input_data): tz = None - if input_data[-1] == 'Z': - tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") input_data = input_data[:-1] else: results = GeneratedsSuper.tzoff_pattern.search(input_data) if results is not None: - tzoff_parts = results.group(2).split(':') + tzoff_parts = results.group(2).split(":") tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) - if results.group(1) == '-': + if results.group(1) == "-": tzoff *= -1 - tz = GeneratedsSuper._FixedOffsetTZ( - tzoff, results.group(0)) + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) input_data = input_data[:-6] - time_parts = input_data.split('.') + time_parts = input_data.split(".") if len(time_parts) > 1: - micro_seconds = int(float('0.' + time_parts[1]) * 1000000) - input_data = '%s.%s' % ( - time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) - dt = datetime_.datetime.strptime( - input_data, '%Y-%m-%dT%H:%M:%S.%f') + micro_seconds = int(float("0." + time_parts[1]) * 1000000) + input_data = "%s.%s" % ( + time_parts[0], + "{}".format(micro_seconds).rjust(6, "0"), + ) + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S.%f") else: - dt = datetime_.datetime.strptime( - input_data, '%Y-%m-%dT%H:%M:%S') + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%dT%H:%M:%S") dt = dt.replace(tzinfo=tz) return dt - def gds_validate_date(self, input_data, node=None, input_name=''): + + def gds_validate_date(self, input_data, node=None, input_name=""): return input_data - def gds_format_date(self, input_data, input_name=''): - _svalue = '%04d-%02d-%02d' % ( + + def gds_format_date(self, input_data, input_name=""): + _svalue = "%04d-%02d-%02d" % ( input_data.year, input_data.month, input_data.day, @@ -264,71 +507,73 @@ def gds_format_date(self, input_data, input_name=''): if tzoff is not None: total_seconds = tzoff.seconds + (86400 * tzoff.days) if total_seconds == 0: - _svalue += 'Z' + _svalue += "Z" else: if total_seconds < 0: - _svalue += '-' + _svalue += "-" total_seconds *= -1 else: - _svalue += '+' + _svalue += "+" hours = total_seconds // 3600 minutes = (total_seconds - (hours * 3600)) // 60 - _svalue += '{0:02d}:{1:02d}'.format( - hours, minutes) + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) except AttributeError: pass return _svalue + @classmethod def gds_parse_date(cls, input_data): tz = None - if input_data[-1] == 'Z': - tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") input_data = input_data[:-1] else: results = GeneratedsSuper.tzoff_pattern.search(input_data) if results is not None: - tzoff_parts = results.group(2).split(':') + tzoff_parts = results.group(2).split(":") tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) - if results.group(1) == '-': + if results.group(1) == "-": tzoff *= -1 - tz = GeneratedsSuper._FixedOffsetTZ( - tzoff, results.group(0)) + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) input_data = input_data[:-6] - dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = datetime_.datetime.strptime(input_data, "%Y-%m-%d") dt = dt.replace(tzinfo=tz) return dt.date() - def gds_validate_time(self, input_data, node=None, input_name=''): + + def gds_validate_time(self, input_data, node=None, input_name=""): return input_data - def gds_format_time(self, input_data, input_name=''): + + def gds_format_time(self, input_data, input_name=""): if input_data.microsecond == 0: - _svalue = '%02d:%02d:%02d' % ( + _svalue = "%02d:%02d:%02d" % ( input_data.hour, input_data.minute, input_data.second, ) else: - _svalue = '%02d:%02d:%02d.%s' % ( + _svalue = "%02d:%02d:%02d.%s" % ( input_data.hour, input_data.minute, input_data.second, - ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ("%f" % (float(input_data.microsecond) / 1000000))[2:], ) if input_data.tzinfo is not None: tzoff = input_data.tzinfo.utcoffset(input_data) if tzoff is not None: total_seconds = tzoff.seconds + (86400 * tzoff.days) if total_seconds == 0: - _svalue += 'Z' + _svalue += "Z" else: if total_seconds < 0: - _svalue += '-' + _svalue += "-" total_seconds *= -1 else: - _svalue += '+' + _svalue += "+" hours = total_seconds // 3600 minutes = (total_seconds - (hours * 3600)) // 60 - _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + _svalue += "{0:02d}:{1:02d}".format(hours, minutes) return _svalue + def gds_validate_simple_patterns(self, patterns, target): # pat is a list of lists of strings/patterns. # The target value must match at least one of the patterns @@ -345,90 +590,199 @@ def gds_validate_simple_patterns(self, patterns, target): found1 = False break return found1 + @classmethod def gds_parse_time(cls, input_data): tz = None - if input_data[-1] == 'Z': - tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + if input_data[-1] == "Z": + tz = GeneratedsSuper._FixedOffsetTZ(0, "UTC") input_data = input_data[:-1] else: results = GeneratedsSuper.tzoff_pattern.search(input_data) if results is not None: - tzoff_parts = results.group(2).split(':') + tzoff_parts = results.group(2).split(":") tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) - if results.group(1) == '-': + if results.group(1) == "-": tzoff *= -1 - tz = GeneratedsSuper._FixedOffsetTZ( - tzoff, results.group(0)) + tz = GeneratedsSuper._FixedOffsetTZ(tzoff, results.group(0)) input_data = input_data[:-6] - if len(input_data.split('.')) > 1: - dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + if len(input_data.split(".")) > 1: + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S.%f") else: - dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = datetime_.datetime.strptime(input_data, "%H:%M:%S") dt = dt.replace(tzinfo=tz) return dt.time() + + def gds_check_cardinality_( + self, value, input_name, min_occurs=0, max_occurs=1, required=None + ): + if value is None: + length = 0 + elif isinstance(value, list): + length = len(value) + else: + length = 1 + if required is not None: + if required and length < 1: + self.gds_collector_.add_message( + "Required value {}{} is missing".format( + input_name, self.gds_get_node_lineno_() + ) + ) + if length < min_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is below " + "the minimum allowed, " + "expected at least {}, found {}".format( + input_name, self.gds_get_node_lineno_(), min_occurs, length + ) + ) + elif length > max_occurs: + self.gds_collector_.add_message( + "Number of values for {}{} is above " + "the maximum allowed, " + "expected at most {}, found {}".format( + input_name, self.gds_get_node_lineno_(), max_occurs, length + ) + ) + + def gds_validate_builtin_ST_( + self, + validator, + value, + input_name, + min_occurs=None, + max_occurs=None, + required=None, + ): + if value is not None: + try: + validator(value, input_name=input_name) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + + def gds_validate_defined_ST_( + self, + validator, + value, + input_name, + min_occurs=None, + max_occurs=None, + required=None, + ): + if value is not None: + try: + validator(value) + except GDSParseError as parse_error: + self.gds_collector_.add_message(str(parse_error)) + def gds_str_lower(self, instring): return instring.lower() + def get_path_(self, node): path_list = [] self.get_path_list_(node, path_list) path_list.reverse() - path = '/'.join(path_list) + path = "/".join(path_list) return path - Tag_strip_pattern_ = re_.compile(r'\{.*\}') + + Tag_strip_pattern_ = re_.compile(r"\{.*\}") + def get_path_list_(self, node, path_list): if node is None: return - tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + tag = GeneratedsSuper.Tag_strip_pattern_.sub("", node.tag) if tag: path_list.append(tag) self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): class_obj1 = default_class - if 'xsi' in node.nsmap: - classname = node.get('{%s}type' % node.nsmap['xsi']) + if "xsi" in node.nsmap: + classname = node.get("{%s}type" % node.nsmap["xsi"]) if classname is not None: - names = classname.split(':') + names = classname.split(":") if len(names) == 2: classname = names[1] class_obj2 = globals().get(classname) if class_obj2 is not None: class_obj1 = class_obj2 return class_obj1 + def gds_build_any(self, node, type_name=None): - return None + # provide default value in case option --disable-xml is used. + content = "" + content = etree_.tostring(node, encoding="unicode") + return content + @classmethod def gds_reverse_node_mapping(cls, mapping): return dict(((v, k) for k, v in mapping.items())) + @staticmethod def gds_encode(instring): if sys.version_info.major == 2: if ExternalEncoding: encoding = ExternalEncoding else: - encoding = 'utf-8' + encoding = "utf-8" return instring.encode(encoding) else: return instring + @staticmethod def convert_unicode(instring): if isinstance(instring, str): result = quote_xml(instring) elif sys.version_info.major == 2 and isinstance(instring, unicode): - result = quote_xml(instring).encode('utf8') + result = quote_xml(instring).encode("utf8") else: result = GeneratedsSuper.gds_encode(str(instring)) return result + def __eq__(self, other): + def excl_select_objs_(obj): + return obj[0] != "parent_object_" and obj[0] != "gds_collector_" + if type(self) != type(other): return False - return self.__dict__ == other.__dict__ + return all( + x == y + for x, y in zip_longest( + filter(excl_select_objs_, self.__dict__.items()), + filter(excl_select_objs_, other.__dict__.items()), + ) + ) + def __ne__(self, other): return not self.__eq__(other) - + + # Django ETL transform hooks. + def gds_djo_etl_transform(self): + pass + + def gds_djo_etl_transform_db_obj(self, dbobj): + pass + + # SQLAlchemy ETL transform hooks. + def gds_sqa_etl_transform(self): + return 0, None + + def gds_sqa_etl_transform_db_obj(self, dbobj): + pass + + def gds_get_node_lineno_(self): + if ( + hasattr(self, "gds_elementtree_node_") + and self.gds_elementtree_node_ is not None + ): + return " near line {}".format(self.gds_elementtree_node_.sourceline) + else: + return "" + def getSubclassFromModule_(module, class_): - '''Get the subclass of a class from a specific module.''' - name = class_.__name__ + 'Sub' + """Get the subclass of a class from a specific module.""" + name = class_.__name__ + "Sub" if hasattr(module, name): return getattr(module, name) else: @@ -454,10 +808,14 @@ def getSubclassFromModule_(module, class_): # Globals # -ExternalEncoding = '' -Tag_pattern_ = re_.compile(r'({.*})?(.*)') +ExternalEncoding = "" +# Set this to false in order to deactivate during export, the use of +# name space prefixes captured from the input document. +UseCapturedNS_ = True +CapturedNsmap_ = {} +Tag_pattern_ = re_.compile(r"({.*})?(.*)") String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") -Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +Namespace_extract_pat_ = re_.compile(r"{(.*)}(.*)") CDATA_pattern_ = re_.compile(r"", re_.DOTALL) # Change this to redirect the generated superclass module to use a @@ -472,21 +830,21 @@ def getSubclassFromModule_(module, class_): def showIndent(outfile, level, pretty_print=True): if pretty_print: for idx in range(level): - outfile.write(' ') + outfile.write(" ") def quote_xml(inStr): "Escape markup chars, but do not modify CDATA sections." if not inStr: - return '' - s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) - s2 = '' + return "" + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s2 = "" pos = 0 matchobjects = CDATA_pattern_.finditer(s1) for mo in matchobjects: - s3 = s1[pos:mo.start()] + s3 = s1[pos : mo.start()] s2 += quote_xml_aux(s3) - s2 += s1[mo.start():mo.end()] + s2 += s1[mo.start() : mo.end()] pos = mo.end() s3 = s1[pos:] s2 += quote_xml_aux(s3) @@ -494,17 +852,17 @@ def quote_xml(inStr): def quote_xml_aux(inStr): - s1 = inStr.replace('&', '&') - s1 = s1.replace('<', '<') - s1 = s1.replace('>', '>') + s1 = inStr.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") return s1 def quote_attrib(inStr): - s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) - s1 = s1.replace('&', '&') - s1 = s1.replace('<', '<') - s1 = s1.replace('>', '>') + s1 = isinstance(inStr, BaseStrType_) and inStr or "%s" % inStr + s1 = s1.replace("&", "&") + s1 = s1.replace("<", "<") + s1 = s1.replace(">", ">") if '"' in s1: if "'" in s1: s1 = '"%s"' % s1.replace('"', """) @@ -518,14 +876,14 @@ def quote_attrib(inStr): def quote_python(inStr): s1 = inStr if s1.find("'") == -1: - if s1.find('\n') == -1: + if s1.find("\n") == -1: return "'%s'" % s1 else: return "'''%s'''" % s1 else: if s1.find('"') != -1: s1 = s1.replace('"', '\\"') - if s1.find('\n') == -1: + if s1.find("\n") == -1: return '"%s"' % s1 else: return '"""%s"""' % s1 @@ -535,7 +893,7 @@ def get_all_text_(node): if node.text is not None: text = node.text else: - text = '' + text = "" for child in node: if child.tail is not None: text += child.tail @@ -544,24 +902,42 @@ def get_all_text_(node): def find_attr_value_(attr_name, node): attrs = node.attrib - attr_parts = attr_name.split(':') + attr_parts = attr_name.split(":") value = None if len(attr_parts) == 1: value = attrs.get(attr_name) elif len(attr_parts) == 2: prefix, name = attr_parts - namespace = node.nsmap.get(prefix) + if prefix == "xml": + namespace = "http://www.w3.org/XML/1998/namespace" + else: + namespace = node.nsmap.get(prefix) if namespace is not None: - value = attrs.get('{%s}%s' % (namespace, name, )) + value = attrs.get( + "{%s}%s" + % ( + namespace, + name, + ) + ) return value +def encode_str_2_3(instr): + return instr + + class GDSParseError(Exception): pass def raise_parse_error(node, msg): - msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + if node is not None: + msg = "%s (element %s/line %d)" % ( + msg, + node.tag, + node.sourceline, + ) raise GDSParseError(msg) @@ -581,52 +957,58 @@ class MixedContainer: TypeDouble = 6 TypeBoolean = 7 TypeBase64 = 8 + def __init__(self, category, content_type, name, value): self.category = category self.content_type = content_type self.name = name self.value = value + def getCategory(self): return self.category + def getContenttype(self, content_type): return self.content_type + def getValue(self): return self.value + def getName(self): return self.name - def export(self, outfile, level, name, namespace, - pretty_print=True): + + def export(self, outfile, level, name, namespace, pretty_print=True): if self.category == MixedContainer.CategoryText: # Prevent exporting empty content as empty lines. if self.value.strip(): outfile.write(self.value) elif self.category == MixedContainer.CategorySimple: self.exportSimple(outfile, level, name) - else: # category == MixedContainer.CategoryComplex + else: # category == MixedContainer.CategoryComplex self.value.export( - outfile, level, namespace, name_=name, - pretty_print=pretty_print) + outfile, level, namespace, name_=name, pretty_print=pretty_print + ) + def exportSimple(self, outfile, level, name): if self.content_type == MixedContainer.TypeString: - outfile.write('<%s>%s' % ( - self.name, self.value, self.name)) - elif self.content_type == MixedContainer.TypeInteger or \ - self.content_type == MixedContainer.TypeBoolean: - outfile.write('<%s>%d' % ( - self.name, self.value, self.name)) - elif self.content_type == MixedContainer.TypeFloat or \ - self.content_type == MixedContainer.TypeDecimal: - outfile.write('<%s>%f' % ( - self.name, self.value, self.name)) + outfile.write("<%s>%s" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + outfile.write("<%s>%d" % (self.name, self.value, self.name)) + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + outfile.write("<%s>%f" % (self.name, self.value, self.name)) elif self.content_type == MixedContainer.TypeDouble: - outfile.write('<%s>%g' % ( - self.name, self.value, self.name)) + outfile.write("<%s>%g" % (self.name, self.value, self.name)) elif self.content_type == MixedContainer.TypeBase64: - outfile.write('<%s>%s' % ( - self.name, - base64.b64encode(self.value), - self.name)) - def to_etree(self, element): + outfile.write( + "<%s>%s" % (self.name, base64.b64encode(self.value), self.name) + ) + + def to_etree(self, element, mapping_=None, reverse_mapping_=None, nsmap_=None): if self.category == MixedContainer.CategoryText: # Prevent exporting empty content as empty lines. if self.value.strip(): @@ -641,77 +1023,119 @@ def to_etree(self, element): else: element.text += self.value elif self.category == MixedContainer.CategorySimple: - subelement = etree_.SubElement( - element, '%s' % self.name) + subelement = etree_.SubElement(element, "%s" % self.name) subelement.text = self.to_etree_simple() - else: # category == MixedContainer.CategoryComplex + else: # category == MixedContainer.CategoryComplex self.value.to_etree(element) - def to_etree_simple(self): + + def to_etree_simple(self, mapping_=None, reverse_mapping_=None, nsmap_=None): if self.content_type == MixedContainer.TypeString: text = self.value - elif (self.content_type == MixedContainer.TypeInteger or - self.content_type == MixedContainer.TypeBoolean): - text = '%d' % self.value - elif (self.content_type == MixedContainer.TypeFloat or - self.content_type == MixedContainer.TypeDecimal): - text = '%f' % self.value + elif ( + self.content_type == MixedContainer.TypeInteger + or self.content_type == MixedContainer.TypeBoolean + ): + text = "%d" % self.value + elif ( + self.content_type == MixedContainer.TypeFloat + or self.content_type == MixedContainer.TypeDecimal + ): + text = "%f" % self.value elif self.content_type == MixedContainer.TypeDouble: - text = '%g' % self.value + text = "%g" % self.value elif self.content_type == MixedContainer.TypeBase64: - text = '%s' % base64.b64encode(self.value) + text = "%s" % base64.b64encode(self.value) return text + def exportLiteral(self, outfile, level, name): if self.category == MixedContainer.CategoryText: showIndent(outfile, level) outfile.write( - 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( - self.category, self.content_type, - self.name, self.value)) + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) elif self.category == MixedContainer.CategorySimple: showIndent(outfile, level) outfile.write( - 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( - self.category, self.content_type, - self.name, self.value)) - else: # category == MixedContainer.CategoryComplex + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' + % (self.category, self.content_type, self.name, self.value) + ) + else: # category == MixedContainer.CategoryComplex showIndent(outfile, level) outfile.write( - 'model_.MixedContainer(%d, %d, "%s",\n' % ( - self.category, self.content_type, self.name,)) + 'model_.MixedContainer(%d, %d, "%s",\n' + % ( + self.category, + self.content_type, + self.name, + ) + ) self.value.exportLiteral(outfile, level + 1) showIndent(outfile, level) - outfile.write(')\n') + outfile.write(")\n") class MemberSpec_(object): - def __init__(self, name='', data_type='', container=0, - optional=0, child_attrs=None, choice=None): + def __init__( + self, + name="", + data_type="", + container=0, + optional=0, + child_attrs=None, + choice=None, + ): self.name = name self.data_type = data_type self.container = container self.child_attrs = child_attrs self.choice = choice self.optional = optional - def set_name(self, name): self.name = name - def get_name(self): return self.name - def set_data_type(self, data_type): self.data_type = data_type - def get_data_type_chain(self): return self.data_type + + def set_name(self, name): + self.name = name + + def get_name(self): + return self.name + + def set_data_type(self, data_type): + self.data_type = data_type + + def get_data_type_chain(self): + return self.data_type + def get_data_type(self): if isinstance(self.data_type, list): if len(self.data_type) > 0: return self.data_type[-1] else: - return 'xs:string' + return "xs:string" else: return self.data_type - def set_container(self, container): self.container = container - def get_container(self): return self.container - def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs - def get_child_attrs(self): return self.child_attrs - def set_choice(self, choice): self.choice = choice - def get_choice(self): return self.choice - def set_optional(self, optional): self.optional = optional - def get_optional(self): return self.optional + + def set_container(self, container): + self.container = container + + def get_container(self): + return self.container + + def set_child_attrs(self, child_attrs): + self.child_attrs = child_attrs + + def get_child_attrs(self): + return self.child_attrs + + def set_choice(self, choice): + self.choice = choice + + def get_choice(self): + return self.choice + + def set_optional(self, optional): + self.optional = optional + + def get_optional(self): + return self.optional def _cast(typ, value): @@ -719,2783 +1143,6328 @@ def _cast(typ, value): return value return typ(value) + # # Data representation classes. # -class BlockTypes(object): - VOLTAGE_CONC_DEP_BLOCK_MECHANISM='voltageConcDepBlockMechanism' +class BlockTypes(str, Enum): + VOLTAGE_CONC_DEP_BLOCK_MECHANISM = "voltageConcDepBlockMechanism" + + +class Metric(str, Enum): + """Metric -- Allowed metrics for InhomogeneousParam""" + PATH_LENGTHFROMROOT = "Path Length from root" -class Metric(object): - PATH_LENGTHFROMROOT='Path Length from root' +class PlasticityTypes(str, Enum): + TSODYKS_MARKRAM_DEP_MECHANISM = "tsodyksMarkramDepMechanism" + TSODYKS_MARKRAM_DEP_FAC_MECHANISM = "tsodyksMarkramDepFacMechanism" -class PlasticityTypes(object): - TSODYKS_MARKRAM_DEP_MECHANISM='tsodyksMarkramDepMechanism' - TSODYKS_MARKRAM_DEP_FAC_MECHANISM='tsodyksMarkramDepFacMechanism' +class ZeroOrOne(str, Enum): + """ZeroOrOne -- Value which is either 0 or 1""" -class ZeroOrOne(object): - _0='0' - _1='1' + _0 = "0" + _1 = "1" -class allowedSpaces(object): - EUCLIDEAN__1_D='Euclidean_1D' - EUCLIDEAN__2_D='Euclidean_2D' - EUCLIDEAN__3_D='Euclidean_3D' - GRID__1_D='Grid_1D' - GRID__2_D='Grid_2D' - GRID__3_D='Grid_3D' +class allowedSpaces(str, Enum): + EUCLIDEAN__1_D = "Euclidean_1D" + EUCLIDEAN__2_D = "Euclidean_2D" + EUCLIDEAN__3_D = "Euclidean_3D" + GRID__1_D = "Grid_1D" + GRID__2_D = "Grid_2D" + GRID__3_D = "Grid_3D" -class channelTypes(object): - ION_CHANNEL_PASSIVE='ionChannelPassive' - ION_CHANNEL_HH='ionChannelHH' +class channelTypes(str, Enum): + ION_CHANNEL_PASSIVE = "ionChannelPassive" + ION_CHANNEL_HH = "ionChannelHH" -class gateTypes(object): - GATE_H_HRATES='gateHHrates' - GATE_H_HRATES_TAU='gateHHratesTau' - GATE_H_HTAU_INF='gateHHtauInf' - GATE_H_HRATES_INF='gateHHratesInf' - GATE_H_HRATES_TAU_INF='gateHHratesTauInf' - GATE_HH_INSTANTANEOUS='gateHHInstantaneous' - GATE_KS='gateKS' - GATE_FRACTIONAL='gateFractional' +class gateTypes(str, Enum): + GATE_H_HRATES = "gateHHrates" + GATE_H_HRATES_TAU = "gateHHratesTau" + GATE_H_HTAU_INF = "gateHHtauInf" + GATE_H_HRATES_INF = "gateHHratesInf" + GATE_H_HRATES_TAU_INF = "gateHHratesTauInf" + GATE_HH_INSTANTANEOUS = "gateHHInstantaneous" + GATE_KS = "gateKS" + GATE_FRACTIONAL = "gateFractional" -class networkTypes(object): - NETWORK='network' - NETWORK_WITH_TEMPERATURE='networkWithTemperature' +class networkTypes(str, Enum): + NETWORK = "network" + NETWORK_WITH_TEMPERATURE = "networkWithTemperature" -class populationTypes(object): - POPULATION='population' - POPULATION_LIST='populationList' +class populationTypes(str, Enum): + POPULATION = "population" + POPULATION_LIST = "populationList" class Property(GeneratedsSuper): - """Generic property with a tag and value""" + """Property -- A property ( a **tag** and **value** pair ), which can be on any **baseStandalone** either as a direct child, or within an **Annotation** . Generally something which helps the visual display or facilitates simulation of a Component, but is not a core physiological property. Common examples include: **numberInternalDivisions,** equivalent of nseg in NEURON; **radius,** for a radius to use in graphical displays for abstract cells ( i. e. without defined morphologies ); **color,** the color to use for a **Population** or **populationList** of cells; **recommended_dt_ms,** the recommended timestep to use for simulating a **Network** , **recommended_duration_ms** the recommended duration to use when running a **Network**""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('tag', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('value', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_("tag", "xs:string", 0, 0, {"use": "required", "name": "tag"}), + MemberSpec_("value", "xs:string", 0, 0, {"use": "required", "name": "value"}), ] subclass = None superclass = None - def __init__(self, tag=None, value=None, **kwargs_): + + def __init__(self, tag=None, value=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.tag = _cast(None, tag) + self.tag_nsprefix_ = None self.value = _cast(None, value) + self.value_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Property) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Property) if subclass is not None: return subclass(*args_, **kwargs_) if Property.subclass: return Property.subclass(*args_, **kwargs_) else: return Property(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Property', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Property') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Property", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Property") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Property": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Property') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Property', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Property'): - if self.tag is not None and 'tag' not in already_processed: - already_processed.add('tag') - outfile.write(' tag=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.tag), input_name='tag')), )) - if self.value is not None and 'value' not in already_processed: - already_processed.add('value') - outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Property', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Property" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Property", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Property" + ): + if self.tag is not None and "tag" not in already_processed: + already_processed.add("tag") + outfile.write( + " tag=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.tag), input_name="tag") + ), + ) + ) + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write( + " value=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.value), input_name="value" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Property", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('tag', node) - if value is not None and 'tag' not in already_processed: - already_processed.add('tag') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("tag", node) + if value is not None and "tag" not in already_processed: + already_processed.add("tag") self.tag = value - value = find_attr_value_('value', node) - if value is not None and 'value' not in already_processed: - already_processed.add('value') + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") self.value = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class Property class Annotation(GeneratedsSuper): - """Placeholder for MIRIAM related metadata, among others.""" + """Annotation -- A structured annotation containing metadata, specifically RDF or **property** elements""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_( + "__ANY__", + "__ANY__", + 1, + 1, + {"maxOccurs": "unbounded", "minOccurs": "0", "processContents": "skip"}, + None, + ), ] subclass = None superclass = None - def __init__(self, anytypeobjs_=None, **kwargs_): + + def __init__(self, anytypeobjs_=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None if anytypeobjs_ is None: self.anytypeobjs_ = [] else: self.anytypeobjs_ = anytypeobjs_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Annotation) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Annotation) if subclass is not None: return subclass(*args_, **kwargs_) if Annotation.subclass: return Annotation.subclass(*args_, **kwargs_) else: return Annotation(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - self.anytypeobjs_ - ): + + def _hasContent(self): + if self.anytypeobjs_: return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Annotation', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Annotation') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Annotation", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Annotation") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Annotation": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Annotation') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Annotation', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Annotation" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Annotation", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Annotation'): + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Annotation" + ): pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Annotation', fromsubclass_=False, pretty_print=True): + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Annotation", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' - else: - eol_ = '' - for obj_ in self.anytypeobjs_: - obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + eol_ = "\n" + else: + eol_ = "" + if not fromsubclass_: + for obj_ in self.anytypeobjs_: + showIndent(outfile, level, pretty_print) + outfile.write(obj_) + outfile.write("\n") + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + + def _buildAttributes(self, node, attrs, already_processed): pass - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'Annotation') - if obj_ is not None: - self.add_anytypeobjs_(obj_) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + content_ = self.gds_build_any(child_, "Annotation") + self.anytypeobjs_.append(content_) + + # end class Annotation class ComponentType(GeneratedsSuper): - """Contains an extension to NeuroML by creating custom LEMS - ComponentType.""" + """ComponentType -- Contains an extension to NeuroML by creating custom LEMS ComponentType.""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('extends', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('description', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('Property', 'Property', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'LEMS_Property', u'name': u'Property', u'minOccurs': u'0'}, None), - MemberSpec_('Parameter', 'Parameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Parameter', u'name': u'Parameter', u'minOccurs': u'0'}, None), - MemberSpec_('Constant', 'Constant', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Constant', u'name': u'Constant', u'minOccurs': u'0'}, None), - MemberSpec_('Exposure', 'Exposure', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Exposure', u'name': u'Exposure', u'minOccurs': u'0'}, None), - MemberSpec_('Requirement', 'Requirement', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Requirement', u'name': u'Requirement', u'minOccurs': u'0'}, None), - MemberSpec_('InstanceRequirement', 'InstanceRequirement', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'InstanceRequirement', u'name': u'InstanceRequirement', u'minOccurs': u'0'}, None), - MemberSpec_('Dynamics', 'Dynamics', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Dynamics', u'name': u'Dynamics', u'minOccurs': u'0'}, None), + MemberSpec_("name", "xs:string", 0, 0, {"use": "required", "name": "name"}), + MemberSpec_( + "extends", "xs:string", 0, 1, {"use": "optional", "name": "extends"} + ), + MemberSpec_( + "description", "xs:string", 0, 1, {"use": "optional", "name": "description"} + ), + MemberSpec_( + "Property", + "Property", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "Property", + "type": "LEMS_Property", + }, + None, + ), + MemberSpec_( + "Parameter", + "Parameter", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "Parameter", + "type": "Parameter", + }, + None, + ), + MemberSpec_( + "Constant", + "Constant", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "Constant", + "type": "Constant", + }, + None, + ), + MemberSpec_( + "Exposure", + "Exposure", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "Exposure", + "type": "Exposure", + }, + None, + ), + MemberSpec_( + "Requirement", + "Requirement", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "Requirement", + "type": "Requirement", + }, + None, + ), + MemberSpec_( + "InstanceRequirement", + "InstanceRequirement", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "InstanceRequirement", + "type": "InstanceRequirement", + }, + None, + ), + MemberSpec_( + "Dynamics", + "Dynamics", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "Dynamics", + "type": "Dynamics", + }, + None, + ), ] subclass = None superclass = None - def __init__(self, name=None, extends=None, description=None, Property=None, Parameter=None, Constant=None, Exposure=None, Requirement=None, InstanceRequirement=None, Dynamics=None, **kwargs_): + + def __init__( + self, + name=None, + extends=None, + description=None, + Property=None, + Parameter=None, + Constant=None, + Exposure=None, + Requirement=None, + InstanceRequirement=None, + Dynamics=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.name = _cast(None, name) + self.name_nsprefix_ = None self.extends = _cast(None, extends) + self.extends_nsprefix_ = None self.description = _cast(None, description) + self.description_nsprefix_ = None if Property is None: self.Property = [] else: self.Property = Property + self.Property_nsprefix_ = None if Parameter is None: self.Parameter = [] else: self.Parameter = Parameter + self.Parameter_nsprefix_ = None if Constant is None: self.Constant = [] else: self.Constant = Constant + self.Constant_nsprefix_ = None if Exposure is None: self.Exposure = [] else: self.Exposure = Exposure + self.Exposure_nsprefix_ = None if Requirement is None: self.Requirement = [] else: self.Requirement = Requirement + self.Requirement_nsprefix_ = None if InstanceRequirement is None: self.InstanceRequirement = [] else: self.InstanceRequirement = InstanceRequirement + self.InstanceRequirement_nsprefix_ = None if Dynamics is None: self.Dynamics = [] else: self.Dynamics = Dynamics + self.Dynamics_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ComponentType) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ComponentType) if subclass is not None: return subclass(*args_, **kwargs_) if ComponentType.subclass: return ComponentType.subclass(*args_, **kwargs_) else: return ComponentType(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): + + def _hasContent(self): if ( - self.Property or - self.Parameter or - self.Constant or - self.Exposure or - self.Requirement or - self.InstanceRequirement or - self.Dynamics + self.Property + or self.Parameter + or self.Constant + or self.Exposure + or self.Requirement + or self.InstanceRequirement + or self.Dynamics ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ComponentType', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ComponentType') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ComponentType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ComponentType") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ComponentType": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ComponentType') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ComponentType', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ComponentType" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ComponentType", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ComponentType'): - if self.name is not None and 'name' not in already_processed: - already_processed.add('name') - outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) - if self.extends is not None and 'extends' not in already_processed: - already_processed.add('extends') - outfile.write(' extends=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.extends), input_name='extends')), )) - if self.description is not None and 'description' not in already_processed: - already_processed.add('description') - outfile.write(' description=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.description), input_name='description')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ComponentType', fromsubclass_=False, pretty_print=True): + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ComponentType", + ): + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + if self.extends is not None and "extends" not in already_processed: + already_processed.add("extends") + outfile.write( + " extends=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.extends), input_name="extends" + ) + ), + ) + ) + if self.description is not None and "description" not in already_processed: + already_processed.add("description") + outfile.write( + " description=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.description), input_name="description" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ComponentType", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for Property_ in self.Property: - Property_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Property', pretty_print=pretty_print) + namespaceprefix_ = ( + self.Property_nsprefix_ + ":" + if (UseCapturedNS_ and self.Property_nsprefix_) + else "" + ) + Property_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Property", + pretty_print=pretty_print, + ) for Parameter_ in self.Parameter: - Parameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Parameter', pretty_print=pretty_print) + namespaceprefix_ = ( + self.Parameter_nsprefix_ + ":" + if (UseCapturedNS_ and self.Parameter_nsprefix_) + else "" + ) + Parameter_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Parameter", + pretty_print=pretty_print, + ) for Constant_ in self.Constant: - Constant_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Constant', pretty_print=pretty_print) + namespaceprefix_ = ( + self.Constant_nsprefix_ + ":" + if (UseCapturedNS_ and self.Constant_nsprefix_) + else "" + ) + Constant_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Constant", + pretty_print=pretty_print, + ) for Exposure_ in self.Exposure: - Exposure_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Exposure', pretty_print=pretty_print) + namespaceprefix_ = ( + self.Exposure_nsprefix_ + ":" + if (UseCapturedNS_ and self.Exposure_nsprefix_) + else "" + ) + Exposure_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Exposure", + pretty_print=pretty_print, + ) for Requirement_ in self.Requirement: - Requirement_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Requirement', pretty_print=pretty_print) + namespaceprefix_ = ( + self.Requirement_nsprefix_ + ":" + if (UseCapturedNS_ and self.Requirement_nsprefix_) + else "" + ) + Requirement_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Requirement", + pretty_print=pretty_print, + ) for InstanceRequirement_ in self.InstanceRequirement: - InstanceRequirement_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='InstanceRequirement', pretty_print=pretty_print) + namespaceprefix_ = ( + self.InstanceRequirement_nsprefix_ + ":" + if (UseCapturedNS_ and self.InstanceRequirement_nsprefix_) + else "" + ) + InstanceRequirement_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="InstanceRequirement", + pretty_print=pretty_print, + ) for Dynamics_ in self.Dynamics: - Dynamics_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Dynamics', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.Dynamics_nsprefix_ + ":" + if (UseCapturedNS_ and self.Dynamics_nsprefix_) + else "" + ) + Dynamics_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Dynamics", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('name', node) - if value is not None and 'name' not in already_processed: - already_processed.add('name') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") self.name = value - value = find_attr_value_('extends', node) - if value is not None and 'extends' not in already_processed: - already_processed.add('extends') + value = find_attr_value_("extends", node) + if value is not None and "extends" not in already_processed: + already_processed.add("extends") self.extends = value - value = find_attr_value_('description', node) - if value is not None and 'description' not in already_processed: - already_processed.add('description') + value = find_attr_value_("description", node) + if value is not None and "description" not in already_processed: + already_processed.add("description") self.description = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'Property': + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "Property": obj_ = LEMS_Property.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.Property.append(obj_) - obj_.original_tagname_ = 'Property' - elif nodeName_ == 'Parameter': + obj_.original_tagname_ = "Property" + elif nodeName_ == "Parameter": obj_ = Parameter.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.Parameter.append(obj_) - obj_.original_tagname_ = 'Parameter' - elif nodeName_ == 'Constant': + obj_.original_tagname_ = "Parameter" + elif nodeName_ == "Constant": obj_ = Constant.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.Constant.append(obj_) - obj_.original_tagname_ = 'Constant' - elif nodeName_ == 'Exposure': + obj_.original_tagname_ = "Constant" + elif nodeName_ == "Exposure": obj_ = Exposure.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.Exposure.append(obj_) - obj_.original_tagname_ = 'Exposure' - elif nodeName_ == 'Requirement': + obj_.original_tagname_ = "Exposure" + elif nodeName_ == "Requirement": obj_ = Requirement.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.Requirement.append(obj_) - obj_.original_tagname_ = 'Requirement' - elif nodeName_ == 'InstanceRequirement': + obj_.original_tagname_ = "Requirement" + elif nodeName_ == "InstanceRequirement": obj_ = InstanceRequirement.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.InstanceRequirement.append(obj_) - obj_.original_tagname_ = 'InstanceRequirement' - elif nodeName_ == 'Dynamics': + obj_.original_tagname_ = "InstanceRequirement" + elif nodeName_ == "Dynamics": obj_ = Dynamics.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.Dynamics.append(obj_) - obj_.original_tagname_ = 'Dynamics' + obj_.original_tagname_ = "Dynamics" + + # end class ComponentType class Constant(GeneratedsSuper): - """LEMS ComponentType for Constant.""" + """Constant -- LEMS ComponentType for Constant.""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('dimension', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('value', 'Nml2Quantity', 0, 0, {'use': u'required'}), - MemberSpec_('description', 'xs:string', 0, 1, {'use': u'optional'}), + MemberSpec_("name", "xs:string", 0, 0, {"use": "required", "name": "name"}), + MemberSpec_( + "dimension", "xs:string", 0, 0, {"use": "required", "name": "dimension"} + ), + MemberSpec_( + "value", "Nml2Quantity", 0, 0, {"use": "required", "name": "value"} + ), + MemberSpec_( + "description", "xs:string", 0, 1, {"use": "optional", "name": "description"} + ), ] subclass = None superclass = None - def __init__(self, name=None, dimension=None, value=None, description=None, **kwargs_): + + def __init__( + self, + name=None, + dimension=None, + value=None, + description=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.name = _cast(None, name) + self.name_nsprefix_ = None self.dimension = _cast(None, dimension) + self.dimension_nsprefix_ = None self.value = _cast(None, value) + self.value_nsprefix_ = None self.description = _cast(None, description) + self.description_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Constant) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Constant) if subclass is not None: return subclass(*args_, **kwargs_) if Constant.subclass: return Constant.subclass(*args_, **kwargs_) else: return Constant(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity(self, value): # Validate type Nml2Quantity, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_patterns_, )) - validate_Nml2Quantity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*([_a-zA-Z0-9])*$']] - def hasContent_(self): if ( - + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_patterns_, + ) + ) + + validate_Nml2Quantity_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*([_a-zA-Z0-9])*)$"] + ] + + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Constant', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Constant') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Constant", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Constant") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Constant": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Constant') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Constant', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Constant'): - if self.name is not None and 'name' not in already_processed: - already_processed.add('name') - outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) - if self.dimension is not None and 'dimension' not in already_processed: - already_processed.add('dimension') - outfile.write(' dimension=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.dimension), input_name='dimension')), )) - if self.value is not None and 'value' not in already_processed: - already_processed.add('value') - outfile.write(' value=%s' % (quote_attrib(self.value), )) - if self.description is not None and 'description' not in already_processed: - already_processed.add('description') - outfile.write(' description=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.description), input_name='description')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Constant', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Constant" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Constant", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Constant" + ): + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + if self.dimension is not None and "dimension" not in already_processed: + already_processed.add("dimension") + outfile.write( + " dimension=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.dimension), input_name="dimension" + ) + ), + ) + ) + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write( + " value=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.value), input_name="value" + ) + ), + ) + ) + if self.description is not None and "description" not in already_processed: + already_processed.add("description") + outfile.write( + " description=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.description), input_name="description" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Constant", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('name', node) - if value is not None and 'name' not in already_processed: - already_processed.add('name') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") self.name = value - value = find_attr_value_('dimension', node) - if value is not None and 'dimension' not in already_processed: - already_processed.add('dimension') + value = find_attr_value_("dimension", node) + if value is not None and "dimension" not in already_processed: + already_processed.add("dimension") self.dimension = value - value = find_attr_value_('value', node) - if value is not None and 'value' not in already_processed: - already_processed.add('value') + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") self.value = value - self.validate_Nml2Quantity(self.value) # validate type Nml2Quantity - value = find_attr_value_('description', node) - if value is not None and 'description' not in already_processed: - already_processed.add('description') + self.validate_Nml2Quantity(self.value) # validate type Nml2Quantity + value = find_attr_value_("description", node) + if value is not None and "description" not in already_processed: + already_processed.add("description") self.description = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class Constant class Exposure(GeneratedsSuper): - """LEMS Exposure (ComponentType property)""" + """Exposure -- LEMS Exposure (ComponentType property)""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('dimension', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('description', 'xs:string', 0, 1, {'use': u'optional'}), + MemberSpec_("name", "xs:string", 0, 0, {"use": "required", "name": "name"}), + MemberSpec_( + "dimension", "xs:string", 0, 0, {"use": "required", "name": "dimension"} + ), + MemberSpec_( + "description", "xs:string", 0, 1, {"use": "optional", "name": "description"} + ), ] subclass = None superclass = None - def __init__(self, name=None, dimension=None, description=None, **kwargs_): + + def __init__( + self, + name=None, + dimension=None, + description=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.name = _cast(None, name) + self.name_nsprefix_ = None self.dimension = _cast(None, dimension) + self.dimension_nsprefix_ = None self.description = _cast(None, description) + self.description_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Exposure) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Exposure) if subclass is not None: return subclass(*args_, **kwargs_) if Exposure.subclass: return Exposure.subclass(*args_, **kwargs_) else: return Exposure(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Exposure', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Exposure') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Exposure", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Exposure") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Exposure": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Exposure') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Exposure', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Exposure'): - if self.name is not None and 'name' not in already_processed: - already_processed.add('name') - outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) - if self.dimension is not None and 'dimension' not in already_processed: - already_processed.add('dimension') - outfile.write(' dimension=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.dimension), input_name='dimension')), )) - if self.description is not None and 'description' not in already_processed: - already_processed.add('description') - outfile.write(' description=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.description), input_name='description')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Exposure', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Exposure" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Exposure", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Exposure" + ): + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + if self.dimension is not None and "dimension" not in already_processed: + already_processed.add("dimension") + outfile.write( + " dimension=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.dimension), input_name="dimension" + ) + ), + ) + ) + if self.description is not None and "description" not in already_processed: + already_processed.add("description") + outfile.write( + " description=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.description), input_name="description" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Exposure", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('name', node) - if value is not None and 'name' not in already_processed: - already_processed.add('name') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") self.name = value - value = find_attr_value_('dimension', node) - if value is not None and 'dimension' not in already_processed: - already_processed.add('dimension') + value = find_attr_value_("dimension", node) + if value is not None and "dimension" not in already_processed: + already_processed.add("dimension") self.dimension = value - value = find_attr_value_('description', node) - if value is not None and 'description' not in already_processed: - already_processed.add('description') + value = find_attr_value_("description", node) + if value is not None and "description" not in already_processed: + already_processed.add("description") self.description = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class Exposure class NamedDimensionalType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('dimension', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('description', 'xs:string', 0, 1, {'use': u'optional'}), + MemberSpec_("name", "xs:string", 0, 0, {"use": "required", "name": "name"}), + MemberSpec_( + "dimension", "xs:string", 0, 0, {"use": "required", "name": "dimension"} + ), + MemberSpec_( + "description", "xs:string", 0, 1, {"use": "optional", "name": "description"} + ), ] subclass = None superclass = None - def __init__(self, name=None, dimension=None, description=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + name=None, + dimension=None, + description=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.name = _cast(None, name) + self.name_nsprefix_ = None self.dimension = _cast(None, dimension) + self.dimension_nsprefix_ = None self.description = _cast(None, description) + self.description_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, NamedDimensionalType) + CurrentSubclassModule_, NamedDimensionalType + ) if subclass is not None: return subclass(*args_, **kwargs_) if NamedDimensionalType.subclass: return NamedDimensionalType.subclass(*args_, **kwargs_) else: return NamedDimensionalType(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NamedDimensionalType', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('NamedDimensionalType') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="NamedDimensionalType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("NamedDimensionalType") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "NamedDimensionalType": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NamedDimensionalType') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NamedDimensionalType', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NamedDimensionalType'): - if self.name is not None and 'name' not in already_processed: - already_processed.add('name') - outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) - if self.dimension is not None and 'dimension' not in already_processed: - already_processed.add('dimension') - outfile.write(' dimension=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.dimension), input_name='dimension')), )) - if self.description is not None and 'description' not in already_processed: - already_processed.add('description') - outfile.write(' description=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.description), input_name='description')), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="NamedDimensionalType", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="NamedDimensionalType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="NamedDimensionalType", + ): + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + if self.dimension is not None and "dimension" not in already_processed: + already_processed.add("dimension") + outfile.write( + " dimension=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.dimension), input_name="dimension" + ) + ), + ) + ) + if self.description is not None and "description" not in already_processed: + already_processed.add("description") + outfile.write( + " description=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.description), input_name="description" + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NamedDimensionalType', fromsubclass_=False, pretty_print=True): + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="NamedDimensionalType", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('name', node) - if value is not None and 'name' not in already_processed: - already_processed.add('name') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") self.name = value - value = find_attr_value_('dimension', node) - if value is not None and 'dimension' not in already_processed: - already_processed.add('dimension') + value = find_attr_value_("dimension", node) + if value is not None and "dimension" not in already_processed: + already_processed.add("dimension") self.dimension = value - value = find_attr_value_('description', node) - if value is not None and 'description' not in already_processed: - already_processed.add('description') + value = find_attr_value_("description", node) + if value is not None and "description" not in already_processed: + already_processed.add("description") self.description = value - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class NamedDimensionalType class NamedDimensionalVariable(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('dimension', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('description', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('exposure', 'xs:string', 0, 1, {'use': u'optional'}), + MemberSpec_("name", "xs:string", 0, 0, {"use": "required", "name": "name"}), + MemberSpec_( + "dimension", "xs:string", 0, 0, {"use": "required", "name": "dimension"} + ), + MemberSpec_( + "description", "xs:string", 0, 1, {"use": "optional", "name": "description"} + ), + MemberSpec_( + "exposure", "xs:string", 0, 1, {"use": "optional", "name": "exposure"} + ), ] subclass = None superclass = None - def __init__(self, name=None, dimension=None, description=None, exposure=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + name=None, + dimension=None, + description=None, + exposure=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.name = _cast(None, name) + self.name_nsprefix_ = None self.dimension = _cast(None, dimension) + self.dimension_nsprefix_ = None self.description = _cast(None, description) + self.description_nsprefix_ = None self.exposure = _cast(None, exposure) + self.exposure_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, NamedDimensionalVariable) + CurrentSubclassModule_, NamedDimensionalVariable + ) if subclass is not None: return subclass(*args_, **kwargs_) if NamedDimensionalVariable.subclass: return NamedDimensionalVariable.subclass(*args_, **kwargs_) else: return NamedDimensionalVariable(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NamedDimensionalVariable', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('NamedDimensionalVariable') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="NamedDimensionalVariable", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("NamedDimensionalVariable") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "NamedDimensionalVariable": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NamedDimensionalVariable') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NamedDimensionalVariable', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NamedDimensionalVariable'): - if self.name is not None and 'name' not in already_processed: - already_processed.add('name') - outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) - if self.dimension is not None and 'dimension' not in already_processed: - already_processed.add('dimension') - outfile.write(' dimension=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.dimension), input_name='dimension')), )) - if self.description is not None and 'description' not in already_processed: - already_processed.add('description') - outfile.write(' description=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.description), input_name='description')), )) - if self.exposure is not None and 'exposure' not in already_processed: - already_processed.add('exposure') - outfile.write(' exposure=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.exposure), input_name='exposure')), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="NamedDimensionalVariable", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="NamedDimensionalVariable", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="NamedDimensionalVariable", + ): + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + if self.dimension is not None and "dimension" not in already_processed: + already_processed.add("dimension") + outfile.write( + " dimension=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.dimension), input_name="dimension" + ) + ), + ) + ) + if self.description is not None and "description" not in already_processed: + already_processed.add("description") + outfile.write( + " description=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.description), input_name="description" + ) + ), + ) + ) + if self.exposure is not None and "exposure" not in already_processed: + already_processed.add("exposure") + outfile.write( + " exposure=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.exposure), input_name="exposure" + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NamedDimensionalVariable', fromsubclass_=False, pretty_print=True): + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="NamedDimensionalVariable", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('name', node) - if value is not None and 'name' not in already_processed: - already_processed.add('name') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") self.name = value - value = find_attr_value_('dimension', node) - if value is not None and 'dimension' not in already_processed: - already_processed.add('dimension') + value = find_attr_value_("dimension", node) + if value is not None and "dimension" not in already_processed: + already_processed.add("dimension") self.dimension = value - value = find_attr_value_('description', node) - if value is not None and 'description' not in already_processed: - already_processed.add('description') + value = find_attr_value_("description", node) + if value is not None and "description" not in already_processed: + already_processed.add("description") self.description = value - value = find_attr_value_('exposure', node) - if value is not None and 'exposure' not in already_processed: - already_processed.add('exposure') + value = find_attr_value_("exposure", node) + if value is not None and "exposure" not in already_processed: + already_processed.add("exposure") self.exposure = value - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class NamedDimensionalVariable class Parameter(NamedDimensionalType): - member_data_items_ = [ - ] + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = NamedDimensionalType - def __init__(self, name=None, dimension=None, description=None, **kwargs_): + + def __init__( + self, + name=None, + dimension=None, + description=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Parameter, self).__init__(name, dimension, description, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("Parameter"), self).__init__( + name, dimension, description, **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Parameter) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Parameter) if subclass is not None: return subclass(*args_, **kwargs_) if Parameter.subclass: return Parameter.subclass(*args_, **kwargs_) else: return Parameter(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(Parameter, self).hasContent_() - ): + + def _hasContent(self): + if super(Parameter, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Parameter', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Parameter') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Parameter", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Parameter") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Parameter": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Parameter') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Parameter', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Parameter'): - super(Parameter, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Parameter') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Parameter', fromsubclass_=False, pretty_print=True): - super(Parameter, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Parameter" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Parameter", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Parameter" + ): + super(Parameter, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Parameter" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Parameter", + fromsubclass_=False, + pretty_print=True, + ): + super(Parameter, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(Parameter, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(Parameter, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + super(Parameter, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(Parameter, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class Parameter class LEMS_Property(NamedDimensionalType): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('default_value', 'xs:double', 0, 1, {'use': u'optional'}), + MemberSpec_( + "default_value", + "xs:double", + 0, + 1, + {"use": "optional", "name": "default_value"}, + ), ] subclass = None superclass = NamedDimensionalType - def __init__(self, name=None, dimension=None, description=None, default_value=None, **kwargs_): + + def __init__( + self, + name=None, + dimension=None, + description=None, + default_value=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(LEMS_Property, self).__init__(name, dimension, description, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("LEMS_Property"), self).__init__( + name, dimension, description, **kwargs_ + ) self.default_value = _cast(float, default_value) + self.default_value_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, LEMS_Property) + subclass = getSubclassFromModule_(CurrentSubclassModule_, LEMS_Property) if subclass is not None: return subclass(*args_, **kwargs_) if LEMS_Property.subclass: return LEMS_Property.subclass(*args_, **kwargs_) else: return LEMS_Property(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(LEMS_Property, self).hasContent_() - ): + + def _hasContent(self): + if super(LEMS_Property, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LEMS_Property', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('LEMS_Property') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="LEMS_Property", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("LEMS_Property") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "LEMS_Property": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LEMS_Property') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LEMS_Property', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LEMS_Property'): - super(LEMS_Property, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LEMS_Property') - if self.default_value is not None and 'default_value' not in already_processed: - already_processed.add('default_value') - outfile.write(' defaultValue="%s"' % self.gds_format_double(self.default_value, input_name='defaultValue')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LEMS_Property', fromsubclass_=False, pretty_print=True): - super(LEMS_Property, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="LEMS_Property" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="LEMS_Property", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="LEMS_Property", + ): + super(LEMS_Property, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="LEMS_Property" + ) + if self.default_value is not None and "default_value" not in already_processed: + already_processed.add("default_value") + outfile.write( + ' defaultValue="%s"' + % self.gds_format_double(self.default_value, input_name="defaultValue") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="LEMS_Property", + fromsubclass_=False, + pretty_print=True, + ): + super(LEMS_Property, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('defaultValue', node) - if value is not None and 'defaultValue' not in already_processed: - already_processed.add('defaultValue') - try: - self.default_value = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (defaultValue): %s' % exp) - super(LEMS_Property, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(LEMS_Property, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("defaultValue", node) + if value is not None and "defaultValue" not in already_processed: + already_processed.add("defaultValue") + value = self.gds_parse_double(value, node, "defaultValue") + self.default_value = value + super(LEMS_Property, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(LEMS_Property, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class LEMS_Property class Requirement(NamedDimensionalType): - member_data_items_ = [ - ] + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = NamedDimensionalType - def __init__(self, name=None, dimension=None, description=None, **kwargs_): + + def __init__( + self, + name=None, + dimension=None, + description=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Requirement, self).__init__(name, dimension, description, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("Requirement"), self).__init__( + name, dimension, description, **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Requirement) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Requirement) if subclass is not None: return subclass(*args_, **kwargs_) if Requirement.subclass: return Requirement.subclass(*args_, **kwargs_) else: return Requirement(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(Requirement, self).hasContent_() - ): + + def _hasContent(self): + if super(Requirement, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Requirement', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Requirement') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Requirement", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Requirement") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Requirement": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Requirement') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Requirement', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Requirement'): - super(Requirement, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Requirement') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Requirement', fromsubclass_=False, pretty_print=True): - super(Requirement, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Requirement" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Requirement", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="Requirement", + ): + super(Requirement, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Requirement" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Requirement", + fromsubclass_=False, + pretty_print=True, + ): + super(Requirement, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(Requirement, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(Requirement, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + super(Requirement, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(Requirement, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class Requirement class InstanceRequirement(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('type', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_("name", "xs:string", 0, 0, {"use": "required", "name": "name"}), + MemberSpec_("type", "xs:string", 0, 0, {"use": "required", "name": "type"}), ] subclass = None superclass = None - def __init__(self, name=None, type=None, **kwargs_): + + def __init__(self, name=None, type=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.name = _cast(None, name) + self.name_nsprefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, InstanceRequirement) + CurrentSubclassModule_, InstanceRequirement + ) if subclass is not None: return subclass(*args_, **kwargs_) if InstanceRequirement.subclass: return InstanceRequirement.subclass(*args_, **kwargs_) else: return InstanceRequirement(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InstanceRequirement', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('InstanceRequirement') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InstanceRequirement", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("InstanceRequirement") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "InstanceRequirement": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InstanceRequirement') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InstanceRequirement', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InstanceRequirement'): - if self.name is not None and 'name' not in already_processed: - already_processed.add('name') - outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InstanceRequirement', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="InstanceRequirement", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="InstanceRequirement", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="InstanceRequirement", + ): + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type), input_name="type" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InstanceRequirement", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('name', node) - if value is not None and 'name' not in already_processed: - already_processed.add('name') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") self.name = value - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class InstanceRequirement class Dynamics(GeneratedsSuper): - """LEMS ComponentType for Dynamics""" + """Dynamics -- LEMS ComponentType for Dynamics""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('StateVariable', 'StateVariable', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'StateVariable', u'name': u'StateVariable', u'minOccurs': u'0'}, None), - MemberSpec_('DerivedVariable', 'DerivedVariable', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'DerivedVariable', u'name': u'DerivedVariable', u'minOccurs': u'0'}, None), - MemberSpec_('ConditionalDerivedVariable', 'ConditionalDerivedVariable', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ConditionalDerivedVariable', u'name': u'ConditionalDerivedVariable', u'minOccurs': u'0'}, None), - MemberSpec_('TimeDerivative', 'TimeDerivative', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'TimeDerivative', u'name': u'TimeDerivative', u'minOccurs': u'0'}, None), + MemberSpec_( + "StateVariable", + "StateVariable", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "StateVariable", + "type": "StateVariable", + }, + None, + ), + MemberSpec_( + "DerivedVariable", + "DerivedVariable", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "DerivedVariable", + "type": "DerivedVariable", + }, + None, + ), + MemberSpec_( + "ConditionalDerivedVariable", + "ConditionalDerivedVariable", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "ConditionalDerivedVariable", + "type": "ConditionalDerivedVariable", + }, + None, + ), + MemberSpec_( + "TimeDerivative", + "TimeDerivative", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "TimeDerivative", + "type": "TimeDerivative", + }, + None, + ), ] subclass = None superclass = None - def __init__(self, StateVariable=None, DerivedVariable=None, ConditionalDerivedVariable=None, TimeDerivative=None, **kwargs_): + + def __init__( + self, + StateVariable=None, + DerivedVariable=None, + ConditionalDerivedVariable=None, + TimeDerivative=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None if StateVariable is None: self.StateVariable = [] else: self.StateVariable = StateVariable + self.StateVariable_nsprefix_ = None if DerivedVariable is None: self.DerivedVariable = [] else: self.DerivedVariable = DerivedVariable + self.DerivedVariable_nsprefix_ = None if ConditionalDerivedVariable is None: self.ConditionalDerivedVariable = [] else: self.ConditionalDerivedVariable = ConditionalDerivedVariable + self.ConditionalDerivedVariable_nsprefix_ = None if TimeDerivative is None: self.TimeDerivative = [] else: self.TimeDerivative = TimeDerivative + self.TimeDerivative_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Dynamics) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Dynamics) if subclass is not None: return subclass(*args_, **kwargs_) if Dynamics.subclass: return Dynamics.subclass(*args_, **kwargs_) else: return Dynamics(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): + + def _hasContent(self): if ( - self.StateVariable or - self.DerivedVariable or - self.ConditionalDerivedVariable or - self.TimeDerivative + self.StateVariable + or self.DerivedVariable + or self.ConditionalDerivedVariable + or self.TimeDerivative ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Dynamics', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Dynamics') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Dynamics", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Dynamics") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Dynamics": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Dynamics') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Dynamics', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Dynamics" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Dynamics", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Dynamics'): + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Dynamics" + ): pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Dynamics', fromsubclass_=False, pretty_print=True): + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Dynamics", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for StateVariable_ in self.StateVariable: - StateVariable_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='StateVariable', pretty_print=pretty_print) + namespaceprefix_ = ( + self.StateVariable_nsprefix_ + ":" + if (UseCapturedNS_ and self.StateVariable_nsprefix_) + else "" + ) + StateVariable_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="StateVariable", + pretty_print=pretty_print, + ) for DerivedVariable_ in self.DerivedVariable: - DerivedVariable_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='DerivedVariable', pretty_print=pretty_print) + namespaceprefix_ = ( + self.DerivedVariable_nsprefix_ + ":" + if (UseCapturedNS_ and self.DerivedVariable_nsprefix_) + else "" + ) + DerivedVariable_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="DerivedVariable", + pretty_print=pretty_print, + ) for ConditionalDerivedVariable_ in self.ConditionalDerivedVariable: - ConditionalDerivedVariable_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ConditionalDerivedVariable', pretty_print=pretty_print) + namespaceprefix_ = ( + self.ConditionalDerivedVariable_nsprefix_ + ":" + if (UseCapturedNS_ and self.ConditionalDerivedVariable_nsprefix_) + else "" + ) + ConditionalDerivedVariable_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="ConditionalDerivedVariable", + pretty_print=pretty_print, + ) for TimeDerivative_ in self.TimeDerivative: - TimeDerivative_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TimeDerivative', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.TimeDerivative_nsprefix_ + ":" + if (UseCapturedNS_ and self.TimeDerivative_nsprefix_) + else "" + ) + TimeDerivative_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="TimeDerivative", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + + def _buildAttributes(self, node, attrs, already_processed): pass - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'StateVariable': + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "StateVariable": obj_ = StateVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.StateVariable.append(obj_) - obj_.original_tagname_ = 'StateVariable' - elif nodeName_ == 'DerivedVariable': + obj_.original_tagname_ = "StateVariable" + elif nodeName_ == "DerivedVariable": obj_ = DerivedVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.DerivedVariable.append(obj_) - obj_.original_tagname_ = 'DerivedVariable' - elif nodeName_ == 'ConditionalDerivedVariable': + obj_.original_tagname_ = "DerivedVariable" + elif nodeName_ == "ConditionalDerivedVariable": obj_ = ConditionalDerivedVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ConditionalDerivedVariable.append(obj_) - obj_.original_tagname_ = 'ConditionalDerivedVariable' - elif nodeName_ == 'TimeDerivative': + obj_.original_tagname_ = "ConditionalDerivedVariable" + elif nodeName_ == "TimeDerivative": obj_ = TimeDerivative.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.TimeDerivative.append(obj_) - obj_.original_tagname_ = 'TimeDerivative' + obj_.original_tagname_ = "TimeDerivative" + + # end class Dynamics class DerivedVariable(NamedDimensionalVariable): - """LEMS ComponentType for DerivedVariable""" + """DerivedVariable -- LEMS ComponentType for DerivedVariable""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('value', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('select', 'xs:string', 0, 1, {'use': u'optional'}), + MemberSpec_("value", "xs:string", 0, 1, {"use": "optional", "name": "value"}), + MemberSpec_("select", "xs:string", 0, 1, {"use": "optional", "name": "select"}), ] subclass = None superclass = NamedDimensionalVariable - def __init__(self, name=None, dimension=None, description=None, exposure=None, value=None, select=None, **kwargs_): + + def __init__( + self, + name=None, + dimension=None, + description=None, + exposure=None, + value=None, + select=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(DerivedVariable, self).__init__(name, dimension, description, exposure, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("DerivedVariable"), self).__init__( + name, dimension, description, exposure, **kwargs_ + ) self.value = _cast(None, value) + self.value_nsprefix_ = None self.select = _cast(None, select) + self.select_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, DerivedVariable) + subclass = getSubclassFromModule_(CurrentSubclassModule_, DerivedVariable) if subclass is not None: return subclass(*args_, **kwargs_) if DerivedVariable.subclass: return DerivedVariable.subclass(*args_, **kwargs_) else: return DerivedVariable(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(DerivedVariable, self).hasContent_() - ): + + def _hasContent(self): + if super(DerivedVariable, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DerivedVariable', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('DerivedVariable') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DerivedVariable", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("DerivedVariable") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "DerivedVariable": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DerivedVariable') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DerivedVariable', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DerivedVariable'): - super(DerivedVariable, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DerivedVariable') - if self.value is not None and 'value' not in already_processed: - already_processed.add('value') - outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) - if self.select is not None and 'select' not in already_processed: - already_processed.add('select') - outfile.write(' select=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.select), input_name='select')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DerivedVariable', fromsubclass_=False, pretty_print=True): - super(DerivedVariable, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="DerivedVariable" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="DerivedVariable", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="DerivedVariable", + ): + super(DerivedVariable, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="DerivedVariable" + ) + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write( + " value=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.value), input_name="value" + ) + ), + ) + ) + if self.select is not None and "select" not in already_processed: + already_processed.add("select") + outfile.write( + " select=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.select), input_name="select" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DerivedVariable", + fromsubclass_=False, + pretty_print=True, + ): + super(DerivedVariable, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('value', node) - if value is not None and 'value' not in already_processed: - already_processed.add('value') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") self.value = value - value = find_attr_value_('select', node) - if value is not None and 'select' not in already_processed: - already_processed.add('select') + value = find_attr_value_("select", node) + if value is not None and "select" not in already_processed: + already_processed.add("select") self.select = value - super(DerivedVariable, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(DerivedVariable, self).buildChildren(child_, node, nodeName_, True) + super(DerivedVariable, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(DerivedVariable, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class DerivedVariable class StateVariable(NamedDimensionalVariable): - member_data_items_ = [ - ] + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = NamedDimensionalVariable - def __init__(self, name=None, dimension=None, description=None, exposure=None, **kwargs_): + + def __init__( + self, + name=None, + dimension=None, + description=None, + exposure=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(StateVariable, self).__init__(name, dimension, description, exposure, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("StateVariable"), self).__init__( + name, dimension, description, exposure, **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, StateVariable) + subclass = getSubclassFromModule_(CurrentSubclassModule_, StateVariable) if subclass is not None: return subclass(*args_, **kwargs_) if StateVariable.subclass: return StateVariable.subclass(*args_, **kwargs_) else: return StateVariable(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(StateVariable, self).hasContent_() - ): + + def _hasContent(self): + if super(StateVariable, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='StateVariable', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('StateVariable') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="StateVariable", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("StateVariable") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "StateVariable": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='StateVariable') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='StateVariable', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='StateVariable'): - super(StateVariable, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='StateVariable') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='StateVariable', fromsubclass_=False, pretty_print=True): - super(StateVariable, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="StateVariable" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="StateVariable", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="StateVariable", + ): + super(StateVariable, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="StateVariable" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="StateVariable", + fromsubclass_=False, + pretty_print=True, + ): + super(StateVariable, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(StateVariable, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(StateVariable, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + super(StateVariable, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(StateVariable, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class StateVariable class ConditionalDerivedVariable(NamedDimensionalVariable): - """LEMS ComponentType for ConditionalDerivedVariable""" + """ConditionalDerivedVariable -- LEMS ComponentType for ConditionalDerivedVariable""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('Case', 'Case', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'Case', u'name': u'Case', u'minOccurs': u'1'}, None), + MemberSpec_( + "Case", + "Case", + 1, + 0, + { + "maxOccurs": "unbounded", + "minOccurs": "1", + "name": "Case", + "type": "Case", + }, + None, + ), ] subclass = None superclass = NamedDimensionalVariable - def __init__(self, name=None, dimension=None, description=None, exposure=None, Case=None, **kwargs_): + + def __init__( + self, + name=None, + dimension=None, + description=None, + exposure=None, + Case=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ConditionalDerivedVariable, self).__init__(name, dimension, description, exposure, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ConditionalDerivedVariable"), self).__init__( + name, dimension, description, exposure, **kwargs_ + ) if Case is None: self.Case = [] else: self.Case = Case + self.Case_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ConditionalDerivedVariable) + CurrentSubclassModule_, ConditionalDerivedVariable + ) if subclass is not None: return subclass(*args_, **kwargs_) if ConditionalDerivedVariable.subclass: return ConditionalDerivedVariable.subclass(*args_, **kwargs_) else: return ConditionalDerivedVariable(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - self.Case or - super(ConditionalDerivedVariable, self).hasContent_() - ): + + def _hasContent(self): + if self.Case or super(ConditionalDerivedVariable, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConditionalDerivedVariable', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ConditionalDerivedVariable') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ConditionalDerivedVariable", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ConditionalDerivedVariable") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ConditionalDerivedVariable": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConditionalDerivedVariable') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ConditionalDerivedVariable', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ConditionalDerivedVariable", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ConditionalDerivedVariable", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ConditionalDerivedVariable'): - super(ConditionalDerivedVariable, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConditionalDerivedVariable') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConditionalDerivedVariable', fromsubclass_=False, pretty_print=True): - super(ConditionalDerivedVariable, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ConditionalDerivedVariable", + ): + super(ConditionalDerivedVariable, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ConditionalDerivedVariable", + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ConditionalDerivedVariable", + fromsubclass_=False, + pretty_print=True, + ): + super(ConditionalDerivedVariable, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for Case_ in self.Case: - Case_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Case', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.Case_nsprefix_ + ":" + if (UseCapturedNS_ and self.Case_nsprefix_) + else "" + ) + Case_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="Case", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(ConditionalDerivedVariable, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'Case': + + def _buildAttributes(self, node, attrs, already_processed): + super(ConditionalDerivedVariable, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "Case": obj_ = Case.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.Case.append(obj_) - obj_.original_tagname_ = 'Case' - super(ConditionalDerivedVariable, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "Case" + super(ConditionalDerivedVariable, self)._buildChildren( + child_, node, nodeName_, True + ) + + # end class ConditionalDerivedVariable class Case(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('condition', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('value', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_( + "condition", "xs:string", 0, 1, {"use": "optional", "name": "condition"} + ), + MemberSpec_("value", "xs:string", 0, 0, {"use": "required", "name": "value"}), ] subclass = None superclass = None - def __init__(self, condition=None, value=None, **kwargs_): + + def __init__(self, condition=None, value=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.condition = _cast(None, condition) + self.condition_nsprefix_ = None self.value = _cast(None, value) + self.value_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Case) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Case) if subclass is not None: return subclass(*args_, **kwargs_) if Case.subclass: return Case.subclass(*args_, **kwargs_) else: return Case(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Case', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Case') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Case", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Case") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Case": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Case') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Case', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Case'): - if self.condition is not None and 'condition' not in already_processed: - already_processed.add('condition') - outfile.write(' condition=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.condition), input_name='condition')), )) - if self.value is not None and 'value' not in already_processed: - already_processed.add('value') - outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Case', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Case" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Case", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Case" + ): + if self.condition is not None and "condition" not in already_processed: + already_processed.add("condition") + outfile.write( + " condition=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.condition), input_name="condition" + ) + ), + ) + ) + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write( + " value=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.value), input_name="value" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Case", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('condition', node) - if value is not None and 'condition' not in already_processed: - already_processed.add('condition') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("condition", node) + if value is not None and "condition" not in already_processed: + already_processed.add("condition") self.condition = value - value = find_attr_value_('value', node) - if value is not None and 'value' not in already_processed: - already_processed.add('value') + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") self.value = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class Case class TimeDerivative(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('variable', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('value', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_( + "variable", "xs:string", 0, 0, {"use": "required", "name": "variable"} + ), + MemberSpec_("value", "xs:string", 0, 0, {"use": "required", "name": "value"}), ] subclass = None superclass = None - def __init__(self, variable=None, value=None, **kwargs_): + + def __init__(self, variable=None, value=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.variable = _cast(None, variable) + self.variable_nsprefix_ = None self.value = _cast(None, value) + self.value_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, TimeDerivative) + subclass = getSubclassFromModule_(CurrentSubclassModule_, TimeDerivative) if subclass is not None: return subclass(*args_, **kwargs_) if TimeDerivative.subclass: return TimeDerivative.subclass(*args_, **kwargs_) else: return TimeDerivative(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TimeDerivative', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('TimeDerivative') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="TimeDerivative", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("TimeDerivative") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "TimeDerivative": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TimeDerivative') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TimeDerivative', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TimeDerivative'): - if self.variable is not None and 'variable' not in already_processed: - already_processed.add('variable') - outfile.write(' variable=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.variable), input_name='variable')), )) - if self.value is not None and 'value' not in already_processed: - already_processed.add('value') - outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TimeDerivative', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="TimeDerivative" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="TimeDerivative", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="TimeDerivative", + ): + if self.variable is not None and "variable" not in already_processed: + already_processed.add("variable") + outfile.write( + " variable=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.variable), input_name="variable" + ) + ), + ) + ) + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write( + " value=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.value), input_name="value" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="TimeDerivative", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('variable', node) - if value is not None and 'variable' not in already_processed: - already_processed.add('variable') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("variable", node) + if value is not None and "variable" not in already_processed: + already_processed.add("variable") self.variable = value - value = find_attr_value_('value', node) - if value is not None and 'value' not in already_processed: - already_processed.add('value') + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") self.value = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class TimeDerivative class IncludeType(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('href', 'xs:anyURI', 0, 0, {'use': u'required'}), + MemberSpec_("href", "xs:anyURI", 0, 0, {"use": "required", "name": "href"}), ] subclass = None superclass = None - def __init__(self, href=None, **kwargs_): + + def __init__(self, href=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.href = _cast(None, href) + self.href_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IncludeType) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IncludeType) if subclass is not None: return subclass(*args_, **kwargs_) if IncludeType.subclass: return IncludeType.subclass(*args_, **kwargs_) else: return IncludeType(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IncludeType', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IncludeType') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IncludeType", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IncludeType") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "IncludeType": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IncludeType') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IncludeType', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IncludeType'): - if self.href is not None and 'href' not in already_processed: - already_processed.add('href') - outfile.write(' href=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.href), input_name='href')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IncludeType', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IncludeType" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IncludeType", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IncludeType", + ): + if self.href is not None and "href" not in already_processed: + already_processed.add("href") + outfile.write( + " href=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.href), input_name="href" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IncludeType", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('href', node) - if value is not None and 'href' not in already_processed: - already_processed.add('href') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("href", node) + if value is not None and "href" not in already_processed: + already_processed.add("href") self.href = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class IncludeType class Q10ConductanceScaling(GeneratedsSuper): + """Q10ConductanceScaling -- A value for the conductance scaling which varies as a standard function of the difference between the current temperature, **temperature,** and the temperature at which the conductance was originally determined, **experimentalTemp** + \n + :param q10Factor: + :type q10Factor: none + :param experimentalTemp: + :type experimentalTemp: temperature + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('q10_factor', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('experimental_temp', 'Nml2Quantity_temperature', 0, 0, {'use': u'required'}), + MemberSpec_( + "q10_factor", + "Nml2Quantity_none", + 0, + 0, + {"use": "required", "name": "q10_factor"}, + ), + MemberSpec_( + "experimental_temp", + "Nml2Quantity_temperature", + 0, + 0, + {"use": "required", "name": "experimental_temp"}, + ), ] subclass = None superclass = None - def __init__(self, q10_factor=None, experimental_temp=None, **kwargs_): + + def __init__( + self, q10_factor=None, experimental_temp=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.q10_factor = _cast(None, q10_factor) + self.q10_factor_nsprefix_ = None self.experimental_temp = _cast(None, experimental_temp) + self.experimental_temp_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, Q10ConductanceScaling) + CurrentSubclassModule_, Q10ConductanceScaling + ) if subclass is not None: return subclass(*args_, **kwargs_) if Q10ConductanceScaling.subclass: return Q10ConductanceScaling.subclass(*args_, **kwargs_) else: return Q10ConductanceScaling(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.validate_Nml2Quantity_none_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$"] + ] + def validate_Nml2Quantity_temperature(self, value): # Validate type Nml2Quantity_temperature, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_temperature_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_temperature_patterns_, )) - validate_Nml2Quantity_temperature_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC)$']] - def hasContent_(self): if ( - + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_temperature_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_temperature_patterns_, + ) + ) + + validate_Nml2Quantity_temperature_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC))$"] + ] + + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Q10ConductanceScaling', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Q10ConductanceScaling') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Q10ConductanceScaling", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Q10ConductanceScaling") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Q10ConductanceScaling": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Q10ConductanceScaling') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Q10ConductanceScaling', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Q10ConductanceScaling'): - if self.q10_factor is not None and 'q10_factor' not in already_processed: - already_processed.add('q10_factor') - outfile.write(' q10Factor=%s' % (quote_attrib(self.q10_factor), )) - if self.experimental_temp is not None and 'experimental_temp' not in already_processed: - already_processed.add('experimental_temp') - outfile.write(' experimentalTemp=%s' % (quote_attrib(self.experimental_temp), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Q10ConductanceScaling', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="Q10ConductanceScaling", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Q10ConductanceScaling", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="Q10ConductanceScaling", + ): + if self.q10_factor is not None and "q10_factor" not in already_processed: + already_processed.add("q10_factor") + outfile.write( + " q10Factor=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.q10_factor), input_name="q10Factor" + ) + ), + ) + ) + if ( + self.experimental_temp is not None + and "experimental_temp" not in already_processed + ): + already_processed.add("experimental_temp") + outfile.write( + " experimentalTemp=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.experimental_temp), + input_name="experimentalTemp", + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Q10ConductanceScaling", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('q10Factor', node) - if value is not None and 'q10Factor' not in already_processed: - already_processed.add('q10Factor') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("q10Factor", node) + if value is not None and "q10Factor" not in already_processed: + already_processed.add("q10Factor") self.q10_factor = value - self.validate_Nml2Quantity_none(self.q10_factor) # validate type Nml2Quantity_none - value = find_attr_value_('experimentalTemp', node) - if value is not None and 'experimentalTemp' not in already_processed: - already_processed.add('experimentalTemp') + self.validate_Nml2Quantity_none( + self.q10_factor + ) # validate type Nml2Quantity_none + value = find_attr_value_("experimentalTemp", node) + if value is not None and "experimentalTemp" not in already_processed: + already_processed.add("experimentalTemp") self.experimental_temp = value - self.validate_Nml2Quantity_temperature(self.experimental_temp) # validate type Nml2Quantity_temperature - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + self.validate_Nml2Quantity_temperature( + self.experimental_temp + ) # validate type Nml2Quantity_temperature + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class Q10ConductanceScaling class Q10Settings(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('type', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('fixed_q10', 'Nml2Quantity_none', 0, 1, {'use': u'optional'}), - MemberSpec_('q10_factor', 'Nml2Quantity_none', 0, 1, {'use': u'optional'}), - MemberSpec_('experimental_temp', 'Nml2Quantity_temperature', 0, 1, {'use': u'optional'}), + MemberSpec_("type", "NmlId", 0, 0, {"use": "required", "name": "type"}), + MemberSpec_( + "fixed_q10", + "Nml2Quantity_none", + 0, + 1, + {"use": "optional", "name": "fixed_q10"}, + ), + MemberSpec_( + "q10_factor", + "Nml2Quantity_none", + 0, + 1, + {"use": "optional", "name": "q10_factor"}, + ), + MemberSpec_( + "experimental_temp", + "Nml2Quantity_temperature", + 0, + 1, + {"use": "optional", "name": "experimental_temp"}, + ), ] subclass = None superclass = None - def __init__(self, type=None, fixed_q10=None, q10_factor=None, experimental_temp=None, **kwargs_): + + def __init__( + self, + type=None, + fixed_q10=None, + q10_factor=None, + experimental_temp=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.fixed_q10 = _cast(None, fixed_q10) + self.fixed_q10_nsprefix_ = None self.q10_factor = _cast(None, q10_factor) + self.q10_factor_nsprefix_ = None self.experimental_temp = _cast(None, experimental_temp) + self.experimental_temp_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Q10Settings) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Q10Settings) if subclass is not None: return subclass(*args_, **kwargs_) if Q10Settings.subclass: return Q10Settings.subclass(*args_, **kwargs_) else: return Q10Settings(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.validate_Nml2Quantity_none_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$"] + ] + def validate_Nml2Quantity_temperature(self, value): # Validate type Nml2Quantity_temperature, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_temperature_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_temperature_patterns_, )) - validate_Nml2Quantity_temperature_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC)$']] - def hasContent_(self): if ( - + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_temperature_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_temperature_patterns_, + ) + ) + + validate_Nml2Quantity_temperature_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC))$"] + ] + + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Q10Settings', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Q10Settings') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Q10Settings", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Q10Settings") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Q10Settings": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Q10Settings') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Q10Settings', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Q10Settings'): - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.fixed_q10 is not None and 'fixed_q10' not in already_processed: - already_processed.add('fixed_q10') - outfile.write(' fixedQ10=%s' % (quote_attrib(self.fixed_q10), )) - if self.q10_factor is not None and 'q10_factor' not in already_processed: - already_processed.add('q10_factor') - outfile.write(' q10Factor=%s' % (quote_attrib(self.q10_factor), )) - if self.experimental_temp is not None and 'experimental_temp' not in already_processed: - already_processed.add('experimental_temp') - outfile.write(' experimentalTemp=%s' % (quote_attrib(self.experimental_temp), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Q10Settings', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Q10Settings" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Q10Settings", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="Q10Settings", + ): + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type), input_name="type" + ) + ), + ) + ) + if self.fixed_q10 is not None and "fixed_q10" not in already_processed: + already_processed.add("fixed_q10") + outfile.write( + " fixedQ10=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.fixed_q10), input_name="fixedQ10" + ) + ), + ) + ) + if self.q10_factor is not None and "q10_factor" not in already_processed: + already_processed.add("q10_factor") + outfile.write( + " q10Factor=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.q10_factor), input_name="q10Factor" + ) + ), + ) + ) + if ( + self.experimental_temp is not None + and "experimental_temp" not in already_processed + ): + already_processed.add("experimental_temp") + outfile.write( + " experimentalTemp=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.experimental_temp), + input_name="experimentalTemp", + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Q10Settings", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_NmlId(self.type) # validate type NmlId - value = find_attr_value_('fixedQ10', node) - if value is not None and 'fixedQ10' not in already_processed: - already_processed.add('fixedQ10') + self.validate_NmlId(self.type) # validate type NmlId + value = find_attr_value_("fixedQ10", node) + if value is not None and "fixedQ10" not in already_processed: + already_processed.add("fixedQ10") self.fixed_q10 = value - self.validate_Nml2Quantity_none(self.fixed_q10) # validate type Nml2Quantity_none - value = find_attr_value_('q10Factor', node) - if value is not None and 'q10Factor' not in already_processed: - already_processed.add('q10Factor') + self.validate_Nml2Quantity_none( + self.fixed_q10 + ) # validate type Nml2Quantity_none + value = find_attr_value_("q10Factor", node) + if value is not None and "q10Factor" not in already_processed: + already_processed.add("q10Factor") self.q10_factor = value - self.validate_Nml2Quantity_none(self.q10_factor) # validate type Nml2Quantity_none - value = find_attr_value_('experimentalTemp', node) - if value is not None and 'experimentalTemp' not in already_processed: - already_processed.add('experimentalTemp') + self.validate_Nml2Quantity_none( + self.q10_factor + ) # validate type Nml2Quantity_none + value = find_attr_value_("experimentalTemp", node) + if value is not None and "experimentalTemp" not in already_processed: + already_processed.add("experimentalTemp") self.experimental_temp = value - self.validate_Nml2Quantity_temperature(self.experimental_temp) # validate type Nml2Quantity_temperature - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + self.validate_Nml2Quantity_temperature( + self.experimental_temp + ) # validate type Nml2Quantity_temperature + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class Q10Settings class HHRate(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('type', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('rate', 'Nml2Quantity_pertime', 0, 1, {'use': u'optional'}), - MemberSpec_('midpoint', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), - MemberSpec_('scale', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), + MemberSpec_("type", "NmlId", 0, 0, {"use": "required", "name": "type"}), + MemberSpec_( + "rate", "Nml2Quantity_pertime", 0, 1, {"use": "optional", "name": "rate"} + ), + MemberSpec_( + "midpoint", + "Nml2Quantity_voltage", + 0, + 1, + {"use": "optional", "name": "midpoint"}, + ), + MemberSpec_( + "scale", "Nml2Quantity_voltage", 0, 1, {"use": "optional", "name": "scale"} + ), ] subclass = None superclass = None - def __init__(self, type=None, rate=None, midpoint=None, scale=None, **kwargs_): + + def __init__( + self, + type=None, + rate=None, + midpoint=None, + scale=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.rate = _cast(None, rate) + self.rate_nsprefix_ = None self.midpoint = _cast(None, midpoint) + self.midpoint_nsprefix_ = None self.scale = _cast(None, scale) + self.scale_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, HHRate) + subclass = getSubclassFromModule_(CurrentSubclassModule_, HHRate) if subclass is not None: return subclass(*args_, **kwargs_) if HHRate.subclass: return HHRate.subclass(*args_, **kwargs_) else: return HHRate(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] + self.validate_Nml2Quantity_pertime_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_pertime_patterns_, + ) + ) + + validate_Nml2Quantity_pertime_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz))$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): if ( - + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHRate', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('HHRate') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="HHRate", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("HHRate") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "HHRate": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HHRate') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HHRate', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HHRate'): - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.rate is not None and 'rate' not in already_processed: - already_processed.add('rate') - outfile.write(' rate=%s' % (quote_attrib(self.rate), )) - if self.midpoint is not None and 'midpoint' not in already_processed: - already_processed.add('midpoint') - outfile.write(' midpoint=%s' % (quote_attrib(self.midpoint), )) - if self.scale is not None and 'scale' not in already_processed: - already_processed.add('scale') - outfile.write(' scale=%s' % (quote_attrib(self.scale), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHRate', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="HHRate" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="HHRate", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="HHRate" + ): + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type), input_name="type" + ) + ), + ) + ) + if self.rate is not None and "rate" not in already_processed: + already_processed.add("rate") + outfile.write( + " rate=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.rate), input_name="rate" + ) + ), + ) + ) + if self.midpoint is not None and "midpoint" not in already_processed: + already_processed.add("midpoint") + outfile.write( + " midpoint=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.midpoint), input_name="midpoint" + ) + ), + ) + ) + if self.scale is not None and "scale" not in already_processed: + already_processed.add("scale") + outfile.write( + " scale=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.scale), input_name="scale" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="HHRate", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_NmlId(self.type) # validate type NmlId - value = find_attr_value_('rate', node) - if value is not None and 'rate' not in already_processed: - already_processed.add('rate') + self.validate_NmlId(self.type) # validate type NmlId + value = find_attr_value_("rate", node) + if value is not None and "rate" not in already_processed: + already_processed.add("rate") self.rate = value - self.validate_Nml2Quantity_pertime(self.rate) # validate type Nml2Quantity_pertime - value = find_attr_value_('midpoint', node) - if value is not None and 'midpoint' not in already_processed: - already_processed.add('midpoint') + self.validate_Nml2Quantity_pertime( + self.rate + ) # validate type Nml2Quantity_pertime + value = find_attr_value_("midpoint", node) + if value is not None and "midpoint" not in already_processed: + already_processed.add("midpoint") self.midpoint = value - self.validate_Nml2Quantity_voltage(self.midpoint) # validate type Nml2Quantity_voltage - value = find_attr_value_('scale', node) - if value is not None and 'scale' not in already_processed: - already_processed.add('scale') + self.validate_Nml2Quantity_voltage( + self.midpoint + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("scale", node) + if value is not None and "scale" not in already_processed: + already_processed.add("scale") self.scale = value - self.validate_Nml2Quantity_voltage(self.scale) # validate type Nml2Quantity_voltage - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + self.validate_Nml2Quantity_voltage( + self.scale + ) # validate type Nml2Quantity_voltage + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class HHRate class HHVariable(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('type', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('rate', 'xs:float', 0, 1, {'use': u'optional'}), - MemberSpec_('midpoint', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), - MemberSpec_('scale', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), + MemberSpec_("type", "NmlId", 0, 0, {"use": "required", "name": "type"}), + MemberSpec_("rate", "xs:float", 0, 1, {"use": "optional", "name": "rate"}), + MemberSpec_( + "midpoint", + "Nml2Quantity_voltage", + 0, + 1, + {"use": "optional", "name": "midpoint"}, + ), + MemberSpec_( + "scale", "Nml2Quantity_voltage", 0, 1, {"use": "optional", "name": "scale"} + ), ] subclass = None superclass = None - def __init__(self, type=None, rate=None, midpoint=None, scale=None, **kwargs_): + + def __init__( + self, + type=None, + rate=None, + midpoint=None, + scale=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.rate = _cast(float, rate) + self.rate_nsprefix_ = None self.midpoint = _cast(None, midpoint) + self.midpoint_nsprefix_ = None self.scale = _cast(None, scale) + self.scale_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, HHVariable) + subclass = getSubclassFromModule_(CurrentSubclassModule_, HHVariable) if subclass is not None: return subclass(*args_, **kwargs_) if HHVariable.subclass: return HHVariable.subclass(*args_, **kwargs_) else: return HHVariable(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): if ( - + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): - return True - else: + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + + def _hasContent(self): + if (): + return True + else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHVariable', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('HHVariable') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="HHVariable", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("HHVariable") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "HHVariable": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HHVariable') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HHVariable', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HHVariable'): - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.rate is not None and 'rate' not in already_processed: - already_processed.add('rate') - outfile.write(' rate="%s"' % self.gds_format_float(self.rate, input_name='rate')) - if self.midpoint is not None and 'midpoint' not in already_processed: - already_processed.add('midpoint') - outfile.write(' midpoint=%s' % (quote_attrib(self.midpoint), )) - if self.scale is not None and 'scale' not in already_processed: - already_processed.add('scale') - outfile.write(' scale=%s' % (quote_attrib(self.scale), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHVariable', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="HHVariable" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="HHVariable", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="HHVariable" + ): + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type), input_name="type" + ) + ), + ) + ) + if self.rate is not None and "rate" not in already_processed: + already_processed.add("rate") + outfile.write( + ' rate="%s"' % self.gds_format_float(self.rate, input_name="rate") + ) + if self.midpoint is not None and "midpoint" not in already_processed: + already_processed.add("midpoint") + outfile.write( + " midpoint=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.midpoint), input_name="midpoint" + ) + ), + ) + ) + if self.scale is not None and "scale" not in already_processed: + already_processed.add("scale") + outfile.write( + " scale=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.scale), input_name="scale" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="HHVariable", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_NmlId(self.type) # validate type NmlId - value = find_attr_value_('rate', node) - if value is not None and 'rate' not in already_processed: - already_processed.add('rate') - try: - self.rate = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (rate): %s' % exp) - value = find_attr_value_('midpoint', node) - if value is not None and 'midpoint' not in already_processed: - already_processed.add('midpoint') + self.validate_NmlId(self.type) # validate type NmlId + value = find_attr_value_("rate", node) + if value is not None and "rate" not in already_processed: + already_processed.add("rate") + value = self.gds_parse_float(value, node, "rate") + self.rate = value + value = find_attr_value_("midpoint", node) + if value is not None and "midpoint" not in already_processed: + already_processed.add("midpoint") self.midpoint = value - self.validate_Nml2Quantity_voltage(self.midpoint) # validate type Nml2Quantity_voltage - value = find_attr_value_('scale', node) - if value is not None and 'scale' not in already_processed: - already_processed.add('scale') + self.validate_Nml2Quantity_voltage( + self.midpoint + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("scale", node) + if value is not None and "scale" not in already_processed: + already_processed.add("scale") self.scale = value - self.validate_Nml2Quantity_voltage(self.scale) # validate type Nml2Quantity_voltage - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + self.validate_Nml2Quantity_voltage( + self.scale + ) # validate type Nml2Quantity_voltage + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class HHVariable class HHTime(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('type', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('rate', 'Nml2Quantity_time', 0, 1, {'use': u'optional'}), - MemberSpec_('midpoint', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), - MemberSpec_('scale', 'Nml2Quantity_voltage', 0, 1, {'use': u'optional'}), - MemberSpec_('tau', 'Nml2Quantity_time', 0, 1, {'use': u'optional'}), + MemberSpec_("type", "NmlId", 0, 0, {"use": "required", "name": "type"}), + MemberSpec_( + "rate", "Nml2Quantity_time", 0, 1, {"use": "optional", "name": "rate"} + ), + MemberSpec_( + "midpoint", + "Nml2Quantity_voltage", + 0, + 1, + {"use": "optional", "name": "midpoint"}, + ), + MemberSpec_( + "scale", "Nml2Quantity_voltage", 0, 1, {"use": "optional", "name": "scale"} + ), + MemberSpec_( + "tau", "Nml2Quantity_time", 0, 1, {"use": "optional", "name": "tau"} + ), ] subclass = None superclass = None - def __init__(self, type=None, rate=None, midpoint=None, scale=None, tau=None, **kwargs_): + + def __init__( + self, + type=None, + rate=None, + midpoint=None, + scale=None, + tau=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.rate = _cast(None, rate) + self.rate_nsprefix_ = None self.midpoint = _cast(None, midpoint) + self.midpoint_nsprefix_ = None self.scale = _cast(None, scale) + self.scale_nsprefix_ = None self.tau = _cast(None, tau) + self.tau_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, HHTime) + subclass = getSubclassFromModule_(CurrentSubclassModule_, HHTime) if subclass is not None: return subclass(*args_, **kwargs_) if HHTime.subclass: return HHTime.subclass(*args_, **kwargs_) else: return HHTime(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): if ( - + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHTime', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('HHTime') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="HHTime", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("HHTime") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "HHTime": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HHTime') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HHTime', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HHTime'): - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.rate is not None and 'rate' not in already_processed: - already_processed.add('rate') - outfile.write(' rate=%s' % (quote_attrib(self.rate), )) - if self.midpoint is not None and 'midpoint' not in already_processed: - already_processed.add('midpoint') - outfile.write(' midpoint=%s' % (quote_attrib(self.midpoint), )) - if self.scale is not None and 'scale' not in already_processed: - already_processed.add('scale') - outfile.write(' scale=%s' % (quote_attrib(self.scale), )) - if self.tau is not None and 'tau' not in already_processed: - already_processed.add('tau') - outfile.write(' tau=%s' % (quote_attrib(self.tau), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HHTime', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="HHTime" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="HHTime", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="HHTime" + ): + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type), input_name="type" + ) + ), + ) + ) + if self.rate is not None and "rate" not in already_processed: + already_processed.add("rate") + outfile.write( + " rate=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.rate), input_name="rate" + ) + ), + ) + ) + if self.midpoint is not None and "midpoint" not in already_processed: + already_processed.add("midpoint") + outfile.write( + " midpoint=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.midpoint), input_name="midpoint" + ) + ), + ) + ) + if self.scale is not None and "scale" not in already_processed: + already_processed.add("scale") + outfile.write( + " scale=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.scale), input_name="scale" + ) + ), + ) + ) + if self.tau is not None and "tau" not in already_processed: + already_processed.add("tau") + outfile.write( + " tau=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.tau), input_name="tau") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="HHTime", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_NmlId(self.type) # validate type NmlId - value = find_attr_value_('rate', node) - if value is not None and 'rate' not in already_processed: - already_processed.add('rate') + self.validate_NmlId(self.type) # validate type NmlId + value = find_attr_value_("rate", node) + if value is not None and "rate" not in already_processed: + already_processed.add("rate") self.rate = value - self.validate_Nml2Quantity_time(self.rate) # validate type Nml2Quantity_time - value = find_attr_value_('midpoint', node) - if value is not None and 'midpoint' not in already_processed: - already_processed.add('midpoint') + self.validate_Nml2Quantity_time( + self.rate + ) # validate type Nml2Quantity_time + value = find_attr_value_("midpoint", node) + if value is not None and "midpoint" not in already_processed: + already_processed.add("midpoint") self.midpoint = value - self.validate_Nml2Quantity_voltage(self.midpoint) # validate type Nml2Quantity_voltage - value = find_attr_value_('scale', node) - if value is not None and 'scale' not in already_processed: - already_processed.add('scale') + self.validate_Nml2Quantity_voltage( + self.midpoint + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("scale", node) + if value is not None and "scale" not in already_processed: + already_processed.add("scale") self.scale = value - self.validate_Nml2Quantity_voltage(self.scale) # validate type Nml2Quantity_voltage - value = find_attr_value_('tau', node) - if value is not None and 'tau' not in already_processed: - already_processed.add('tau') + self.validate_Nml2Quantity_voltage( + self.scale + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("tau", node) + if value is not None and "tau" not in already_processed: + already_processed.add("tau") self.tau = value - self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class HHTime class BlockMechanism(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('type', 'BlockTypes', 0, 0, {'use': u'required'}), - MemberSpec_('species', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('block_concentration', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), - MemberSpec_('scaling_conc', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), - MemberSpec_('scaling_volt', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_("type", "BlockTypes", 0, 0, {"use": "required", "name": "type"}), + MemberSpec_("species", "NmlId", 0, 0, {"use": "required", "name": "species"}), + MemberSpec_( + "block_concentration", + "Nml2Quantity_concentration", + 0, + 0, + {"use": "required", "name": "block_concentration"}, + ), + MemberSpec_( + "scaling_conc", + "Nml2Quantity_concentration", + 0, + 0, + {"use": "required", "name": "scaling_conc"}, + ), + MemberSpec_( + "scaling_volt", + "Nml2Quantity_voltage", + 0, + 0, + {"use": "required", "name": "scaling_volt"}, + ), ] subclass = None superclass = None - def __init__(self, type=None, species=None, block_concentration=None, scaling_conc=None, scaling_volt=None, **kwargs_): + + def __init__( + self, + type=None, + species=None, + block_concentration=None, + scaling_conc=None, + scaling_volt=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.species = _cast(None, species) + self.species_nsprefix_ = None self.block_concentration = _cast(None, block_concentration) + self.block_concentration_nsprefix_ = None self.scaling_conc = _cast(None, scaling_conc) + self.scaling_conc_nsprefix_ = None self.scaling_volt = _cast(None, scaling_volt) + self.scaling_volt_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, BlockMechanism) + subclass = getSubclassFromModule_(CurrentSubclassModule_, BlockMechanism) if subclass is not None: return subclass(*args_, **kwargs_) if BlockMechanism.subclass: return BlockMechanism.subclass(*args_, **kwargs_) else: return BlockMechanism(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_BlockTypes(self, value): # Validate type BlockTypes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - value = str(value) - enumerations = ['voltageConcDepBlockMechanism'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on BlockTypes' % {"value" : value.encode("utf-8")} ) + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + value = value + enumerations = ["voltageConcDepBlockMechanism"] + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on BlockTypes' + % {"value": encode_str_2_3(value), "lineno": lineno} + ) + result = False + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + def validate_Nml2Quantity_concentration(self, value): # Validate type Nml2Quantity_concentration, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_concentration_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_concentration_patterns_, )) - validate_Nml2Quantity_concentration_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM)$']] + self.validate_Nml2Quantity_concentration_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_concentration_patterns_, + ) + ) + + validate_Nml2Quantity_concentration_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM))$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): if ( - + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BlockMechanism', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BlockMechanism') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BlockMechanism", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BlockMechanism") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "BlockMechanism": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BlockMechanism') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BlockMechanism', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BlockMechanism'): - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.species is not None and 'species' not in already_processed: - already_processed.add('species') - outfile.write(' species=%s' % (quote_attrib(self.species), )) - if self.block_concentration is not None and 'block_concentration' not in already_processed: - already_processed.add('block_concentration') - outfile.write(' blockConcentration=%s' % (quote_attrib(self.block_concentration), )) - if self.scaling_conc is not None and 'scaling_conc' not in already_processed: - already_processed.add('scaling_conc') - outfile.write(' scalingConc=%s' % (quote_attrib(self.scaling_conc), )) - if self.scaling_volt is not None and 'scaling_volt' not in already_processed: - already_processed.add('scaling_volt') - outfile.write(' scalingVolt=%s' % (quote_attrib(self.scaling_volt), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BlockMechanism', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BlockMechanism" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BlockMechanism", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BlockMechanism", + ): + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type), input_name="type" + ) + ), + ) + ) + if self.species is not None and "species" not in already_processed: + already_processed.add("species") + outfile.write( + " species=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.species), input_name="species" + ) + ), + ) + ) + if ( + self.block_concentration is not None + and "block_concentration" not in already_processed + ): + already_processed.add("block_concentration") + outfile.write( + " blockConcentration=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.block_concentration), + input_name="blockConcentration", + ) + ), + ) + ) + if self.scaling_conc is not None and "scaling_conc" not in already_processed: + already_processed.add("scaling_conc") + outfile.write( + " scalingConc=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.scaling_conc), input_name="scalingConc" + ) + ), + ) + ) + if self.scaling_volt is not None and "scaling_volt" not in already_processed: + already_processed.add("scaling_volt") + outfile.write( + " scalingVolt=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.scaling_volt), input_name="scalingVolt" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BlockMechanism", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_BlockTypes(self.type) # validate type BlockTypes - value = find_attr_value_('species', node) - if value is not None and 'species' not in already_processed: - already_processed.add('species') + self.validate_BlockTypes(self.type) # validate type BlockTypes + value = find_attr_value_("species", node) + if value is not None and "species" not in already_processed: + already_processed.add("species") self.species = value - self.validate_NmlId(self.species) # validate type NmlId - value = find_attr_value_('blockConcentration', node) - if value is not None and 'blockConcentration' not in already_processed: - already_processed.add('blockConcentration') + self.validate_NmlId(self.species) # validate type NmlId + value = find_attr_value_("blockConcentration", node) + if value is not None and "blockConcentration" not in already_processed: + already_processed.add("blockConcentration") self.block_concentration = value - self.validate_Nml2Quantity_concentration(self.block_concentration) # validate type Nml2Quantity_concentration - value = find_attr_value_('scalingConc', node) - if value is not None and 'scalingConc' not in already_processed: - already_processed.add('scalingConc') + self.validate_Nml2Quantity_concentration( + self.block_concentration + ) # validate type Nml2Quantity_concentration + value = find_attr_value_("scalingConc", node) + if value is not None and "scalingConc" not in already_processed: + already_processed.add("scalingConc") self.scaling_conc = value - self.validate_Nml2Quantity_concentration(self.scaling_conc) # validate type Nml2Quantity_concentration - value = find_attr_value_('scalingVolt', node) - if value is not None and 'scalingVolt' not in already_processed: - already_processed.add('scalingVolt') + self.validate_Nml2Quantity_concentration( + self.scaling_conc + ) # validate type Nml2Quantity_concentration + value = find_attr_value_("scalingVolt", node) + if value is not None and "scalingVolt" not in already_processed: + already_processed.add("scalingVolt") self.scaling_volt = value - self.validate_Nml2Quantity_voltage(self.scaling_volt) # validate type Nml2Quantity_voltage - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + self.validate_Nml2Quantity_voltage( + self.scaling_volt + ) # validate type Nml2Quantity_voltage + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class BlockMechanism class PlasticityMechanism(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('type', 'PlasticityTypes', 0, 0, {'use': u'required'}), - MemberSpec_('init_release_prob', 'ZeroToOne', 0, 0, {'use': u'required'}), - MemberSpec_('tau_rec', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('tau_fac', 'Nml2Quantity_time', 0, 1, {'use': u'optional'}), + MemberSpec_( + "type", "PlasticityTypes", 0, 0, {"use": "required", "name": "type"} + ), + MemberSpec_( + "init_release_prob", + "ZeroToOne", + 0, + 0, + {"use": "required", "name": "init_release_prob"}, + ), + MemberSpec_( + "tau_rec", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "tau_rec"} + ), + MemberSpec_( + "tau_fac", "Nml2Quantity_time", 0, 1, {"use": "optional", "name": "tau_fac"} + ), ] subclass = None superclass = None - def __init__(self, type=None, init_release_prob=None, tau_rec=None, tau_fac=None, **kwargs_): + + def __init__( + self, + type=None, + init_release_prob=None, + tau_rec=None, + tau_fac=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.init_release_prob = _cast(float, init_release_prob) + self.init_release_prob_nsprefix_ = None self.tau_rec = _cast(None, tau_rec) + self.tau_rec_nsprefix_ = None self.tau_fac = _cast(None, tau_fac) + self.tau_fac_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, PlasticityMechanism) + CurrentSubclassModule_, PlasticityMechanism + ) if subclass is not None: return subclass(*args_, **kwargs_) if PlasticityMechanism.subclass: return PlasticityMechanism.subclass(*args_, **kwargs_) else: return PlasticityMechanism(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_PlasticityTypes(self, value): # Validate type PlasticityTypes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - value = str(value) - enumerations = ['tsodyksMarkramDepMechanism', 'tsodyksMarkramDepFacMechanism'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on PlasticityTypes' % {"value" : value.encode("utf-8")} ) + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + value = value + enumerations = [ + "tsodyksMarkramDepMechanism", + "tsodyksMarkramDepFacMechanism", + ] + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on PlasticityTypes' + % {"value": encode_str_2_3(value), "lineno": lineno} + ) + result = False + def validate_ZeroToOne(self, value): # Validate type ZeroToOne, a restriction on xs:float. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, float): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if value < 0: - warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' % {"value" : value} ) + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ZeroToOne' + % {"value": value, "lineno": lineno} + ) + result = False if value > 1: - warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' % {"value" : value} ) + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ZeroToOne' + % {"value": value, "lineno": lineno} + ) + result = False + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): if ( - + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PlasticityMechanism', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('PlasticityMechanism') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PlasticityMechanism", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PlasticityMechanism") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "PlasticityMechanism": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PlasticityMechanism') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PlasticityMechanism', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PlasticityMechanism'): - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.init_release_prob is not None and 'init_release_prob' not in already_processed: - already_processed.add('init_release_prob') - outfile.write(' initReleaseProb=%s' % (quote_attrib(self.init_release_prob), )) - if self.tau_rec is not None and 'tau_rec' not in already_processed: - already_processed.add('tau_rec') - outfile.write(' tauRec=%s' % (quote_attrib(self.tau_rec), )) - if self.tau_fac is not None and 'tau_fac' not in already_processed: - already_processed.add('tau_fac') - outfile.write(' tauFac=%s' % (quote_attrib(self.tau_fac), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PlasticityMechanism', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="PlasticityMechanism", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PlasticityMechanism", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="PlasticityMechanism", + ): + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type), input_name="type" + ) + ), + ) + ) + if ( + self.init_release_prob is not None + and "init_release_prob" not in already_processed + ): + already_processed.add("init_release_prob") + outfile.write( + ' initReleaseProb="%s"' + % self.gds_format_float( + self.init_release_prob, input_name="initReleaseProb" + ) + ) + if self.tau_rec is not None and "tau_rec" not in already_processed: + already_processed.add("tau_rec") + outfile.write( + " tauRec=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.tau_rec), input_name="tauRec" + ) + ), + ) + ) + if self.tau_fac is not None and "tau_fac" not in already_processed: + already_processed.add("tau_fac") + outfile.write( + " tauFac=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.tau_fac), input_name="tauFac" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PlasticityMechanism", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_PlasticityTypes(self.type) # validate type PlasticityTypes - value = find_attr_value_('initReleaseProb', node) - if value is not None and 'initReleaseProb' not in already_processed: - already_processed.add('initReleaseProb') - try: - self.init_release_prob = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (initReleaseProb): %s' % exp) - self.validate_ZeroToOne(self.init_release_prob) # validate type ZeroToOne - value = find_attr_value_('tauRec', node) - if value is not None and 'tauRec' not in already_processed: - already_processed.add('tauRec') + self.validate_PlasticityTypes(self.type) # validate type PlasticityTypes + value = find_attr_value_("initReleaseProb", node) + if value is not None and "initReleaseProb" not in already_processed: + already_processed.add("initReleaseProb") + value = self.gds_parse_float(value, node, "initReleaseProb") + self.init_release_prob = value + self.validate_ZeroToOne(self.init_release_prob) # validate type ZeroToOne + value = find_attr_value_("tauRec", node) + if value is not None and "tauRec" not in already_processed: + already_processed.add("tauRec") self.tau_rec = value - self.validate_Nml2Quantity_time(self.tau_rec) # validate type Nml2Quantity_time - value = find_attr_value_('tauFac', node) - if value is not None and 'tauFac' not in already_processed: - already_processed.add('tauFac') + self.validate_Nml2Quantity_time( + self.tau_rec + ) # validate type Nml2Quantity_time + value = find_attr_value_("tauFac", node) + if value is not None and "tauFac" not in already_processed: + already_processed.add("tauFac") self.tau_fac = value - self.validate_Nml2Quantity_time(self.tau_fac) # validate type Nml2Quantity_time - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + self.validate_Nml2Quantity_time( + self.tau_fac + ) # validate type Nml2Quantity_time + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class PlasticityMechanism class SegmentParent(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('segments', 'NonNegativeInteger', 0, 0, {'use': u'required'}), - MemberSpec_('fraction_along', 'ZeroToOne', 0, 1, {'use': u'optional'}), + MemberSpec_( + "segments", + "NonNegativeInteger", + 0, + 0, + {"use": "required", "name": "segments"}, + ), + MemberSpec_( + "fraction_along", + "ZeroToOne", + 0, + 1, + {"use": "optional", "name": "fraction_along"}, + ), ] subclass = None superclass = None - def __init__(self, segments=None, fraction_along='1', **kwargs_): + + def __init__( + self, segments=None, fraction_along="1", gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.segments = _cast(int, segments) + self.segments_nsprefix_ = None self.fraction_along = _cast(float, fraction_along) + self.fraction_along_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SegmentParent) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SegmentParent) if subclass is not None: return subclass(*args_, **kwargs_) if SegmentParent.subclass: return SegmentParent.subclass(*args_, **kwargs_) else: return SegmentParent(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass + def validate_ZeroToOne(self, value): # Validate type ZeroToOne, a restriction on xs:float. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, float): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if value < 0: - warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' % {"value" : value} ) + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ZeroToOne' + % {"value": value, "lineno": lineno} + ) + result = False if value > 1: - warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' % {"value" : value} ) - def hasContent_(self): - if ( + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ZeroToOne' + % {"value": value, "lineno": lineno} + ) + result = False - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentParent', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SegmentParent') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SegmentParent", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SegmentParent") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SegmentParent": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SegmentParent') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SegmentParent', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SegmentParent'): - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - if self.fraction_along != 1 and 'fraction_along' not in already_processed: - already_processed.add('fraction_along') - outfile.write(' fractionAlong=%s' % (quote_attrib(self.fraction_along), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentParent', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SegmentParent" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SegmentParent", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SegmentParent", + ): + if self.segments is not None and "segments" not in already_processed: + already_processed.add("segments") + outfile.write( + ' segment="%s"' + % self.gds_format_integer(self.segments, input_name="segment") + ) + if self.fraction_along != 1 and "fraction_along" not in already_processed: + already_processed.add("fraction_along") + outfile.write( + ' fractionAlong="%s"' + % self.gds_format_float(self.fraction_along, input_name="fractionAlong") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SegmentParent", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') - try: - self.segments = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("segment", node) + if value is not None and "segment" not in already_processed: + already_processed.add("segment") + self.segments = self.gds_parse_integer(value, node, "segment") if self.segments < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.segments) # validate type NonNegativeInteger - value = find_attr_value_('fractionAlong', node) - if value is not None and 'fractionAlong' not in already_processed: - already_processed.add('fractionAlong') - try: - self.fraction_along = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (fractionAlong): %s' % exp) - self.validate_ZeroToOne(self.fraction_along) # validate type ZeroToOne - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.segments + ) # validate type NonNegativeInteger + value = find_attr_value_("fractionAlong", node) + if value is not None and "fractionAlong" not in already_processed: + already_processed.add("fractionAlong") + value = self.gds_parse_float(value, node, "fractionAlong") + self.fraction_along = value + self.validate_ZeroToOne(self.fraction_along) # validate type ZeroToOne + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class SegmentParent class Point3DWithDiam(GeneratedsSuper): - """A 3D point with diameter.""" + """Point3DWithDiam -- Base type for ComponentTypes which specify an ( **x,** **y,** **z** ) coordinate along with a **diameter.** Note: no dimension used in the attributes for these coordinates! These are assumed to have dimension micrometer ( 10^-6 m ). This is due to micrometers being the default option for the majority of neuronal morphology formats, and dimensions are omitted here to facilitate reading and writing of morphologies in NeuroML. + \n + :param x: x coordinate of the point. Note: no dimension used, see description of **point3DWithDiam** for details. + :type x: none + :param y: y coordinate of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. + :type y: none + :param z: z coordinate of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. + :type z: none + :param diameter: Diameter of the ppoint. Note: no dimension used, see description of **point3DWithDiam** for details. + :type diameter: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('x', 'xs:double', 0, 0, {'use': u'required'}), - MemberSpec_('y', 'xs:double', 0, 0, {'use': u'required'}), - MemberSpec_('z', 'xs:double', 0, 0, {'use': u'required'}), - MemberSpec_('diameter', 'DoubleGreaterThanZero', 0, 0, {'use': u'required'}), + MemberSpec_("x", "xs:double", 0, 0, {"use": "required", "name": "x"}), + MemberSpec_("y", "xs:double", 0, 0, {"use": "required", "name": "y"}), + MemberSpec_("z", "xs:double", 0, 0, {"use": "required", "name": "z"}), + MemberSpec_( + "diameter", + "DoubleGreaterThanZero", + 0, + 0, + {"use": "required", "name": "diameter"}, + ), ] subclass = None superclass = None - def __init__(self, x=None, y=None, z=None, diameter=None, **kwargs_): + + def __init__( + self, x=None, y=None, z=None, diameter=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.x = _cast(float, x) + self.x_nsprefix_ = None self.y = _cast(float, y) + self.y_nsprefix_ = None self.z = _cast(float, z) + self.z_nsprefix_ = None self.diameter = _cast(float, diameter) + self.diameter_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Point3DWithDiam) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Point3DWithDiam) if subclass is not None: return subclass(*args_, **kwargs_) if Point3DWithDiam.subclass: return Point3DWithDiam.subclass(*args_, **kwargs_) else: return Point3DWithDiam(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_DoubleGreaterThanZero(self, value): # Validate type DoubleGreaterThanZero, a restriction on xs:double. - if value is not None and Validate_simpletypes_: - if value <= 0: - warnings_.warn('Value "%(value)s" does not match xsd minExclusive restriction on DoubleGreaterThanZero' % {"value" : value} ) - def hasContent_(self): if ( - + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, float): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if value <= 0: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd minExclusive restriction on DoubleGreaterThanZero' + % {"value": value, "lineno": lineno} + ) + result = False + + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Point3DWithDiam', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Point3DWithDiam') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Point3DWithDiam", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Point3DWithDiam") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Point3DWithDiam": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Point3DWithDiam') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Point3DWithDiam', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Point3DWithDiam'): - if self.x is not None and 'x' not in already_processed: - already_processed.add('x') - outfile.write(' x="%s"' % self.gds_format_double(self.x, input_name='x')) - if self.y is not None and 'y' not in already_processed: - already_processed.add('y') - outfile.write(' y="%s"' % self.gds_format_double(self.y, input_name='y')) - if self.z is not None and 'z' not in already_processed: - already_processed.add('z') - outfile.write(' z="%s"' % self.gds_format_double(self.z, input_name='z')) - if self.diameter is not None and 'diameter' not in already_processed: - already_processed.add('diameter') - outfile.write(' diameter=%s' % (quote_attrib(self.diameter), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Point3DWithDiam', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Point3DWithDiam" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Point3DWithDiam", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="Point3DWithDiam", + ): + if self.x is not None and "x" not in already_processed: + already_processed.add("x") + outfile.write(' x="%s"' % self.gds_format_double(self.x, input_name="x")) + if self.y is not None and "y" not in already_processed: + already_processed.add("y") + outfile.write(' y="%s"' % self.gds_format_double(self.y, input_name="y")) + if self.z is not None and "z" not in already_processed: + already_processed.add("z") + outfile.write(' z="%s"' % self.gds_format_double(self.z, input_name="z")) + if self.diameter is not None and "diameter" not in already_processed: + already_processed.add("diameter") + outfile.write( + ' diameter="%s"' + % self.gds_format_double(self.diameter, input_name="diameter") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Point3DWithDiam", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('x', node) - if value is not None and 'x' not in already_processed: - already_processed.add('x') - try: - self.x = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (x): %s' % exp) - value = find_attr_value_('y', node) - if value is not None and 'y' not in already_processed: - already_processed.add('y') - try: - self.y = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (y): %s' % exp) - value = find_attr_value_('z', node) - if value is not None and 'z' not in already_processed: - already_processed.add('z') - try: - self.z = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (z): %s' % exp) - value = find_attr_value_('diameter', node) - if value is not None and 'diameter' not in already_processed: - already_processed.add('diameter') - try: - self.diameter = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (diameter): %s' % exp) - self.validate_DoubleGreaterThanZero(self.diameter) # validate type DoubleGreaterThanZero - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("x", node) + if value is not None and "x" not in already_processed: + already_processed.add("x") + value = self.gds_parse_double(value, node, "x") + self.x = value + value = find_attr_value_("y", node) + if value is not None and "y" not in already_processed: + already_processed.add("y") + value = self.gds_parse_double(value, node, "y") + self.y = value + value = find_attr_value_("z", node) + if value is not None and "z" not in already_processed: + already_processed.add("z") + value = self.gds_parse_double(value, node, "z") + self.z = value + value = find_attr_value_("diameter", node) + if value is not None and "diameter" not in already_processed: + already_processed.add("diameter") + value = self.gds_parse_double(value, node, "diameter") + self.diameter = value + self.validate_DoubleGreaterThanZero( + self.diameter + ) # validate type DoubleGreaterThanZero + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass def __str__(self): - return "("+str(self.x)+", "+str(self.y)+", "+str(self.z)+"), diam "+str(self.diameter)+"um" + return ( + "(" + + str(self.x) + + ", " + + str(self.y) + + ", " + + str(self.z) + + "), diam " + + str(self.diameter) + + "um" + ) def __repr__(self): return str(self) def distance_to(self, other_3d_point): + """Find the distance between this point and another. + + :param other_3d_point: other 3D point to calculate distance to + :type other_3d_point: Point3DWithDiam + :returns: distance between the two points + :rtype: float + """ a_x = self.x a_y = self.y a_z = self.z @@ -3504,3522 +7473,8646 @@ def distance_to(self, other_3d_point): b_y = other_3d_point.y b_z = other_3d_point.z - distance = ((a_x-b_x)**2 + (a_y-b_y)**2 + (a_z-b_z)**2)**(0.5) + distance = ((a_x - b_x) ** 2 + (a_y - b_y) ** 2 + (a_z - b_z) ** 2) ** (0.5) return distance + # end class Point3DWithDiam class ProximalDetails(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('translation_start', 'xs:double', 0, 0, {'use': u'required'}), + MemberSpec_( + "translation_start", + "xs:double", + 0, + 0, + {"use": "required", "name": "translation_start"}, + ), ] subclass = None superclass = None - def __init__(self, translation_start=None, **kwargs_): + + def __init__(self, translation_start=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.translation_start = _cast(float, translation_start) + self.translation_start_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ProximalDetails) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ProximalDetails) if subclass is not None: return subclass(*args_, **kwargs_) if ProximalDetails.subclass: return ProximalDetails.subclass(*args_, **kwargs_) else: return ProximalDetails(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ProximalDetails', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ProximalDetails') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ProximalDetails", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ProximalDetails") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ProximalDetails": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ProximalDetails') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ProximalDetails', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ProximalDetails'): - if self.translation_start is not None and 'translation_start' not in already_processed: - already_processed.add('translation_start') - outfile.write(' translationStart="%s"' % self.gds_format_double(self.translation_start, input_name='translationStart')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ProximalDetails', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ProximalDetails" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ProximalDetails", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ProximalDetails", + ): + if ( + self.translation_start is not None + and "translation_start" not in already_processed + ): + already_processed.add("translation_start") + outfile.write( + ' translationStart="%s"' + % self.gds_format_double( + self.translation_start, input_name="translationStart" + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ProximalDetails", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('translationStart', node) - if value is not None and 'translationStart' not in already_processed: - already_processed.add('translationStart') - try: - self.translation_start = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (translationStart): %s' % exp) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("translationStart", node) + if value is not None and "translationStart" not in already_processed: + already_processed.add("translationStart") + value = self.gds_parse_double(value, node, "translationStart") + self.translation_start = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class ProximalDetails class DistalDetails(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('normalization_end', 'xs:double', 0, 0, {'use': u'required'}), + MemberSpec_( + "normalization_end", + "xs:double", + 0, + 0, + {"use": "required", "name": "normalization_end"}, + ), ] subclass = None superclass = None - def __init__(self, normalization_end=None, **kwargs_): + + def __init__(self, normalization_end=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.normalization_end = _cast(float, normalization_end) + self.normalization_end_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, DistalDetails) + subclass = getSubclassFromModule_(CurrentSubclassModule_, DistalDetails) if subclass is not None: return subclass(*args_, **kwargs_) if DistalDetails.subclass: return DistalDetails.subclass(*args_, **kwargs_) else: return DistalDetails(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DistalDetails', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('DistalDetails') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DistalDetails", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("DistalDetails") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "DistalDetails": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DistalDetails') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DistalDetails', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DistalDetails'): - if self.normalization_end is not None and 'normalization_end' not in already_processed: - already_processed.add('normalization_end') - outfile.write(' normalizationEnd="%s"' % self.gds_format_double(self.normalization_end, input_name='normalizationEnd')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DistalDetails', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="DistalDetails" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="DistalDetails", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="DistalDetails", + ): + if ( + self.normalization_end is not None + and "normalization_end" not in already_processed + ): + already_processed.add("normalization_end") + outfile.write( + ' normalizationEnd="%s"' + % self.gds_format_double( + self.normalization_end, input_name="normalizationEnd" + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DistalDetails", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('normalizationEnd', node) - if value is not None and 'normalizationEnd' not in already_processed: - already_processed.add('normalizationEnd') - try: - self.normalization_end = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (normalizationEnd): %s' % exp) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("normalizationEnd", node) + if value is not None and "normalizationEnd" not in already_processed: + already_processed.add("normalizationEnd") + value = self.gds_parse_double(value, node, "normalizationEnd") + self.normalization_end = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class DistalDetails class Member(GeneratedsSuper): + """Member -- A single identified **segment** which is part of the **segmentGroup**""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('segments', 'NonNegativeInteger', 0, 0, {'use': u'required'}), + MemberSpec_( + "segments", + "NonNegativeInteger", + 0, + 0, + {"use": "required", "name": "segments"}, + ), ] subclass = None superclass = None - def __init__(self, segments=None, **kwargs_): + + def __init__(self, segments=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.segments = _cast(int, segments) + self.segments_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Member) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Member) if subclass is not None: return subclass(*args_, **kwargs_) if Member.subclass: return Member.subclass(*args_, **kwargs_) else: return Member(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: - pass - def hasContent_(self): if ( - + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + pass + + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Member', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Member') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Member", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Member") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Member": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Member') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Member', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Member'): - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Member', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Member" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Member", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Member" + ): + if self.segments is not None and "segments" not in already_processed: + already_processed.add("segments") + outfile.write( + ' segment="%s"' + % self.gds_format_integer(self.segments, input_name="segment") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Member", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') - try: - self.segments = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("segment", node) + if value is not None and "segment" not in already_processed: + already_processed.add("segment") + self.segments = self.gds_parse_integer(value, node, "segment") if self.segments < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.segments) # validate type NonNegativeInteger - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.segments + ) # validate type NonNegativeInteger + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class Member class Include(GeneratedsSuper): + """Include -- Include all members of another **segmentGroup** in this group""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('segment_groups', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_( + "segment_groups", + "NmlId", + 0, + 0, + {"use": "required", "name": "segment_groups"}, + ), ] subclass = None superclass = None - def __init__(self, segment_groups=None, **kwargs_): + + def __init__(self, segment_groups=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Include) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Include) if subclass is not None: return subclass(*args_, **kwargs_) if Include.subclass: return Include.subclass(*args_, **kwargs_) else: return Include(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): if ( - + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Include', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Include') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Include", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Include") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Include": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Include') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Include', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Include'): - if self.segment_groups is not None and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Include', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Include" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Include", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Include" + ): + if ( + self.segment_groups is not None + and "segment_groups" not in already_processed + ): + already_processed.add("segment_groups") + outfile.write( + " segmentGroup=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.segment_groups), input_name="segmentGroup" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Include", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + self.validate_NmlId(self.segment_groups) # validate type NmlId + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class Include class Path(GeneratedsSuper): + """Path -- Include all the **segment** s between those specified by **from** and **to** , inclusive""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('from_', 'SegmentEndPoint', 0, 1, {u'type': u'SegmentEndPoint', u'name': u'from', u'minOccurs': u'0'}, None), - MemberSpec_('to', 'SegmentEndPoint', 0, 1, {u'type': u'SegmentEndPoint', u'name': u'to', u'minOccurs': u'0'}, None), + MemberSpec_( + "from_", + "SegmentEndPoint", + 0, + 1, + {"minOccurs": "0", "name": "from", "type": "SegmentEndPoint"}, + None, + ), + MemberSpec_( + "to", + "SegmentEndPoint", + 0, + 1, + {"minOccurs": "0", "name": "to", "type": "SegmentEndPoint"}, + None, + ), ] subclass = None superclass = None - def __init__(self, from_=None, to=None, **kwargs_): + + def __init__(self, from_=None, to=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.from_ = from_ + self.from__nsprefix_ = None self.to = to + self.to_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Path) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Path) if subclass is not None: return subclass(*args_, **kwargs_) if Path.subclass: return Path.subclass(*args_, **kwargs_) else: return Path(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - self.from_ is not None or - self.to is not None - ): + + def _hasContent(self): + if self.from_ is not None or self.to is not None: return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Path', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Path') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Path", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Path") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Path": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Path') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Path', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Path" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Path", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Path'): + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Path" + ): pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Path', fromsubclass_=False, pretty_print=True): + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Path", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.from_ is not None: - self.from_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='from', pretty_print=pretty_print) + namespaceprefix_ = ( + self.from__nsprefix_ + ":" + if (UseCapturedNS_ and self.from__nsprefix_) + else "" + ) + self.from_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="from", + pretty_print=pretty_print, + ) if self.to is not None: - self.to.export(outfile, level, namespaceprefix_, namespacedef_='', name_='to', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.to_nsprefix_ + ":" + if (UseCapturedNS_ and self.to_nsprefix_) + else "" + ) + self.to.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="to", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + + def _buildAttributes(self, node, attrs, already_processed): pass - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'from': + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "from": obj_ = SegmentEndPoint.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.from_ = obj_ - obj_.original_tagname_ = 'from' - elif nodeName_ == 'to': + obj_.original_tagname_ = "from" + elif nodeName_ == "to": obj_ = SegmentEndPoint.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.to = obj_ - obj_.original_tagname_ = 'to' + obj_.original_tagname_ = "to" + + # end class Path class SubTree(GeneratedsSuper): + """SubTree -- Include all the **segment** s distal to that specified by **from** in the **segmentGroup**""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('from_', 'SegmentEndPoint', 0, 1, {u'type': u'SegmentEndPoint', u'name': u'from', u'minOccurs': u'0'}, 3), - MemberSpec_('to', 'SegmentEndPoint', 0, 1, {u'type': u'SegmentEndPoint', u'name': u'to', u'minOccurs': u'0'}, 3), + MemberSpec_( + "from_", + "SegmentEndPoint", + 0, + 1, + {"minOccurs": "0", "name": "from", "type": "SegmentEndPoint"}, + 3, + ), + MemberSpec_( + "to", + "SegmentEndPoint", + 0, + 1, + {"minOccurs": "0", "name": "to", "type": "SegmentEndPoint"}, + 3, + ), ] subclass = None superclass = None - def __init__(self, from_=None, to=None, **kwargs_): + + def __init__(self, from_=None, to=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.from_ = from_ + self.from__nsprefix_ = None self.to = to + self.to_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SubTree) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SubTree) if subclass is not None: return subclass(*args_, **kwargs_) if SubTree.subclass: return SubTree.subclass(*args_, **kwargs_) else: return SubTree(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - self.from_ is not None or - self.to is not None - ): + + def _hasContent(self): + if self.from_ is not None or self.to is not None: return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SubTree', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SubTree') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="SubTree", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SubTree") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SubTree": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SubTree') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SubTree', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SubTree" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SubTree", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SubTree'): + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="SubTree" + ): pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SubTree', fromsubclass_=False, pretty_print=True): + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="SubTree", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.from_ is not None: - self.from_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='from', pretty_print=pretty_print) + namespaceprefix_ = ( + self.from__nsprefix_ + ":" + if (UseCapturedNS_ and self.from__nsprefix_) + else "" + ) + self.from_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="from", + pretty_print=pretty_print, + ) if self.to is not None: - self.to.export(outfile, level, namespaceprefix_, namespacedef_='', name_='to', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.to_nsprefix_ + ":" + if (UseCapturedNS_ and self.to_nsprefix_) + else "" + ) + self.to.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="to", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + + def _buildAttributes(self, node, attrs, already_processed): pass - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'from': + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "from": obj_ = SegmentEndPoint.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.from_ = obj_ - obj_.original_tagname_ = 'from' - elif nodeName_ == 'to': + obj_.original_tagname_ = "from" + elif nodeName_ == "to": obj_ = SegmentEndPoint.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.to = obj_ - obj_.original_tagname_ = 'to' + obj_.original_tagname_ = "to" + + # end class SubTree class SegmentEndPoint(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('segments', 'NonNegativeInteger', 0, 0, {'use': u'required'}), + MemberSpec_( + "segments", + "NonNegativeInteger", + 0, + 0, + {"use": "required", "name": "segments"}, + ), ] subclass = None superclass = None - def __init__(self, segments=None, **kwargs_): + + def __init__(self, segments=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.segments = _cast(int, segments) + self.segments_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SegmentEndPoint) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SegmentEndPoint) if subclass is not None: return subclass(*args_, **kwargs_) if SegmentEndPoint.subclass: return SegmentEndPoint.subclass(*args_, **kwargs_) else: return SegmentEndPoint(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: - pass - def hasContent_(self): if ( - + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + pass + + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentEndPoint', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SegmentEndPoint') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SegmentEndPoint", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SegmentEndPoint") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SegmentEndPoint": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SegmentEndPoint') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SegmentEndPoint', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SegmentEndPoint'): - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentEndPoint', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SegmentEndPoint" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SegmentEndPoint", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SegmentEndPoint", + ): + if self.segments is not None and "segments" not in already_processed: + already_processed.add("segments") + outfile.write( + ' segment="%s"' + % self.gds_format_integer(self.segments, input_name="segment") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SegmentEndPoint", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') - try: - self.segments = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("segment", node) + if value is not None and "segment" not in already_processed: + already_processed.add("segment") + self.segments = self.gds_parse_integer(value, node, "segment") if self.segments < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.segments) # validate type NonNegativeInteger - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.segments + ) # validate type NonNegativeInteger + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class SegmentEndPoint class MembraneProperties(GeneratedsSuper): + """MembraneProperties -- Properties specific to the membrane, such as the **populations** of channels, **channelDensities,** **specificCapacitance,** etc.""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('channel_populations', 'ChannelPopulation', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelPopulation', u'name': u'channelPopulation', u'minOccurs': u'0'}, None), - MemberSpec_('channel_densities', 'ChannelDensity', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensity', u'name': u'channelDensity', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_v_shifts', 'ChannelDensityVShift', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityVShift', u'name': u'channelDensityVShift', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_nernsts', 'ChannelDensityNernst', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityNernst', u'name': u'channelDensityNernst', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_ghks', 'ChannelDensityGHK', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityGHK', u'name': u'channelDensityGHK', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_ghk2s', 'ChannelDensityGHK2', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityGHK2', u'name': u'channelDensityGHK2', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_non_uniforms', 'ChannelDensityNonUniform', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityNonUniform', u'name': u'channelDensityNonUniform', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_non_uniform_nernsts', 'ChannelDensityNonUniformNernst', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityNonUniformNernst', u'name': u'channelDensityNonUniformNernst', u'minOccurs': u'0'}, None), - MemberSpec_('channel_density_non_uniform_ghks', 'ChannelDensityNonUniformGHK', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityNonUniformGHK', u'name': u'channelDensityNonUniformGHK', u'minOccurs': u'0'}, None), - MemberSpec_('spike_threshes', 'SpikeThresh', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeThresh', u'name': u'spikeThresh', u'minOccurs': u'0'}, None), - MemberSpec_('specific_capacitances', 'SpecificCapacitance', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpecificCapacitance', u'name': u'specificCapacitance', u'minOccurs': u'0'}, None), - MemberSpec_('init_memb_potentials', 'InitMembPotential', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'InitMembPotential', u'name': u'initMembPotential', u'minOccurs': u'0'}, None), + MemberSpec_( + "channel_populations", + "ChannelPopulation", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "channelPopulation", + "type": "ChannelPopulation", + }, + None, + ), + MemberSpec_( + "channel_densities", + "ChannelDensity", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "channelDensity", + "type": "ChannelDensity", + }, + None, + ), + MemberSpec_( + "channel_density_v_shifts", + "ChannelDensityVShift", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "channelDensityVShift", + "type": "ChannelDensityVShift", + }, + None, + ), + MemberSpec_( + "channel_density_nernsts", + "ChannelDensityNernst", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "channelDensityNernst", + "type": "ChannelDensityNernst", + }, + None, + ), + MemberSpec_( + "channel_density_ghks", + "ChannelDensityGHK", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "channelDensityGHK", + "type": "ChannelDensityGHK", + }, + None, + ), + MemberSpec_( + "channel_density_ghk2s", + "ChannelDensityGHK2", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "channelDensityGHK2", + "type": "ChannelDensityGHK2", + }, + None, + ), + MemberSpec_( + "channel_density_non_uniforms", + "ChannelDensityNonUniform", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "channelDensityNonUniform", + "type": "ChannelDensityNonUniform", + }, + None, + ), + MemberSpec_( + "channel_density_non_uniform_nernsts", + "ChannelDensityNonUniformNernst", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "channelDensityNonUniformNernst", + "type": "ChannelDensityNonUniformNernst", + }, + None, + ), + MemberSpec_( + "channel_density_non_uniform_ghks", + "ChannelDensityNonUniformGHK", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "channelDensityNonUniformGHK", + "type": "ChannelDensityNonUniformGHK", + }, + None, + ), + MemberSpec_( + "spike_threshes", + "SpikeThresh", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "spikeThresh", + "type": "SpikeThresh", + }, + None, + ), + MemberSpec_( + "specific_capacitances", + "SpecificCapacitance", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "specificCapacitance", + "type": "SpecificCapacitance", + }, + None, + ), + MemberSpec_( + "init_memb_potentials", + "InitMembPotential", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "initMembPotential", + "type": "InitMembPotential", + }, + None, + ), ] subclass = None superclass = None - def __init__(self, channel_populations=None, channel_densities=None, channel_density_v_shifts=None, channel_density_nernsts=None, channel_density_ghks=None, channel_density_ghk2s=None, channel_density_non_uniforms=None, channel_density_non_uniform_nernsts=None, channel_density_non_uniform_ghks=None, spike_threshes=None, specific_capacitances=None, init_memb_potentials=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + channel_populations=None, + channel_densities=None, + channel_density_v_shifts=None, + channel_density_nernsts=None, + channel_density_ghks=None, + channel_density_ghk2s=None, + channel_density_non_uniforms=None, + channel_density_non_uniform_nernsts=None, + channel_density_non_uniform_ghks=None, + spike_threshes=None, + specific_capacitances=None, + init_memb_potentials=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None if channel_populations is None: self.channel_populations = [] else: self.channel_populations = channel_populations + self.channel_populations_nsprefix_ = None if channel_densities is None: self.channel_densities = [] else: self.channel_densities = channel_densities + self.channel_densities_nsprefix_ = None if channel_density_v_shifts is None: self.channel_density_v_shifts = [] else: self.channel_density_v_shifts = channel_density_v_shifts + self.channel_density_v_shifts_nsprefix_ = None if channel_density_nernsts is None: self.channel_density_nernsts = [] else: self.channel_density_nernsts = channel_density_nernsts + self.channel_density_nernsts_nsprefix_ = None if channel_density_ghks is None: self.channel_density_ghks = [] else: self.channel_density_ghks = channel_density_ghks + self.channel_density_ghks_nsprefix_ = None if channel_density_ghk2s is None: self.channel_density_ghk2s = [] else: self.channel_density_ghk2s = channel_density_ghk2s + self.channel_density_ghk2s_nsprefix_ = None if channel_density_non_uniforms is None: self.channel_density_non_uniforms = [] else: self.channel_density_non_uniforms = channel_density_non_uniforms + self.channel_density_non_uniforms_nsprefix_ = None if channel_density_non_uniform_nernsts is None: self.channel_density_non_uniform_nernsts = [] else: - self.channel_density_non_uniform_nernsts = channel_density_non_uniform_nernsts + self.channel_density_non_uniform_nernsts = ( + channel_density_non_uniform_nernsts + ) + self.channel_density_non_uniform_nernsts_nsprefix_ = None if channel_density_non_uniform_ghks is None: self.channel_density_non_uniform_ghks = [] else: self.channel_density_non_uniform_ghks = channel_density_non_uniform_ghks + self.channel_density_non_uniform_ghks_nsprefix_ = None if spike_threshes is None: self.spike_threshes = [] else: self.spike_threshes = spike_threshes + self.spike_threshes_nsprefix_ = None if specific_capacitances is None: self.specific_capacitances = [] else: self.specific_capacitances = specific_capacitances + self.specific_capacitances_nsprefix_ = None if init_memb_potentials is None: self.init_memb_potentials = [] else: self.init_memb_potentials = init_memb_potentials + self.init_memb_potentials_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, MembraneProperties) + CurrentSubclassModule_, MembraneProperties + ) if subclass is not None: return subclass(*args_, **kwargs_) if MembraneProperties.subclass: return MembraneProperties.subclass(*args_, **kwargs_) else: return MembraneProperties(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - self.channel_populations or - self.channel_densities or - self.channel_density_v_shifts or - self.channel_density_nernsts or - self.channel_density_ghks or - self.channel_density_ghk2s or - self.channel_density_non_uniforms or - self.channel_density_non_uniform_nernsts or - self.channel_density_non_uniform_ghks or - self.spike_threshes or - self.specific_capacitances or - self.init_memb_potentials + + def _hasContent(self): + if ( + self.channel_populations + or self.channel_densities + or self.channel_density_v_shifts + or self.channel_density_nernsts + or self.channel_density_ghks + or self.channel_density_ghk2s + or self.channel_density_non_uniforms + or self.channel_density_non_uniform_nernsts + or self.channel_density_non_uniform_ghks + or self.spike_threshes + or self.specific_capacitances + or self.init_memb_potentials ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='MembraneProperties', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('MembraneProperties') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="MembraneProperties", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("MembraneProperties") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "MembraneProperties": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MembraneProperties') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MembraneProperties', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="MembraneProperties", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="MembraneProperties", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MembraneProperties'): - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="MembraneProperties", + ): + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='MembraneProperties', fromsubclass_=False, pretty_print=True): + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="MembraneProperties", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for channelPopulation_ in self.channel_populations: - channelPopulation_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelPopulation', pretty_print=pretty_print) + namespaceprefix_ = ( + self.channel_populations_nsprefix_ + ":" + if (UseCapturedNS_ and self.channel_populations_nsprefix_) + else "" + ) + channelPopulation_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelPopulation", + pretty_print=pretty_print, + ) for channelDensity_ in self.channel_densities: - channelDensity_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensity', pretty_print=pretty_print) + namespaceprefix_ = ( + self.channel_densities_nsprefix_ + ":" + if (UseCapturedNS_ and self.channel_densities_nsprefix_) + else "" + ) + channelDensity_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensity", + pretty_print=pretty_print, + ) for channelDensityVShift_ in self.channel_density_v_shifts: - channelDensityVShift_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityVShift', pretty_print=pretty_print) + namespaceprefix_ = ( + self.channel_density_v_shifts_nsprefix_ + ":" + if (UseCapturedNS_ and self.channel_density_v_shifts_nsprefix_) + else "" + ) + channelDensityVShift_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensityVShift", + pretty_print=pretty_print, + ) for channelDensityNernst_ in self.channel_density_nernsts: - channelDensityNernst_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityNernst', pretty_print=pretty_print) + namespaceprefix_ = ( + self.channel_density_nernsts_nsprefix_ + ":" + if (UseCapturedNS_ and self.channel_density_nernsts_nsprefix_) + else "" + ) + channelDensityNernst_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensityNernst", + pretty_print=pretty_print, + ) for channelDensityGHK_ in self.channel_density_ghks: - channelDensityGHK_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityGHK', pretty_print=pretty_print) + namespaceprefix_ = ( + self.channel_density_ghks_nsprefix_ + ":" + if (UseCapturedNS_ and self.channel_density_ghks_nsprefix_) + else "" + ) + channelDensityGHK_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensityGHK", + pretty_print=pretty_print, + ) for channelDensityGHK2_ in self.channel_density_ghk2s: - channelDensityGHK2_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityGHK2', pretty_print=pretty_print) + namespaceprefix_ = ( + self.channel_density_ghk2s_nsprefix_ + ":" + if (UseCapturedNS_ and self.channel_density_ghk2s_nsprefix_) + else "" + ) + channelDensityGHK2_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensityGHK2", + pretty_print=pretty_print, + ) for channelDensityNonUniform_ in self.channel_density_non_uniforms: - channelDensityNonUniform_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityNonUniform', pretty_print=pretty_print) + namespaceprefix_ = ( + self.channel_density_non_uniforms_nsprefix_ + ":" + if (UseCapturedNS_ and self.channel_density_non_uniforms_nsprefix_) + else "" + ) + channelDensityNonUniform_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensityNonUniform", + pretty_print=pretty_print, + ) for channelDensityNonUniformNernst_ in self.channel_density_non_uniform_nernsts: - channelDensityNonUniformNernst_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityNonUniformNernst', pretty_print=pretty_print) + namespaceprefix_ = ( + self.channel_density_non_uniform_nernsts_nsprefix_ + ":" + if ( + UseCapturedNS_ + and self.channel_density_non_uniform_nernsts_nsprefix_ + ) + else "" + ) + channelDensityNonUniformNernst_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensityNonUniformNernst", + pretty_print=pretty_print, + ) for channelDensityNonUniformGHK_ in self.channel_density_non_uniform_ghks: - channelDensityNonUniformGHK_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityNonUniformGHK', pretty_print=pretty_print) + namespaceprefix_ = ( + self.channel_density_non_uniform_ghks_nsprefix_ + ":" + if (UseCapturedNS_ and self.channel_density_non_uniform_ghks_nsprefix_) + else "" + ) + channelDensityNonUniformGHK_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensityNonUniformGHK", + pretty_print=pretty_print, + ) for spikeThresh_ in self.spike_threshes: - spikeThresh_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeThresh', pretty_print=pretty_print) + namespaceprefix_ = ( + self.spike_threshes_nsprefix_ + ":" + if (UseCapturedNS_ and self.spike_threshes_nsprefix_) + else "" + ) + spikeThresh_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="spikeThresh", + pretty_print=pretty_print, + ) for specificCapacitance_ in self.specific_capacitances: - specificCapacitance_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='specificCapacitance', pretty_print=pretty_print) + namespaceprefix_ = ( + self.specific_capacitances_nsprefix_ + ":" + if (UseCapturedNS_ and self.specific_capacitances_nsprefix_) + else "" + ) + specificCapacitance_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="specificCapacitance", + pretty_print=pretty_print, + ) for initMembPotential_ in self.init_memb_potentials: - initMembPotential_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='initMembPotential', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.init_memb_potentials_nsprefix_ + ":" + if (UseCapturedNS_ and self.init_memb_potentials_nsprefix_) + else "" + ) + initMembPotential_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="initMembPotential", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'channelPopulation': + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "channelPopulation": obj_ = ChannelPopulation.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_populations.append(obj_) - obj_.original_tagname_ = 'channelPopulation' - elif nodeName_ == 'channelDensity': + obj_.original_tagname_ = "channelPopulation" + elif nodeName_ == "channelDensity": class_obj_ = self.get_class_obj_(child_, ChannelDensity) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_densities.append(obj_) - obj_.original_tagname_ = 'channelDensity' - elif nodeName_ == 'channelDensityVShift': + obj_.original_tagname_ = "channelDensity" + elif nodeName_ == "channelDensityVShift": obj_ = ChannelDensityVShift.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_density_v_shifts.append(obj_) - obj_.original_tagname_ = 'channelDensityVShift' - elif nodeName_ == 'channelDensityNernst': + obj_.original_tagname_ = "channelDensityVShift" + elif nodeName_ == "channelDensityNernst": class_obj_ = self.get_class_obj_(child_, ChannelDensityNernst) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_density_nernsts.append(obj_) - obj_.original_tagname_ = 'channelDensityNernst' - elif nodeName_ == 'channelDensityGHK': + obj_.original_tagname_ = "channelDensityNernst" + elif nodeName_ == "channelDensityGHK": obj_ = ChannelDensityGHK.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_density_ghks.append(obj_) - obj_.original_tagname_ = 'channelDensityGHK' - elif nodeName_ == 'channelDensityGHK2': + obj_.original_tagname_ = "channelDensityGHK" + elif nodeName_ == "channelDensityGHK2": obj_ = ChannelDensityGHK2.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_density_ghk2s.append(obj_) - obj_.original_tagname_ = 'channelDensityGHK2' - elif nodeName_ == 'channelDensityNonUniform': + obj_.original_tagname_ = "channelDensityGHK2" + elif nodeName_ == "channelDensityNonUniform": obj_ = ChannelDensityNonUniform.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_density_non_uniforms.append(obj_) - obj_.original_tagname_ = 'channelDensityNonUniform' - elif nodeName_ == 'channelDensityNonUniformNernst': + obj_.original_tagname_ = "channelDensityNonUniform" + elif nodeName_ == "channelDensityNonUniformNernst": obj_ = ChannelDensityNonUniformNernst.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_density_non_uniform_nernsts.append(obj_) - obj_.original_tagname_ = 'channelDensityNonUniformNernst' - elif nodeName_ == 'channelDensityNonUniformGHK': + obj_.original_tagname_ = "channelDensityNonUniformNernst" + elif nodeName_ == "channelDensityNonUniformGHK": obj_ = ChannelDensityNonUniformGHK.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_density_non_uniform_ghks.append(obj_) - obj_.original_tagname_ = 'channelDensityNonUniformGHK' - elif nodeName_ == 'spikeThresh': + obj_.original_tagname_ = "channelDensityNonUniformGHK" + elif nodeName_ == "spikeThresh": obj_ = SpikeThresh.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spike_threshes.append(obj_) - obj_.original_tagname_ = 'spikeThresh' - elif nodeName_ == 'specificCapacitance': + obj_.original_tagname_ = "spikeThresh" + elif nodeName_ == "specificCapacitance": obj_ = SpecificCapacitance.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.specific_capacitances.append(obj_) - obj_.original_tagname_ = 'specificCapacitance' - elif nodeName_ == 'initMembPotential': + obj_.original_tagname_ = "specificCapacitance" + elif nodeName_ == "initMembPotential": obj_ = InitMembPotential.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.init_memb_potentials.append(obj_) - obj_.original_tagname_ = 'initMembPotential' + obj_.original_tagname_ = "initMembPotential" + + # end class MembraneProperties class MembraneProperties2CaPools(MembraneProperties): + """MembraneProperties2CaPools -- Variant of membraneProperties with 2 independent Ca pools""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('channel_density_nernst_ca2s', 'ChannelDensityNernstCa2', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ChannelDensityNernstCa2', u'name': u'channelDensityNernstCa2', u'minOccurs': u'0'}, None), + MemberSpec_( + "channel_density_nernst_ca2s", + "ChannelDensityNernstCa2", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "channelDensityNernstCa2", + "type": "ChannelDensityNernstCa2", + }, + None, + ), ] subclass = None superclass = MembraneProperties - def __init__(self, channel_populations=None, channel_densities=None, channel_density_v_shifts=None, channel_density_nernsts=None, channel_density_ghks=None, channel_density_ghk2s=None, channel_density_non_uniforms=None, channel_density_non_uniform_nernsts=None, channel_density_non_uniform_ghks=None, spike_threshes=None, specific_capacitances=None, init_memb_potentials=None, channel_density_nernst_ca2s=None, **kwargs_): + + def __init__( + self, + channel_populations=None, + channel_densities=None, + channel_density_v_shifts=None, + channel_density_nernsts=None, + channel_density_ghks=None, + channel_density_ghk2s=None, + channel_density_non_uniforms=None, + channel_density_non_uniform_nernsts=None, + channel_density_non_uniform_ghks=None, + spike_threshes=None, + specific_capacitances=None, + init_memb_potentials=None, + channel_density_nernst_ca2s=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(MembraneProperties2CaPools, self).__init__(channel_populations, channel_densities, channel_density_v_shifts, channel_density_nernsts, channel_density_ghks, channel_density_ghk2s, channel_density_non_uniforms, channel_density_non_uniform_nernsts, channel_density_non_uniform_ghks, spike_threshes, specific_capacitances, init_memb_potentials, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("MembraneProperties2CaPools"), self).__init__( + channel_populations, + channel_densities, + channel_density_v_shifts, + channel_density_nernsts, + channel_density_ghks, + channel_density_ghk2s, + channel_density_non_uniforms, + channel_density_non_uniform_nernsts, + channel_density_non_uniform_ghks, + spike_threshes, + specific_capacitances, + init_memb_potentials, + **kwargs_ + ) if channel_density_nernst_ca2s is None: self.channel_density_nernst_ca2s = [] else: self.channel_density_nernst_ca2s = channel_density_nernst_ca2s + self.channel_density_nernst_ca2s_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, MembraneProperties2CaPools) + CurrentSubclassModule_, MembraneProperties2CaPools + ) if subclass is not None: return subclass(*args_, **kwargs_) if MembraneProperties2CaPools.subclass: return MembraneProperties2CaPools.subclass(*args_, **kwargs_) else: return MembraneProperties2CaPools(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): + + def _hasContent(self): if ( - self.channel_density_nernst_ca2s or - super(MembraneProperties2CaPools, self).hasContent_() + self.channel_density_nernst_ca2s + or super(MembraneProperties2CaPools, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='MembraneProperties2CaPools', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('MembraneProperties2CaPools') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="MembraneProperties2CaPools", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("MembraneProperties2CaPools") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "MembraneProperties2CaPools": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MembraneProperties2CaPools') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MembraneProperties2CaPools', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="MembraneProperties2CaPools", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="MembraneProperties2CaPools", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MembraneProperties2CaPools'): - super(MembraneProperties2CaPools, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MembraneProperties2CaPools') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='MembraneProperties2CaPools', fromsubclass_=False, pretty_print=True): - super(MembraneProperties2CaPools, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="MembraneProperties2CaPools", + ): + super(MembraneProperties2CaPools, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="MembraneProperties2CaPools", + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="MembraneProperties2CaPools", + fromsubclass_=False, + pretty_print=True, + ): + super(MembraneProperties2CaPools, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for channelDensityNernstCa2_ in self.channel_density_nernst_ca2s: - channelDensityNernstCa2_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='channelDensityNernstCa2', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.channel_density_nernst_ca2s_nsprefix_ + ":" + if (UseCapturedNS_ and self.channel_density_nernst_ca2s_nsprefix_) + else "" + ) + channelDensityNernstCa2_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="channelDensityNernstCa2", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(MembraneProperties2CaPools, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'channelDensityNernstCa2': + + def _buildAttributes(self, node, attrs, already_processed): + super(MembraneProperties2CaPools, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "channelDensityNernstCa2": obj_ = ChannelDensityNernstCa2.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.channel_density_nernst_ca2s.append(obj_) - obj_.original_tagname_ = 'channelDensityNernstCa2' - super(MembraneProperties2CaPools, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "channelDensityNernstCa2" + super(MembraneProperties2CaPools, self)._buildChildren( + child_, node, nodeName_, True + ) + + # end class MembraneProperties2CaPools -class ValueAcrossSegOrSegGroup(GeneratedsSuper): +class SpikeThresh(GeneratedsSuper): + """SpikeThresh -- Membrane potential at which to emit a spiking event. Note, usually the spiking event will not be emitted again until the membrane potential has fallen below this value and rises again to cross it in a positive direction + \n + :param value: + :type value: voltage + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('value', 'Nml2Quantity', 0, 1, {'use': u'optional'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('segments', 'NmlId', 0, 1, {'use': u'optional'}), + MemberSpec_( + "value", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "value"} + ), + MemberSpec_( + "segment_groups", + "NmlId", + 0, + 1, + {"use": "optional", "name": "segment_groups"}, + ), ] subclass = None superclass = None - def __init__(self, value=None, segment_groups='all', segments=None, extensiontype_=None, **kwargs_): + + def __init__( + self, value=None, segment_groups="all", gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.value = _cast(None, value) + self.value_nsprefix_ = None self.segment_groups = _cast(None, segment_groups) - self.segments = _cast(None, segments) - self.extensiontype_ = extensiontype_ + self.segment_groups_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ValueAcrossSegOrSegGroup) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SpikeThresh) if subclass is not None: return subclass(*args_, **kwargs_) - if ValueAcrossSegOrSegGroup.subclass: - return ValueAcrossSegOrSegGroup.subclass(*args_, **kwargs_) + if SpikeThresh.subclass: + return SpikeThresh.subclass(*args_, **kwargs_) else: - return ValueAcrossSegOrSegGroup(*args_, **kwargs_) + return SpikeThresh(*args_, **kwargs_) + factory = staticmethod(factory) - def validate_Nml2Quantity(self, value): - # Validate type Nml2Quantity, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_patterns_, )) - validate_Nml2Quantity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*([_a-zA-Z0-9])*$']] - def validate_NmlId(self, value): - # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): - if ( + def validate_Nml2Quantity_voltage(self, value): + # Validate type Nml2Quantity_voltage, a restriction on xs:string. + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + + def validate_NmlId(self, value): + # Validate type NmlId, a restriction on xs:string. + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ValueAcrossSegOrSegGroup', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ValueAcrossSegOrSegGroup') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeThresh", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpikeThresh") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SpikeThresh": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ValueAcrossSegOrSegGroup') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ValueAcrossSegOrSegGroup', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ValueAcrossSegOrSegGroup'): - if self.value is not None and 'value' not in already_processed: - already_processed.add('value') - outfile.write(' value=%s' % (quote_attrib(self.value), )) - if self.segment_groups != "all" and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') - outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ValueAcrossSegOrSegGroup', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SpikeThresh" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpikeThresh", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SpikeThresh", + ): + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write( + " value=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.value), input_name="value" + ) + ), + ) + ) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write( + " segmentGroup=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.segment_groups), input_name="segmentGroup" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeThresh", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('value', node) - if value is not None and 'value' not in already_processed: - already_processed.add('value') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") self.value = value - self.validate_Nml2Quantity(self.value) # validate type Nml2Quantity - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + self.validate_Nml2Quantity_voltage( + self.value + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') - self.segments = value - self.validate_NmlId(self.segments) # validate type NmlId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') - self.extensiontype_ = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + self.validate_NmlId(self.segment_groups) # validate type NmlId + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class SpikeThresh + + +class SpecificCapacitance(GeneratedsSuper): + """SpecificCapacitance -- Capacitance per unit area + \n + :param value: + :type value: specificCapacitance + + """ + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [ + MemberSpec_( + "value", + "Nml2Quantity_specificCapacitance", + 0, + 0, + {"use": "required", "name": "value"}, + ), + MemberSpec_( + "segment_groups", + "NmlId", + 0, + 1, + {"use": "optional", "name": "segment_groups"}, + ), + ] + subclass = None + superclass = None + + def __init__( + self, value=None, segment_groups="all", gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.value = _cast(None, value) + self.value_nsprefix_ = None + self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SpecificCapacitance + ) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SpecificCapacitance.subclass: + return SpecificCapacitance.subclass(*args_, **kwargs_) + else: + return SpecificCapacitance(*args_, **kwargs_) + + factory = staticmethod(factory) + + def validate_Nml2Quantity_specificCapacitance(self, value): + # Validate type Nml2Quantity_specificCapacitance, a restriction on xs:string. + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_specificCapacitance_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_specificCapacitance_patterns_, + ) + ) + + validate_Nml2Quantity_specificCapacitance_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F_per_m2|uF_per_cm2))$"] + ] + + def validate_NmlId(self, value): + # Validate type NmlId, a restriction on xs:string. + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): + if (): + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpecificCapacitance", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpecificCapacitance") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SpecificCapacitance": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpecificCapacitance", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpecificCapacitance", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SpecificCapacitance", + ): + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write( + " value=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.value), input_name="value" + ) + ), + ) + ) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write( + " segmentGroup=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.segment_groups), input_name="segmentGroup" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpecificCapacitance", + fromsubclass_=False, + pretty_print=True, + ): pass -# end class ValueAcrossSegOrSegGroup + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") + self.value = value + self.validate_Nml2Quantity_specificCapacitance( + self.value + ) # validate type Nml2Quantity_specificCapacitance + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") + self.segment_groups = value + self.validate_NmlId(self.segment_groups) # validate type NmlId + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class SpecificCapacitance + + +class InitMembPotential(GeneratedsSuper): + """InitMembPotential -- Explicitly set initial membrane potential for the cell + \n + :param value: + :type value: voltage + + """ + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [ + MemberSpec_( + "value", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "value"} + ), + MemberSpec_( + "segment_groups", + "NmlId", + 0, + 1, + {"use": "optional", "name": "segment_groups"}, + ), + ] + subclass = None + superclass = None + + def __init__( + self, value=None, segment_groups="all", gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.value = _cast(None, value) + self.value_nsprefix_ = None + self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, InitMembPotential) + if subclass is not None: + return subclass(*args_, **kwargs_) + if InitMembPotential.subclass: + return InitMembPotential.subclass(*args_, **kwargs_) + else: + return InitMembPotential(*args_, **kwargs_) + + factory = staticmethod(factory) + + def validate_Nml2Quantity_voltage(self, value): + # Validate type Nml2Quantity_voltage, a restriction on xs:string. + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + + def validate_NmlId(self, value): + # Validate type NmlId, a restriction on xs:string. + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): + if (): + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InitMembPotential", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("InitMembPotential") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "InitMembPotential": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="InitMembPotential", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="InitMembPotential", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="InitMembPotential", + ): + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write( + " value=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.value), input_name="value" + ) + ), + ) + ) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write( + " segmentGroup=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.segment_groups), input_name="segmentGroup" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InitMembPotential", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") + self.value = value + self.validate_Nml2Quantity_voltage( + self.value + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") + self.segment_groups = value + self.validate_NmlId(self.segment_groups) # validate type NmlId + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class InitMembPotential + + +class Resistivity(GeneratedsSuper): + """Resistivity -- The resistivity, or specific axial resistance, of the cytoplasm + \n + :param value: + :type value: resistivity + + """ + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [ + MemberSpec_( + "value", + "Nml2Quantity_resistivity", + 0, + 0, + {"use": "required", "name": "value"}, + ), + MemberSpec_( + "segment_groups", + "NmlId", + 0, + 1, + {"use": "optional", "name": "segment_groups"}, + ), + ] + subclass = None + superclass = None + + def __init__( + self, value=None, segment_groups="all", gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None + self.original_tagname_ = None + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + self.value = _cast(None, value) + self.value_nsprefix_ = None + self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None + + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_(CurrentSubclassModule_, Resistivity) + if subclass is not None: + return subclass(*args_, **kwargs_) + if Resistivity.subclass: + return Resistivity.subclass(*args_, **kwargs_) + else: + return Resistivity(*args_, **kwargs_) + + factory = staticmethod(factory) + + def validate_Nml2Quantity_resistivity(self, value): + # Validate type Nml2Quantity_resistivity, a restriction on xs:string. + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_resistivity_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_resistivity_patterns_, + ) + ) + + validate_Nml2Quantity_resistivity_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm_cm|kohm_cm|ohm_m))$"] + ] + + def validate_NmlId(self, value): + # Validate type NmlId, a restriction on xs:string. + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): + if (): + return True + else: + return False + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Resistivity", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Resistivity") + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Resistivity": + name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" + showIndent(outfile, level, pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Resistivity" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Resistivity", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="Resistivity", + ): + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write( + " value=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.value), input_name="value" + ) + ), + ) + ) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write( + " segmentGroup=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.segment_groups), input_name="segmentGroup" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Resistivity", + fromsubclass_=False, + pretty_print=True, + ): + pass + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) + return self + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") + self.value = value + self.validate_Nml2Quantity_resistivity( + self.value + ) # validate type Nml2Quantity_resistivity + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") + self.segment_groups = value + self.validate_NmlId(self.segment_groups) # validate type NmlId + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + pass + + +# end class Resistivity class VariableParameter(GeneratedsSuper): + """VariableParameter -- Specifies a **parameter** ( e. g. condDensity ) which can vary its value across a **segmentGroup.** The value is calculated from **value** attribute of the **inhomogeneousValue** subelement. This element is normally a child of **channelDensityNonUniform** , **channelDensityNonUniformNernst** or **channelDensityNonUniformGHK** and is used to calculate the value of the conductance, etc. which will vary on different parts of the cell. The **segmentGroup** specified here needs to define an **inhomogeneousParameter** ( referenced from **inhomogeneousParameter** in the **inhomogeneousValue** ), which calculates a **variable** ( e. g. p ) varying across the cell ( e. g. based on the path length from soma ), which is then used in the **value** attribute of the **inhomogeneousValue** ( so for example condDensity = f( p ) )""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('parameter', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('inhomogeneous_value', 'InhomogeneousValue', 0, 1, {u'type': u'InhomogeneousValue', u'name': u'inhomogeneousValue', u'minOccurs': u'0'}, None), + MemberSpec_( + "parameter", "xs:string", 0, 0, {"use": "required", "name": "parameter"} + ), + MemberSpec_( + "segment_groups", + "xs:string", + 0, + 0, + {"use": "required", "name": "segment_groups"}, + ), + MemberSpec_( + "inhomogeneous_value", + "InhomogeneousValue", + 0, + 1, + { + "minOccurs": "0", + "name": "inhomogeneousValue", + "type": "InhomogeneousValue", + }, + None, + ), ] subclass = None superclass = None - def __init__(self, parameter=None, segment_groups=None, inhomogeneous_value=None, **kwargs_): + + def __init__( + self, + parameter=None, + segment_groups=None, + inhomogeneous_value=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.parameter = _cast(None, parameter) + self.parameter_nsprefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None self.inhomogeneous_value = inhomogeneous_value + self.inhomogeneous_value_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, VariableParameter) + subclass = getSubclassFromModule_(CurrentSubclassModule_, VariableParameter) if subclass is not None: return subclass(*args_, **kwargs_) if VariableParameter.subclass: return VariableParameter.subclass(*args_, **kwargs_) else: return VariableParameter(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - self.inhomogeneous_value is not None - ): + + def _hasContent(self): + if self.inhomogeneous_value is not None: return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VariableParameter', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('VariableParameter') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="VariableParameter", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("VariableParameter") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "VariableParameter": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VariableParameter') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='VariableParameter', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="VariableParameter", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="VariableParameter", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='VariableParameter'): - if self.parameter is not None and 'parameter' not in already_processed: - already_processed.add('parameter') - outfile.write(' parameter=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.parameter), input_name='parameter')), )) - if self.segment_groups is not None and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.segment_groups), input_name='segmentGroup')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VariableParameter', fromsubclass_=False, pretty_print=True): + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="VariableParameter", + ): + if self.parameter is not None and "parameter" not in already_processed: + already_processed.add("parameter") + outfile.write( + " parameter=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.parameter), input_name="parameter" + ) + ), + ) + ) + if ( + self.segment_groups is not None + and "segment_groups" not in already_processed + ): + already_processed.add("segment_groups") + outfile.write( + " segmentGroup=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.segment_groups), input_name="segmentGroup" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="VariableParameter", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.inhomogeneous_value is not None: - self.inhomogeneous_value.export(outfile, level, namespaceprefix_, namespacedef_='', name_='inhomogeneousValue', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.inhomogeneous_value_nsprefix_ + ":" + if (UseCapturedNS_ and self.inhomogeneous_value_nsprefix_) + else "" + ) + self.inhomogeneous_value.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="inhomogeneousValue", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('parameter', node) - if value is not None and 'parameter' not in already_processed: - already_processed.add('parameter') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("parameter", node) + if value is not None and "parameter" not in already_processed: + already_processed.add("parameter") self.parameter = value - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'inhomogeneousValue': + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "inhomogeneousValue": obj_ = InhomogeneousValue.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.inhomogeneous_value = obj_ - obj_.original_tagname_ = 'inhomogeneousValue' + obj_.original_tagname_ = "inhomogeneousValue" + + # end class VariableParameter class InhomogeneousValue(GeneratedsSuper): + """InhomogeneousValue -- Specifies the **value** of an **inhomogeneousParameter.** For usage see **variableParameter**""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('inhomogeneous_parameters', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('value', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_( + "inhomogeneous_parameters", + "xs:string", + 0, + 0, + {"use": "required", "name": "inhomogeneous_parameters"}, + ), + MemberSpec_("value", "xs:string", 0, 0, {"use": "required", "name": "value"}), ] subclass = None superclass = None - def __init__(self, inhomogeneous_parameters=None, value=None, **kwargs_): + + def __init__( + self, inhomogeneous_parameters=None, value=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.inhomogeneous_parameters = _cast(None, inhomogeneous_parameters) + self.inhomogeneous_parameters_nsprefix_ = None self.value = _cast(None, value) + self.value_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, InhomogeneousValue) + CurrentSubclassModule_, InhomogeneousValue + ) if subclass is not None: return subclass(*args_, **kwargs_) if InhomogeneousValue.subclass: return InhomogeneousValue.subclass(*args_, **kwargs_) else: return InhomogeneousValue(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InhomogeneousValue', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('InhomogeneousValue') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InhomogeneousValue", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("InhomogeneousValue") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "InhomogeneousValue": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InhomogeneousValue') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InhomogeneousValue', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InhomogeneousValue'): - if self.inhomogeneous_parameters is not None and 'inhomogeneous_parameters' not in already_processed: - already_processed.add('inhomogeneous_parameters') - outfile.write(' inhomogeneousParameter=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.inhomogeneous_parameters), input_name='inhomogeneousParameter')), )) - if self.value is not None and 'value' not in already_processed: - already_processed.add('value') - outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InhomogeneousValue', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="InhomogeneousValue", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="InhomogeneousValue", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="InhomogeneousValue", + ): + if ( + self.inhomogeneous_parameters is not None + and "inhomogeneous_parameters" not in already_processed + ): + already_processed.add("inhomogeneous_parameters") + outfile.write( + " inhomogeneousParameter=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.inhomogeneous_parameters), + input_name="inhomogeneousParameter", + ) + ), + ) + ) + if self.value is not None and "value" not in already_processed: + already_processed.add("value") + outfile.write( + " value=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.value), input_name="value" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InhomogeneousValue", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('inhomogeneousParameter', node) - if value is not None and 'inhomogeneousParameter' not in already_processed: - already_processed.add('inhomogeneousParameter') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("inhomogeneousParameter", node) + if value is not None and "inhomogeneousParameter" not in already_processed: + already_processed.add("inhomogeneousParameter") self.inhomogeneous_parameters = value - value = find_attr_value_('value', node) - if value is not None and 'value' not in already_processed: - already_processed.add('value') + value = find_attr_value_("value", node) + if value is not None and "value" not in already_processed: + already_processed.add("value") self.value = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class InhomogeneousValue -class Species(ValueAcrossSegOrSegGroup): - """Specifying the ion here again is redundant, the ion name should be - the same as id. Kept for now until LEMS implementation can - select by id. TODO: remove.""" +class Species(GeneratedsSuper): + """Species -- Description of a chemical species identified by **ion,** which has internal, **concentration,** and external, **extConcentration** values for its concentration + \n + :param initialConcentration + : + :type initialConcentration: concentration + :param initialExtConcentration: + :type initialExtConcentration: concentration + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('id', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('concentration_model', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('ion', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('initial_concentration', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), - MemberSpec_('initial_ext_concentration', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), + MemberSpec_("id", "NmlId", 0, 0, {"use": "required", "name": "id"}), + MemberSpec_( + "concentration_model", + "NmlId", + 0, + 0, + {"use": "required", "name": "concentration_model"}, + ), + MemberSpec_("ion", "NmlId", 0, 1, {"use": "optional", "name": "ion"}), + MemberSpec_( + "initial_concentration", + "Nml2Quantity_concentration", + 0, + 0, + {"use": "required", "name": "initial_concentration"}, + ), + MemberSpec_( + "initial_ext_concentration", + "Nml2Quantity_concentration", + 0, + 0, + {"use": "required", "name": "initial_ext_concentration"}, + ), + MemberSpec_( + "segment_groups", + "NmlId", + 0, + 1, + {"use": "optional", "name": "segment_groups"}, + ), ] subclass = None - superclass = ValueAcrossSegOrSegGroup - def __init__(self, value=None, segment_groups='all', segments=None, id=None, concentration_model=None, ion=None, initial_concentration=None, initial_ext_concentration=None, **kwargs_): + superclass = None + + def __init__( + self, + id=None, + concentration_model=None, + ion=None, + initial_concentration=None, + initial_ext_concentration=None, + segment_groups="all", + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Species, self).__init__(value, segment_groups, segments, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.id = _cast(None, id) + self.id_nsprefix_ = None self.concentration_model = _cast(None, concentration_model) + self.concentration_model_nsprefix_ = None self.ion = _cast(None, ion) + self.ion_nsprefix_ = None self.initial_concentration = _cast(None, initial_concentration) + self.initial_concentration_nsprefix_ = None self.initial_ext_concentration = _cast(None, initial_ext_concentration) + self.initial_ext_concentration_nsprefix_ = None + self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Species) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Species) if subclass is not None: return subclass(*args_, **kwargs_) if Species.subclass: return Species.subclass(*args_, **kwargs_) else: return Species(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + def validate_Nml2Quantity_concentration(self, value): # Validate type Nml2Quantity_concentration, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_concentration_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_concentration_patterns_, )) - validate_Nml2Quantity_concentration_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM)$']] - def hasContent_(self): if ( - super(Species, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_concentration_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_concentration_patterns_, + ) + ) + + validate_Nml2Quantity_concentration_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM))$"] + ] + + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Species', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Species') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Species", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Species") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Species": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Species') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Species', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Species'): - super(Species, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Species') - if self.id is not None and 'id' not in already_processed: - already_processed.add('id') - outfile.write(' id=%s' % (quote_attrib(self.id), )) - if self.concentration_model is not None and 'concentration_model' not in already_processed: - already_processed.add('concentration_model') - outfile.write(' concentrationModel=%s' % (quote_attrib(self.concentration_model), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - if self.initial_concentration is not None and 'initial_concentration' not in already_processed: - already_processed.add('initial_concentration') - outfile.write(' initialConcentration=%s' % (quote_attrib(self.initial_concentration), )) - if self.initial_ext_concentration is not None and 'initial_ext_concentration' not in already_processed: - already_processed.add('initial_ext_concentration') - outfile.write(' initialExtConcentration=%s' % (quote_attrib(self.initial_ext_concentration), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Species', fromsubclass_=False, pretty_print=True): - super(Species, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Species" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Species", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Species" + ): + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write( + " id=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.id), input_name="id") + ), + ) + ) + if ( + self.concentration_model is not None + and "concentration_model" not in already_processed + ): + already_processed.add("concentration_model") + outfile.write( + " concentrationModel=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.concentration_model), + input_name="concentrationModel", + ) + ), + ) + ) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write( + " ion=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.ion), input_name="ion") + ), + ) + ) + if ( + self.initial_concentration is not None + and "initial_concentration" not in already_processed + ): + already_processed.add("initial_concentration") + outfile.write( + " initialConcentration=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.initial_concentration), + input_name="initialConcentration", + ) + ), + ) + ) + if ( + self.initial_ext_concentration is not None + and "initial_ext_concentration" not in already_processed + ): + already_processed.add("initial_ext_concentration") + outfile.write( + " initialExtConcentration=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.initial_ext_concentration), + input_name="initialExtConcentration", + ) + ), + ) + ) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write( + " segmentGroup=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.segment_groups), input_name="segmentGroup" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Species", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('id', node) - if value is not None and 'id' not in already_processed: - already_processed.add('id') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") self.id = value - self.validate_NmlId(self.id) # validate type NmlId - value = find_attr_value_('concentrationModel', node) - if value is not None and 'concentrationModel' not in already_processed: - already_processed.add('concentrationModel') + self.validate_NmlId(self.id) # validate type NmlId + value = find_attr_value_("concentrationModel", node) + if value is not None and "concentrationModel" not in already_processed: + already_processed.add("concentrationModel") self.concentration_model = value - self.validate_NmlId(self.concentration_model) # validate type NmlId - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + self.validate_NmlId(self.concentration_model) # validate type NmlId + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - value = find_attr_value_('initialConcentration', node) - if value is not None and 'initialConcentration' not in already_processed: - already_processed.add('initialConcentration') + self.validate_NmlId(self.ion) # validate type NmlId + value = find_attr_value_("initialConcentration", node) + if value is not None and "initialConcentration" not in already_processed: + already_processed.add("initialConcentration") self.initial_concentration = value - self.validate_Nml2Quantity_concentration(self.initial_concentration) # validate type Nml2Quantity_concentration - value = find_attr_value_('initialExtConcentration', node) - if value is not None and 'initialExtConcentration' not in already_processed: - already_processed.add('initialExtConcentration') + self.validate_Nml2Quantity_concentration( + self.initial_concentration + ) # validate type Nml2Quantity_concentration + value = find_attr_value_("initialExtConcentration", node) + if value is not None and "initialExtConcentration" not in already_processed: + already_processed.add("initialExtConcentration") self.initial_ext_concentration = value - self.validate_Nml2Quantity_concentration(self.initial_ext_concentration) # validate type Nml2Quantity_concentration - super(Species, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(Species, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_concentration( + self.initial_ext_concentration + ) # validate type Nml2Quantity_concentration + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") + self.segment_groups = value + self.validate_NmlId(self.segment_groups) # validate type NmlId + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class Species class IntracellularProperties(GeneratedsSuper): + """IntracellularProperties -- Biophysical properties related to the intracellular space within the **cell** , such as the **resistivity** and the list of ionic **species** present. **caConc** and **caConcExt** are explicitly exposed here to facilitate accessing these values from other Components, even though **caConcExt** is clearly not an intracellular property""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('species', 'Species', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Species', u'name': u'species', u'minOccurs': u'0'}, None), - MemberSpec_('resistivities', 'Resistivity', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Resistivity', u'name': u'resistivity', u'minOccurs': u'0'}, None), + MemberSpec_( + "species", + "Species", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "species", + "type": "Species", + }, + None, + ), + MemberSpec_( + "resistivities", + "Resistivity", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "resistivity", + "type": "Resistivity", + }, + None, + ), ] subclass = None superclass = None - def __init__(self, species=None, resistivities=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + species=None, + resistivities=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None if species is None: self.species = [] else: self.species = species + self.species_nsprefix_ = None if resistivities is None: self.resistivities = [] else: self.resistivities = resistivities + self.resistivities_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, IntracellularProperties) + CurrentSubclassModule_, IntracellularProperties + ) if subclass is not None: return subclass(*args_, **kwargs_) if IntracellularProperties.subclass: return IntracellularProperties.subclass(*args_, **kwargs_) else: return IntracellularProperties(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - self.species or - self.resistivities - ): + + def _hasContent(self): + if self.species or self.resistivities: return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntracellularProperties', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IntracellularProperties') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="IntracellularProperties", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IntracellularProperties") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "IntracellularProperties": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IntracellularProperties') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IntracellularProperties', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="IntracellularProperties", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IntracellularProperties", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IntracellularProperties'): - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IntracellularProperties", + ): + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntracellularProperties', fromsubclass_=False, pretty_print=True): + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="IntracellularProperties", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for species_ in self.species: - species_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='species', pretty_print=pretty_print) + namespaceprefix_ = ( + self.species_nsprefix_ + ":" + if (UseCapturedNS_ and self.species_nsprefix_) + else "" + ) + species_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="species", + pretty_print=pretty_print, + ) for resistivity_ in self.resistivities: - resistivity_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='resistivity', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.resistivities_nsprefix_ + ":" + if (UseCapturedNS_ and self.resistivities_nsprefix_) + else "" + ) + resistivity_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="resistivity", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'species': + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "species": obj_ = Species.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.species.append(obj_) - obj_.original_tagname_ = 'species' - elif nodeName_ == 'resistivity': + obj_.original_tagname_ = "species" + elif nodeName_ == "resistivity": obj_ = Resistivity.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.resistivities.append(obj_) - obj_.original_tagname_ = 'resistivity' + obj_.original_tagname_ = "resistivity" + + # end class IntracellularProperties class IntracellularProperties2CaPools(IntracellularProperties): - member_data_items_ = [ - ] + """IntracellularProperties2CaPools -- Variant of intracellularProperties with 2 independent Ca pools""" + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = IntracellularProperties - def __init__(self, species=None, resistivities=None, **kwargs_): + + def __init__( + self, species=None, resistivities=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IntracellularProperties2CaPools, self).__init__(species, resistivities, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("IntracellularProperties2CaPools"), self).__init__( + species, resistivities, **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, IntracellularProperties2CaPools) + CurrentSubclassModule_, IntracellularProperties2CaPools + ) if subclass is not None: return subclass(*args_, **kwargs_) if IntracellularProperties2CaPools.subclass: return IntracellularProperties2CaPools.subclass(*args_, **kwargs_) else: return IntracellularProperties2CaPools(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(IntracellularProperties2CaPools, self).hasContent_() - ): + + def _hasContent(self): + if super(IntracellularProperties2CaPools, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntracellularProperties2CaPools', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IntracellularProperties2CaPools') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IntracellularProperties2CaPools", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get( + "IntracellularProperties2CaPools" + ) if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if ( + self.original_tagname_ is not None + and name_ == "IntracellularProperties2CaPools" + ): name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IntracellularProperties2CaPools') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IntracellularProperties2CaPools', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="IntracellularProperties2CaPools", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IntracellularProperties2CaPools", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IntracellularProperties2CaPools'): - super(IntracellularProperties2CaPools, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IntracellularProperties2CaPools') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IntracellularProperties2CaPools', fromsubclass_=False, pretty_print=True): - super(IntracellularProperties2CaPools, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IntracellularProperties2CaPools", + ): + super(IntracellularProperties2CaPools, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="IntracellularProperties2CaPools", + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IntracellularProperties2CaPools", + fromsubclass_=False, + pretty_print=True, + ): + super(IntracellularProperties2CaPools, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(IntracellularProperties2CaPools, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IntracellularProperties2CaPools, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + super(IntracellularProperties2CaPools, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(IntracellularProperties2CaPools, self)._buildChildren( + child_, node, nodeName_, True + ) pass + + # end class IntracellularProperties2CaPools class ExtracellularPropertiesLocal(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('species', 'Species', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Species', u'name': u'species', u'minOccurs': u'0'}, None), + MemberSpec_( + "species", + "Species", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "species", + "type": "Species", + }, + None, + ), ] subclass = None superclass = None - def __init__(self, species=None, **kwargs_): + + def __init__(self, species=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None if species is None: self.species = [] else: self.species = species + self.species_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ExtracellularPropertiesLocal) + CurrentSubclassModule_, ExtracellularPropertiesLocal + ) if subclass is not None: return subclass(*args_, **kwargs_) if ExtracellularPropertiesLocal.subclass: return ExtracellularPropertiesLocal.subclass(*args_, **kwargs_) else: return ExtracellularPropertiesLocal(*args_, **kwargs_) - factory = staticmethod(factory) - def hasContent_(self): - if ( - self.species - ): + + factory = staticmethod(factory) + + def _hasContent(self): + if self.species: return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtracellularPropertiesLocal', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtracellularPropertiesLocal') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ExtracellularPropertiesLocal", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ExtracellularPropertiesLocal") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if ( + self.original_tagname_ is not None + and name_ == "ExtracellularPropertiesLocal" + ): name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtracellularPropertiesLocal') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtracellularPropertiesLocal', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ExtracellularPropertiesLocal", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ExtracellularPropertiesLocal", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtracellularPropertiesLocal'): + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ExtracellularPropertiesLocal", + ): pass - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtracellularPropertiesLocal', fromsubclass_=False, pretty_print=True): + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ExtracellularPropertiesLocal", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for species_ in self.species: - species_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='species', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.species_nsprefix_ + ":" + if (UseCapturedNS_ and self.species_nsprefix_) + else "" + ) + species_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="species", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): + + def _buildAttributes(self, node, attrs, already_processed): pass - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'species': + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "species": obj_ = Species.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.species.append(obj_) - obj_.original_tagname_ = 'species' + obj_.original_tagname_ = "species" + + # end class ExtracellularPropertiesLocal class SpaceStructure(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('x_spacing', 'xs:float', 0, 1, {'use': 'optional'}), - MemberSpec_('y_spacing', 'xs:float', 0, 1, {'use': u'optional'}), - MemberSpec_('z_spacing', 'xs:float', 0, 1, {'use': u'optional'}), - MemberSpec_('x_start', 'xs:float', 0, 1, {'use': u'optional'}), - MemberSpec_('y_start', 'xs:float', 0, 1, {'use': u'optional'}), - MemberSpec_('z_start', 'xs:float', 0, 1, {'use': u'optional'}), + MemberSpec_( + "x_spacing", "xs:float", 0, 1, {"use": "optional", "name": "x_spacing"} + ), + MemberSpec_( + "y_spacing", "xs:float", 0, 1, {"use": "optional", "name": "y_spacing"} + ), + MemberSpec_( + "z_spacing", "xs:float", 0, 1, {"use": "optional", "name": "z_spacing"} + ), + MemberSpec_( + "x_start", "xs:float", 0, 1, {"use": "optional", "name": "x_start"} + ), + MemberSpec_( + "y_start", "xs:float", 0, 1, {"use": "optional", "name": "y_start"} + ), + MemberSpec_( + "z_start", "xs:float", 0, 1, {"use": "optional", "name": "z_start"} + ), ] subclass = None superclass = None - def __init__(self, x_spacing=None, y_spacing=None, z_spacing=None, x_start=0, y_start=0, z_start=0, **kwargs_): + + def __init__( + self, + x_spacing=None, + y_spacing=None, + z_spacing=None, + x_start=0, + y_start=0, + z_start=0, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.x_spacing = _cast(float, x_spacing) + self.x_spacing_nsprefix_ = None self.y_spacing = _cast(float, y_spacing) + self.y_spacing_nsprefix_ = None self.z_spacing = _cast(float, z_spacing) + self.z_spacing_nsprefix_ = None self.x_start = _cast(float, x_start) + self.x_start_nsprefix_ = None self.y_start = _cast(float, y_start) + self.y_start_nsprefix_ = None self.z_start = _cast(float, z_start) + self.z_start_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpaceStructure) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SpaceStructure) if subclass is not None: return subclass(*args_, **kwargs_) if SpaceStructure.subclass: return SpaceStructure.subclass(*args_, **kwargs_) else: return SpaceStructure(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpaceStructure', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpaceStructure') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpaceStructure", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpaceStructure") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SpaceStructure": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpaceStructure') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpaceStructure', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpaceStructure'): - if self.x_spacing is not None and 'x_spacing' not in already_processed: - already_processed.add('x_spacing') - outfile.write(' xSpacing="%s"' % self.gds_format_float(self.x_spacing, input_name='xSpacing')) - if self.y_spacing is not None and 'y_spacing' not in already_processed: - already_processed.add('y_spacing') - outfile.write(' ySpacing="%s"' % self.gds_format_float(self.y_spacing, input_name='ySpacing')) - if self.z_spacing is not None and 'z_spacing' not in already_processed: - already_processed.add('z_spacing') - outfile.write(' zSpacing="%s"' % self.gds_format_float(self.z_spacing, input_name='zSpacing')) - if self.x_start != 0 and 'x_start' not in already_processed: - already_processed.add('x_start') - outfile.write(' xStart="%s"' % self.gds_format_float(self.x_start, input_name='xStart')) - if self.y_start != 0 and 'y_start' not in already_processed: - already_processed.add('y_start') - outfile.write(' yStart="%s"' % self.gds_format_float(self.y_start, input_name='yStart')) - if self.z_start != 0 and 'z_start' not in already_processed: - already_processed.add('z_start') - outfile.write(' zStart="%s"' % self.gds_format_float(self.z_start, input_name='zStart')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpaceStructure', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SpaceStructure" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpaceStructure", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SpaceStructure", + ): + if self.x_spacing is not None and "x_spacing" not in already_processed: + already_processed.add("x_spacing") + outfile.write( + ' xSpacing="%s"' + % self.gds_format_float(self.x_spacing, input_name="xSpacing") + ) + if self.y_spacing is not None and "y_spacing" not in already_processed: + already_processed.add("y_spacing") + outfile.write( + ' ySpacing="%s"' + % self.gds_format_float(self.y_spacing, input_name="ySpacing") + ) + if self.z_spacing is not None and "z_spacing" not in already_processed: + already_processed.add("z_spacing") + outfile.write( + ' zSpacing="%s"' + % self.gds_format_float(self.z_spacing, input_name="zSpacing") + ) + if self.x_start != 0 and "x_start" not in already_processed: + already_processed.add("x_start") + outfile.write( + ' xStart="%s"' + % self.gds_format_float(self.x_start, input_name="xStart") + ) + if self.y_start != 0 and "y_start" not in already_processed: + already_processed.add("y_start") + outfile.write( + ' yStart="%s"' + % self.gds_format_float(self.y_start, input_name="yStart") + ) + if self.z_start != 0 and "z_start" not in already_processed: + already_processed.add("z_start") + outfile.write( + ' zStart="%s"' + % self.gds_format_float(self.z_start, input_name="zStart") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpaceStructure", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xSpacing', node) - if value is not None and 'xSpacing' not in already_processed: - already_processed.add('xSpacing') - try: - self.x_spacing = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (xSpacing): %s' % exp) - value = find_attr_value_('ySpacing', node) - if value is not None and 'ySpacing' not in already_processed: - already_processed.add('ySpacing') - try: - self.y_spacing = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (ySpacing): %s' % exp) - value = find_attr_value_('zSpacing', node) - if value is not None and 'zSpacing' not in already_processed: - already_processed.add('zSpacing') - try: - self.z_spacing = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (zSpacing): %s' % exp) - value = find_attr_value_('xStart', node) - if value is not None and 'xStart' not in already_processed: - already_processed.add('xStart') - try: - self.x_start = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (xStart): %s' % exp) - value = find_attr_value_('yStart', node) - if value is not None and 'yStart' not in already_processed: - already_processed.add('yStart') - try: - self.y_start = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (yStart): %s' % exp) - value = find_attr_value_('zStart', node) - if value is not None and 'zStart' not in already_processed: - already_processed.add('zStart') - try: - self.z_start = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (zStart): %s' % exp) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("xSpacing", node) + if value is not None and "xSpacing" not in already_processed: + already_processed.add("xSpacing") + value = self.gds_parse_float(value, node, "xSpacing") + self.x_spacing = value + value = find_attr_value_("ySpacing", node) + if value is not None and "ySpacing" not in already_processed: + already_processed.add("ySpacing") + value = self.gds_parse_float(value, node, "ySpacing") + self.y_spacing = value + value = find_attr_value_("zSpacing", node) + if value is not None and "zSpacing" not in already_processed: + already_processed.add("zSpacing") + value = self.gds_parse_float(value, node, "zSpacing") + self.z_spacing = value + value = find_attr_value_("xStart", node) + if value is not None and "xStart" not in already_processed: + already_processed.add("xStart") + value = self.gds_parse_float(value, node, "xStart") + self.x_start = value + value = find_attr_value_("yStart", node) + if value is not None and "yStart" not in already_processed: + already_processed.add("yStart") + value = self.gds_parse_float(value, node, "yStart") + self.y_start = value + value = find_attr_value_("zStart", node) + if value is not None and "zStart" not in already_processed: + already_processed.add("zStart") + value = self.gds_parse_float(value, node, "zStart") + self.z_start = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class SpaceStructure class Layout(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('spaces', 'NmlId', 0, 1, {'use': 'optional'}), - MemberSpec_('random', 'RandomLayout', 0, 0, {u'type': u'RandomLayout', u'name': u'random'}, 5), - MemberSpec_('grid', 'GridLayout', 0, 0, {u'type': u'GridLayout', u'name': u'grid'}, 5), - MemberSpec_('unstructured', 'UnstructuredLayout', 0, 0, {u'type': u'UnstructuredLayout', u'name': u'unstructured'}, 5), + MemberSpec_("spaces", "NmlId", 0, 1, {"use": "optional", "name": "spaces"}), + MemberSpec_( + "random", + "RandomLayout", + 0, + 0, + {"name": "random", "type": "RandomLayout"}, + 5, + ), + MemberSpec_( + "grid", "GridLayout", 0, 0, {"name": "grid", "type": "GridLayout"}, 5 + ), + MemberSpec_( + "unstructured", + "UnstructuredLayout", + 0, + 0, + {"name": "unstructured", "type": "UnstructuredLayout"}, + 5, + ), ] subclass = None superclass = None - def __init__(self, spaces=None, random=None, grid=None, unstructured=None, **kwargs_): + + def __init__( + self, + spaces=None, + random=None, + grid=None, + unstructured=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.spaces = _cast(None, spaces) + self.spaces_nsprefix_ = None self.random = random + self.random_nsprefix_ = None self.grid = grid + self.grid_nsprefix_ = None self.unstructured = unstructured + self.unstructured_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Layout) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Layout) if subclass is not None: return subclass(*args_, **kwargs_) if Layout.subclass: return Layout.subclass(*args_, **kwargs_) else: return Layout(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): if ( - self.random is not None or - self.grid is not None or - self.unstructured is not None + self.random is not None + or self.grid is not None + or self.unstructured is not None ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Layout', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Layout') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Layout", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Layout") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Layout": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Layout') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Layout', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Layout" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Layout", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Layout'): - if self.spaces is not None and 'spaces' not in already_processed: - already_processed.add('spaces') - outfile.write(' space=%s' % (quote_attrib(self.spaces), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Layout', fromsubclass_=False, pretty_print=True): + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Layout" + ): + if self.spaces is not None and "spaces" not in already_processed: + already_processed.add("spaces") + outfile.write( + " space=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.spaces), input_name="space" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Layout", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.random is not None: - self.random.export(outfile, level, namespaceprefix_, namespacedef_='', name_='random', pretty_print=pretty_print) + namespaceprefix_ = ( + self.random_nsprefix_ + ":" + if (UseCapturedNS_ and self.random_nsprefix_) + else "" + ) + self.random.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="random", + pretty_print=pretty_print, + ) if self.grid is not None: - self.grid.export(outfile, level, namespaceprefix_, namespacedef_='', name_='grid', pretty_print=pretty_print) + namespaceprefix_ = ( + self.grid_nsprefix_ + ":" + if (UseCapturedNS_ and self.grid_nsprefix_) + else "" + ) + self.grid.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="grid", + pretty_print=pretty_print, + ) if self.unstructured is not None: - self.unstructured.export(outfile, level, namespaceprefix_, namespacedef_='', name_='unstructured', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.unstructured_nsprefix_ + ":" + if (UseCapturedNS_ and self.unstructured_nsprefix_) + else "" + ) + self.unstructured.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="unstructured", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('space', node) - if value is not None and 'space' not in already_processed: - already_processed.add('space') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("space", node) + if value is not None and "space" not in already_processed: + already_processed.add("space") self.spaces = value - self.validate_NmlId(self.spaces) # validate type NmlId - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'random': + self.validate_NmlId(self.spaces) # validate type NmlId + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "random": obj_ = RandomLayout.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.random = obj_ - obj_.original_tagname_ = 'random' - elif nodeName_ == 'grid': + obj_.original_tagname_ = "random" + elif nodeName_ == "grid": obj_ = GridLayout.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.grid = obj_ - obj_.original_tagname_ = 'grid' - elif nodeName_ == 'unstructured': + obj_.original_tagname_ = "grid" + elif nodeName_ == "unstructured": obj_ = UnstructuredLayout.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.unstructured = obj_ - obj_.original_tagname_ = 'unstructured' + obj_.original_tagname_ = "unstructured" + + # end class Layout class UnstructuredLayout(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('number', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), + MemberSpec_( + "number", + "xs:nonNegativeInteger", + 0, + 1, + {"use": "optional", "name": "number"}, + ), ] subclass = None superclass = None - def __init__(self, number=None, **kwargs_): + + def __init__(self, number=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.number = _cast(int, number) + self.number_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, UnstructuredLayout) + CurrentSubclassModule_, UnstructuredLayout + ) if subclass is not None: return subclass(*args_, **kwargs_) if UnstructuredLayout.subclass: return UnstructuredLayout.subclass(*args_, **kwargs_) else: return UnstructuredLayout(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='UnstructuredLayout', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('UnstructuredLayout') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="UnstructuredLayout", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("UnstructuredLayout") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "UnstructuredLayout": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='UnstructuredLayout') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='UnstructuredLayout', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='UnstructuredLayout'): - if self.number is not None and 'number' not in already_processed: - already_processed.add('number') - outfile.write(' number="%s"' % self.gds_format_integer(self.number, input_name='number')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='UnstructuredLayout', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="UnstructuredLayout", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="UnstructuredLayout", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="UnstructuredLayout", + ): + if self.number is not None and "number" not in already_processed: + already_processed.add("number") + outfile.write( + ' number="%s"' + % self.gds_format_integer(self.number, input_name="number") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="UnstructuredLayout", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('number', node) - if value is not None and 'number' not in already_processed: - already_processed.add('number') - try: - self.number = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("number", node) + if value is not None and "number" not in already_processed: + already_processed.add("number") + self.number = self.gds_parse_integer(value, node, "number") if self.number < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + raise_parse_error(node, "Invalid NonNegativeInteger") + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class UnstructuredLayout class RandomLayout(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('number', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('regions', 'NmlId', 0, 1, {'use': 'optional'}), + MemberSpec_( + "number", + "xs:nonNegativeInteger", + 0, + 1, + {"use": "optional", "name": "number"}, + ), + MemberSpec_("regions", "NmlId", 0, 1, {"use": "optional", "name": "regions"}), ] subclass = None superclass = None - def __init__(self, number=None, regions=None, **kwargs_): + + def __init__(self, number=None, regions=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.number = _cast(int, number) + self.number_nsprefix_ = None self.regions = _cast(None, regions) + self.regions_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, RandomLayout) + subclass = getSubclassFromModule_(CurrentSubclassModule_, RandomLayout) if subclass is not None: return subclass(*args_, **kwargs_) if RandomLayout.subclass: return RandomLayout.subclass(*args_, **kwargs_) else: return RandomLayout(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): if ( - + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RandomLayout', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('RandomLayout') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="RandomLayout", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("RandomLayout") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "RandomLayout": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RandomLayout') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RandomLayout', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RandomLayout'): - if self.number is not None and 'number' not in already_processed: - already_processed.add('number') - outfile.write(' number="%s"' % self.gds_format_integer(self.number, input_name='number')) - if self.regions is not None and 'regions' not in already_processed: - already_processed.add('regions') - outfile.write(' region=%s' % (quote_attrib(self.regions), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RandomLayout', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="RandomLayout" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="RandomLayout", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="RandomLayout", + ): + if self.number is not None and "number" not in already_processed: + already_processed.add("number") + outfile.write( + ' number="%s"' + % self.gds_format_integer(self.number, input_name="number") + ) + if self.regions is not None and "regions" not in already_processed: + already_processed.add("regions") + outfile.write( + " region=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.regions), input_name="region" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="RandomLayout", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('number', node) - if value is not None and 'number' not in already_processed: - already_processed.add('number') - try: - self.number = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("number", node) + if value is not None and "number" not in already_processed: + already_processed.add("number") + self.number = self.gds_parse_integer(value, node, "number") if self.number < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - value = find_attr_value_('region', node) - if value is not None and 'region' not in already_processed: - already_processed.add('region') + raise_parse_error(node, "Invalid NonNegativeInteger") + value = find_attr_value_("region", node) + if value is not None and "region" not in already_processed: + already_processed.add("region") self.regions = value - self.validate_NmlId(self.regions) # validate type NmlId - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + self.validate_NmlId(self.regions) # validate type NmlId + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class RandomLayout class GridLayout(GeneratedsSuper): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('x_size', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('y_size', 'xs:nonNegativeInteger', 0, 1, {'use': u'optional'}), - MemberSpec_('z_size', 'xs:nonNegativeInteger', 0, 1, {'use': u'optional'}), + MemberSpec_( + "x_size", + "xs:nonNegativeInteger", + 0, + 1, + {"use": "optional", "name": "x_size"}, + ), + MemberSpec_( + "y_size", + "xs:nonNegativeInteger", + 0, + 1, + {"use": "optional", "name": "y_size"}, + ), + MemberSpec_( + "z_size", + "xs:nonNegativeInteger", + 0, + 1, + {"use": "optional", "name": "z_size"}, + ), ] subclass = None superclass = None - def __init__(self, x_size=None, y_size=None, z_size=None, **kwargs_): + + def __init__( + self, x_size=None, y_size=None, z_size=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.x_size = _cast(int, x_size) + self.x_size_nsprefix_ = None self.y_size = _cast(int, y_size) + self.y_size_nsprefix_ = None self.z_size = _cast(int, z_size) + self.z_size_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GridLayout) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GridLayout) if subclass is not None: return subclass(*args_, **kwargs_) if GridLayout.subclass: return GridLayout.subclass(*args_, **kwargs_) else: return GridLayout(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GridLayout', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GridLayout') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GridLayout", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GridLayout") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "GridLayout": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GridLayout') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GridLayout', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GridLayout'): - if self.x_size is not None and 'x_size' not in already_processed: - already_processed.add('x_size') - outfile.write(' xSize="%s"' % self.gds_format_integer(self.x_size, input_name='xSize')) - if self.y_size is not None and 'y_size' not in already_processed: - already_processed.add('y_size') - outfile.write(' ySize="%s"' % self.gds_format_integer(self.y_size, input_name='ySize')) - if self.z_size is not None and 'z_size' not in already_processed: - already_processed.add('z_size') - outfile.write(' zSize="%s"' % self.gds_format_integer(self.z_size, input_name='zSize')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GridLayout', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GridLayout" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GridLayout", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="GridLayout" + ): + if self.x_size is not None and "x_size" not in already_processed: + already_processed.add("x_size") + outfile.write( + ' xSize="%s"' % self.gds_format_integer(self.x_size, input_name="xSize") + ) + if self.y_size is not None and "y_size" not in already_processed: + already_processed.add("y_size") + outfile.write( + ' ySize="%s"' % self.gds_format_integer(self.y_size, input_name="ySize") + ) + if self.z_size is not None and "z_size" not in already_processed: + already_processed.add("z_size") + outfile.write( + ' zSize="%s"' % self.gds_format_integer(self.z_size, input_name="zSize") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GridLayout", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xSize', node) - if value is not None and 'xSize' not in already_processed: - already_processed.add('xSize') - try: - self.x_size = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("xSize", node) + if value is not None and "xSize" not in already_processed: + already_processed.add("xSize") + self.x_size = self.gds_parse_integer(value, node, "xSize") if self.x_size < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - value = find_attr_value_('ySize', node) - if value is not None and 'ySize' not in already_processed: - already_processed.add('ySize') - try: - self.y_size = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Invalid NonNegativeInteger") + value = find_attr_value_("ySize", node) + if value is not None and "ySize" not in already_processed: + already_processed.add("ySize") + self.y_size = self.gds_parse_integer(value, node, "ySize") if self.y_size < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - value = find_attr_value_('zSize', node) - if value is not None and 'zSize' not in already_processed: - already_processed.add('zSize') - try: - self.z_size = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Invalid NonNegativeInteger") + value = find_attr_value_("zSize", node) + if value is not None and "zSize" not in already_processed: + already_processed.add("zSize") + self.z_size = self.gds_parse_integer(value, node, "zSize") if self.z_size < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + raise_parse_error(node, "Invalid NonNegativeInteger") + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass + + # end class GridLayout class Instance(GeneratedsSuper): + """Instance -- Specifies a single instance of a component in a **population** ( placed at **location** ).""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('id', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('i', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('j', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('k', 'xs:nonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('location', 'Location', 0, 0, {u'type': u'Location', u'name': u'location'}, None), + MemberSpec_( + "id", "xs:nonNegativeInteger", 0, 1, {"use": "optional", "name": "id"} + ), + MemberSpec_( + "i", "xs:nonNegativeInteger", 0, 1, {"use": "optional", "name": "i"} + ), + MemberSpec_( + "j", "xs:nonNegativeInteger", 0, 1, {"use": "optional", "name": "j"} + ), + MemberSpec_( + "k", "xs:nonNegativeInteger", 0, 1, {"use": "optional", "name": "k"} + ), + MemberSpec_( + "location", "Location", 0, 0, {"name": "location", "type": "Location"}, None + ), ] subclass = None superclass = None - def __init__(self, id=None, i=None, j=None, k=None, location=None, **kwargs_): + + def __init__( + self, + id=None, + i=None, + j=None, + k=None, + location=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.id = _cast(int, id) + self.id_nsprefix_ = None self.i = _cast(int, i) + self.i_nsprefix_ = None self.j = _cast(int, j) + self.j_nsprefix_ = None self.k = _cast(int, k) + self.k_nsprefix_ = None self.location = location + self.location_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Instance) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Instance) if subclass is not None: return subclass(*args_, **kwargs_) if Instance.subclass: return Instance.subclass(*args_, **kwargs_) else: return Instance(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - self.location is not None - ): + + def _hasContent(self): + if self.location is not None: return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Instance', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Instance') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Instance", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Instance") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Instance": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Instance') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Instance', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Instance" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Instance", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Instance'): - if self.id is not None and 'id' not in already_processed: - already_processed.add('id') - outfile.write(' id="%s"' % self.gds_format_integer(self.id, input_name='id')) - if self.i is not None and 'i' not in already_processed: - already_processed.add('i') - outfile.write(' i="%s"' % self.gds_format_integer(self.i, input_name='i')) - if self.j is not None and 'j' not in already_processed: - already_processed.add('j') - outfile.write(' j="%s"' % self.gds_format_integer(self.j, input_name='j')) - if self.k is not None and 'k' not in already_processed: - already_processed.add('k') - outfile.write(' k="%s"' % self.gds_format_integer(self.k, input_name='k')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Instance', fromsubclass_=False, pretty_print=True): + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Instance" + ): + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write( + ' id="%s"' % self.gds_format_integer(self.id, input_name="id") + ) + if self.i is not None and "i" not in already_processed: + already_processed.add("i") + outfile.write(' i="%s"' % self.gds_format_integer(self.i, input_name="i")) + if self.j is not None and "j" not in already_processed: + already_processed.add("j") + outfile.write(' j="%s"' % self.gds_format_integer(self.j, input_name="j")) + if self.k is not None and "k" not in already_processed: + already_processed.add("k") + outfile.write(' k="%s"' % self.gds_format_integer(self.k, input_name="k")) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Instance", + fromsubclass_=False, + pretty_print=True, + ): if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.location is not None: - self.location.export(outfile, level, namespaceprefix_, namespacedef_='', name_='location', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.location_nsprefix_ + ":" + if (UseCapturedNS_ and self.location_nsprefix_) + else "" + ) + self.location.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="location", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('id', node) - if value is not None and 'id' not in already_processed: - already_processed.add('id') - try: - self.id = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") + self.id = self.gds_parse_integer(value, node, "id") if self.id < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - value = find_attr_value_('i', node) - if value is not None and 'i' not in already_processed: - already_processed.add('i') - try: - self.i = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Invalid NonNegativeInteger") + value = find_attr_value_("i", node) + if value is not None and "i" not in already_processed: + already_processed.add("i") + self.i = self.gds_parse_integer(value, node, "i") if self.i < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - value = find_attr_value_('j', node) - if value is not None and 'j' not in already_processed: - already_processed.add('j') - try: - self.j = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Invalid NonNegativeInteger") + value = find_attr_value_("j", node) + if value is not None and "j" not in already_processed: + already_processed.add("j") + self.j = self.gds_parse_integer(value, node, "j") if self.j < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - value = find_attr_value_('k', node) - if value is not None and 'k' not in already_processed: - already_processed.add('k') - try: - self.k = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + raise_parse_error(node, "Invalid NonNegativeInteger") + value = find_attr_value_("k", node) + if value is not None and "k" not in already_processed: + already_processed.add("k") + self.k = self.gds_parse_integer(value, node, "k") if self.k < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'location': + raise_parse_error(node, "Invalid NonNegativeInteger") + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "location": obj_ = Location.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.location = obj_ - obj_.original_tagname_ = 'location' - + obj_.original_tagname_ = "location" def __str__(self): - return "Instance "+str(self.id)+ (" at location: "+str(self.location) if self.location else "") + return ( + "Instance " + + str(self.id) + + (" at location: " + str(self.location) if self.location else "") + ) def __repr__(self): return str(self) + # end class Instance class Location(GeneratedsSuper): + """Location -- Specifies the ( x, y, z ) location of a single **instance** of a component in a **population** + \n + :param x: + :type x: none + :param y: + :type y: none + :param z: + :type z: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('x', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('y', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('z', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("x", "xs:float", 0, 0, {"use": "required", "name": "x"}), + MemberSpec_("y", "xs:float", 0, 0, {"use": "required", "name": "y"}), + MemberSpec_("z", "xs:float", 0, 0, {"use": "required", "name": "z"}), ] subclass = None superclass = None - def __init__(self, x=None, y=None, z=None, **kwargs_): + + def __init__(self, x=None, y=None, z=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.x = _cast(float, x) + self.x_nsprefix_ = None self.y = _cast(float, y) + self.y_nsprefix_ = None self.z = _cast(float, z) + self.z_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Location) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Location) if subclass is not None: return subclass(*args_, **kwargs_) if Location.subclass: return Location.subclass(*args_, **kwargs_) else: return Location(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Location', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Location') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Location", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Location") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Location": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Location') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Location', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Location'): - if self.x is not None and 'x' not in already_processed: - already_processed.add('x') - outfile.write(' x="%s"' % self.gds_format_float(self.x, input_name='x')) - if self.y is not None and 'y' not in already_processed: - already_processed.add('y') - outfile.write(' y="%s"' % self.gds_format_float(self.y, input_name='y')) - if self.z is not None and 'z' not in already_processed: - already_processed.add('z') - outfile.write(' z="%s"' % self.gds_format_float(self.z, input_name='z')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Location', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Location" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Location", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Location" + ): + if self.x is not None and "x" not in already_processed: + already_processed.add("x") + outfile.write(' x="%s"' % self.gds_format_float(self.x, input_name="x")) + if self.y is not None and "y" not in already_processed: + already_processed.add("y") + outfile.write(' y="%s"' % self.gds_format_float(self.y, input_name="y")) + if self.z is not None and "z" not in already_processed: + already_processed.add("z") + outfile.write(' z="%s"' % self.gds_format_float(self.z, input_name="z")) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Location", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('x', node) - if value is not None and 'x' not in already_processed: - already_processed.add('x') - try: - self.x = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (x): %s' % exp) - value = find_attr_value_('y', node) - if value is not None and 'y' not in already_processed: - already_processed.add('y') - try: - self.y = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (y): %s' % exp) - value = find_attr_value_('z', node) - if value is not None and 'z' not in already_processed: - already_processed.add('z') - try: - self.z = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (z): %s' % exp) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("x", node) + if value is not None and "x" not in already_processed: + already_processed.add("x") + value = self.gds_parse_float(value, node, "x") + self.x = value + value = find_attr_value_("y", node) + if value is not None and "y" not in already_processed: + already_processed.add("y") + value = self.gds_parse_float(value, node, "y") + self.y = value + value = find_attr_value_("z", node) + if value is not None and "z" not in already_processed: + already_processed.add("z") + value = self.gds_parse_float(value, node, "z") + self.z = value + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass - def _format(self,value): + def _format(self, value): - if int(value)==value: + if int(value) == value: return str(int(value)) else: - return '%.4f' % value + return "%.4f" % value def __str__(self): - return "("+ self._format(self.x) +", "+ self._format(self.y) +", "+ self._format(self.z) +")" + return ( + "(" + + self._format(self.x) + + ", " + + self._format(self.y) + + ", " + + self._format(self.z) + + ")" + ) def __repr__(self): return str(self) + # end class Location class SynapticConnection(GeneratedsSuper): - """Single explicit connection. Introduced to test connections in LEMS. - Will probably be removed in favour of connections wrapped in - projection element""" + """SynapticConnection -- Explicit event connection between named components, which gets processed via a new instance of a **synapse** component which is created on the target component""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('from_', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('to', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('synapse', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('destination', 'NmlId', 0, 1, {'use': u'optional'}), + MemberSpec_("from_", "xs:string", 0, 0, {"use": "required", "name": "from_"}), + MemberSpec_("to", "xs:string", 0, 0, {"use": "required", "name": "to"}), + MemberSpec_( + "synapse", "xs:string", 0, 0, {"use": "required", "name": "synapse"} + ), + MemberSpec_( + "destination", "NmlId", 0, 1, {"use": "optional", "name": "destination"} + ), ] subclass = None superclass = None - def __init__(self, from_=None, to=None, synapse=None, destination=None, **kwargs_): + + def __init__( + self, + from_=None, + to=None, + synapse=None, + destination=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.from_ = _cast(None, from_) + self.from__nsprefix_ = None self.to = _cast(None, to) + self.to_nsprefix_ = None self.synapse = _cast(None, synapse) + self.synapse_nsprefix_ = None self.destination = _cast(None, destination) + self.destination_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, SynapticConnection) + CurrentSubclassModule_, SynapticConnection + ) if subclass is not None: return subclass(*args_, **kwargs_) if SynapticConnection.subclass: return SynapticConnection.subclass(*args_, **kwargs_) else: return SynapticConnection(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): if ( - + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SynapticConnection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SynapticConnection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SynapticConnection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SynapticConnection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SynapticConnection": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SynapticConnection') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SynapticConnection', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SynapticConnection'): - if self.from_ is not None and 'from_' not in already_processed: - already_processed.add('from_') - outfile.write(' from=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.from_), input_name='from')), )) - if self.to is not None and 'to' not in already_processed: - already_processed.add('to') - outfile.write(' to=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.to), input_name='to')), )) - if self.synapse is not None and 'synapse' not in already_processed: - already_processed.add('synapse') - outfile.write(' synapse=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse), input_name='synapse')), )) - if self.destination is not None and 'destination' not in already_processed: - already_processed.add('destination') - outfile.write(' destination=%s' % (quote_attrib(self.destination), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SynapticConnection', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SynapticConnection", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SynapticConnection", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SynapticConnection", + ): + if self.from_ is not None and "from_" not in already_processed: + already_processed.add("from_") + outfile.write( + " from=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.from_), input_name="from" + ) + ), + ) + ) + if self.to is not None and "to" not in already_processed: + already_processed.add("to") + outfile.write( + " to=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.to), input_name="to") + ), + ) + ) + if self.synapse is not None and "synapse" not in already_processed: + already_processed.add("synapse") + outfile.write( + " synapse=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.synapse), input_name="synapse" + ) + ), + ) + ) + if self.destination is not None and "destination" not in already_processed: + already_processed.add("destination") + outfile.write( + " destination=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.destination), input_name="destination" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SynapticConnection", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('from', node) - if value is not None and 'from' not in already_processed: - already_processed.add('from') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("from", node) + if value is not None and "from" not in already_processed: + already_processed.add("from") self.from_ = value - value = find_attr_value_('to', node) - if value is not None and 'to' not in already_processed: - already_processed.add('to') + value = find_attr_value_("to", node) + if value is not None and "to" not in already_processed: + already_processed.add("to") self.to = value - value = find_attr_value_('synapse', node) - if value is not None and 'synapse' not in already_processed: - already_processed.add('synapse') + value = find_attr_value_("synapse", node) + if value is not None and "synapse" not in already_processed: + already_processed.add("synapse") self.synapse = value - value = find_attr_value_('destination', node) - if value is not None and 'destination' not in already_processed: - already_processed.add('destination') + value = find_attr_value_("destination", node) + if value is not None and "destination" not in already_processed: + already_processed.add("destination") self.destination = value - self.validate_NmlId(self.destination) # validate type NmlId - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + self.validate_NmlId(self.destination) # validate type NmlId + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass - def _get_cell_id(self,ref): + def _get_cell_id(self, ref): """Get cell ID""" - if '[' in ref: - return int(ref.split('[')[1].split(']')[0]) + if "[" in ref: + return int(ref.split("[")[1].split("]")[0]) else: - return int(ref.split('/')[2]) + return int(ref.split("/")[2]) - def _get_population(self,ref): + def _get_population(self, ref): """Get population""" - if '[' in ref: - return ref.split('[')[0] + if "[" in ref: + return ref.split("[")[0] else: - return ref.split('/')[0] + return ref.split("/")[0] def __str__(self): - dest = self.destination if self.destination else 'unspecified' - return "Synaptic connection from "+str(self._get_population(self.from_))+"(cell "+str(self._get_cell_id(self.from_))+ ") -> "+str(self._get_population(self.to))+"(cell "+str(self._get_cell_id(self.to))+"), syn: "+self.synapse+", destination: "+dest - + dest = self.destination if self.destination else "unspecified" + return ( + "Synaptic connection from " + + str(self._get_population(self.from_)) + + "(cell " + + str(self._get_cell_id(self.from_)) + + ") -> " + + str(self._get_population(self.to)) + + "(cell " + + str(self._get_cell_id(self.to)) + + "), syn: " + + self.synapse + + ", destination: " + + dest + ) # end class SynapticConnection class ExplicitInput(GeneratedsSuper): - """Single explicit input. Introduced to test inputs in LEMS. Will - probably be removed in favour of inputs wrapped in inputList - element""" + """ExplicitInput -- An explicit input ( anything which extends **basePointCurrent** ) to a target cell in a population""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('target', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('input', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('destination', 'xs:string', 0, 1, {'use': 'optional'}), + MemberSpec_("target", "xs:string", 0, 0, {"use": "required", "name": "target"}), + MemberSpec_("input", "xs:string", 0, 0, {"use": "required", "name": "input"}), + MemberSpec_( + "destination", "xs:string", 0, 1, {"use": "optional", "name": "destination"} + ), ] subclass = None superclass = None - def __init__(self, target=None, input=None, destination=None, **kwargs_): + + def __init__( + self, target=None, input=None, destination=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.target = _cast(None, target) + self.target_nsprefix_ = None self.input = _cast(None, input) + self.input_nsprefix_ = None self.destination = _cast(None, destination) + self.destination_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ExplicitInput) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ExplicitInput) if subclass is not None: return subclass(*args_, **kwargs_) if ExplicitInput.subclass: return ExplicitInput.subclass(*args_, **kwargs_) else: return ExplicitInput(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExplicitInput', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExplicitInput') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExplicitInput", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ExplicitInput") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ExplicitInput": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExplicitInput') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExplicitInput', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExplicitInput'): - if self.target is not None and 'target' not in already_processed: - already_processed.add('target') - outfile.write(' target=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.target), input_name='target')), )) - if self.input is not None and 'input' not in already_processed: - already_processed.add('input') - outfile.write(' input=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.input), input_name='input')), )) - if self.destination is not None and 'destination' not in already_processed: - already_processed.add('destination') - outfile.write(' destination=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.destination), input_name='destination')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExplicitInput', fromsubclass_=False, pretty_print=True): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExplicitInput" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ExplicitInput", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ExplicitInput", + ): + if self.target is not None and "target" not in already_processed: + already_processed.add("target") + outfile.write( + " target=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.target), input_name="target" + ) + ), + ) + ) + if self.input is not None and "input" not in already_processed: + already_processed.add("input") + outfile.write( + " input=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.input), input_name="input" + ) + ), + ) + ) + if self.destination is not None and "destination" not in already_processed: + already_processed.add("destination") + outfile.write( + " destination=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.destination), input_name="destination" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExplicitInput", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('target', node) - if value is not None and 'target' not in already_processed: - already_processed.add('target') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("target", node) + if value is not None and "target" not in already_processed: + already_processed.add("target") self.target = value - value = find_attr_value_('input', node) - if value is not None and 'input' not in already_processed: - already_processed.add('input') + value = find_attr_value_("input", node) + if value is not None and "input" not in already_processed: + already_processed.add("input") self.input = value - value = find_attr_value_('destination', node) - if value is not None and 'destination' not in already_processed: - already_processed.add('destination') + value = find_attr_value_("destination", node) + if value is not None and "destination" not in already_processed: + already_processed.add("destination") self.destination = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass def _get_cell_id(self, id_string): - if '[' in id_string: - return int(id_string.split('[')[1].split(']')[0]) + if "[" in id_string: + return int(id_string.split("[")[1].split("]")[0]) else: - return int(id_string.split('/')[2]) + return int(id_string.split("/")[2]) def get_target_cell_id(self): + """Get ID of target cell.""" return self._get_cell_id(self.target) def get_segment_id(self): + """Get the ID of the segment. + Returns 0 if segment_id was not set. + """ return int(self.segment_id) if self.segment_id else 0 def get_fraction_along(self): + """Get fraction along. + + Returns 0.5 is fraction_along was not set. + """ return float(self.fraction_along) if self.fraction_along else 0.5 def __str__(self): - return "Input "+str(self.id)+": "+str(self.get_target_cell_id())+":"+str(self.get_segment_id())+"("+'%.6f'%self.get_fraction_along()+")" - - - def get_target_cell_id(self,): + return ( + "Input " + + str(self.id) + + ": " + + str(self.get_target_cell_id()) + + ":" + + str(self.get_segment_id()) + + "(" + + "%.6f" % self.get_fraction_along() + + ")" + ) + + def get_target_cell_id( + self, + ): """Get target cell ID""" - if '[' in self.target: - return int(self.target.split('[')[1].split(']')[0]) + if "[" in self.target: + return int(self.target.split("[")[1].split("]")[0]) else: - return int(self.target.split('/')[2]) + return int(self.target.split("/")[2]) - def get_target_population(self,): + def get_target_population( + self, + ): """Get target population.""" - if '[' in self.target: - return self.target.split('[')[0] + if "[" in self.target: + return self.target.split("[")[0] else: - return self.target.split('/')[0] + return self.target.split("/")[0] def __str__(self): - dest = self.destination if self.destination else 'unspecified' - return "Explicit Input of type "+str(self.input)+" to "+self.get_target_population()+"(cell "+str(self.get_target_cell_id())+ "), destination: "+dest - + dest = self.destination if self.destination else "unspecified" + return ( + "Explicit Input of type " + + str(self.input) + + " to " + + self.get_target_population() + + "(cell " + + str(self.get_target_cell_id()) + + "), destination: " + + dest + ) # end class ExplicitInput class Input(GeneratedsSuper): - """Individual input to the cell specified by target""" + """Input -- Specifies a single input to a **target,** optionally giving the **segmentId** ( default 0 ) and **fractionAlong** the segment ( default 0. 5 ).""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('id', 'NonNegativeInteger', 0, 0, {'use': u'required'}), - MemberSpec_('target', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('destination', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('segment_id', 'NonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional'}), + MemberSpec_( + "id", "NonNegativeInteger", 0, 0, {"use": "required", "name": "id"} + ), + MemberSpec_("target", "xs:string", 0, 0, {"use": "required", "name": "target"}), + MemberSpec_( + "destination", "NmlId", 0, 0, {"use": "required", "name": "destination"} + ), + MemberSpec_( + "segment_id", + "NonNegativeInteger", + 0, + 1, + {"use": "optional", "name": "segment_id"}, + ), + MemberSpec_( + "fraction_along", + "ZeroToOne", + 0, + 1, + {"use": "optional", "name": "fraction_along"}, + ), ] subclass = None superclass = None - def __init__(self, id=None, target=None, destination=None, segment_id=None, fraction_along=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + id=None, + target=None, + destination=None, + segment_id=None, + fraction_along=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.id = _cast(int, id) + self.id_nsprefix_ = None self.target = _cast(None, target) + self.target_nsprefix_ = None self.destination = _cast(None, destination) + self.destination_nsprefix_ = None self.segment_id = _cast(int, segment_id) + self.segment_id_nsprefix_ = None self.fraction_along = _cast(float, fraction_along) + self.fraction_along_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Input) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Input) if subclass is not None: return subclass(*args_, **kwargs_) if Input.subclass: return Input.subclass(*args_, **kwargs_) else: return Input(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + def validate_ZeroToOne(self, value): # Validate type ZeroToOne, a restriction on xs:float. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, float): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if value < 0: - warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' % {"value" : value} ) + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ZeroToOne' + % {"value": value, "lineno": lineno} + ) + result = False if value > 1: - warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' % {"value" : value} ) - def hasContent_(self): - if ( + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ZeroToOne' + % {"value": value, "lineno": lineno} + ) + result = False - ): + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Input', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Input') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Input", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Input") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Input": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Input') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Input', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Input'): - if self.id is not None and 'id' not in already_processed: - already_processed.add('id') - outfile.write(' id=%s' % (quote_attrib(self.id), )) - if self.target is not None and 'target' not in already_processed: - already_processed.add('target') - outfile.write(' target=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.target), input_name='target')), )) - if self.destination is not None and 'destination' not in already_processed: - already_processed.add('destination') - outfile.write(' destination=%s' % (quote_attrib(self.destination), )) - if self.segment_id is not None and 'segment_id' not in already_processed: - already_processed.add('segment_id') - outfile.write(' segmentId=%s' % (quote_attrib(self.segment_id), )) - if self.fraction_along is not None and 'fraction_along' not in already_processed: - already_processed.add('fraction_along') - outfile.write(' fractionAlong=%s' % (quote_attrib(self.fraction_along), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Input" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Input", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Input" + ): + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write( + ' id="%s"' % self.gds_format_integer(self.id, input_name="id") + ) + if self.target is not None and "target" not in already_processed: + already_processed.add("target") + outfile.write( + " target=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.target), input_name="target" + ) + ), + ) + ) + if self.destination is not None and "destination" not in already_processed: + already_processed.add("destination") + outfile.write( + " destination=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.destination), input_name="destination" + ) + ), + ) + ) + if self.segment_id is not None and "segment_id" not in already_processed: + already_processed.add("segment_id") + outfile.write( + ' segmentId="%s"' + % self.gds_format_integer(self.segment_id, input_name="segmentId") + ) + if ( + self.fraction_along is not None + and "fraction_along" not in already_processed + ): + already_processed.add("fraction_along") + outfile.write( + ' fractionAlong="%s"' + % self.gds_format_float(self.fraction_along, input_name="fractionAlong") + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Input', fromsubclass_=False, pretty_print=True): + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Input", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('id', node) - if value is not None and 'id' not in already_processed: - already_processed.add('id') - try: - self.id = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") + self.id = self.gds_parse_integer(value, node, "id") if self.id < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.id) # validate type NonNegativeInteger - value = find_attr_value_('target', node) - if value is not None and 'target' not in already_processed: - already_processed.add('target') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.id + ) # validate type NonNegativeInteger + value = find_attr_value_("target", node) + if value is not None and "target" not in already_processed: + already_processed.add("target") self.target = value - value = find_attr_value_('destination', node) - if value is not None and 'destination' not in already_processed: - already_processed.add('destination') + value = find_attr_value_("destination", node) + if value is not None and "destination" not in already_processed: + already_processed.add("destination") self.destination = value - self.validate_NmlId(self.destination) # validate type NmlId - value = find_attr_value_('segmentId', node) - if value is not None and 'segmentId' not in already_processed: - already_processed.add('segmentId') - try: - self.segment_id = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.validate_NmlId(self.destination) # validate type NmlId + value = find_attr_value_("segmentId", node) + if value is not None and "segmentId" not in already_processed: + already_processed.add("segmentId") + self.segment_id = self.gds_parse_integer(value, node, "segmentId") if self.segment_id < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.segment_id) # validate type NonNegativeInteger - value = find_attr_value_('fractionAlong', node) - if value is not None and 'fractionAlong' not in already_processed: - already_processed.add('fractionAlong') - try: - self.fraction_along = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (fractionAlong): %s' % exp) - self.validate_ZeroToOne(self.fraction_along) # validate type ZeroToOne - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.segment_id + ) # validate type NonNegativeInteger + value = find_attr_value_("fractionAlong", node) + if value is not None and "fractionAlong" not in already_processed: + already_processed.add("fractionAlong") + value = self.gds_parse_float(value, node, "fractionAlong") + self.fraction_along = value + self.validate_ZeroToOne(self.fraction_along) # validate type ZeroToOne + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass def _get_cell_id(self, id_string): - if '[' in id_string: - return int(id_string.split('[')[1].split(']')[0]) + if "[" in id_string: + return int(id_string.split("[")[1].split("]")[0]) else: - return int(id_string.split('/')[2]) + return int(id_string.split("/")[2]) def get_target_cell_id(self): + """Get ID of target cell.""" return self._get_cell_id(self.target) def get_segment_id(self): + """Get the ID of the segment. + Returns 0 if segment_id was not set. + """ return int(self.segment_id) if self.segment_id else 0 def get_fraction_along(self): + """Get fraction along. + + Returns 0.5 is fraction_along was not set. + """ return float(self.fraction_along) if self.fraction_along else 0.5 def __str__(self): - return "Input "+str(self.id)+": "+str(self.get_target_cell_id())+":"+str(self.get_segment_id())+"("+'%.6f'%self.get_fraction_along()+")" + return ( + "Input " + + str(self.id) + + ": " + + str(self.get_target_cell_id()) + + ":" + + str(self.get_segment_id()) + + "(" + + "%.6f" % self.get_fraction_along() + + ")" + ) # end class Input class InputW(Input): - """Individual input to the cell specified by target. Includes setting - of _weight for the connection""" + """InputW -- Specifies input lists. Can set **weight** to scale individual inputs. + \n + :param weight: + :type weight: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('weight', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("weight", "xs:float", 0, 0, {"use": "required", "name": "weight"}), ] subclass = None superclass = Input - def __init__(self, id=None, target=None, destination=None, segment_id=None, fraction_along=None, weight=None, **kwargs_): + + def __init__( + self, + id=None, + target=None, + destination=None, + segment_id=None, + fraction_along=None, + weight=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(InputW, self).__init__(id, target, destination, segment_id, fraction_along, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("InputW"), self).__init__( + id, target, destination, segment_id, fraction_along, **kwargs_ + ) self.weight = _cast(float, weight) + self.weight_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, InputW) + subclass = getSubclassFromModule_(CurrentSubclassModule_, InputW) if subclass is not None: return subclass(*args_, **kwargs_) if InputW.subclass: return InputW.subclass(*args_, **kwargs_) else: return InputW(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(InputW, self).hasContent_() - ): + + def _hasContent(self): + if super(InputW, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InputW', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('InputW') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InputW", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("InputW") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "InputW": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InputW') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InputW', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InputW'): - super(InputW, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InputW') - if self.weight is not None and 'weight' not in already_processed: - already_processed.add('weight') - outfile.write(' weight="%s"' % self.gds_format_float(self.weight, input_name='weight')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InputW', fromsubclass_=False, pretty_print=True): - super(InputW, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="InputW" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="InputW", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="InputW" + ): + super(InputW, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="InputW" + ) + if self.weight is not None and "weight" not in already_processed: + already_processed.add("weight") + outfile.write( + ' weight="%s"' % self.gds_format_float(self.weight, input_name="weight") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="InputW", + fromsubclass_=False, + pretty_print=True, + ): + super(InputW, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('weight', node) - if value is not None and 'weight' not in already_processed: - already_processed.add('weight') - try: - self.weight = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (weight): %s' % exp) - super(InputW, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(InputW, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("weight", node) + if value is not None and "weight" not in already_processed: + already_processed.add("weight") + value = self.gds_parse_float(value, node, "weight") + self.weight = value + super(InputW, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(InputW, self)._buildChildren(child_, node, nodeName_, True) pass def get_weight(self): + """Get weight. + + If weight is not set, the default value of 1.0 is returned. + """ - return float(self.weight) if self.weight!=None else 1.0 + return float(self.weight) if self.weight != None else 1.0 def __str__(self): - return "Input (weight) "+str(self.id)+": "+str(self.get_target_cell_id())+":"+str(self.get_segment_id())+"("+'%.6f'%self.get_fraction_along()+"), weight: "+'%.6f'%self.get_weight() + return ( + "Input (weight) " + + str(self.id) + + ": " + + str(self.get_target_cell_id()) + + ":" + + str(self.get_segment_id()) + + "(" + + "%.6f" % self.get_fraction_along() + + "), weight: " + + "%.6f" % self.get_weight() + ) # end class InputW class BaseWithoutId(GeneratedsSuper): - """Base element without ID specified *yet*, e.g. for an element with a - particular requirement on its id which does not comply with - NmlId (e.g. Segment needs nonNegativeInteger).""" + """BaseWithoutId -- Base element without ID specified *yet*, e.g. for an element with a particular requirement on its id which does not comply with NmlId (e.g. Segment needs nonNegativeInteger).""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('neuro_lex_id', 'NeuroLexId', 0, 1, {'use': u'optional'}), + MemberSpec_( + "neuro_lex_id", + "NeuroLexId", + 0, + 1, + {"use": "optional", "name": "neuro_lex_id"}, + ), ] subclass = None superclass = None - def __init__(self, neuro_lex_id=None, extensiontype_=None, **kwargs_): + + def __init__( + self, neuro_lex_id=None, extensiontype_=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None self.neuro_lex_id = _cast(None, neuro_lex_id) + self.neuro_lex_id_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseWithoutId) + subclass = getSubclassFromModule_(CurrentSubclassModule_, BaseWithoutId) if subclass is not None: return subclass(*args_, **kwargs_) if BaseWithoutId.subclass: return BaseWithoutId.subclass(*args_, **kwargs_) else: return BaseWithoutId(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NeuroLexId(self, value): # Validate type NeuroLexId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_NeuroLexId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NeuroLexId_patterns_, )) - validate_NeuroLexId_patterns_ = [[u'^[a-zA-Z0-9_:]*$']] - def hasContent_(self): if ( - + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NeuroLexId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NeuroLexId_patterns_, + ) + ) + + validate_NeuroLexId_patterns_ = [["^([a-zA-Z0-9_:]*)$"]] + + def _hasContent(self): + if (): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseWithoutId', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseWithoutId') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseWithoutId", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseWithoutId") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "BaseWithoutId": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseWithoutId') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseWithoutId', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseWithoutId'): - if self.neuro_lex_id is not None and 'neuro_lex_id' not in already_processed: - already_processed.add('neuro_lex_id') - outfile.write(' neuroLexId=%s' % (quote_attrib(self.neuro_lex_id), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseWithoutId" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseWithoutId", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseWithoutId", + ): + if self.neuro_lex_id is not None and "neuro_lex_id" not in already_processed: + already_processed.add("neuro_lex_id") + outfile.write( + " neuroLexId=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.neuro_lex_id), input_name="neuroLexId" + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseWithoutId', fromsubclass_=False, pretty_print=True): + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseWithoutId", + fromsubclass_=False, + pretty_print=True, + ): pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('neuroLexId', node) - if value is not None and 'neuroLexId' not in already_processed: - already_processed.add('neuroLexId') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("neuroLexId", node) + if value is not None and "neuroLexId" not in already_processed: + already_processed.add("neuroLexId") self.neuro_lex_id = value - self.validate_NeuroLexId(self.neuro_lex_id) # validate type NeuroLexId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_NeuroLexId(self.neuro_lex_id) # validate type NeuroLexId + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): pass -# end class BaseWithoutId + + def add(self, obj=None, hint=None, force=False): + """Generic function to allow easy addition of a new member to a NeuroML object. + + Without arguments, when `obj=None`, it simply calls the `info()` method + to provide the list of valid member types for the NeuroML class. + + Use `info(show_contents=True)` to see the valid members of this class, + and their current contents. + + :param obj: object member to add + :type obj: any NeuroML Type defined by the API + :param hint: member name to add to when there are multiple members that `obj` can be added to + :type hint: string + :param force: boolean to force addition when an obj has already been added previously + :type force: bool + + :raises Exception: if a member compatible to obj could not be found + :raises Exception: if multiple members can accept the object and no hint is provided. + """ + if not obj: + self.info() + return + + # getattr only returns the value of the provided member but one cannot + # then use this to modify the member. Using `vars` also allows us to + # modify the value + targets = [] + all_members = self.get_members() + for member in all_members: + # get_data_type() returns the type as a string, e.g.: 'IncludeType' + if member.get_data_type() == type(obj).__name__: + targets.append(member) + + if len(targets) == 0: + # no targets found + e = Exception( + """A member object of {} type could not be found in NeuroML class {}.\n{} + """.format( + type(obj).__name__, type(self).__name__, self.info() + ) + ) + raise e + elif len(targets) == 1: + # good, just add it + self.__add(obj, targets[0], force) + else: + # more than one target + if not hint: + err_string = """Multiple members can accept {}. Please provide the name of the variable using the `hint` argument to specify which member to add to:\n""".format( + type(obj).__name__ + ) + for t in targets: + err_string += "- {}\n".format(t.get_name()) + raise Exception(err_string) + + # use hint to figure out which target to use + for t in targets: + if hint == t.get_name(): + self.__add(obj, t, force) + break + + def __add(self, obj, member, force=False): + """Private method to add new member to a specified variable in a NeuroML object. + + :param obj: object member to add + :type obj: any NeuroML Type defined by the API + :param member: member variable name to add to when there are multiple members that `obj` can be added to + :type member: MemberSpec_ + :param force: boolean to force addition when an obj has already been added previously + :type force: bool + + """ + import warnings + + # A single value, not a list: + if member.get_container() == 0: + if force: + vars(self)[member.get_name()] = obj + else: + if vars(self)[member.get_name()]: + warnings.warn( + """{} has already been assigned. Use `force=True` to overwrite. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format( + member.get_name() + ) + ) + else: + vars(self)[member.get_name()] = obj + # List + else: + # Do not use 'obj in ..' for membership check because it also + # returns true if an element with the same value exists in the + # container + # https://docs.python.org/3/reference/expressions.html#membership-test-operations + if force: + vars(self)[member.get_name()].append(obj) + else: + if any(obj is e for e in vars(self)[member.get_name()]): + warnings.warn( + """{} already exists in {}. Use `force=True` to force readdition. Hint: you can make changes to the already added object as required without needing to re-add it because only references to the objects are added, not their values.""".format( + obj, member.get_name() + ) + ) + else: + vars(self)[member.get_name()].append(obj) + + def get_members(self): + """Get member data items, also from ancestors. + + This function is required because generateDS does not include inherited + members in the member_data_items list for a derived class. So, for + example, while IonChannelHH has `gate_hh_rates` which it inherits from + IonChannel, IonChannelHH's `member_data_items_` is empty. It relies on + the IonChannel classes' `member_data_items_` list. + + :returns: list of members, including ones inherited from ancestors. + """ + import copy + + # create a copy by value + # if copied by reference (=), the member_data_items_ object variable is + # modified to a large list, greatly increasing the memory usage. + all_members = copy.copy(self.member_data_items_) + for c in type(self).__mro__: + try: + all_members.extend(c.member_data_items_) + except AttributeError: + pass + except TypeError: + pass + + # deduplicate + # TODO where are the duplicates coming from given that we're not + # calling this recursively? + all_members = list(set(all_members)) + return all_members + + def info(self, show_contents=False): + """A helper function to get a list of members of this class. + + This is useful to quickly check what members can go into a particular + NeuroML class (which will match the Schema definitions). It lists these + members and notes whether they are "single" type elements (Child + elements) or "List" elements (Children elements). It will also note + whether a member is optional or required. + + See http://www.davekuhlman.org/generateDS.html#user-methods for more + information on the MemberSpec_ class that generateDS uses. + + :param show_contents: also prints out the contents of the members + :type show_contents: bool + + :returns: the string (for testing purposes) + """ + + info_str = "Valid members for {} are:\n".format(self.__class__.__name__) + for member in self.member_data_items_: + info_str += "* {} (class: {})\n".format(member.name, member.data_type) + if show_contents: + contents = getattr(self, member.get_name()) + info_str += " * Contents: {}\n\n".format(contents) + + info_str += "Please see the NeuroML standard schema documentation at https://docs.neuroml.org/Userdocs/NeuroMLv2.html for more information." + print(info_str) + return info_str + + # end class BaseWithoutId class BaseNonNegativeIntegerId(BaseWithoutId): - """Anything which can have a unique (within its parent) id, which must - be an integer zero or greater.""" + """BaseNonNegativeIntegerId -- Anything which can have a unique (within its parent) id, which must be an integer zero or greater.""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('id', 'NonNegativeInteger', 0, 0, {'use': u'required'}), + MemberSpec_( + "id", "NonNegativeInteger", 0, 0, {"use": "required", "name": "id"} + ), ] subclass = None superclass = BaseWithoutId - def __init__(self, neuro_lex_id=None, id=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseNonNegativeIntegerId, self).__init__(neuro_lex_id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("BaseNonNegativeIntegerId"), self).__init__( + neuro_lex_id, extensiontype_, **kwargs_ + ) self.id = _cast(int, id) + self.id_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseNonNegativeIntegerId) + CurrentSubclassModule_, BaseNonNegativeIntegerId + ) if subclass is not None: return subclass(*args_, **kwargs_) if BaseNonNegativeIntegerId.subclass: return BaseNonNegativeIntegerId.subclass(*args_, **kwargs_) else: return BaseNonNegativeIntegerId(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: - pass - def hasContent_(self): if ( - super(BaseNonNegativeIntegerId, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + pass + + def _hasContent(self): + if super(BaseNonNegativeIntegerId, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseNonNegativeIntegerId', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseNonNegativeIntegerId') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseNonNegativeIntegerId", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseNonNegativeIntegerId") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "BaseNonNegativeIntegerId": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseNonNegativeIntegerId') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseNonNegativeIntegerId', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseNonNegativeIntegerId'): - super(BaseNonNegativeIntegerId, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseNonNegativeIntegerId') - if self.id is not None and 'id' not in already_processed: - already_processed.add('id') - outfile.write(' id=%s' % (quote_attrib(self.id), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseNonNegativeIntegerId", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseNonNegativeIntegerId", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseNonNegativeIntegerId", + ): + super(BaseNonNegativeIntegerId, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseNonNegativeIntegerId", + ) + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write( + ' id="%s"' % self.gds_format_integer(self.id, input_name="id") + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseNonNegativeIntegerId', fromsubclass_=False, pretty_print=True): - super(BaseNonNegativeIntegerId, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseNonNegativeIntegerId", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseNonNegativeIntegerId, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('id', node) - if value is not None and 'id' not in already_processed: - already_processed.add('id') - try: - self.id = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") + self.id = self.gds_parse_integer(value, node, "id") if self.id < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.id) # validate type NonNegativeInteger - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.id + ) # validate type NonNegativeInteger + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseNonNegativeIntegerId, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseNonNegativeIntegerId, self).buildChildren(child_, node, nodeName_, True) + super(BaseNonNegativeIntegerId, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(BaseNonNegativeIntegerId, self)._buildChildren( + child_, node, nodeName_, True + ) pass + + # end class BaseNonNegativeIntegerId class Base(BaseWithoutId): - """Anything which can have a unique (within its parent) id of the form - NmlId (spaceless combination of letters, numbers and - underscore).""" + """Base -- Anything which can have a unique (within its parent) id of the form NmlId (spaceless combination of letters, numbers and underscore).""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('id', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_("id", "NmlId", 0, 0, {"use": "required", "name": "id"}), ] subclass = None superclass = BaseWithoutId - def __init__(self, neuro_lex_id=None, id=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Base, self).__init__(neuro_lex_id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("Base"), self).__init__( + neuro_lex_id, extensiontype_, **kwargs_ + ) self.id = _cast(None, id) + self.id_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Base) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Base) if subclass is not None: return subclass(*args_, **kwargs_) if Base.subclass: return Base.subclass(*args_, **kwargs_) else: return Base(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): if ( - super(Base, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): + if super(Base, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Base', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Base') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Base", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Base") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Base": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Base') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Base', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Base'): - super(Base, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Base') - if self.id is not None and 'id' not in already_processed: - already_processed.add('id') - outfile.write(' id=%s' % (quote_attrib(self.id), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Base" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Base", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Base" + ): + super(Base, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Base" + ) + if self.id is not None and "id" not in already_processed: + already_processed.add("id") + outfile.write( + " id=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.id), input_name="id") + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Base', fromsubclass_=False, pretty_print=True): - super(Base, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Base", + fromsubclass_=False, + pretty_print=True, + ): + super(Base, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('id', node) - if value is not None and 'id' not in already_processed: - already_processed.add('id') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("id", node) + if value is not None and "id" not in already_processed: + already_processed.add("id") self.id = value - self.validate_NmlId(self.id) # validate type NmlId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_NmlId(self.id) # validate type NmlId + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(Base, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(Base, self).buildChildren(child_, node, nodeName_, True) + super(Base, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(Base, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class Base class Standalone(Base): - """Elements which can stand alone and be referenced by id, e.g. cell, - morphology.""" + """Standalone -- Elements which can stand alone and be referenced by id, e.g. cell, morphology.""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('metaid', 'MetaId', 0, 1, {'use': u'optional'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('properties', 'Property', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Property', u'name': u'property', u'minOccurs': u'0'}, None), - MemberSpec_('annotation', 'Annotation', 0, 1, {u'type': u'Annotation', u'name': u'annotation', u'minOccurs': u'0'}, None), + MemberSpec_("metaid", "MetaId", 0, 1, {"use": "optional", "name": "metaid"}), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {"minOccurs": "0", "name": "notes", "type": "xs:string"}, + None, + ), + MemberSpec_( + "properties", + "Property", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "property", + "type": "Property", + }, + None, + ), + MemberSpec_( + "annotation", + "Annotation", + 0, + 1, + {"minOccurs": "0", "name": "annotation", "type": "Annotation"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Standalone, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("Standalone"), self).__init__( + neuro_lex_id, id, extensiontype_, **kwargs_ + ) self.metaid = _cast(None, metaid) + self.metaid_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None if properties is None: self.properties = [] else: self.properties = properties + self.properties_nsprefix_ = None self.annotation = annotation + self.annotation_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Standalone) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Standalone) if subclass is not None: return subclass(*args_, **kwargs_) if Standalone.subclass: return Standalone.subclass(*args_, **kwargs_) else: return Standalone(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass + return result + def validate_MetaId(self, value): # Validate type MetaId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_MetaId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_MetaId_patterns_, )) - validate_MetaId_patterns_ = [[u'^[a-zA-Z0-9_]*$']] - def hasContent_(self): + self.validate_MetaId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_MetaId_patterns_, + ) + ) + + validate_MetaId_patterns_ = [["^([a-zA-Z0-9_]*)$"]] + + def _hasContent(self): if ( - self.notes is not None or - self.properties or - self.annotation is not None or - super(Standalone, self).hasContent_() + self.notes is not None + or self.properties + or self.annotation is not None + or super(Standalone, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Standalone', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Standalone') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Standalone", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Standalone") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Standalone": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Standalone') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Standalone', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Standalone" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Standalone", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Standalone'): - super(Standalone, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Standalone') - if self.metaid is not None and 'metaid' not in already_processed: - already_processed.add('metaid') - outfile.write(' metaid=%s' % (quote_attrib(self.metaid), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Standalone" + ): + super(Standalone, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Standalone" + ) + if self.metaid is not None and "metaid" not in already_processed: + already_processed.add("metaid") + outfile.write( + " metaid=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.metaid), input_name="metaid" + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Standalone', fromsubclass_=False, pretty_print=True): - super(Standalone, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Standalone", + fromsubclass_=False, + pretty_print=True, + ): + super(Standalone, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: + namespaceprefix_ = ( + self.notes_nsprefix_ + ":" + if (UseCapturedNS_ and self.notes_nsprefix_) + else "" + ) showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) for property_ in self.properties: - property_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='property', pretty_print=pretty_print) + namespaceprefix_ = ( + self.properties_nsprefix_ + ":" + if (UseCapturedNS_ and self.properties_nsprefix_) + else "" + ) + property_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="property", + pretty_print=pretty_print, + ) if self.annotation is not None: - self.annotation.export(outfile, level, namespaceprefix_, namespacedef_='', name_='annotation', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.annotation_nsprefix_ + ":" + if (UseCapturedNS_ and self.annotation_nsprefix_) + else "" + ) + self.annotation.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="annotation", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('metaid', node) - if value is not None and 'metaid' not in already_processed: - already_processed.add('metaid') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("metaid", node) + if value is not None and "metaid" not in already_processed: + already_processed.add("metaid") self.metaid = value - self.validate_MetaId(self.metaid) # validate type MetaId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_MetaId(self.metaid) # validate type MetaId + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(Standalone, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + super(Standalone, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "notes": + value_ = child_.text + value_ = self.gds_parse_string(value_, node, "notes") + value_ = self.gds_validate_string(value_, node, "notes") + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'property': + elif nodeName_ == "property": obj_ = Property.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.properties.append(obj_) - obj_.original_tagname_ = 'property' - elif nodeName_ == 'annotation': + obj_.original_tagname_ = "property" + elif nodeName_ == "annotation": obj_ = Annotation.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.annotation = obj_ - obj_.original_tagname_ = 'annotation' - super(Standalone, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "annotation" + super(Standalone, self)._buildChildren(child_, node, nodeName_, True) + + # end class Standalone class SpikeSourcePoisson(Standalone): + """SpikeSourcePoisson -- Spike source, generating spikes according to a Poisson process. + \n + :param start: + :type start: time + :param duration: + :type duration: time + :param rate: + :type rate: per_time + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('start', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('rate', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), + MemberSpec_( + "start", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "start"} + ), + MemberSpec_( + "duration", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "duration"}, + ), + MemberSpec_( + "rate", "Nml2Quantity_pertime", 0, 0, {"use": "required", "name": "rate"} + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, start=None, duration=None, rate=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + start=None, + duration=None, + rate=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeSourcePoisson, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("SpikeSourcePoisson"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.start = _cast(None, start) + self.start_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.rate = _cast(None, rate) + self.rate_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpikeSourcePoisson) + CurrentSubclassModule_, SpikeSourcePoisson + ) if subclass is not None: return subclass(*args_, **kwargs_) if SpikeSourcePoisson.subclass: return SpikeSourcePoisson.subclass(*args_, **kwargs_) else: return SpikeSourcePoisson(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] - def hasContent_(self): if ( - super(SpikeSourcePoisson, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_pertime_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_pertime_patterns_, + ) + ) + + validate_Nml2Quantity_pertime_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz))$"] + ] + + def _hasContent(self): + if super(SpikeSourcePoisson, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeSourcePoisson', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpikeSourcePoisson') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeSourcePoisson", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpikeSourcePoisson") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SpikeSourcePoisson": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeSourcePoisson') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeSourcePoisson', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpikeSourcePoisson", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpikeSourcePoisson", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeSourcePoisson'): - super(SpikeSourcePoisson, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeSourcePoisson') - if self.start is not None and 'start' not in already_processed: - already_processed.add('start') - outfile.write(' start=%s' % (quote_attrib(self.start), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.rate is not None and 'rate' not in already_processed: - already_processed.add('rate') - outfile.write(' rate=%s' % (quote_attrib(self.rate), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeSourcePoisson', fromsubclass_=False, pretty_print=True): - super(SpikeSourcePoisson, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SpikeSourcePoisson", + ): + super(SpikeSourcePoisson, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpikeSourcePoisson", + ) + if self.start is not None and "start" not in already_processed: + already_processed.add("start") + outfile.write( + " start=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.start), input_name="start" + ) + ), + ) + ) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write( + " duration=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.duration), input_name="duration" + ) + ), + ) + ) + if self.rate is not None and "rate" not in already_processed: + already_processed.add("rate") + outfile.write( + " rate=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.rate), input_name="rate" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeSourcePoisson", + fromsubclass_=False, + pretty_print=True, + ): + super(SpikeSourcePoisson, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('start', node) - if value is not None and 'start' not in already_processed: - already_processed.add('start') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("start", node) + if value is not None and "start" not in already_processed: + already_processed.add("start") self.start = value - self.validate_Nml2Quantity_time(self.start) # validate type Nml2Quantity_time - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_time( + self.start + ) # validate type Nml2Quantity_time + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('rate', node) - if value is not None and 'rate' not in already_processed: - already_processed.add('rate') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("rate", node) + if value is not None and "rate" not in already_processed: + already_processed.add("rate") self.rate = value - self.validate_Nml2Quantity_pertime(self.rate) # validate type Nml2Quantity_pertime - super(SpikeSourcePoisson, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SpikeSourcePoisson, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_pertime( + self.rate + ) # validate type Nml2Quantity_pertime + super(SpikeSourcePoisson, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(SpikeSourcePoisson, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class SpikeSourcePoisson class InputList(Base): - """List of inputs to a population. Currents will be provided by the - specified component.""" + """InputList -- An explicit list of **input** s to a **population.**""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('populations', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('component', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('input', 'Input', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Input', u'name': u'input', u'minOccurs': u'0'}, None), - MemberSpec_('input_ws', 'InputW', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'InputW', u'name': u'inputW', u'minOccurs': u'0'}, None), + MemberSpec_( + "populations", "NmlId", 0, 0, {"use": "required", "name": "populations"} + ), + MemberSpec_( + "component", "NmlId", 0, 0, {"use": "required", "name": "component"} + ), + MemberSpec_( + "input", + "Input", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "input", + "type": "Input", + }, + None, + ), + MemberSpec_( + "input_ws", + "InputW", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "inputW", + "type": "InputW", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, populations=None, component=None, input=None, input_ws=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + populations=None, + component=None, + input=None, + input_ws=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(InputList, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("InputList"), self).__init__(neuro_lex_id, id, **kwargs_) self.populations = _cast(None, populations) + self.populations_nsprefix_ = None self.component = _cast(None, component) + self.component_nsprefix_ = None if input is None: self.input = [] else: self.input = input + self.input_nsprefix_ = None if input_ws is None: self.input_ws = [] else: self.input_ws = input_ws + self.input_ws_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, InputList) + subclass = getSubclassFromModule_(CurrentSubclassModule_, InputList) if subclass is not None: return subclass(*args_, **kwargs_) if InputList.subclass: return InputList.subclass(*args_, **kwargs_) else: return InputList(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): if ( - self.input or - self.input_ws or - super(InputList, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): + if self.input or self.input_ws or super(InputList, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InputList', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('InputList') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="InputList", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("InputList") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "InputList": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InputList') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InputList', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="InputList" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="InputList", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InputList'): - super(InputList, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InputList') - if self.populations is not None and 'populations' not in already_processed: - already_processed.add('populations') - outfile.write(' population=%s' % (quote_attrib(self.populations), )) - if self.component is not None and 'component' not in already_processed: - already_processed.add('component') - outfile.write(' component=%s' % (quote_attrib(self.component), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InputList', fromsubclass_=False, pretty_print=True): - super(InputList, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="InputList" + ): + super(InputList, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="InputList" + ) + if self.populations is not None and "populations" not in already_processed: + already_processed.add("populations") + outfile.write( + " population=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.populations), input_name="population" + ) + ), + ) + ) + if self.component is not None and "component" not in already_processed: + already_processed.add("component") + outfile.write( + " component=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.component), input_name="component" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="InputList", + fromsubclass_=False, + pretty_print=True, + ): + super(InputList, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for input_ in self.input: - input_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='input', pretty_print=pretty_print) + namespaceprefix_ = ( + self.input_nsprefix_ + ":" + if (UseCapturedNS_ and self.input_nsprefix_) + else "" + ) + input_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="input", + pretty_print=pretty_print, + ) for inputW_ in self.input_ws: - inputW_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='inputW', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.input_ws_nsprefix_ + ":" + if (UseCapturedNS_ and self.input_ws_nsprefix_) + else "" + ) + inputW_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="inputW", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('population', node) - if value is not None and 'population' not in already_processed: - already_processed.add('population') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("population", node) + if value is not None and "population" not in already_processed: + already_processed.add("population") self.populations = value - self.validate_NmlId(self.populations) # validate type NmlId - value = find_attr_value_('component', node) - if value is not None and 'component' not in already_processed: - already_processed.add('component') + self.validate_NmlId(self.populations) # validate type NmlId + value = find_attr_value_("component", node) + if value is not None and "component" not in already_processed: + already_processed.add("component") self.component = value - self.validate_NmlId(self.component) # validate type NmlId - super(InputList, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'input': + self.validate_NmlId(self.component) # validate type NmlId + super(InputList, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "input": class_obj_ = self.get_class_obj_(child_, Input) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.input.append(obj_) - obj_.original_tagname_ = 'input' - elif nodeName_ == 'inputW': + obj_.original_tagname_ = "input" + elif nodeName_ == "inputW": obj_ = InputW.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.input_ws.append(obj_) - obj_.original_tagname_ = 'inputW' - super(InputList, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "inputW" + super(InputList, self)._buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): - #print("Exporting InputList: "+str(self.id)+" as HDF5") - + """Export to HDF5 file.""" + # print("Exporting InputList: "+str(self.id)+" as HDF5") import numpy - ilGroup = h5file.create_group(h5Group, 'inputList_'+self.id) + ilGroup = h5file.create_group(h5Group, "inputList_" + self.id) ilGroup._f_setattr("id", self.id) ilGroup._f_setattr("component", self.component) ilGroup._f_setattr("population", self.populations) @@ -7028,33 +16121,35 @@ def exportHdf5(self, h5file, h5Group): extra_cols = {} - num_tot = len(self.input)+len(self.input_ws) + num_tot = len(self.input) + len(self.input_ws) - if len(self.input_ws)>0: - extra_cols["column_"+str(cols)] = 'weight' - cols+=1 + if len(self.input_ws) > 0: + extra_cols["column_" + str(cols)] = "weight" + cols += 1 - #print("Exporting "+str(num_tot)+" inputs") + # print("Exporting "+str(num_tot)+" inputs") a = numpy.zeros([num_tot, cols], numpy.float32) - count=0 + count = 0 for input in self.input: - a[count,0] = input.id - a[count,1] = input.get_target_cell_id() - a[count,2] = input.get_segment_id() - a[count,3] = input.get_fraction_along() - count+=1 + a[count, 0] = input.id + a[count, 1] = input.get_target_cell_id() + a[count, 2] = input.get_segment_id() + a[count, 3] = input.get_fraction_along() + count += 1 for input in self.input_ws: - a[count,0] = input.id - a[count,1] = input.get_target_cell_id() - a[count,2] = input.get_segment_id() - a[count,3] = input.get_fraction_along() - a[count,4] = input.get_weight() - count+=1 + a[count, 0] = input.id + a[count, 1] = input.get_target_cell_id() + a[count, 2] = input.get_segment_id() + a[count, 3] = input.get_fraction_along() + a[count, 4] = input.get_weight() + count += 1 - array = h5file.create_carray(ilGroup, self.id, obj=a, title="Locations of inputs in "+ self.id) + array = h5file.create_carray( + ilGroup, self.id, obj=a, title="Locations of inputs in " + self.id + ) array._f_setattr("column_0", "id") array._f_setattr("column_1", "target_cell_id") @@ -7065,474 +16160,1045 @@ def exportHdf5(self, h5file, h5Group): def __str__(self): - return "Input list: "+self.id+" to "+self.populations+", component "+self.component - + return ( + "Input list: " + + self.id + + " to " + + self.populations + + ", component " + + self.component + ) # end class InputList class BaseConnection(BaseNonNegativeIntegerId): - """Base of all synaptic connections (chemical/electrical/analog, etc.) - inside projections""" - member_data_items_ = [ - ] + """BaseConnection -- Base of all synaptic connections (chemical/electrical/analog, etc.) inside projections""" + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = BaseNonNegativeIntegerId - def __init__(self, neuro_lex_id=None, id=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseConnection, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("BaseConnection"), self).__init__( + neuro_lex_id, id, extensiontype_, **kwargs_ + ) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseConnection) + subclass = getSubclassFromModule_(CurrentSubclassModule_, BaseConnection) if subclass is not None: return subclass(*args_, **kwargs_) if BaseConnection.subclass: return BaseConnection.subclass(*args_, **kwargs_) else: return BaseConnection(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(BaseConnection, self).hasContent_() - ): + + def _hasContent(self): + if super(BaseConnection, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseConnection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConnection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseConnection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "BaseConnection": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnection') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConnection', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConnection'): - super(BaseConnection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnection') - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseConnection" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseConnection", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseConnection", + ): + super(BaseConnection, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseConnection" + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnection', fromsubclass_=False, pretty_print=True): - super(BaseConnection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConnection", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseConnection, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseConnection, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseConnection, self).buildChildren(child_, node, nodeName_, True) + super(BaseConnection, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(BaseConnection, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class BaseConnection class BaseProjection(Base): - """Base for projection (set of synaptic connections) between two - populations""" + """BaseProjection -- Base for projection (set of synaptic connections) between two populations""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('presynaptic_population', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('postsynaptic_population', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_( + "presynaptic_population", + "NmlId", + 0, + 0, + {"use": "required", "name": "presynaptic_population"}, + ), + MemberSpec_( + "postsynaptic_population", + "NmlId", + 0, + 0, + {"use": "required", "name": "postsynaptic_population"}, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + presynaptic_population=None, + postsynaptic_population=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseProjection, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("BaseProjection"), self).__init__( + neuro_lex_id, id, extensiontype_, **kwargs_ + ) self.presynaptic_population = _cast(None, presynaptic_population) + self.presynaptic_population_nsprefix_ = None self.postsynaptic_population = _cast(None, postsynaptic_population) + self.postsynaptic_population_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseProjection) + subclass = getSubclassFromModule_(CurrentSubclassModule_, BaseProjection) if subclass is not None: return subclass(*args_, **kwargs_) if BaseProjection.subclass: return BaseProjection.subclass(*args_, **kwargs_) else: return BaseProjection(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): if ( - super(BaseProjection, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): + if super(BaseProjection, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseProjection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseProjection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseProjection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseProjection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "BaseProjection": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseProjection') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseProjection', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseProjection'): - super(BaseProjection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseProjection') - if self.presynaptic_population is not None and 'presynaptic_population' not in already_processed: - already_processed.add('presynaptic_population') - outfile.write(' presynapticPopulation=%s' % (quote_attrib(self.presynaptic_population), )) - if self.postsynaptic_population is not None and 'postsynaptic_population' not in already_processed: - already_processed.add('postsynaptic_population') - outfile.write(' postsynapticPopulation=%s' % (quote_attrib(self.postsynaptic_population), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseProjection" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseProjection", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseProjection", + ): + super(BaseProjection, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseProjection" + ) + if ( + self.presynaptic_population is not None + and "presynaptic_population" not in already_processed + ): + already_processed.add("presynaptic_population") + outfile.write( + " presynapticPopulation=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.presynaptic_population), + input_name="presynapticPopulation", + ) + ), + ) + ) + if ( + self.postsynaptic_population is not None + and "postsynaptic_population" not in already_processed + ): + already_processed.add("postsynaptic_population") + outfile.write( + " postsynapticPopulation=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.postsynaptic_population), + input_name="postsynapticPopulation", + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseProjection', fromsubclass_=False, pretty_print=True): - super(BaseProjection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseProjection", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseProjection, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('presynapticPopulation', node) - if value is not None and 'presynapticPopulation' not in already_processed: - already_processed.add('presynapticPopulation') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("presynapticPopulation", node) + if value is not None and "presynapticPopulation" not in already_processed: + already_processed.add("presynapticPopulation") self.presynaptic_population = value - self.validate_NmlId(self.presynaptic_population) # validate type NmlId - value = find_attr_value_('postsynapticPopulation', node) - if value is not None and 'postsynapticPopulation' not in already_processed: - already_processed.add('postsynapticPopulation') + self.validate_NmlId(self.presynaptic_population) # validate type NmlId + value = find_attr_value_("postsynapticPopulation", node) + if value is not None and "postsynapticPopulation" not in already_processed: + already_processed.add("postsynapticPopulation") self.postsynaptic_population = value - self.validate_NmlId(self.postsynaptic_population) # validate type NmlId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_NmlId(self.postsynaptic_population) # validate type NmlId + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseProjection, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseProjection, self).buildChildren(child_, node, nodeName_, True) + super(BaseProjection, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(BaseProjection, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class BaseProjection class CellSet(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('select', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_("select", "xs:string", 0, 0, {"use": "required", "name": "select"}), + MemberSpec_( + "__ANY__", + "__ANY__", + 1, + 1, + {"maxOccurs": "unbounded", "minOccurs": "0", "processContents": "skip"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, select=None, anytypeobjs_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + select=None, + anytypeobjs_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(CellSet, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("CellSet"), self).__init__(neuro_lex_id, id, **kwargs_) self.select = _cast(None, select) + self.select_nsprefix_ = None if anytypeobjs_ is None: self.anytypeobjs_ = [] else: self.anytypeobjs_ = anytypeobjs_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, CellSet) + subclass = getSubclassFromModule_(CurrentSubclassModule_, CellSet) if subclass is not None: return subclass(*args_, **kwargs_) if CellSet.subclass: return CellSet.subclass(*args_, **kwargs_) else: return CellSet(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - self.anytypeobjs_ or - super(CellSet, self).hasContent_() - ): + + def _hasContent(self): + if self.anytypeobjs_ or super(CellSet, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CellSet', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('CellSet') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="CellSet", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("CellSet") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "CellSet": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CellSet') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CellSet', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="CellSet" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="CellSet", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CellSet'): - super(CellSet, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CellSet') - if self.select is not None and 'select' not in already_processed: - already_processed.add('select') - outfile.write(' select=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.select), input_name='select')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CellSet', fromsubclass_=False, pretty_print=True): - super(CellSet, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="CellSet" + ): + super(CellSet, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="CellSet" + ) + if self.select is not None and "select" not in already_processed: + already_processed.add("select") + outfile.write( + " select=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.select), input_name="select" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="CellSet", + fromsubclass_=False, + pretty_print=True, + ): + super(CellSet, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' - else: - eol_ = '' - for obj_ in self.anytypeobjs_: - obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + eol_ = "\n" + else: + eol_ = "" + if not fromsubclass_: + for obj_ in self.anytypeobjs_: + showIndent(outfile, level, pretty_print) + outfile.write(obj_) + outfile.write("\n") + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('select', node) - if value is not None and 'select' not in already_processed: - already_processed.add('select') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("select", node) + if value is not None and "select" not in already_processed: + already_processed.add("select") self.select = value - super(CellSet, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'CellSet') - if obj_ is not None: - self.add_anytypeobjs_(obj_) - super(CellSet, self).buildChildren(child_, node, nodeName_, True) + super(CellSet, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + content_ = self.gds_build_any(child_, "CellSet") + self.anytypeobjs_.append(content_) + super(CellSet, self)._buildChildren(child_, node, nodeName_, True) + + # end class CellSet class Population(Standalone): + """Population -- A population of components, with just one parameter for the **size,** i. e. number of components to create. Note: quite often this is used with type= **populationList** which means the size is determined by the number of **instance** s ( with **location** s ) in the list. The **size** attribute is still set, and there will be a validation error if this does not match the number in the list. + \n + :param size: Number of instances of this Component to create when the population is instantiated + :type size: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('component', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('size', 'NonNegativeInteger', 0, 1, {'use': u'optional'}), - MemberSpec_('type', 'populationTypes', 0, 1, {'use': u'optional'}), - MemberSpec_('extracellular_properties', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('layout', 'Layout', 0, 1, {u'type': u'Layout', u'name': u'layout', u'minOccurs': u'0'}, 4), - MemberSpec_('instances', 'Instance', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'Instance', u'name': u'instance'}, 4), + MemberSpec_( + "component", "NmlId", 0, 0, {"use": "required", "name": "component"} + ), + MemberSpec_( + "size", "NonNegativeInteger", 0, 1, {"use": "optional", "name": "size"} + ), + MemberSpec_( + "type", "populationTypes", 0, 1, {"use": "optional", "name": "type"} + ), + MemberSpec_( + "extracellular_properties", + "NmlId", + 0, + 1, + {"use": "optional", "name": "extracellular_properties"}, + ), + MemberSpec_( + "layout", + "Layout", + 0, + 1, + {"minOccurs": "0", "name": "layout", "type": "Layout"}, + 4, + ), + MemberSpec_( + "instances", + "Instance", + 1, + 0, + {"maxOccurs": "unbounded", "name": "instance", "type": "Instance"}, + 4, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, component=None, size=None, type=None, extracellular_properties=None, layout=None, instances=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + component=None, + size=None, + type=None, + extracellular_properties=None, + layout=None, + instances=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Population, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("Population"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.component = _cast(None, component) + self.component_nsprefix_ = None self.size = _cast(int, size) + self.size_nsprefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.extracellular_properties = _cast(None, extracellular_properties) + self.extracellular_properties_nsprefix_ = None self.layout = layout + self.layout_nsprefix_ = None if instances is None: self.instances = [] else: self.instances = instances + self.instances_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Population) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Population) if subclass is not None: return subclass(*args_, **kwargs_) if Population.subclass: return Population.subclass(*args_, **kwargs_) else: return Population(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass + def validate_populationTypes(self, value): # Validate type populationTypes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - value = str(value) - enumerations = ['population', 'populationList'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on populationTypes' % {"value" : value.encode("utf-8")} ) - def hasContent_(self): if ( - self.layout is not None or - self.instances or - super(Population, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + value = value + enumerations = ["population", "populationList"] + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on populationTypes' + % {"value": encode_str_2_3(value), "lineno": lineno} + ) + result = False + + def _hasContent(self): + if ( + self.layout is not None + or self.instances + or super(Population, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Population', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Population') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Population", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Population") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Population": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Population') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Population', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Population" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Population", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Population'): - super(Population, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Population') - if self.component is not None and 'component' not in already_processed: - already_processed.add('component') - outfile.write(' component=%s' % (quote_attrib(self.component), )) - if self.size is not None and 'size' not in already_processed: - already_processed.add('size') - outfile.write(' size=%s' % (quote_attrib(self.size), )) - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.extracellular_properties is not None and 'extracellular_properties' not in already_processed: - already_processed.add('extracellular_properties') - outfile.write(' extracellularProperties=%s' % (quote_attrib(self.extracellular_properties), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Population', fromsubclass_=False, pretty_print=True): - super(Population, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Population" + ): + super(Population, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Population" + ) + if self.component is not None and "component" not in already_processed: + already_processed.add("component") + outfile.write( + " component=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.component), input_name="component" + ) + ), + ) + ) + if self.size is not None and "size" not in already_processed: + already_processed.add("size") + outfile.write( + ' size="%s"' % self.gds_format_integer(self.size, input_name="size") + ) + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type), input_name="type" + ) + ), + ) + ) + if ( + self.extracellular_properties is not None + and "extracellular_properties" not in already_processed + ): + already_processed.add("extracellular_properties") + outfile.write( + " extracellularProperties=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.extracellular_properties), + input_name="extracellularProperties", + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Population", + fromsubclass_=False, + pretty_print=True, + ): + super(Population, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.layout is not None: - self.layout.export(outfile, level, namespaceprefix_, namespacedef_='', name_='layout', pretty_print=pretty_print) + namespaceprefix_ = ( + self.layout_nsprefix_ + ":" + if (UseCapturedNS_ and self.layout_nsprefix_) + else "" + ) + self.layout.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="layout", + pretty_print=pretty_print, + ) for instance_ in self.instances: - instance_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='instance', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.instances_nsprefix_ + ":" + if (UseCapturedNS_ and self.instances_nsprefix_) + else "" + ) + instance_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="instance", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('component', node) - if value is not None and 'component' not in already_processed: - already_processed.add('component') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("component", node) + if value is not None and "component" not in already_processed: + already_processed.add("component") self.component = value - self.validate_NmlId(self.component) # validate type NmlId - value = find_attr_value_('size', node) - if value is not None and 'size' not in already_processed: - already_processed.add('size') - try: - self.size = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.validate_NmlId(self.component) # validate type NmlId + value = find_attr_value_("size", node) + if value is not None and "size" not in already_processed: + already_processed.add("size") + self.size = self.gds_parse_integer(value, node, "size") if self.size < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.size) # validate type NonNegativeInteger - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.size + ) # validate type NonNegativeInteger + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_populationTypes(self.type) # validate type populationTypes - value = find_attr_value_('extracellularProperties', node) - if value is not None and 'extracellularProperties' not in already_processed: - already_processed.add('extracellularProperties') + self.validate_populationTypes(self.type) # validate type populationTypes + value = find_attr_value_("extracellularProperties", node) + if value is not None and "extracellularProperties" not in already_processed: + already_processed.add("extracellularProperties") self.extracellular_properties = value - self.validate_NmlId(self.extracellular_properties) # validate type NmlId - super(Population, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'layout': + self.validate_NmlId(self.extracellular_properties) # validate type NmlId + super(Population, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "layout": obj_ = Layout.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.layout = obj_ - obj_.original_tagname_ = 'layout' - elif nodeName_ == 'instance': + obj_.original_tagname_ = "layout" + elif nodeName_ == "instance": obj_ = Instance.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.instances.append(obj_) - obj_.original_tagname_ = 'instance' - super(Population, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "instance" + super(Population, self)._buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): - #print("Exporting Population: "+str(self.id)+" as HDF5") - + """Export to HDF5 file.""" + # print("Exporting Population: "+str(self.id)+" as HDF5") import numpy - popGroup = h5file.create_group(h5Group, 'population_'+self.id) + popGroup = h5file.create_group(h5Group, "population_" + self.id) popGroup._f_setattr("id", self.id) popGroup._f_setattr("component", self.component) for p in self.properties: - popGroup._f_setattr("property:"+p.tag, p.value) - + popGroup._f_setattr("property:" + p.tag, p.value) - if len(self.instances)>0: + if len(self.instances) > 0: colCount = 3 a = numpy.zeros([len(self.instances), colCount], numpy.float32) - count=0 + count = 0 for instance in self.instances: - a[count,0] = instance.location.x - a[count,1] = instance.location.y - a[count,2] = instance.location.z + a[count, 0] = instance.location.x + a[count, 1] = instance.location.y + a[count, 2] = instance.location.z - count=count+1 + count = count + 1 popGroup._f_setattr("size", count) popGroup._f_setattr("type", "populationList") - array = h5file.create_carray(popGroup, self.id, obj=a, title="Locations of cells in "+ self.id) + array = h5file.create_carray( + popGroup, self.id, obj=a, title="Locations of cells in " + self.id + ) array._f_setattr("column_0", "x") array._f_setattr("column_1", "y") array._f_setattr("column_2", "z") @@ -7541,504 +17207,1205 @@ def exportHdf5(self, h5file, h5Group): popGroup._f_setattr("size", self.size) def get_size(self): - return len(self.instances) if len(self.instances)>0 else (self.size if self.size else 0) + return ( + len(self.instances) + if len(self.instances) > 0 + else (self.size if self.size else 0) + ) def __str__(self): - return "Population: "+str(self.id)+" with "+str( self.get_size() )+" components of type "+(self.component if self.component else "???") - + return ( + "Population: " + + str(self.id) + + " with " + + str(self.get_size()) + + " components of type " + + (self.component if self.component else "???") + ) # end class Population class Region(Base): + """Region -- Initial attempt to specify 3D region for placing cells. Work in progress. . .""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('spaces', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_("spaces", "NmlId", 0, 1, {"use": "optional", "name": "spaces"}), + MemberSpec_( + "__ANY__", + "__ANY__", + 1, + 1, + {"maxOccurs": "unbounded", "minOccurs": "0", "processContents": "skip"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, spaces=None, anytypeobjs_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + spaces=None, + anytypeobjs_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Region, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("Region"), self).__init__(neuro_lex_id, id, **kwargs_) self.spaces = _cast(None, spaces) + self.spaces_nsprefix_ = None if anytypeobjs_ is None: self.anytypeobjs_ = [] else: self.anytypeobjs_ = anytypeobjs_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Region) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Region) if subclass is not None: return subclass(*args_, **kwargs_) if Region.subclass: return Region.subclass(*args_, **kwargs_) else: return Region(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): if ( - self.anytypeobjs_ or - super(Region, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): + if self.anytypeobjs_ or super(Region, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Region', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Region') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Region", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Region") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Region": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Region') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Region', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Region" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Region", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Region'): - super(Region, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Region') - if self.spaces is not None and 'spaces' not in already_processed: - already_processed.add('spaces') - outfile.write(' space=%s' % (quote_attrib(self.spaces), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Region', fromsubclass_=False, pretty_print=True): - super(Region, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Region" + ): + super(Region, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Region" + ) + if self.spaces is not None and "spaces" not in already_processed: + already_processed.add("spaces") + outfile.write( + " space=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.spaces), input_name="space" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Region", + fromsubclass_=False, + pretty_print=True, + ): + super(Region, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' - else: - eol_ = '' - for obj_ in self.anytypeobjs_: - obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + eol_ = "\n" + else: + eol_ = "" + if not fromsubclass_: + for obj_ in self.anytypeobjs_: + showIndent(outfile, level, pretty_print) + outfile.write(obj_) + outfile.write("\n") + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('space', node) - if value is not None and 'space' not in already_processed: - already_processed.add('space') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("space", node) + if value is not None and "space" not in already_processed: + already_processed.add("space") self.spaces = value - self.validate_NmlId(self.spaces) # validate type NmlId - super(Region, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'Region') - if obj_ is not None: - self.add_anytypeobjs_(obj_) - super(Region, self).buildChildren(child_, node, nodeName_, True) + self.validate_NmlId(self.spaces) # validate type NmlId + super(Region, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + content_ = self.gds_build_any(child_, "Region") + self.anytypeobjs_.append(content_) + super(Region, self)._buildChildren(child_, node, nodeName_, True) + + # end class Region class Space(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('based_on', 'allowedSpaces', 0, 1, {'use': u'optional'}), - MemberSpec_('structure', 'SpaceStructure', 0, 1, {u'type': u'SpaceStructure', u'name': u'structure', u'minOccurs': u'0'}, None), + MemberSpec_( + "based_on", "allowedSpaces", 0, 1, {"use": "optional", "name": "based_on"} + ), + MemberSpec_( + "structure", + "SpaceStructure", + 0, + 1, + {"minOccurs": "0", "name": "structure", "type": "SpaceStructure"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, based_on=None, structure=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + based_on=None, + structure=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Space, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("Space"), self).__init__(neuro_lex_id, id, **kwargs_) self.based_on = _cast(None, based_on) + self.based_on_nsprefix_ = None self.structure = structure + self.structure_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Space) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Space) if subclass is not None: return subclass(*args_, **kwargs_) if Space.subclass: return Space.subclass(*args_, **kwargs_) else: return Space(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_allowedSpaces(self, value): # Validate type allowedSpaces, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - value = str(value) - enumerations = ['Euclidean_1D', 'Euclidean_2D', 'Euclidean_3D', 'Grid_1D', 'Grid_2D', 'Grid_3D'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on allowedSpaces' % {"value" : value.encode("utf-8")} ) - def hasContent_(self): if ( - self.structure is not None or - super(Space, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + value = value + enumerations = [ + "Euclidean_1D", + "Euclidean_2D", + "Euclidean_3D", + "Grid_1D", + "Grid_2D", + "Grid_3D", + ] + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on allowedSpaces' + % {"value": encode_str_2_3(value), "lineno": lineno} + ) + result = False + + def _hasContent(self): + if self.structure is not None or super(Space, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Space', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Space') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Space", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Space") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Space": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Space') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Space', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Space" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Space", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Space'): - super(Space, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Space') - if self.based_on is not None and 'based_on' not in already_processed: - already_processed.add('based_on') - outfile.write(' basedOn=%s' % (quote_attrib(self.based_on), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Space', fromsubclass_=False, pretty_print=True): - super(Space, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Space" + ): + super(Space, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Space" + ) + if self.based_on is not None and "based_on" not in already_processed: + already_processed.add("based_on") + outfile.write( + " basedOn=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.based_on), input_name="basedOn" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Space", + fromsubclass_=False, + pretty_print=True, + ): + super(Space, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.structure is not None: - self.structure.export(outfile, level, namespaceprefix_, namespacedef_='', name_='structure', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.structure_nsprefix_ + ":" + if (UseCapturedNS_ and self.structure_nsprefix_) + else "" + ) + self.structure.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="structure", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('basedOn', node) - if value is not None and 'basedOn' not in already_processed: - already_processed.add('basedOn') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("basedOn", node) + if value is not None and "basedOn" not in already_processed: + already_processed.add("basedOn") self.based_on = value - self.validate_allowedSpaces(self.based_on) # validate type allowedSpaces - super(Space, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'structure': + self.validate_allowedSpaces(self.based_on) # validate type allowedSpaces + super(Space, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "structure": obj_ = SpaceStructure.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.structure = obj_ - obj_.original_tagname_ = 'structure' - super(Space, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "structure" + super(Space, self)._buildChildren(child_, node, nodeName_, True) + + # end class Space class Network(Standalone): + """Network -- Network containing: **population** s ( potentially of type **populationList** , and so specifying a list of cell **location** s ); **projection** s ( with lists of **connection** s ) and/or **explicitConnection** s; and **inputList** s ( with lists of **input** s ) and/or **explicitInput** s. Note: often in NeuroML this will be of type **networkWithTemperature** if there are temperature dependent elements ( e. g. ion channels ).""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('type', 'networkTypes', 0, 1, {'use': u'optional'}), - MemberSpec_('temperature', 'Nml2Quantity_temperature', 0, 1, {'use': u'optional'}), - MemberSpec_('spaces', 'Space', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Space', u'name': u'space', u'minOccurs': u'0'}, None), - MemberSpec_('regions', 'Region', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Region', u'name': u'region', u'minOccurs': u'0'}, None), - MemberSpec_('extracellular_properties', 'ExtracellularPropertiesLocal', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExtracellularPropertiesLocal', u'name': u'extracellularProperties', u'minOccurs': u'0'}, None), - MemberSpec_('populations', 'Population', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'Population', u'name': u'population'}, None), - MemberSpec_('cell_sets', 'CellSet', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'CellSet', u'name': u'cellSet', u'minOccurs': u'0'}, None), - MemberSpec_('synaptic_connections', 'SynapticConnection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SynapticConnection', u'name': u'synapticConnection', u'minOccurs': u'0'}, None), - MemberSpec_('projections', 'Projection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Projection', u'name': u'projection', u'minOccurs': u'0'}, None), - MemberSpec_('electrical_projections', 'ElectricalProjection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ElectricalProjection', u'name': u'electricalProjection', u'minOccurs': u'0'}, None), - MemberSpec_('continuous_projections', 'ContinuousProjection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ContinuousProjection', u'name': u'continuousProjection', u'minOccurs': u'0'}, None), - MemberSpec_('explicit_inputs', 'ExplicitInput', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExplicitInput', u'name': u'explicitInput', u'minOccurs': u'0'}, None), - MemberSpec_('input_lists', 'InputList', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'InputList', u'name': u'inputList', u'minOccurs': u'0'}, None), + MemberSpec_("type", "networkTypes", 0, 1, {"use": "optional", "name": "type"}), + MemberSpec_( + "temperature", + "Nml2Quantity_temperature", + 0, + 1, + {"use": "optional", "name": "temperature"}, + ), + MemberSpec_( + "spaces", + "Space", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "space", + "type": "Space", + }, + None, + ), + MemberSpec_( + "regions", + "Region", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "region", + "type": "Region", + }, + None, + ), + MemberSpec_( + "extracellular_properties", + "ExtracellularPropertiesLocal", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "extracellularProperties", + "type": "ExtracellularPropertiesLocal", + }, + None, + ), + MemberSpec_( + "populations", + "Population", + 1, + 0, + {"maxOccurs": "unbounded", "name": "population", "type": "Population"}, + None, + ), + MemberSpec_( + "cell_sets", + "CellSet", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "cellSet", + "type": "CellSet", + }, + None, + ), + MemberSpec_( + "synaptic_connections", + "SynapticConnection", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "synapticConnection", + "type": "SynapticConnection", + }, + None, + ), + MemberSpec_( + "projections", + "Projection", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "projection", + "type": "Projection", + }, + None, + ), + MemberSpec_( + "electrical_projections", + "ElectricalProjection", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "electricalProjection", + "type": "ElectricalProjection", + }, + None, + ), + MemberSpec_( + "continuous_projections", + "ContinuousProjection", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "continuousProjection", + "type": "ContinuousProjection", + }, + None, + ), + MemberSpec_( + "explicit_inputs", + "ExplicitInput", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "explicitInput", + "type": "ExplicitInput", + }, + None, + ), + MemberSpec_( + "input_lists", + "InputList", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "inputList", + "type": "InputList", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, type=None, temperature=None, spaces=None, regions=None, extracellular_properties=None, populations=None, cell_sets=None, synaptic_connections=None, projections=None, electrical_projections=None, continuous_projections=None, explicit_inputs=None, input_lists=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + type=None, + temperature=None, + spaces=None, + regions=None, + extracellular_properties=None, + populations=None, + cell_sets=None, + synaptic_connections=None, + projections=None, + electrical_projections=None, + continuous_projections=None, + explicit_inputs=None, + input_lists=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Network, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("Network"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.type = _cast(None, type) + self.type_nsprefix_ = None self.temperature = _cast(None, temperature) + self.temperature_nsprefix_ = None if spaces is None: self.spaces = [] else: self.spaces = spaces + self.spaces_nsprefix_ = None if regions is None: self.regions = [] else: self.regions = regions + self.regions_nsprefix_ = None if extracellular_properties is None: self.extracellular_properties = [] else: self.extracellular_properties = extracellular_properties + self.extracellular_properties_nsprefix_ = None if populations is None: self.populations = [] else: self.populations = populations + self.populations_nsprefix_ = None if cell_sets is None: self.cell_sets = [] else: self.cell_sets = cell_sets + self.cell_sets_nsprefix_ = None if synaptic_connections is None: self.synaptic_connections = [] else: self.synaptic_connections = synaptic_connections + self.synaptic_connections_nsprefix_ = None if projections is None: self.projections = [] else: self.projections = projections + self.projections_nsprefix_ = None if electrical_projections is None: self.electrical_projections = [] else: self.electrical_projections = electrical_projections + self.electrical_projections_nsprefix_ = None if continuous_projections is None: self.continuous_projections = [] else: self.continuous_projections = continuous_projections + self.continuous_projections_nsprefix_ = None if explicit_inputs is None: self.explicit_inputs = [] else: self.explicit_inputs = explicit_inputs + self.explicit_inputs_nsprefix_ = None if input_lists is None: self.input_lists = [] else: self.input_lists = input_lists + self.input_lists_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Network) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Network) if subclass is not None: return subclass(*args_, **kwargs_) if Network.subclass: return Network.subclass(*args_, **kwargs_) else: return Network(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_networkTypes(self, value): # Validate type networkTypes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - value = str(value) - enumerations = ['network', 'networkWithTemperature'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on networkTypes' % {"value" : value.encode("utf-8")} ) + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + value = value + enumerations = ["network", "networkWithTemperature"] + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on networkTypes' + % {"value": encode_str_2_3(value), "lineno": lineno} + ) + result = False + def validate_Nml2Quantity_temperature(self, value): # Validate type Nml2Quantity_temperature, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_temperature_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_temperature_patterns_, )) - validate_Nml2Quantity_temperature_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC)$']] - def hasContent_(self): - if ( - self.spaces or - self.regions or - self.extracellular_properties or - self.populations or - self.cell_sets or - self.synaptic_connections or - self.projections or - self.electrical_projections or - self.continuous_projections or - self.explicit_inputs or - self.input_lists or - super(Network, self).hasContent_() + self.validate_Nml2Quantity_temperature_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_temperature_patterns_, + ) + ) + + validate_Nml2Quantity_temperature_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(degC))$"] + ] + + def _hasContent(self): + if ( + self.spaces + or self.regions + or self.extracellular_properties + or self.populations + or self.cell_sets + or self.synaptic_connections + or self.projections + or self.electrical_projections + or self.continuous_projections + or self.explicit_inputs + or self.input_lists + or super(Network, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Network', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Network') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Network", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Network") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Network": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Network') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Network', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Network" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Network", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Network'): - super(Network, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Network') - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.temperature is not None and 'temperature' not in already_processed: - already_processed.add('temperature') - outfile.write(' temperature=%s' % (quote_attrib(self.temperature), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Network', fromsubclass_=False, pretty_print=True): - super(Network, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Network" + ): + super(Network, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Network" + ) + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type), input_name="type" + ) + ), + ) + ) + if self.temperature is not None and "temperature" not in already_processed: + already_processed.add("temperature") + outfile.write( + " temperature=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.temperature), input_name="temperature" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Network", + fromsubclass_=False, + pretty_print=True, + ): + super(Network, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for space_ in self.spaces: - space_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='space', pretty_print=pretty_print) + namespaceprefix_ = ( + self.spaces_nsprefix_ + ":" + if (UseCapturedNS_ and self.spaces_nsprefix_) + else "" + ) + space_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="space", + pretty_print=pretty_print, + ) for region_ in self.regions: - region_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='region', pretty_print=pretty_print) + namespaceprefix_ = ( + self.regions_nsprefix_ + ":" + if (UseCapturedNS_ and self.regions_nsprefix_) + else "" + ) + region_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="region", + pretty_print=pretty_print, + ) for extracellularProperties_ in self.extracellular_properties: - extracellularProperties_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='extracellularProperties', pretty_print=pretty_print) + namespaceprefix_ = ( + self.extracellular_properties_nsprefix_ + ":" + if (UseCapturedNS_ and self.extracellular_properties_nsprefix_) + else "" + ) + extracellularProperties_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="extracellularProperties", + pretty_print=pretty_print, + ) for population_ in self.populations: - population_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='population', pretty_print=pretty_print) + namespaceprefix_ = ( + self.populations_nsprefix_ + ":" + if (UseCapturedNS_ and self.populations_nsprefix_) + else "" + ) + population_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="population", + pretty_print=pretty_print, + ) for cellSet_ in self.cell_sets: - cellSet_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='cellSet', pretty_print=pretty_print) + namespaceprefix_ = ( + self.cell_sets_nsprefix_ + ":" + if (UseCapturedNS_ and self.cell_sets_nsprefix_) + else "" + ) + cellSet_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="cellSet", + pretty_print=pretty_print, + ) for synapticConnection_ in self.synaptic_connections: - synapticConnection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='synapticConnection', pretty_print=pretty_print) + namespaceprefix_ = ( + self.synaptic_connections_nsprefix_ + ":" + if (UseCapturedNS_ and self.synaptic_connections_nsprefix_) + else "" + ) + synapticConnection_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="synapticConnection", + pretty_print=pretty_print, + ) for projection_ in self.projections: - projection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='projection', pretty_print=pretty_print) + namespaceprefix_ = ( + self.projections_nsprefix_ + ":" + if (UseCapturedNS_ and self.projections_nsprefix_) + else "" + ) + projection_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="projection", + pretty_print=pretty_print, + ) for electricalProjection_ in self.electrical_projections: - electricalProjection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='electricalProjection', pretty_print=pretty_print) + namespaceprefix_ = ( + self.electrical_projections_nsprefix_ + ":" + if (UseCapturedNS_ and self.electrical_projections_nsprefix_) + else "" + ) + electricalProjection_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="electricalProjection", + pretty_print=pretty_print, + ) for continuousProjection_ in self.continuous_projections: - continuousProjection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='continuousProjection', pretty_print=pretty_print) + namespaceprefix_ = ( + self.continuous_projections_nsprefix_ + ":" + if (UseCapturedNS_ and self.continuous_projections_nsprefix_) + else "" + ) + continuousProjection_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="continuousProjection", + pretty_print=pretty_print, + ) for explicitInput_ in self.explicit_inputs: - explicitInput_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='explicitInput', pretty_print=pretty_print) + namespaceprefix_ = ( + self.explicit_inputs_nsprefix_ + ":" + if (UseCapturedNS_ and self.explicit_inputs_nsprefix_) + else "" + ) + explicitInput_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="explicitInput", + pretty_print=pretty_print, + ) for inputList_ in self.input_lists: - inputList_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='inputList', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.input_lists_nsprefix_ + ":" + if (UseCapturedNS_ and self.input_lists_nsprefix_) + else "" + ) + inputList_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="inputList", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_networkTypes(self.type) # validate type networkTypes - value = find_attr_value_('temperature', node) - if value is not None and 'temperature' not in already_processed: - already_processed.add('temperature') + self.validate_networkTypes(self.type) # validate type networkTypes + value = find_attr_value_("temperature", node) + if value is not None and "temperature" not in already_processed: + already_processed.add("temperature") self.temperature = value - self.validate_Nml2Quantity_temperature(self.temperature) # validate type Nml2Quantity_temperature - super(Network, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'space': + self.validate_Nml2Quantity_temperature( + self.temperature + ) # validate type Nml2Quantity_temperature + super(Network, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "space": obj_ = Space.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spaces.append(obj_) - obj_.original_tagname_ = 'space' - elif nodeName_ == 'region': + obj_.original_tagname_ = "space" + elif nodeName_ == "region": obj_ = Region.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.regions.append(obj_) - obj_.original_tagname_ = 'region' - elif nodeName_ == 'extracellularProperties': + obj_.original_tagname_ = "region" + elif nodeName_ == "extracellularProperties": obj_ = ExtracellularPropertiesLocal.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.extracellular_properties.append(obj_) - obj_.original_tagname_ = 'extracellularProperties' - elif nodeName_ == 'population': + obj_.original_tagname_ = "extracellularProperties" + elif nodeName_ == "population": obj_ = Population.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.populations.append(obj_) - obj_.original_tagname_ = 'population' - elif nodeName_ == 'cellSet': + obj_.original_tagname_ = "population" + elif nodeName_ == "cellSet": obj_ = CellSet.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.cell_sets.append(obj_) - obj_.original_tagname_ = 'cellSet' - elif nodeName_ == 'synapticConnection': + obj_.original_tagname_ = "cellSet" + elif nodeName_ == "synapticConnection": obj_ = SynapticConnection.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.synaptic_connections.append(obj_) - obj_.original_tagname_ = 'synapticConnection' - elif nodeName_ == 'projection': + obj_.original_tagname_ = "synapticConnection" + elif nodeName_ == "projection": obj_ = Projection.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.projections.append(obj_) - obj_.original_tagname_ = 'projection' - elif nodeName_ == 'electricalProjection': + obj_.original_tagname_ = "projection" + elif nodeName_ == "electricalProjection": obj_ = ElectricalProjection.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.electrical_projections.append(obj_) - obj_.original_tagname_ = 'electricalProjection' - elif nodeName_ == 'continuousProjection': + obj_.original_tagname_ = "electricalProjection" + elif nodeName_ == "continuousProjection": obj_ = ContinuousProjection.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.continuous_projections.append(obj_) - obj_.original_tagname_ = 'continuousProjection' - elif nodeName_ == 'explicitInput': + obj_.original_tagname_ = "continuousProjection" + elif nodeName_ == "explicitInput": obj_ = ExplicitInput.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.explicit_inputs.append(obj_) - obj_.original_tagname_ = 'explicitInput' - elif nodeName_ == 'inputList': + obj_.original_tagname_ = "explicitInput" + elif nodeName_ == "inputList": obj_ = InputList.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.input_lists.append(obj_) - obj_.original_tagname_ = 'inputList' - super(Network, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "inputList" + super(Network, self)._buildChildren(child_, node, nodeName_, True) warn_count = 0 - def get_by_id(self,id): + + def get_by_id(self, id): + """Get a component by its ID + + :param id: ID of component to find + :type id: str + :returns: component with specified ID or None if no component with specified ID found + """ all_ids = [] for ms in self.member_data_items_: mlist = self.__getattribute__(ms.name) for m in mlist: - if hasattr(m,"id"): + if hasattr(m, "id"): if m.id == id: return m else: all_ids.append(m.id) from neuroml.loaders import print_ - if self.warn_count<10: - print_("Id "+id+" not found in element. All ids: "+str(sorted(all_ids))) - self.warn_count+=1 - elif self.warn_count==10: + + if self.warn_count < 10: + print_( + "Id " + + id + + " not found in element. All ids: " + + str(sorted(all_ids)) + ) + self.warn_count += 1 + elif self.warn_count == 10: print_(" - Suppressing further warnings about id not found...") return None - def __str__(self): - return "Network "+str(self.id)+" with "+str(len(self.populations))+" population(s)" + return ( + "Network " + + str(self.id) + + " with " + + str(len(self.populations)) + + " population(s)" + ) - def exportHdf5(self, h5file, h5Group): - #print("Exporting Network: "+str(self.id)+" as HDF5") - + """Export to HDF5 file.""" + # print("Exporting Network: "+str(self.id)+" as HDF5") import numpy - netGroup = h5file.create_group(h5Group, 'network') + netGroup = h5file.create_group(h5Group, "network") netGroup._f_setattr("id", self.id) netGroup._f_setattr("notes", self.notes) if self.temperature: netGroup._f_setattr("temperature", self.temperature) - for pop in self.populations: pop.exportHdf5(h5file, netGroup) @@ -8059,4335 +18426,10545 @@ def exportHdf5(self, h5file, h5Group): for il in self.input_lists: il.exportHdf5(h5file, netGroup) - # end class Network class TransientPoissonFiringSynapse(Standalone): + """TransientPoissonFiringSynapse -- Poisson spike generator firing at **averageRate** after a **delay** and for a **duration,** connected to single **synapse** that is triggered every time a spike is generated, providing an input current. Similar to ComponentType **poissonFiringSynapse** . + \n + :param averageRate: + :type averageRate: per_time + :param delay: + :type delay: time + :param duration: + :type duration: time + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('average_rate', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('synapse', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('spike_target', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_( + "average_rate", + "Nml2Quantity_pertime", + 0, + 0, + {"use": "required", "name": "average_rate"}, + ), + MemberSpec_( + "delay", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "delay"} + ), + MemberSpec_( + "duration", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "duration"}, + ), + MemberSpec_( + "synapse", "xs:string", 0, 0, {"use": "required", "name": "synapse"} + ), + MemberSpec_( + "spike_target", + "xs:string", + 0, + 0, + {"use": "required", "name": "spike_target"}, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, average_rate=None, delay=None, duration=None, synapse=None, spike_target=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + average_rate=None, + delay=None, + duration=None, + synapse=None, + spike_target=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(TransientPoissonFiringSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("TransientPoissonFiringSynapse"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.average_rate = _cast(None, average_rate) + self.average_rate_nsprefix_ = None self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.synapse = _cast(None, synapse) + self.synapse_nsprefix_ = None self.spike_target = _cast(None, spike_target) + self.spike_target_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, TransientPoissonFiringSynapse) + CurrentSubclassModule_, TransientPoissonFiringSynapse + ) if subclass is not None: return subclass(*args_, **kwargs_) if TransientPoissonFiringSynapse.subclass: return TransientPoissonFiringSynapse.subclass(*args_, **kwargs_) else: return TransientPoissonFiringSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] + self.validate_Nml2Quantity_pertime_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_pertime_patterns_, + ) + ) + + validate_Nml2Quantity_pertime_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz))$"] + ] + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): if ( - super(TransientPoissonFiringSynapse, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + + def _hasContent(self): + if super(TransientPoissonFiringSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TransientPoissonFiringSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('TransientPoissonFiringSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="TransientPoissonFiringSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("TransientPoissonFiringSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if ( + self.original_tagname_ is not None + and name_ == "TransientPoissonFiringSynapse" + ): name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TransientPoissonFiringSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TransientPoissonFiringSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="TransientPoissonFiringSynapse", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="TransientPoissonFiringSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TransientPoissonFiringSynapse'): - super(TransientPoissonFiringSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TransientPoissonFiringSynapse') - if self.average_rate is not None and 'average_rate' not in already_processed: - already_processed.add('average_rate') - outfile.write(' averageRate=%s' % (quote_attrib(self.average_rate), )) - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.synapse is not None and 'synapse' not in already_processed: - already_processed.add('synapse') - outfile.write(' synapse=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse), input_name='synapse')), )) - if self.spike_target is not None and 'spike_target' not in already_processed: - already_processed.add('spike_target') - outfile.write(' spikeTarget=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.spike_target), input_name='spikeTarget')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TransientPoissonFiringSynapse', fromsubclass_=False, pretty_print=True): - super(TransientPoissonFiringSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="TransientPoissonFiringSynapse", + ): + super(TransientPoissonFiringSynapse, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="TransientPoissonFiringSynapse", + ) + if self.average_rate is not None and "average_rate" not in already_processed: + already_processed.add("average_rate") + outfile.write( + " averageRate=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.average_rate), input_name="averageRate" + ) + ), + ) + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write( + " delay=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.delay), input_name="delay" + ) + ), + ) + ) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write( + " duration=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.duration), input_name="duration" + ) + ), + ) + ) + if self.synapse is not None and "synapse" not in already_processed: + already_processed.add("synapse") + outfile.write( + " synapse=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.synapse), input_name="synapse" + ) + ), + ) + ) + if self.spike_target is not None and "spike_target" not in already_processed: + already_processed.add("spike_target") + outfile.write( + " spikeTarget=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.spike_target), input_name="spikeTarget" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="TransientPoissonFiringSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(TransientPoissonFiringSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('averageRate', node) - if value is not None and 'averageRate' not in already_processed: - already_processed.add('averageRate') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("averageRate", node) + if value is not None and "averageRate" not in already_processed: + already_processed.add("averageRate") self.average_rate = value - self.validate_Nml2Quantity_pertime(self.average_rate) # validate type Nml2Quantity_pertime - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + self.validate_Nml2Quantity_pertime( + self.average_rate + ) # validate type Nml2Quantity_pertime + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('synapse', node) - if value is not None and 'synapse' not in already_processed: - already_processed.add('synapse') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("synapse", node) + if value is not None and "synapse" not in already_processed: + already_processed.add("synapse") self.synapse = value - value = find_attr_value_('spikeTarget', node) - if value is not None and 'spikeTarget' not in already_processed: - already_processed.add('spikeTarget') + value = find_attr_value_("spikeTarget", node) + if value is not None and "spikeTarget" not in already_processed: + already_processed.add("spikeTarget") self.spike_target = value - super(TransientPoissonFiringSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(TransientPoissonFiringSynapse, self).buildChildren(child_, node, nodeName_, True) + super(TransientPoissonFiringSynapse, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(TransientPoissonFiringSynapse, self)._buildChildren( + child_, node, nodeName_, True + ) pass + + # end class TransientPoissonFiringSynapse class PoissonFiringSynapse(Standalone): + """PoissonFiringSynapse -- Poisson spike generator firing at **averageRate,** which is connected to single **synapse** that is triggered every time a spike is generated, producing an input current. See also **transientPoissonFiringSynapse** . + \n + :param averageRate: The average rate at which spikes are emitted + :type averageRate: per_time + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('average_rate', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), - MemberSpec_('synapse', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('spike_target', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_( + "average_rate", + "Nml2Quantity_pertime", + 0, + 0, + {"use": "required", "name": "average_rate"}, + ), + MemberSpec_( + "synapse", "xs:string", 0, 0, {"use": "required", "name": "synapse"} + ), + MemberSpec_( + "spike_target", + "xs:string", + 0, + 0, + {"use": "required", "name": "spike_target"}, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, average_rate=None, synapse=None, spike_target=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + average_rate=None, + synapse=None, + spike_target=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(PoissonFiringSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("PoissonFiringSynapse"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.average_rate = _cast(None, average_rate) + self.average_rate_nsprefix_ = None self.synapse = _cast(None, synapse) + self.synapse_nsprefix_ = None self.spike_target = _cast(None, spike_target) + self.spike_target_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, PoissonFiringSynapse) + CurrentSubclassModule_, PoissonFiringSynapse + ) if subclass is not None: return subclass(*args_, **kwargs_) if PoissonFiringSynapse.subclass: return PoissonFiringSynapse.subclass(*args_, **kwargs_) else: return PoissonFiringSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] - def hasContent_(self): if ( - super(PoissonFiringSynapse, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_pertime_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_pertime_patterns_, + ) + ) + + validate_Nml2Quantity_pertime_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz))$"] + ] + + def _hasContent(self): + if super(PoissonFiringSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PoissonFiringSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('PoissonFiringSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PoissonFiringSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PoissonFiringSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "PoissonFiringSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PoissonFiringSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PoissonFiringSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="PoissonFiringSynapse", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PoissonFiringSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PoissonFiringSynapse'): - super(PoissonFiringSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PoissonFiringSynapse') - if self.average_rate is not None and 'average_rate' not in already_processed: - already_processed.add('average_rate') - outfile.write(' averageRate=%s' % (quote_attrib(self.average_rate), )) - if self.synapse is not None and 'synapse' not in already_processed: - already_processed.add('synapse') - outfile.write(' synapse=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse), input_name='synapse')), )) - if self.spike_target is not None and 'spike_target' not in already_processed: - already_processed.add('spike_target') - outfile.write(' spikeTarget=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.spike_target), input_name='spikeTarget')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PoissonFiringSynapse', fromsubclass_=False, pretty_print=True): - super(PoissonFiringSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="PoissonFiringSynapse", + ): + super(PoissonFiringSynapse, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="PoissonFiringSynapse", + ) + if self.average_rate is not None and "average_rate" not in already_processed: + already_processed.add("average_rate") + outfile.write( + " averageRate=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.average_rate), input_name="averageRate" + ) + ), + ) + ) + if self.synapse is not None and "synapse" not in already_processed: + already_processed.add("synapse") + outfile.write( + " synapse=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.synapse), input_name="synapse" + ) + ), + ) + ) + if self.spike_target is not None and "spike_target" not in already_processed: + already_processed.add("spike_target") + outfile.write( + " spikeTarget=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.spike_target), input_name="spikeTarget" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PoissonFiringSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(PoissonFiringSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('averageRate', node) - if value is not None and 'averageRate' not in already_processed: - already_processed.add('averageRate') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("averageRate", node) + if value is not None and "averageRate" not in already_processed: + already_processed.add("averageRate") self.average_rate = value - self.validate_Nml2Quantity_pertime(self.average_rate) # validate type Nml2Quantity_pertime - value = find_attr_value_('synapse', node) - if value is not None and 'synapse' not in already_processed: - already_processed.add('synapse') + self.validate_Nml2Quantity_pertime( + self.average_rate + ) # validate type Nml2Quantity_pertime + value = find_attr_value_("synapse", node) + if value is not None and "synapse" not in already_processed: + already_processed.add("synapse") self.synapse = value - value = find_attr_value_('spikeTarget', node) - if value is not None and 'spikeTarget' not in already_processed: - already_processed.add('spikeTarget') + value = find_attr_value_("spikeTarget", node) + if value is not None and "spikeTarget" not in already_processed: + already_processed.add("spikeTarget") self.spike_target = value - super(PoissonFiringSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(PoissonFiringSynapse, self).buildChildren(child_, node, nodeName_, True) + super(PoissonFiringSynapse, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(PoissonFiringSynapse, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class PoissonFiringSynapse class SpikeGeneratorPoisson(Standalone): + """SpikeGeneratorPoisson -- Generator of spikes whose ISI is distributed according to an exponential PDF with scale: 1 / **averageRate** + \n + :param averageRate: The average rate at which spikes are emitted + :type averageRate: per_time + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('average_rate', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), + MemberSpec_( + "average_rate", + "Nml2Quantity_pertime", + 0, + 0, + {"use": "required", "name": "average_rate"}, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, average_rate=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + average_rate=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeGeneratorPoisson, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("SpikeGeneratorPoisson"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.average_rate = _cast(None, average_rate) + self.average_rate_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpikeGeneratorPoisson) + CurrentSubclassModule_, SpikeGeneratorPoisson + ) if subclass is not None: return subclass(*args_, **kwargs_) if SpikeGeneratorPoisson.subclass: return SpikeGeneratorPoisson.subclass(*args_, **kwargs_) else: return SpikeGeneratorPoisson(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] - def hasContent_(self): if ( - super(SpikeGeneratorPoisson, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_pertime_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_pertime_patterns_, + ) + ) + + validate_Nml2Quantity_pertime_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz))$"] + ] + + def _hasContent(self): + if super(SpikeGeneratorPoisson, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorPoisson', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpikeGeneratorPoisson') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeGeneratorPoisson", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpikeGeneratorPoisson") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SpikeGeneratorPoisson": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorPoisson') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeGeneratorPoisson', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpikeGeneratorPoisson", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpikeGeneratorPoisson", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeGeneratorPoisson'): - super(SpikeGeneratorPoisson, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorPoisson') - if self.average_rate is not None and 'average_rate' not in already_processed: - already_processed.add('average_rate') - outfile.write(' averageRate=%s' % (quote_attrib(self.average_rate), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SpikeGeneratorPoisson", + ): + super(SpikeGeneratorPoisson, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpikeGeneratorPoisson", + ) + if self.average_rate is not None and "average_rate" not in already_processed: + already_processed.add("average_rate") + outfile.write( + " averageRate=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.average_rate), input_name="averageRate" + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorPoisson', fromsubclass_=False, pretty_print=True): - super(SpikeGeneratorPoisson, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeGeneratorPoisson", + fromsubclass_=False, + pretty_print=True, + ): + super(SpikeGeneratorPoisson, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('averageRate', node) - if value is not None and 'averageRate' not in already_processed: - already_processed.add('averageRate') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("averageRate", node) + if value is not None and "averageRate" not in already_processed: + already_processed.add("averageRate") self.average_rate = value - self.validate_Nml2Quantity_pertime(self.average_rate) # validate type Nml2Quantity_pertime - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_pertime( + self.average_rate + ) # validate type Nml2Quantity_pertime + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(SpikeGeneratorPoisson, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SpikeGeneratorPoisson, self).buildChildren(child_, node, nodeName_, True) + super(SpikeGeneratorPoisson, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(SpikeGeneratorPoisson, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class SpikeGeneratorPoisson class SpikeGeneratorRandom(Standalone): + """SpikeGeneratorRandom -- Generator of spikes with a random interspike interval of at least **minISI** and at most **maxISI** + \n + :param maxISI: Maximum interspike interval + :type maxISI: time + :param minISI: Minimum interspike interval + :type minISI: time + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('max_isi', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('min_isi', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_( + "max_isi", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "max_isi"} + ), + MemberSpec_( + "min_isi", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "min_isi"} + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, max_isi=None, min_isi=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + max_isi=None, + min_isi=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeGeneratorRandom, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("SpikeGeneratorRandom"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.max_isi = _cast(None, max_isi) + self.max_isi_nsprefix_ = None self.min_isi = _cast(None, min_isi) + self.min_isi_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpikeGeneratorRandom) + CurrentSubclassModule_, SpikeGeneratorRandom + ) if subclass is not None: return subclass(*args_, **kwargs_) if SpikeGeneratorRandom.subclass: return SpikeGeneratorRandom.subclass(*args_, **kwargs_) else: return SpikeGeneratorRandom(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): if ( - super(SpikeGeneratorRandom, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + + def _hasContent(self): + if super(SpikeGeneratorRandom, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorRandom', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpikeGeneratorRandom') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeGeneratorRandom", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpikeGeneratorRandom") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SpikeGeneratorRandom": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorRandom') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeGeneratorRandom', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpikeGeneratorRandom", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpikeGeneratorRandom", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeGeneratorRandom'): - super(SpikeGeneratorRandom, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorRandom') - if self.max_isi is not None and 'max_isi' not in already_processed: - already_processed.add('max_isi') - outfile.write(' maxISI=%s' % (quote_attrib(self.max_isi), )) - if self.min_isi is not None and 'min_isi' not in already_processed: - already_processed.add('min_isi') - outfile.write(' minISI=%s' % (quote_attrib(self.min_isi), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorRandom', fromsubclass_=False, pretty_print=True): - super(SpikeGeneratorRandom, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SpikeGeneratorRandom", + ): + super(SpikeGeneratorRandom, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpikeGeneratorRandom", + ) + if self.max_isi is not None and "max_isi" not in already_processed: + already_processed.add("max_isi") + outfile.write( + " maxISI=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.max_isi), input_name="maxISI" + ) + ), + ) + ) + if self.min_isi is not None and "min_isi" not in already_processed: + already_processed.add("min_isi") + outfile.write( + " minISI=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.min_isi), input_name="minISI" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeGeneratorRandom", + fromsubclass_=False, + pretty_print=True, + ): + super(SpikeGeneratorRandom, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('maxISI', node) - if value is not None and 'maxISI' not in already_processed: - already_processed.add('maxISI') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("maxISI", node) + if value is not None and "maxISI" not in already_processed: + already_processed.add("maxISI") self.max_isi = value - self.validate_Nml2Quantity_time(self.max_isi) # validate type Nml2Quantity_time - value = find_attr_value_('minISI', node) - if value is not None and 'minISI' not in already_processed: - already_processed.add('minISI') + self.validate_Nml2Quantity_time( + self.max_isi + ) # validate type Nml2Quantity_time + value = find_attr_value_("minISI", node) + if value is not None and "minISI" not in already_processed: + already_processed.add("minISI") self.min_isi = value - self.validate_Nml2Quantity_time(self.min_isi) # validate type Nml2Quantity_time - super(SpikeGeneratorRandom, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SpikeGeneratorRandom, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_time( + self.min_isi + ) # validate type Nml2Quantity_time + super(SpikeGeneratorRandom, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(SpikeGeneratorRandom, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class SpikeGeneratorRandom class SpikeGenerator(Standalone): + """SpikeGenerator -- Simple generator of spikes at a regular interval set by **period** + \n + :param period: Time between spikes. The first spike will be emitted after this time. + :type period: time + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('period', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_( + "period", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "period"} + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, period=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + period=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeGenerator, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("SpikeGenerator"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.period = _cast(None, period) + self.period_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpikeGenerator) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SpikeGenerator) if subclass is not None: return subclass(*args_, **kwargs_) if SpikeGenerator.subclass: return SpikeGenerator.subclass(*args_, **kwargs_) else: return SpikeGenerator(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): if ( - super(SpikeGenerator, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + + def _hasContent(self): + if super(SpikeGenerator, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGenerator', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpikeGenerator') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeGenerator", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpikeGenerator") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SpikeGenerator": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGenerator') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeGenerator', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SpikeGenerator" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpikeGenerator", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeGenerator'): - super(SpikeGenerator, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGenerator') - if self.period is not None and 'period' not in already_processed: - already_processed.add('period') - outfile.write(' period=%s' % (quote_attrib(self.period), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGenerator', fromsubclass_=False, pretty_print=True): - super(SpikeGenerator, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SpikeGenerator", + ): + super(SpikeGenerator, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SpikeGenerator" + ) + if self.period is not None and "period" not in already_processed: + already_processed.add("period") + outfile.write( + " period=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.period), input_name="period" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeGenerator", + fromsubclass_=False, + pretty_print=True, + ): + super(SpikeGenerator, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('period', node) - if value is not None and 'period' not in already_processed: - already_processed.add('period') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("period", node) + if value is not None and "period" not in already_processed: + already_processed.add("period") self.period = value - self.validate_Nml2Quantity_time(self.period) # validate type Nml2Quantity_time - super(SpikeGenerator, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SpikeGenerator, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_time( + self.period + ) # validate type Nml2Quantity_time + super(SpikeGenerator, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(SpikeGenerator, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class SpikeGenerator class TimedSynapticInput(Standalone): + """TimedSynapticInput -- Spike array connected to a single **synapse,** producing a current triggered by each **spike** in the array.""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('synapse', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('spike_target', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('spikes', 'Spike', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Spike', u'name': u'spike', u'minOccurs': u'0'}, None), + MemberSpec_("synapse", "NmlId", 0, 0, {"use": "required", "name": "synapse"}), + MemberSpec_( + "spike_target", + "xs:string", + 0, + 0, + {"use": "required", "name": "spike_target"}, + ), + MemberSpec_( + "spikes", + "Spike", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "spike", + "type": "Spike", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, synapse=None, spike_target=None, spikes=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + synapse=None, + spike_target=None, + spikes=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(TimedSynapticInput, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("TimedSynapticInput"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.synapse = _cast(None, synapse) + self.synapse_nsprefix_ = None self.spike_target = _cast(None, spike_target) + self.spike_target_nsprefix_ = None if spikes is None: self.spikes = [] else: self.spikes = spikes + self.spikes_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, TimedSynapticInput) + CurrentSubclassModule_, TimedSynapticInput + ) if subclass is not None: return subclass(*args_, **kwargs_) if TimedSynapticInput.subclass: return TimedSynapticInput.subclass(*args_, **kwargs_) else: return TimedSynapticInput(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): if ( - self.spikes or - super(TimedSynapticInput, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): + if self.spikes or super(TimedSynapticInput, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TimedSynapticInput', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('TimedSynapticInput') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="TimedSynapticInput", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("TimedSynapticInput") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "TimedSynapticInput": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TimedSynapticInput') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TimedSynapticInput', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="TimedSynapticInput", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="TimedSynapticInput", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TimedSynapticInput'): - super(TimedSynapticInput, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TimedSynapticInput') - if self.synapse is not None and 'synapse' not in already_processed: - already_processed.add('synapse') - outfile.write(' synapse=%s' % (quote_attrib(self.synapse), )) - if self.spike_target is not None and 'spike_target' not in already_processed: - already_processed.add('spike_target') - outfile.write(' spikeTarget=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.spike_target), input_name='spikeTarget')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TimedSynapticInput', fromsubclass_=False, pretty_print=True): - super(TimedSynapticInput, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="TimedSynapticInput", + ): + super(TimedSynapticInput, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="TimedSynapticInput", + ) + if self.synapse is not None and "synapse" not in already_processed: + already_processed.add("synapse") + outfile.write( + " synapse=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.synapse), input_name="synapse" + ) + ), + ) + ) + if self.spike_target is not None and "spike_target" not in already_processed: + already_processed.add("spike_target") + outfile.write( + " spikeTarget=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.spike_target), input_name="spikeTarget" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="TimedSynapticInput", + fromsubclass_=False, + pretty_print=True, + ): + super(TimedSynapticInput, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for spike_ in self.spikes: - spike_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spike', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.spikes_nsprefix_ + ":" + if (UseCapturedNS_ and self.spikes_nsprefix_) + else "" + ) + spike_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="spike", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('synapse', node) - if value is not None and 'synapse' not in already_processed: - already_processed.add('synapse') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("synapse", node) + if value is not None and "synapse" not in already_processed: + already_processed.add("synapse") self.synapse = value - self.validate_NmlId(self.synapse) # validate type NmlId - value = find_attr_value_('spikeTarget', node) - if value is not None and 'spikeTarget' not in already_processed: - already_processed.add('spikeTarget') + self.validate_NmlId(self.synapse) # validate type NmlId + value = find_attr_value_("spikeTarget", node) + if value is not None and "spikeTarget" not in already_processed: + already_processed.add("spikeTarget") self.spike_target = value - super(TimedSynapticInput, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'spike': + super(TimedSynapticInput, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "spike": obj_ = Spike.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spikes.append(obj_) - obj_.original_tagname_ = 'spike' - super(TimedSynapticInput, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "spike" + super(TimedSynapticInput, self)._buildChildren(child_, node, nodeName_, True) + + # end class TimedSynapticInput class SpikeArray(Standalone): + """SpikeArray -- Set of spike ComponentTypes, each emitting one spike at a certain time. Can be used to feed a predetermined spike train into a cell""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('spikes', 'Spike', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Spike', u'name': u'spike', u'minOccurs': u'0'}, None), + MemberSpec_( + "spikes", + "Spike", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "spike", + "type": "Spike", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, spikes=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + spikes=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeArray, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("SpikeArray"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) if spikes is None: self.spikes = [] else: self.spikes = spikes + self.spikes_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpikeArray) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SpikeArray) if subclass is not None: return subclass(*args_, **kwargs_) if SpikeArray.subclass: return SpikeArray.subclass(*args_, **kwargs_) else: return SpikeArray(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - self.spikes or - super(SpikeArray, self).hasContent_() - ): + + def _hasContent(self): + if self.spikes or super(SpikeArray, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeArray', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpikeArray') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="SpikeArray", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpikeArray") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SpikeArray": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeArray') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeArray', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SpikeArray" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpikeArray", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeArray'): - super(SpikeArray, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeArray') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeArray', fromsubclass_=False, pretty_print=True): - super(SpikeArray, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="SpikeArray" + ): + super(SpikeArray, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SpikeArray" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="SpikeArray", + fromsubclass_=False, + pretty_print=True, + ): + super(SpikeArray, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for spike_ in self.spikes: - spike_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spike', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.spikes_nsprefix_ + ":" + if (UseCapturedNS_ and self.spikes_nsprefix_) + else "" + ) + spike_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="spike", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(SpikeArray, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'spike': + + def _buildAttributes(self, node, attrs, already_processed): + super(SpikeArray, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "spike": obj_ = Spike.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spikes.append(obj_) - obj_.original_tagname_ = 'spike' - super(SpikeArray, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "spike" + super(SpikeArray, self)._buildChildren(child_, node, nodeName_, True) + + # end class SpikeArray class Spike(BaseNonNegativeIntegerId): + """Spike -- Emits a single spike at the specified **time** + \n + :param time: Time at which to emit one spike event + :type time: time + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('time', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_( + "time", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "time"} + ), ] subclass = None superclass = BaseNonNegativeIntegerId - def __init__(self, neuro_lex_id=None, id=None, time=None, **kwargs_): + + def __init__( + self, neuro_lex_id=None, id=None, time=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Spike, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("Spike"), self).__init__(neuro_lex_id, id, **kwargs_) self.time = _cast(None, time) + self.time_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Spike) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Spike) if subclass is not None: return subclass(*args_, **kwargs_) if Spike.subclass: return Spike.subclass(*args_, **kwargs_) else: return Spike(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): if ( - super(Spike, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + + def _hasContent(self): + if super(Spike, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Spike', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Spike') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Spike", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Spike") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Spike": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Spike') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Spike', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Spike'): - super(Spike, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Spike') - if self.time is not None and 'time' not in already_processed: - already_processed.add('time') - outfile.write(' time=%s' % (quote_attrib(self.time), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Spike', fromsubclass_=False, pretty_print=True): - super(Spike, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Spike" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Spike", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Spike" + ): + super(Spike, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Spike" + ) + if self.time is not None and "time" not in already_processed: + already_processed.add("time") + outfile.write( + " time=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.time), input_name="time" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Spike", + fromsubclass_=False, + pretty_print=True, + ): + super(Spike, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('time', node) - if value is not None and 'time' not in already_processed: - already_processed.add('time') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("time", node) + if value is not None and "time" not in already_processed: + already_processed.add("time") self.time = value - self.validate_Nml2Quantity_time(self.time) # validate type Nml2Quantity_time - super(Spike, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(Spike, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_time( + self.time + ) # validate type Nml2Quantity_time + super(Spike, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(Spike, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class Spike class VoltageClampTriple(Standalone): + """VoltageClampTriple -- Voltage clamp with 3 clamp levels. Applies a variable current **i** ( through **simpleSeriesResistance** ) to try to keep parent cell at **conditioningVoltage** until time **delay,** **testingVoltage** until **delay** + **duration,** and **returnVoltage** afterwards. Only enabled if **active** = 1. + \n + :param active: Whether the voltage clamp is active ( 1 ) or inactive ( 0 ). + :type active: none + :param delay: Delay before switching from conditioningVoltage to testingVoltage. + :type delay: time + :param duration: Duration to hold at testingVoltage. + :type duration: time + :param conditioningVoltage: Target voltage before time delay + :type conditioningVoltage: voltage + :param testingVoltage: Target voltage between times delay and delay + duration + :type testingVoltage: voltage + :param returnVoltage: Target voltage after time duration + :type returnVoltage: voltage + :param simpleSeriesResistance: Current will be calculated by the difference in voltage between the target and parent, divided by this value + :type simpleSeriesResistance: resistance + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('active', 'ZeroOrOne', 0, 0, {'use': u'required'}), - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('conditioning_voltage', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('testing_voltage', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('return_voltage', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('simple_series_resistance', 'Nml2Quantity_resistance', 0, 0, {'use': u'required'}), + MemberSpec_("active", "ZeroOrOne", 0, 0, {"use": "required", "name": "active"}), + MemberSpec_( + "delay", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "delay"} + ), + MemberSpec_( + "duration", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "duration"}, + ), + MemberSpec_( + "conditioning_voltage", + "Nml2Quantity_voltage", + 0, + 0, + {"use": "required", "name": "conditioning_voltage"}, + ), + MemberSpec_( + "testing_voltage", + "Nml2Quantity_voltage", + 0, + 0, + {"use": "required", "name": "testing_voltage"}, + ), + MemberSpec_( + "return_voltage", + "Nml2Quantity_voltage", + 0, + 0, + {"use": "required", "name": "return_voltage"}, + ), + MemberSpec_( + "simple_series_resistance", + "Nml2Quantity_resistance", + 0, + 0, + {"use": "required", "name": "simple_series_resistance"}, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, active=None, delay=None, duration=None, conditioning_voltage=None, testing_voltage=None, return_voltage=None, simple_series_resistance=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + active=None, + delay=None, + duration=None, + conditioning_voltage=None, + testing_voltage=None, + return_voltage=None, + simple_series_resistance=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(VoltageClampTriple, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("VoltageClampTriple"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.active = _cast(float, active) + self.active_nsprefix_ = None self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.conditioning_voltage = _cast(None, conditioning_voltage) + self.conditioning_voltage_nsprefix_ = None self.testing_voltage = _cast(None, testing_voltage) + self.testing_voltage_nsprefix_ = None self.return_voltage = _cast(None, return_voltage) + self.return_voltage_nsprefix_ = None self.simple_series_resistance = _cast(None, simple_series_resistance) + self.simple_series_resistance_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, VoltageClampTriple) + CurrentSubclassModule_, VoltageClampTriple + ) if subclass is not None: return subclass(*args_, **kwargs_) if VoltageClampTriple.subclass: return VoltageClampTriple.subclass(*args_, **kwargs_) else: return VoltageClampTriple(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_ZeroOrOne(self, value): # Validate type ZeroOrOne, a restriction on xs:double. - if value is not None and Validate_simpletypes_: - value = str(value) - enumerations = ['0', '1'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on ZeroOrOne' % {"value" : value.encode("utf-8")} ) + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, float): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + value = value + enumerations = [0.0, 1.0] + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ZeroOrOne' + % {"value": encode_str_2_3(value), "lineno": lineno} + ) + result = False + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + def validate_Nml2Quantity_resistance(self, value): # Validate type Nml2Quantity_resistance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_resistance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_resistance_patterns_, )) - validate_Nml2Quantity_resistance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm|kohm|Mohm)$']] - def hasContent_(self): if ( - super(VoltageClampTriple, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_resistance_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_resistance_patterns_, + ) + ) + + validate_Nml2Quantity_resistance_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm|kohm|Mohm))$"] + ] + + def _hasContent(self): + if super(VoltageClampTriple, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VoltageClampTriple', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('VoltageClampTriple') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="VoltageClampTriple", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("VoltageClampTriple") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "VoltageClampTriple": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VoltageClampTriple') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='VoltageClampTriple', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="VoltageClampTriple", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="VoltageClampTriple", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='VoltageClampTriple'): - super(VoltageClampTriple, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VoltageClampTriple') - if self.active is not None and 'active' not in already_processed: - already_processed.add('active') - outfile.write(' active=%s' % (quote_attrib(self.active), )) - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.conditioning_voltage is not None and 'conditioning_voltage' not in already_processed: - already_processed.add('conditioning_voltage') - outfile.write(' conditioningVoltage=%s' % (quote_attrib(self.conditioning_voltage), )) - if self.testing_voltage is not None and 'testing_voltage' not in already_processed: - already_processed.add('testing_voltage') - outfile.write(' testingVoltage=%s' % (quote_attrib(self.testing_voltage), )) - if self.return_voltage is not None and 'return_voltage' not in already_processed: - already_processed.add('return_voltage') - outfile.write(' returnVoltage=%s' % (quote_attrib(self.return_voltage), )) - if self.simple_series_resistance is not None and 'simple_series_resistance' not in already_processed: - already_processed.add('simple_series_resistance') - outfile.write(' simpleSeriesResistance=%s' % (quote_attrib(self.simple_series_resistance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VoltageClampTriple', fromsubclass_=False, pretty_print=True): - super(VoltageClampTriple, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="VoltageClampTriple", + ): + super(VoltageClampTriple, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="VoltageClampTriple", + ) + if self.active is not None and "active" not in already_processed: + already_processed.add("active") + outfile.write( + ' active="%s"' + % self.gds_format_double(self.active, input_name="active") + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write( + " delay=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.delay), input_name="delay" + ) + ), + ) + ) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write( + " duration=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.duration), input_name="duration" + ) + ), + ) + ) + if ( + self.conditioning_voltage is not None + and "conditioning_voltage" not in already_processed + ): + already_processed.add("conditioning_voltage") + outfile.write( + " conditioningVoltage=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.conditioning_voltage), + input_name="conditioningVoltage", + ) + ), + ) + ) + if ( + self.testing_voltage is not None + and "testing_voltage" not in already_processed + ): + already_processed.add("testing_voltage") + outfile.write( + " testingVoltage=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.testing_voltage), + input_name="testingVoltage", + ) + ), + ) + ) + if ( + self.return_voltage is not None + and "return_voltage" not in already_processed + ): + already_processed.add("return_voltage") + outfile.write( + " returnVoltage=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.return_voltage), + input_name="returnVoltage", + ) + ), + ) + ) + if ( + self.simple_series_resistance is not None + and "simple_series_resistance" not in already_processed + ): + already_processed.add("simple_series_resistance") + outfile.write( + " simpleSeriesResistance=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.simple_series_resistance), + input_name="simpleSeriesResistance", + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="VoltageClampTriple", + fromsubclass_=False, + pretty_print=True, + ): + super(VoltageClampTriple, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('active', node) - if value is not None and 'active' not in already_processed: - already_processed.add('active') - try: - self.active = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (active): %s' % exp) - self.validate_ZeroOrOne(self.active) # validate type ZeroOrOne - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("active", node) + if value is not None and "active" not in already_processed: + already_processed.add("active") + value = self.gds_parse_double(value, node, "active") + self.active = value + self.validate_ZeroOrOne(self.active) # validate type ZeroOrOne + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('conditioningVoltage', node) - if value is not None and 'conditioningVoltage' not in already_processed: - already_processed.add('conditioningVoltage') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("conditioningVoltage", node) + if value is not None and "conditioningVoltage" not in already_processed: + already_processed.add("conditioningVoltage") self.conditioning_voltage = value - self.validate_Nml2Quantity_voltage(self.conditioning_voltage) # validate type Nml2Quantity_voltage - value = find_attr_value_('testingVoltage', node) - if value is not None and 'testingVoltage' not in already_processed: - already_processed.add('testingVoltage') + self.validate_Nml2Quantity_voltage( + self.conditioning_voltage + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("testingVoltage", node) + if value is not None and "testingVoltage" not in already_processed: + already_processed.add("testingVoltage") self.testing_voltage = value - self.validate_Nml2Quantity_voltage(self.testing_voltage) # validate type Nml2Quantity_voltage - value = find_attr_value_('returnVoltage', node) - if value is not None and 'returnVoltage' not in already_processed: - already_processed.add('returnVoltage') + self.validate_Nml2Quantity_voltage( + self.testing_voltage + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("returnVoltage", node) + if value is not None and "returnVoltage" not in already_processed: + already_processed.add("returnVoltage") self.return_voltage = value - self.validate_Nml2Quantity_voltage(self.return_voltage) # validate type Nml2Quantity_voltage - value = find_attr_value_('simpleSeriesResistance', node) - if value is not None and 'simpleSeriesResistance' not in already_processed: - already_processed.add('simpleSeriesResistance') + self.validate_Nml2Quantity_voltage( + self.return_voltage + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("simpleSeriesResistance", node) + if value is not None and "simpleSeriesResistance" not in already_processed: + already_processed.add("simpleSeriesResistance") self.simple_series_resistance = value - self.validate_Nml2Quantity_resistance(self.simple_series_resistance) # validate type Nml2Quantity_resistance - super(VoltageClampTriple, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(VoltageClampTriple, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_resistance( + self.simple_series_resistance + ) # validate type Nml2Quantity_resistance + super(VoltageClampTriple, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(VoltageClampTriple, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class VoltageClampTriple class VoltageClamp(Standalone): + """VoltageClamp -- Voltage clamp. Applies a variable current **i** to try to keep parent at **targetVoltage.** Not yet fully tested!!! Consider using voltageClampTriple!! + \n + :param delay: Delay before change in current. Current is zero prior to this. + :type delay: time + :param duration: Duration for attempting to keep parent at targetVoltage. Current is zero after delay + duration. + :type duration: time + :param targetVoltage: Current will be applied to try to get parent to this target voltage + :type targetVoltage: voltage + :param simpleSeriesResistance: Current will be calculated by the difference in voltage between the target and parent, divided by this value + :type simpleSeriesResistance: resistance + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('target_voltage', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('simple_series_resistance', 'Nml2Quantity_resistance', 0, 0, {'use': u'required'}), + MemberSpec_( + "delay", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "delay"} + ), + MemberSpec_( + "duration", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "duration"}, + ), + MemberSpec_( + "target_voltage", + "Nml2Quantity_voltage", + 0, + 0, + {"use": "required", "name": "target_voltage"}, + ), + MemberSpec_( + "simple_series_resistance", + "Nml2Quantity_resistance", + 0, + 0, + {"use": "required", "name": "simple_series_resistance"}, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, target_voltage=None, simple_series_resistance=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + delay=None, + duration=None, + target_voltage=None, + simple_series_resistance=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(VoltageClamp, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("VoltageClamp"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.target_voltage = _cast(None, target_voltage) + self.target_voltage_nsprefix_ = None self.simple_series_resistance = _cast(None, simple_series_resistance) + self.simple_series_resistance_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, VoltageClamp) + subclass = getSubclassFromModule_(CurrentSubclassModule_, VoltageClamp) if subclass is not None: return subclass(*args_, **kwargs_) if VoltageClamp.subclass: return VoltageClamp.subclass(*args_, **kwargs_) else: return VoltageClamp(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + def validate_Nml2Quantity_resistance(self, value): # Validate type Nml2Quantity_resistance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_resistance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_resistance_patterns_, )) - validate_Nml2Quantity_resistance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm|kohm|Mohm)$']] - def hasContent_(self): if ( - super(VoltageClamp, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_resistance_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_resistance_patterns_, + ) + ) + + validate_Nml2Quantity_resistance_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(ohm|kohm|Mohm))$"] + ] + + def _hasContent(self): + if super(VoltageClamp, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VoltageClamp', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('VoltageClamp') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="VoltageClamp", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("VoltageClamp") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "VoltageClamp": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VoltageClamp') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='VoltageClamp', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="VoltageClamp" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="VoltageClamp", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='VoltageClamp'): - super(VoltageClamp, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VoltageClamp') - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.target_voltage is not None and 'target_voltage' not in already_processed: - already_processed.add('target_voltage') - outfile.write(' targetVoltage=%s' % (quote_attrib(self.target_voltage), )) - if self.simple_series_resistance is not None and 'simple_series_resistance' not in already_processed: - already_processed.add('simple_series_resistance') - outfile.write(' simpleSeriesResistance=%s' % (quote_attrib(self.simple_series_resistance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='VoltageClamp', fromsubclass_=False, pretty_print=True): - super(VoltageClamp, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="VoltageClamp", + ): + super(VoltageClamp, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="VoltageClamp" + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write( + " delay=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.delay), input_name="delay" + ) + ), + ) + ) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write( + " duration=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.duration), input_name="duration" + ) + ), + ) + ) + if ( + self.target_voltage is not None + and "target_voltage" not in already_processed + ): + already_processed.add("target_voltage") + outfile.write( + " targetVoltage=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.target_voltage), + input_name="targetVoltage", + ) + ), + ) + ) + if ( + self.simple_series_resistance is not None + and "simple_series_resistance" not in already_processed + ): + already_processed.add("simple_series_resistance") + outfile.write( + " simpleSeriesResistance=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.simple_series_resistance), + input_name="simpleSeriesResistance", + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="VoltageClamp", + fromsubclass_=False, + pretty_print=True, + ): + super(VoltageClamp, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('targetVoltage', node) - if value is not None and 'targetVoltage' not in already_processed: - already_processed.add('targetVoltage') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("targetVoltage", node) + if value is not None and "targetVoltage" not in already_processed: + already_processed.add("targetVoltage") self.target_voltage = value - self.validate_Nml2Quantity_voltage(self.target_voltage) # validate type Nml2Quantity_voltage - value = find_attr_value_('simpleSeriesResistance', node) - if value is not None and 'simpleSeriesResistance' not in already_processed: - already_processed.add('simpleSeriesResistance') + self.validate_Nml2Quantity_voltage( + self.target_voltage + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("simpleSeriesResistance", node) + if value is not None and "simpleSeriesResistance" not in already_processed: + already_processed.add("simpleSeriesResistance") self.simple_series_resistance = value - self.validate_Nml2Quantity_resistance(self.simple_series_resistance) # validate type Nml2Quantity_resistance - super(VoltageClamp, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(VoltageClamp, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_resistance( + self.simple_series_resistance + ) # validate type Nml2Quantity_resistance + super(VoltageClamp, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(VoltageClamp, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class VoltageClamp class CompoundInputDL(Standalone): + """CompoundInputDL -- Generates a current which is the sum of all its child **basePointCurrentDL** elements, e. g. can be a combination of **pulseGeneratorDL** , **sineGeneratorDL** elements producing a single **i.** Scaled by **weight,** if set""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('pulse_generator_dls', 'PulseGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PulseGeneratorDL', u'name': u'pulseGeneratorDL', u'minOccurs': u'0'}, None), - MemberSpec_('sine_generator_dls', 'SineGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SineGeneratorDL', u'name': u'sineGeneratorDL', u'minOccurs': u'0'}, None), - MemberSpec_('ramp_generator_dls', 'RampGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'RampGeneratorDL', u'name': u'rampGeneratorDL', u'minOccurs': u'0'}, None), + MemberSpec_( + "pulse_generator_dls", + "PulseGeneratorDL", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "pulseGeneratorDL", + "type": "PulseGeneratorDL", + }, + None, + ), + MemberSpec_( + "sine_generator_dls", + "SineGeneratorDL", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "sineGeneratorDL", + "type": "SineGeneratorDL", + }, + None, + ), + MemberSpec_( + "ramp_generator_dls", + "RampGeneratorDL", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "rampGeneratorDL", + "type": "RampGeneratorDL", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, pulse_generator_dls=None, sine_generator_dls=None, ramp_generator_dls=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + pulse_generator_dls=None, + sine_generator_dls=None, + ramp_generator_dls=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(CompoundInputDL, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("CompoundInputDL"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) if pulse_generator_dls is None: self.pulse_generator_dls = [] else: self.pulse_generator_dls = pulse_generator_dls + self.pulse_generator_dls_nsprefix_ = None if sine_generator_dls is None: self.sine_generator_dls = [] else: self.sine_generator_dls = sine_generator_dls + self.sine_generator_dls_nsprefix_ = None if ramp_generator_dls is None: self.ramp_generator_dls = [] else: self.ramp_generator_dls = ramp_generator_dls + self.ramp_generator_dls_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, CompoundInputDL) + subclass = getSubclassFromModule_(CurrentSubclassModule_, CompoundInputDL) if subclass is not None: return subclass(*args_, **kwargs_) if CompoundInputDL.subclass: return CompoundInputDL.subclass(*args_, **kwargs_) else: return CompoundInputDL(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): + + def _hasContent(self): if ( - self.pulse_generator_dls or - self.sine_generator_dls or - self.ramp_generator_dls or - super(CompoundInputDL, self).hasContent_() + self.pulse_generator_dls + or self.sine_generator_dls + or self.ramp_generator_dls + or super(CompoundInputDL, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CompoundInputDL', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('CompoundInputDL') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="CompoundInputDL", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("CompoundInputDL") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "CompoundInputDL": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CompoundInputDL') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CompoundInputDL', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="CompoundInputDL" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="CompoundInputDL", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CompoundInputDL'): - super(CompoundInputDL, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CompoundInputDL') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CompoundInputDL', fromsubclass_=False, pretty_print=True): - super(CompoundInputDL, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="CompoundInputDL", + ): + super(CompoundInputDL, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="CompoundInputDL" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="CompoundInputDL", + fromsubclass_=False, + pretty_print=True, + ): + super(CompoundInputDL, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for pulseGeneratorDL_ in self.pulse_generator_dls: - pulseGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='pulseGeneratorDL', pretty_print=pretty_print) + namespaceprefix_ = ( + self.pulse_generator_dls_nsprefix_ + ":" + if (UseCapturedNS_ and self.pulse_generator_dls_nsprefix_) + else "" + ) + pulseGeneratorDL_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="pulseGeneratorDL", + pretty_print=pretty_print, + ) for sineGeneratorDL_ in self.sine_generator_dls: - sineGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='sineGeneratorDL', pretty_print=pretty_print) + namespaceprefix_ = ( + self.sine_generator_dls_nsprefix_ + ":" + if (UseCapturedNS_ and self.sine_generator_dls_nsprefix_) + else "" + ) + sineGeneratorDL_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="sineGeneratorDL", + pretty_print=pretty_print, + ) for rampGeneratorDL_ in self.ramp_generator_dls: - rampGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='rampGeneratorDL', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.ramp_generator_dls_nsprefix_ + ":" + if (UseCapturedNS_ and self.ramp_generator_dls_nsprefix_) + else "" + ) + rampGeneratorDL_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="rampGeneratorDL", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(CompoundInputDL, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'pulseGeneratorDL': + + def _buildAttributes(self, node, attrs, already_processed): + super(CompoundInputDL, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "pulseGeneratorDL": obj_ = PulseGeneratorDL.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.pulse_generator_dls.append(obj_) - obj_.original_tagname_ = 'pulseGeneratorDL' - elif nodeName_ == 'sineGeneratorDL': + obj_.original_tagname_ = "pulseGeneratorDL" + elif nodeName_ == "sineGeneratorDL": obj_ = SineGeneratorDL.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.sine_generator_dls.append(obj_) - obj_.original_tagname_ = 'sineGeneratorDL' - elif nodeName_ == 'rampGeneratorDL': + obj_.original_tagname_ = "sineGeneratorDL" + elif nodeName_ == "rampGeneratorDL": obj_ = RampGeneratorDL.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ramp_generator_dls.append(obj_) - obj_.original_tagname_ = 'rampGeneratorDL' - super(CompoundInputDL, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "rampGeneratorDL" + super(CompoundInputDL, self)._buildChildren(child_, node, nodeName_, True) + + # end class CompoundInputDL class CompoundInput(Standalone): + """CompoundInput -- Generates a current which is the sum of all its child **basePointCurrent** element, e. g. can be a combination of **pulseGenerator** , **sineGenerator** elements producing a single **i.** Scaled by **weight,** if set""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('pulse_generators', 'PulseGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PulseGenerator', u'name': u'pulseGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('sine_generators', 'SineGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SineGenerator', u'name': u'sineGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('ramp_generators', 'RampGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'RampGenerator', u'name': u'rampGenerator', u'minOccurs': u'0'}, None), + MemberSpec_( + "pulse_generators", + "PulseGenerator", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "pulseGenerator", + "type": "PulseGenerator", + }, + None, + ), + MemberSpec_( + "sine_generators", + "SineGenerator", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "sineGenerator", + "type": "SineGenerator", + }, + None, + ), + MemberSpec_( + "ramp_generators", + "RampGenerator", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "rampGenerator", + "type": "RampGenerator", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, pulse_generators=None, sine_generators=None, ramp_generators=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + pulse_generators=None, + sine_generators=None, + ramp_generators=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(CompoundInput, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("CompoundInput"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) if pulse_generators is None: self.pulse_generators = [] else: self.pulse_generators = pulse_generators + self.pulse_generators_nsprefix_ = None if sine_generators is None: self.sine_generators = [] else: self.sine_generators = sine_generators + self.sine_generators_nsprefix_ = None if ramp_generators is None: self.ramp_generators = [] else: self.ramp_generators = ramp_generators + self.ramp_generators_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, CompoundInput) + subclass = getSubclassFromModule_(CurrentSubclassModule_, CompoundInput) if subclass is not None: return subclass(*args_, **kwargs_) if CompoundInput.subclass: return CompoundInput.subclass(*args_, **kwargs_) else: return CompoundInput(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): + + def _hasContent(self): if ( - self.pulse_generators or - self.sine_generators or - self.ramp_generators or - super(CompoundInput, self).hasContent_() + self.pulse_generators + or self.sine_generators + or self.ramp_generators + or super(CompoundInput, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CompoundInput', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('CompoundInput') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="CompoundInput", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("CompoundInput") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "CompoundInput": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CompoundInput') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CompoundInput', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="CompoundInput" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="CompoundInput", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CompoundInput'): - super(CompoundInput, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CompoundInput') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CompoundInput', fromsubclass_=False, pretty_print=True): - super(CompoundInput, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="CompoundInput", + ): + super(CompoundInput, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="CompoundInput" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="CompoundInput", + fromsubclass_=False, + pretty_print=True, + ): + super(CompoundInput, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for pulseGenerator_ in self.pulse_generators: - pulseGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='pulseGenerator', pretty_print=pretty_print) + namespaceprefix_ = ( + self.pulse_generators_nsprefix_ + ":" + if (UseCapturedNS_ and self.pulse_generators_nsprefix_) + else "" + ) + pulseGenerator_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="pulseGenerator", + pretty_print=pretty_print, + ) for sineGenerator_ in self.sine_generators: - sineGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='sineGenerator', pretty_print=pretty_print) + namespaceprefix_ = ( + self.sine_generators_nsprefix_ + ":" + if (UseCapturedNS_ and self.sine_generators_nsprefix_) + else "" + ) + sineGenerator_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="sineGenerator", + pretty_print=pretty_print, + ) for rampGenerator_ in self.ramp_generators: - rampGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='rampGenerator', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.ramp_generators_nsprefix_ + ":" + if (UseCapturedNS_ and self.ramp_generators_nsprefix_) + else "" + ) + rampGenerator_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="rampGenerator", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(CompoundInput, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'pulseGenerator': + + def _buildAttributes(self, node, attrs, already_processed): + super(CompoundInput, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "pulseGenerator": obj_ = PulseGenerator.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.pulse_generators.append(obj_) - obj_.original_tagname_ = 'pulseGenerator' - elif nodeName_ == 'sineGenerator': + obj_.original_tagname_ = "pulseGenerator" + elif nodeName_ == "sineGenerator": obj_ = SineGenerator.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.sine_generators.append(obj_) - obj_.original_tagname_ = 'sineGenerator' - elif nodeName_ == 'rampGenerator': + obj_.original_tagname_ = "sineGenerator" + elif nodeName_ == "rampGenerator": obj_ = RampGenerator.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ramp_generators.append(obj_) - obj_.original_tagname_ = 'rampGenerator' - super(CompoundInput, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "rampGenerator" + super(CompoundInput, self)._buildChildren(child_, node, nodeName_, True) + + # end class CompoundInput class RampGeneratorDL(Standalone): + """RampGeneratorDL -- Dimensionless equivalent of **rampGenerator** . Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the dimensionless current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set + \n + :param delay: Delay before change in current. Current is baselineAmplitude prior to this. + :type delay: time + :param duration: Duration for holding current at amplitude. Current is baselineAmplitude after delay + duration. + :type duration: time + :param startAmplitude: Amplitude of linearly varying current at time delay + :type startAmplitude: none + :param finishAmplitude: Amplitude of linearly varying current at time delay + duration + :type finishAmplitude: none + :param baselineAmplitude: Amplitude of current before time delay, and after time delay + duration + :type baselineAmplitude: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('start_amplitude', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('finish_amplitude', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('baseline_amplitude', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), + MemberSpec_( + "delay", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "delay"} + ), + MemberSpec_( + "duration", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "duration"}, + ), + MemberSpec_( + "start_amplitude", + "Nml2Quantity_none", + 0, + 0, + {"use": "required", "name": "start_amplitude"}, + ), + MemberSpec_( + "finish_amplitude", + "Nml2Quantity_none", + 0, + 0, + {"use": "required", "name": "finish_amplitude"}, + ), + MemberSpec_( + "baseline_amplitude", + "Nml2Quantity_none", + 0, + 0, + {"use": "required", "name": "baseline_amplitude"}, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, start_amplitude=None, finish_amplitude=None, baseline_amplitude=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + delay=None, + duration=None, + start_amplitude=None, + finish_amplitude=None, + baseline_amplitude=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(RampGeneratorDL, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("RampGeneratorDL"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.start_amplitude = _cast(None, start_amplitude) + self.start_amplitude_nsprefix_ = None self.finish_amplitude = _cast(None, finish_amplitude) + self.finish_amplitude_nsprefix_ = None self.baseline_amplitude = _cast(None, baseline_amplitude) + self.baseline_amplitude_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, RampGeneratorDL) + subclass = getSubclassFromModule_(CurrentSubclassModule_, RampGeneratorDL) if subclass is not None: return subclass(*args_, **kwargs_) if RampGeneratorDL.subclass: return RampGeneratorDL.subclass(*args_, **kwargs_) else: return RampGeneratorDL(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] - def hasContent_(self): if ( - super(RampGeneratorDL, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_none_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$"] + ] + + def _hasContent(self): + if super(RampGeneratorDL, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RampGeneratorDL', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('RampGeneratorDL') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="RampGeneratorDL", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("RampGeneratorDL") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "RampGeneratorDL": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RampGeneratorDL') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RampGeneratorDL', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="RampGeneratorDL" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="RampGeneratorDL", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RampGeneratorDL'): - super(RampGeneratorDL, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RampGeneratorDL') - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.start_amplitude is not None and 'start_amplitude' not in already_processed: - already_processed.add('start_amplitude') - outfile.write(' startAmplitude=%s' % (quote_attrib(self.start_amplitude), )) - if self.finish_amplitude is not None and 'finish_amplitude' not in already_processed: - already_processed.add('finish_amplitude') - outfile.write(' finishAmplitude=%s' % (quote_attrib(self.finish_amplitude), )) - if self.baseline_amplitude is not None and 'baseline_amplitude' not in already_processed: - already_processed.add('baseline_amplitude') - outfile.write(' baselineAmplitude=%s' % (quote_attrib(self.baseline_amplitude), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RampGeneratorDL', fromsubclass_=False, pretty_print=True): - super(RampGeneratorDL, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="RampGeneratorDL", + ): + super(RampGeneratorDL, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="RampGeneratorDL" + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write( + " delay=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.delay), input_name="delay" + ) + ), + ) + ) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write( + " duration=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.duration), input_name="duration" + ) + ), + ) + ) + if ( + self.start_amplitude is not None + and "start_amplitude" not in already_processed + ): + already_processed.add("start_amplitude") + outfile.write( + " startAmplitude=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.start_amplitude), + input_name="startAmplitude", + ) + ), + ) + ) + if ( + self.finish_amplitude is not None + and "finish_amplitude" not in already_processed + ): + already_processed.add("finish_amplitude") + outfile.write( + " finishAmplitude=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.finish_amplitude), + input_name="finishAmplitude", + ) + ), + ) + ) + if ( + self.baseline_amplitude is not None + and "baseline_amplitude" not in already_processed + ): + already_processed.add("baseline_amplitude") + outfile.write( + " baselineAmplitude=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.baseline_amplitude), + input_name="baselineAmplitude", + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="RampGeneratorDL", + fromsubclass_=False, + pretty_print=True, + ): + super(RampGeneratorDL, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('startAmplitude', node) - if value is not None and 'startAmplitude' not in already_processed: - already_processed.add('startAmplitude') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("startAmplitude", node) + if value is not None and "startAmplitude" not in already_processed: + already_processed.add("startAmplitude") self.start_amplitude = value - self.validate_Nml2Quantity_none(self.start_amplitude) # validate type Nml2Quantity_none - value = find_attr_value_('finishAmplitude', node) - if value is not None and 'finishAmplitude' not in already_processed: - already_processed.add('finishAmplitude') + self.validate_Nml2Quantity_none( + self.start_amplitude + ) # validate type Nml2Quantity_none + value = find_attr_value_("finishAmplitude", node) + if value is not None and "finishAmplitude" not in already_processed: + already_processed.add("finishAmplitude") self.finish_amplitude = value - self.validate_Nml2Quantity_none(self.finish_amplitude) # validate type Nml2Quantity_none - value = find_attr_value_('baselineAmplitude', node) - if value is not None and 'baselineAmplitude' not in already_processed: - already_processed.add('baselineAmplitude') + self.validate_Nml2Quantity_none( + self.finish_amplitude + ) # validate type Nml2Quantity_none + value = find_attr_value_("baselineAmplitude", node) + if value is not None and "baselineAmplitude" not in already_processed: + already_processed.add("baselineAmplitude") self.baseline_amplitude = value - self.validate_Nml2Quantity_none(self.baseline_amplitude) # validate type Nml2Quantity_none - super(RampGeneratorDL, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(RampGeneratorDL, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_none( + self.baseline_amplitude + ) # validate type Nml2Quantity_none + super(RampGeneratorDL, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(RampGeneratorDL, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class RampGeneratorDL class RampGenerator(Standalone): + """RampGenerator -- Generates a ramping current after a time **delay,** for a fixed **duration.** During this time the current steadily changes from **startAmplitude** to **finishAmplitude.** Scaled by **weight,** if set + \n + :param delay: Delay before change in current. Current is baselineAmplitude prior to this. + :type delay: time + :param duration: Duration for holding current at amplitude. Current is baselineAmplitude after delay + duration. + :type duration: time + :param startAmplitude: Amplitude of linearly varying current at time delay + :type startAmplitude: current + :param finishAmplitude: Amplitude of linearly varying current at time delay + duration + :type finishAmplitude: current + :param baselineAmplitude: Amplitude of current before time delay, and after time delay + duration + :type baselineAmplitude: current + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('start_amplitude', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), - MemberSpec_('finish_amplitude', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), - MemberSpec_('baseline_amplitude', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), + MemberSpec_( + "delay", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "delay"} + ), + MemberSpec_( + "duration", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "duration"}, + ), + MemberSpec_( + "start_amplitude", + "Nml2Quantity_current", + 0, + 0, + {"use": "required", "name": "start_amplitude"}, + ), + MemberSpec_( + "finish_amplitude", + "Nml2Quantity_current", + 0, + 0, + {"use": "required", "name": "finish_amplitude"}, + ), + MemberSpec_( + "baseline_amplitude", + "Nml2Quantity_current", + 0, + 0, + {"use": "required", "name": "baseline_amplitude"}, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, start_amplitude=None, finish_amplitude=None, baseline_amplitude=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + delay=None, + duration=None, + start_amplitude=None, + finish_amplitude=None, + baseline_amplitude=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(RampGenerator, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("RampGenerator"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.start_amplitude = _cast(None, start_amplitude) + self.start_amplitude_nsprefix_ = None self.finish_amplitude = _cast(None, finish_amplitude) + self.finish_amplitude_nsprefix_ = None self.baseline_amplitude = _cast(None, baseline_amplitude) + self.baseline_amplitude_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, RampGenerator) + subclass = getSubclassFromModule_(CurrentSubclassModule_, RampGenerator) if subclass is not None: return subclass(*args_, **kwargs_) if RampGenerator.subclass: return RampGenerator.subclass(*args_, **kwargs_) else: return RampGenerator(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] - def hasContent_(self): if ( - super(RampGenerator, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_current_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_current_patterns_, + ) + ) + + validate_Nml2Quantity_current_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA))$"] + ] + + def _hasContent(self): + if super(RampGenerator, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RampGenerator', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('RampGenerator') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="RampGenerator", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("RampGenerator") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "RampGenerator": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RampGenerator') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RampGenerator', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="RampGenerator" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="RampGenerator", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RampGenerator'): - super(RampGenerator, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RampGenerator') - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.start_amplitude is not None and 'start_amplitude' not in already_processed: - already_processed.add('start_amplitude') - outfile.write(' startAmplitude=%s' % (quote_attrib(self.start_amplitude), )) - if self.finish_amplitude is not None and 'finish_amplitude' not in already_processed: - already_processed.add('finish_amplitude') - outfile.write(' finishAmplitude=%s' % (quote_attrib(self.finish_amplitude), )) - if self.baseline_amplitude is not None and 'baseline_amplitude' not in already_processed: - already_processed.add('baseline_amplitude') - outfile.write(' baselineAmplitude=%s' % (quote_attrib(self.baseline_amplitude), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RampGenerator', fromsubclass_=False, pretty_print=True): - super(RampGenerator, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="RampGenerator", + ): + super(RampGenerator, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="RampGenerator" + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write( + " delay=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.delay), input_name="delay" + ) + ), + ) + ) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write( + " duration=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.duration), input_name="duration" + ) + ), + ) + ) + if ( + self.start_amplitude is not None + and "start_amplitude" not in already_processed + ): + already_processed.add("start_amplitude") + outfile.write( + " startAmplitude=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.start_amplitude), + input_name="startAmplitude", + ) + ), + ) + ) + if ( + self.finish_amplitude is not None + and "finish_amplitude" not in already_processed + ): + already_processed.add("finish_amplitude") + outfile.write( + " finishAmplitude=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.finish_amplitude), + input_name="finishAmplitude", + ) + ), + ) + ) + if ( + self.baseline_amplitude is not None + and "baseline_amplitude" not in already_processed + ): + already_processed.add("baseline_amplitude") + outfile.write( + " baselineAmplitude=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.baseline_amplitude), + input_name="baselineAmplitude", + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="RampGenerator", + fromsubclass_=False, + pretty_print=True, + ): + super(RampGenerator, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('startAmplitude', node) - if value is not None and 'startAmplitude' not in already_processed: - already_processed.add('startAmplitude') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("startAmplitude", node) + if value is not None and "startAmplitude" not in already_processed: + already_processed.add("startAmplitude") self.start_amplitude = value - self.validate_Nml2Quantity_current(self.start_amplitude) # validate type Nml2Quantity_current - value = find_attr_value_('finishAmplitude', node) - if value is not None and 'finishAmplitude' not in already_processed: - already_processed.add('finishAmplitude') + self.validate_Nml2Quantity_current( + self.start_amplitude + ) # validate type Nml2Quantity_current + value = find_attr_value_("finishAmplitude", node) + if value is not None and "finishAmplitude" not in already_processed: + already_processed.add("finishAmplitude") self.finish_amplitude = value - self.validate_Nml2Quantity_current(self.finish_amplitude) # validate type Nml2Quantity_current - value = find_attr_value_('baselineAmplitude', node) - if value is not None and 'baselineAmplitude' not in already_processed: - already_processed.add('baselineAmplitude') + self.validate_Nml2Quantity_current( + self.finish_amplitude + ) # validate type Nml2Quantity_current + value = find_attr_value_("baselineAmplitude", node) + if value is not None and "baselineAmplitude" not in already_processed: + already_processed.add("baselineAmplitude") self.baseline_amplitude = value - self.validate_Nml2Quantity_current(self.baseline_amplitude) # validate type Nml2Quantity_current - super(RampGenerator, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(RampGenerator, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_current( + self.baseline_amplitude + ) # validate type Nml2Quantity_current + super(RampGenerator, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(RampGenerator, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class RampGenerator class SineGeneratorDL(Standalone): + """SineGeneratorDL -- Dimensionless equivalent of **sineGenerator** . Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set + \n + :param phase: Phase ( between 0 and 2*pi ) at which to start the varying current ( i. e. at time given by delay ) + :type phase: none + :param delay: Delay before change in current. Current is zero prior to this. + :type delay: time + :param duration: Duration for holding current at amplitude. Current is zero after delay + duration. + :type duration: time + :param amplitude: Maximum amplitude of current + :type amplitude: none + :param period: Time period of oscillation + :type period: time + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('phase', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('amplitude', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('period', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_( + "delay", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "delay"} + ), + MemberSpec_( + "phase", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "phase"} + ), + MemberSpec_( + "duration", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "duration"}, + ), + MemberSpec_( + "amplitude", + "Nml2Quantity_none", + 0, + 0, + {"use": "required", "name": "amplitude"}, + ), + MemberSpec_( + "period", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "period"} + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, phase=None, duration=None, amplitude=None, period=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + delay=None, + phase=None, + duration=None, + amplitude=None, + period=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SineGeneratorDL, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("SineGeneratorDL"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.phase = _cast(None, phase) + self.phase_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.amplitude = _cast(None, amplitude) + self.amplitude_nsprefix_ = None self.period = _cast(None, period) + self.period_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SineGeneratorDL) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SineGeneratorDL) if subclass is not None: return subclass(*args_, **kwargs_) if SineGeneratorDL.subclass: return SineGeneratorDL.subclass(*args_, **kwargs_) else: return SineGeneratorDL(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] - def hasContent_(self): if ( - super(SineGeneratorDL, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_none_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$"] + ] + + def _hasContent(self): + if super(SineGeneratorDL, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SineGeneratorDL', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SineGeneratorDL') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SineGeneratorDL", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SineGeneratorDL") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SineGeneratorDL": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SineGeneratorDL') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SineGeneratorDL', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SineGeneratorDL" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SineGeneratorDL", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SineGeneratorDL'): - super(SineGeneratorDL, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SineGeneratorDL') - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.phase is not None and 'phase' not in already_processed: - already_processed.add('phase') - outfile.write(' phase=%s' % (quote_attrib(self.phase), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.amplitude is not None and 'amplitude' not in already_processed: - already_processed.add('amplitude') - outfile.write(' amplitude=%s' % (quote_attrib(self.amplitude), )) - if self.period is not None and 'period' not in already_processed: - already_processed.add('period') - outfile.write(' period=%s' % (quote_attrib(self.period), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SineGeneratorDL', fromsubclass_=False, pretty_print=True): - super(SineGeneratorDL, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SineGeneratorDL", + ): + super(SineGeneratorDL, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SineGeneratorDL" + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write( + " delay=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.delay), input_name="delay" + ) + ), + ) + ) + if self.phase is not None and "phase" not in already_processed: + already_processed.add("phase") + outfile.write( + " phase=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.phase), input_name="phase" + ) + ), + ) + ) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write( + " duration=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.duration), input_name="duration" + ) + ), + ) + ) + if self.amplitude is not None and "amplitude" not in already_processed: + already_processed.add("amplitude") + outfile.write( + " amplitude=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.amplitude), input_name="amplitude" + ) + ), + ) + ) + if self.period is not None and "period" not in already_processed: + already_processed.add("period") + outfile.write( + " period=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.period), input_name="period" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SineGeneratorDL", + fromsubclass_=False, + pretty_print=True, + ): + super(SineGeneratorDL, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('phase', node) - if value is not None and 'phase' not in already_processed: - already_processed.add('phase') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("phase", node) + if value is not None and "phase" not in already_processed: + already_processed.add("phase") self.phase = value - self.validate_Nml2Quantity_none(self.phase) # validate type Nml2Quantity_none - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_none( + self.phase + ) # validate type Nml2Quantity_none + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('amplitude', node) - if value is not None and 'amplitude' not in already_processed: - already_processed.add('amplitude') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("amplitude", node) + if value is not None and "amplitude" not in already_processed: + already_processed.add("amplitude") self.amplitude = value - self.validate_Nml2Quantity_none(self.amplitude) # validate type Nml2Quantity_none - value = find_attr_value_('period', node) - if value is not None and 'period' not in already_processed: - already_processed.add('period') + self.validate_Nml2Quantity_none( + self.amplitude + ) # validate type Nml2Quantity_none + value = find_attr_value_("period", node) + if value is not None and "period" not in already_processed: + already_processed.add("period") self.period = value - self.validate_Nml2Quantity_time(self.period) # validate type Nml2Quantity_time - super(SineGeneratorDL, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SineGeneratorDL, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_time( + self.period + ) # validate type Nml2Quantity_time + super(SineGeneratorDL, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(SineGeneratorDL, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class SineGeneratorDL class SineGenerator(Standalone): + """SineGenerator -- Generates a sinusoidally varying current after a time **delay,** for a fixed **duration.** The **period** and maximum **amplitude** of the current can be set as well as the **phase** at which to start. Scaled by **weight,** if set + \n + :param phase: Phase ( between 0 and 2*pi ) at which to start the varying current ( i. e. at time given by delay ) + :type phase: none + :param delay: Delay before change in current. Current is zero prior to this. + :type delay: time + :param duration: Duration for holding current at amplitude. Current is zero after delay + duration. + :type duration: time + :param amplitude: Maximum amplitude of current + :type amplitude: current + :param period: Time period of oscillation + :type period: time + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('phase', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('amplitude', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), - MemberSpec_('period', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_( + "delay", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "delay"} + ), + MemberSpec_( + "phase", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "phase"} + ), + MemberSpec_( + "duration", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "duration"}, + ), + MemberSpec_( + "amplitude", + "Nml2Quantity_current", + 0, + 0, + {"use": "required", "name": "amplitude"}, + ), + MemberSpec_( + "period", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "period"} + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, phase=None, duration=None, amplitude=None, period=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + delay=None, + phase=None, + duration=None, + amplitude=None, + period=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SineGenerator, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("SineGenerator"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.phase = _cast(None, phase) + self.phase_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.amplitude = _cast(None, amplitude) + self.amplitude_nsprefix_ = None self.period = _cast(None, period) + self.period_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SineGenerator) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SineGenerator) if subclass is not None: return subclass(*args_, **kwargs_) if SineGenerator.subclass: return SineGenerator.subclass(*args_, **kwargs_) else: return SineGenerator(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.validate_Nml2Quantity_none_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$"] + ] + def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] - def hasContent_(self): if ( - super(SineGenerator, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_current_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_current_patterns_, + ) + ) + + validate_Nml2Quantity_current_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA))$"] + ] + + def _hasContent(self): + if super(SineGenerator, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SineGenerator', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SineGenerator') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SineGenerator", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SineGenerator") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SineGenerator": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SineGenerator') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SineGenerator', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SineGenerator" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SineGenerator", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SineGenerator'): - super(SineGenerator, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SineGenerator') - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.phase is not None and 'phase' not in already_processed: - already_processed.add('phase') - outfile.write(' phase=%s' % (quote_attrib(self.phase), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.amplitude is not None and 'amplitude' not in already_processed: - already_processed.add('amplitude') - outfile.write(' amplitude=%s' % (quote_attrib(self.amplitude), )) - if self.period is not None and 'period' not in already_processed: - already_processed.add('period') - outfile.write(' period=%s' % (quote_attrib(self.period), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SineGenerator', fromsubclass_=False, pretty_print=True): - super(SineGenerator, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SineGenerator", + ): + super(SineGenerator, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SineGenerator" + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write( + " delay=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.delay), input_name="delay" + ) + ), + ) + ) + if self.phase is not None and "phase" not in already_processed: + already_processed.add("phase") + outfile.write( + " phase=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.phase), input_name="phase" + ) + ), + ) + ) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write( + " duration=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.duration), input_name="duration" + ) + ), + ) + ) + if self.amplitude is not None and "amplitude" not in already_processed: + already_processed.add("amplitude") + outfile.write( + " amplitude=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.amplitude), input_name="amplitude" + ) + ), + ) + ) + if self.period is not None and "period" not in already_processed: + already_processed.add("period") + outfile.write( + " period=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.period), input_name="period" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SineGenerator", + fromsubclass_=False, + pretty_print=True, + ): + super(SineGenerator, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('phase', node) - if value is not None and 'phase' not in already_processed: - already_processed.add('phase') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("phase", node) + if value is not None and "phase" not in already_processed: + already_processed.add("phase") self.phase = value - self.validate_Nml2Quantity_none(self.phase) # validate type Nml2Quantity_none - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_none( + self.phase + ) # validate type Nml2Quantity_none + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('amplitude', node) - if value is not None and 'amplitude' not in already_processed: - already_processed.add('amplitude') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("amplitude", node) + if value is not None and "amplitude" not in already_processed: + already_processed.add("amplitude") self.amplitude = value - self.validate_Nml2Quantity_current(self.amplitude) # validate type Nml2Quantity_current - value = find_attr_value_('period', node) - if value is not None and 'period' not in already_processed: - already_processed.add('period') + self.validate_Nml2Quantity_current( + self.amplitude + ) # validate type Nml2Quantity_current + value = find_attr_value_("period", node) + if value is not None and "period" not in already_processed: + already_processed.add("period") self.period = value - self.validate_Nml2Quantity_time(self.period) # validate type Nml2Quantity_time - super(SineGenerator, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SineGenerator, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_time( + self.period + ) # validate type Nml2Quantity_time + super(SineGenerator, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(SineGenerator, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class SineGenerator class PulseGeneratorDL(Standalone): - """Generates a constant current pulse of a certain amplitude (non - dimensional) for a specified duration after a delay.""" + """PulseGeneratorDL -- Dimensionless equivalent of **pulseGenerator** . Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set + \n + :param delay: Delay before change in current. Current is zero prior to this. + :type delay: time + :param duration: Duration for holding current at amplitude. Current is zero after delay + duration. + :type duration: time + :param amplitude: Amplitude of current pulse + :type amplitude: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('amplitude', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), + MemberSpec_( + "delay", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "delay"} + ), + MemberSpec_( + "duration", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "duration"}, + ), + MemberSpec_( + "amplitude", + "Nml2Quantity_none", + 0, + 0, + {"use": "required", "name": "amplitude"}, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, amplitude=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + delay=None, + duration=None, + amplitude=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(PulseGeneratorDL, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("PulseGeneratorDL"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.amplitude = _cast(None, amplitude) + self.amplitude_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, PulseGeneratorDL) + subclass = getSubclassFromModule_(CurrentSubclassModule_, PulseGeneratorDL) if subclass is not None: return subclass(*args_, **kwargs_) if PulseGeneratorDL.subclass: return PulseGeneratorDL.subclass(*args_, **kwargs_) else: return PulseGeneratorDL(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] - def hasContent_(self): if ( - super(PulseGeneratorDL, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_none_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$"] + ] + + def _hasContent(self): + if super(PulseGeneratorDL, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PulseGeneratorDL', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('PulseGeneratorDL') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PulseGeneratorDL", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PulseGeneratorDL") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "PulseGeneratorDL": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PulseGeneratorDL') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PulseGeneratorDL', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="PulseGeneratorDL", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PulseGeneratorDL", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PulseGeneratorDL'): - super(PulseGeneratorDL, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PulseGeneratorDL') - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.amplitude is not None and 'amplitude' not in already_processed: - already_processed.add('amplitude') - outfile.write(' amplitude=%s' % (quote_attrib(self.amplitude), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PulseGeneratorDL', fromsubclass_=False, pretty_print=True): - super(PulseGeneratorDL, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="PulseGeneratorDL", + ): + super(PulseGeneratorDL, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="PulseGeneratorDL", + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write( + " delay=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.delay), input_name="delay" + ) + ), + ) + ) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write( + " duration=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.duration), input_name="duration" + ) + ), + ) + ) + if self.amplitude is not None and "amplitude" not in already_processed: + already_processed.add("amplitude") + outfile.write( + " amplitude=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.amplitude), input_name="amplitude" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PulseGeneratorDL", + fromsubclass_=False, + pretty_print=True, + ): + super(PulseGeneratorDL, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('amplitude', node) - if value is not None and 'amplitude' not in already_processed: - already_processed.add('amplitude') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("amplitude", node) + if value is not None and "amplitude" not in already_processed: + already_processed.add("amplitude") self.amplitude = value - self.validate_Nml2Quantity_none(self.amplitude) # validate type Nml2Quantity_none - super(PulseGeneratorDL, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(PulseGeneratorDL, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_none( + self.amplitude + ) # validate type Nml2Quantity_none + super(PulseGeneratorDL, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(PulseGeneratorDL, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class PulseGeneratorDL class PulseGenerator(Standalone): - """Generates a constant current pulse of a certain amplitude (with - dimensions for current) for a specified duration after a delay.""" + """PulseGenerator -- Generates a constant current pulse of a certain **amplitude** for a specified **duration** after a **delay.** Scaled by **weight,** if set + \n + :param delay: Delay before change in current. Current is zero prior to this. + :type delay: time + :param duration: Duration for holding current at amplitude. Current is zero after delay + duration. + :type duration: time + :param amplitude: Amplitude of current pulse + :type amplitude: current + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('duration', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('amplitude', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), + MemberSpec_( + "delay", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "delay"} + ), + MemberSpec_( + "duration", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "duration"}, + ), + MemberSpec_( + "amplitude", + "Nml2Quantity_current", + 0, + 0, + {"use": "required", "name": "amplitude"}, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, delay=None, duration=None, amplitude=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + delay=None, + duration=None, + amplitude=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(PulseGenerator, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("PulseGenerator"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.delay = _cast(None, delay) + self.delay_nsprefix_ = None self.duration = _cast(None, duration) + self.duration_nsprefix_ = None self.amplitude = _cast(None, amplitude) + self.amplitude_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, PulseGenerator) + subclass = getSubclassFromModule_(CurrentSubclassModule_, PulseGenerator) if subclass is not None: return subclass(*args_, **kwargs_) if PulseGenerator.subclass: return PulseGenerator.subclass(*args_, **kwargs_) else: return PulseGenerator(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] - def hasContent_(self): if ( - super(PulseGenerator, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_current_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_current_patterns_, + ) + ) + + validate_Nml2Quantity_current_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA))$"] + ] + + def _hasContent(self): + if super(PulseGenerator, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PulseGenerator', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('PulseGenerator') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PulseGenerator", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PulseGenerator") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "PulseGenerator": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PulseGenerator') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PulseGenerator', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="PulseGenerator" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PulseGenerator", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PulseGenerator'): - super(PulseGenerator, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PulseGenerator') - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - if self.duration is not None and 'duration' not in already_processed: - already_processed.add('duration') - outfile.write(' duration=%s' % (quote_attrib(self.duration), )) - if self.amplitude is not None and 'amplitude' not in already_processed: - already_processed.add('amplitude') - outfile.write(' amplitude=%s' % (quote_attrib(self.amplitude), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PulseGenerator', fromsubclass_=False, pretty_print=True): - super(PulseGenerator, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="PulseGenerator", + ): + super(PulseGenerator, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="PulseGenerator" + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write( + " delay=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.delay), input_name="delay" + ) + ), + ) + ) + if self.duration is not None and "duration" not in already_processed: + already_processed.add("duration") + outfile.write( + " duration=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.duration), input_name="duration" + ) + ), + ) + ) + if self.amplitude is not None and "amplitude" not in already_processed: + already_processed.add("amplitude") + outfile.write( + " amplitude=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.amplitude), input_name="amplitude" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PulseGenerator", + fromsubclass_=False, + pretty_print=True, + ): + super(PulseGenerator, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - value = find_attr_value_('duration', node) - if value is not None and 'duration' not in already_processed: - already_processed.add('duration') + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + value = find_attr_value_("duration", node) + if value is not None and "duration" not in already_processed: + already_processed.add("duration") self.duration = value - self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time - value = find_attr_value_('amplitude', node) - if value is not None and 'amplitude' not in already_processed: - already_processed.add('amplitude') + self.validate_Nml2Quantity_time( + self.duration + ) # validate type Nml2Quantity_time + value = find_attr_value_("amplitude", node) + if value is not None and "amplitude" not in already_processed: + already_processed.add("amplitude") self.amplitude = value - self.validate_Nml2Quantity_current(self.amplitude) # validate type Nml2Quantity_current - super(PulseGenerator, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(PulseGenerator, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_current( + self.amplitude + ) # validate type Nml2Quantity_current + super(PulseGenerator, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(PulseGenerator, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class PulseGenerator class ReactionScheme(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('source', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('type', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_("source", "xs:string", 0, 0, {"use": "required", "name": "source"}), + MemberSpec_("type", "xs:string", 0, 0, {"use": "required", "name": "type"}), + MemberSpec_( + "__ANY__", + "__ANY__", + 1, + 1, + {"maxOccurs": "unbounded", "minOccurs": "0", "processContents": "skip"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, source=None, type=None, anytypeobjs_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + source=None, + type=None, + anytypeobjs_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ReactionScheme, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ReactionScheme"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.source = _cast(None, source) + self.source_nsprefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None if anytypeobjs_ is None: self.anytypeobjs_ = [] else: self.anytypeobjs_ = anytypeobjs_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ReactionScheme) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ReactionScheme) if subclass is not None: return subclass(*args_, **kwargs_) if ReactionScheme.subclass: return ReactionScheme.subclass(*args_, **kwargs_) else: return ReactionScheme(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - self.anytypeobjs_ or - super(ReactionScheme, self).hasContent_() - ): + + def _hasContent(self): + if self.anytypeobjs_ or super(ReactionScheme, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ReactionScheme', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ReactionScheme') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ReactionScheme", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ReactionScheme") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ReactionScheme": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReactionScheme') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ReactionScheme', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ReactionScheme" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ReactionScheme", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ReactionScheme'): - super(ReactionScheme, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReactionScheme') - if self.source is not None and 'source' not in already_processed: - already_processed.add('source') - outfile.write(' source=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.source), input_name='source')), )) - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ReactionScheme', fromsubclass_=False, pretty_print=True): - super(ReactionScheme, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ReactionScheme", + ): + super(ReactionScheme, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ReactionScheme" + ) + if self.source is not None and "source" not in already_processed: + already_processed.add("source") + outfile.write( + " source=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.source), input_name="source" + ) + ), + ) + ) + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type), input_name="type" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ReactionScheme", + fromsubclass_=False, + pretty_print=True, + ): + super(ReactionScheme, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' - else: - eol_ = '' - for obj_ in self.anytypeobjs_: - obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + eol_ = "\n" + else: + eol_ = "" + if not fromsubclass_: + for obj_ in self.anytypeobjs_: + showIndent(outfile, level, pretty_print) + outfile.write(obj_) + outfile.write("\n") + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('source', node) - if value is not None and 'source' not in already_processed: - already_processed.add('source') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("source", node) + if value is not None and "source" not in already_processed: + already_processed.add("source") self.source = value - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - super(ReactionScheme, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'ReactionScheme') - if obj_ is not None: - self.add_anytypeobjs_(obj_) - super(ReactionScheme, self).buildChildren(child_, node, nodeName_, True) + super(ReactionScheme, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + content_ = self.gds_build_any(child_, "ReactionScheme") + self.anytypeobjs_.append(content_) + super(ReactionScheme, self)._buildChildren(child_, node, nodeName_, True) + + # end class ReactionScheme class ExtracellularProperties(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('species', 'Species', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Species', u'name': u'species', u'minOccurs': u'0'}, None), + MemberSpec_( + "species", + "Species", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "species", + "type": "Species", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, species=None, **kwargs_): + + def __init__( + self, neuro_lex_id=None, id=None, species=None, gds_collector_=None, **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ExtracellularProperties, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ExtracellularProperties"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) if species is None: self.species = [] else: self.species = species + self.species_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ExtracellularProperties) + CurrentSubclassModule_, ExtracellularProperties + ) if subclass is not None: return subclass(*args_, **kwargs_) if ExtracellularProperties.subclass: return ExtracellularProperties.subclass(*args_, **kwargs_) else: return ExtracellularProperties(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - self.species or - super(ExtracellularProperties, self).hasContent_() - ): + + def _hasContent(self): + if self.species or super(ExtracellularProperties, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtracellularProperties', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExtracellularProperties') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ExtracellularProperties", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ExtracellularProperties") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ExtracellularProperties": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtracellularProperties') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExtracellularProperties', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ExtracellularProperties", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ExtracellularProperties", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExtracellularProperties'): - super(ExtracellularProperties, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExtracellularProperties') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExtracellularProperties', fromsubclass_=False, pretty_print=True): - super(ExtracellularProperties, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ExtracellularProperties", + ): + super(ExtracellularProperties, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ExtracellularProperties", + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ExtracellularProperties", + fromsubclass_=False, + pretty_print=True, + ): + super(ExtracellularProperties, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for species_ in self.species: - species_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='species', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.species_nsprefix_ + ":" + if (UseCapturedNS_ and self.species_nsprefix_) + else "" + ) + species_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="species", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(ExtracellularProperties, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'species': + + def _buildAttributes(self, node, attrs, already_processed): + super(ExtracellularProperties, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "species": obj_ = Species.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.species.append(obj_) - obj_.original_tagname_ = 'species' - super(ExtracellularProperties, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "species" + super(ExtracellularProperties, self)._buildChildren( + child_, node, nodeName_, True + ) + + # end class ExtracellularProperties class ChannelDensityGHK2(Base): - """Specifying the ion here again is redundant, this will be set in - ionChannel definition. It is added here TEMPORARILY since - selecting all ca or na conducting channel populations/densities - in a cell would be difficult otherwise. Also, it will make it - easier to set the correct native simulator value for erev (e.g. - ek for ion = k in NEURON). Currently a required attribute. It - should be removed in the longer term, due to possible - inconsistencies in this value and that in the ionChannel - element. TODO: remove.""" + """ChannelDensityGHK2 -- Time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity.** Modified version of Jaffe et al. 1994 ( used also in Lawrence et al. 2006 ). See https://github.com/OpenSourceBrain/ghk-nernst. + \n + :param condDensity: + :type condDensity: conductanceDensity + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('cond_density', 'Nml2Quantity_conductanceDensity', 0, 1, {'use': u'optional'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('segments', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_( + "ion_channel", "NmlId", 0, 0, {"use": "required", "name": "ion_channel"} + ), + MemberSpec_( + "cond_density", + "Nml2Quantity_conductanceDensity", + 0, + 1, + {"use": "optional", "name": "cond_density"}, + ), + MemberSpec_( + "segment_groups", + "NmlId", + 0, + 1, + {"use": "optional", "name": "segment_groups"}, + ), + MemberSpec_("segments", "NmlId", 0, 1, {"use": "optional", "name": "segments"}), + MemberSpec_("ion", "NmlId", 0, 0, {"use": "required", "name": "ion"}), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, segment_groups='all', segments=None, ion=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + cond_density=None, + segment_groups="all", + segments=None, + ion=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityGHK2, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ChannelDensityGHK2"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.ion_channel = _cast(None, ion_channel) + self.ion_channel_nsprefix_ = None self.cond_density = _cast(None, cond_density) + self.cond_density_nsprefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None self.segments = _cast(None, segments) + self.segments_nsprefix_ = None self.ion = _cast(None, ion) + self.ion_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensityGHK2) + CurrentSubclassModule_, ChannelDensityGHK2 + ) if subclass is not None: return subclass(*args_, **kwargs_) if ChannelDensityGHK2.subclass: return ChannelDensityGHK2.subclass(*args_, **kwargs_) else: return ChannelDensityGHK2(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + def validate_Nml2Quantity_conductanceDensity(self, value): # Validate type Nml2Quantity_conductanceDensity, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductanceDensity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductanceDensity_patterns_, )) - validate_Nml2Quantity_conductanceDensity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2)$']] - def hasContent_(self): if ( - super(ChannelDensityGHK2, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_conductanceDensity_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_conductanceDensity_patterns_, + ) + ) + + validate_Nml2Quantity_conductanceDensity_patterns_ = [ + [ + "^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2))$" + ] + ] + + def _hasContent(self): + if super(ChannelDensityGHK2, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityGHK2', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityGHK2') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityGHK2", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelDensityGHK2") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ChannelDensityGHK2": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityGHK2') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityGHK2', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityGHK2'): - super(ChannelDensityGHK2, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityGHK2') - if self.ion_channel is not None and 'ion_channel' not in already_processed: - already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) - if self.cond_density is not None and 'cond_density' not in already_processed: - already_processed.add('cond_density') - outfile.write(' condDensity=%s' % (quote_attrib(self.cond_density), )) - if self.segment_groups != "all" and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityGHK2', fromsubclass_=False, pretty_print=True): - super(ChannelDensityGHK2, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityGHK2", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensityGHK2", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensityGHK2", + ): + super(ChannelDensityGHK2, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityGHK2", + ) + if self.ion_channel is not None and "ion_channel" not in already_processed: + already_processed.add("ion_channel") + outfile.write( + " ionChannel=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.ion_channel), input_name="ionChannel" + ) + ), + ) + ) + if self.cond_density is not None and "cond_density" not in already_processed: + already_processed.add("cond_density") + outfile.write( + " condDensity=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.cond_density), input_name="condDensity" + ) + ), + ) + ) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write( + " segmentGroup=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.segment_groups), input_name="segmentGroup" + ) + ), + ) + ) + if self.segments is not None and "segments" not in already_processed: + already_processed.add("segments") + outfile.write( + " segment=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.segments), input_name="segment" + ) + ), + ) + ) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write( + " ion=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.ion), input_name="ion") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityGHK2", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensityGHK2, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ionChannel', node) - if value is not None and 'ionChannel' not in already_processed: - already_processed.add('ionChannel') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("ionChannel", node) + if value is not None and "ionChannel" not in already_processed: + already_processed.add("ionChannel") self.ion_channel = value - self.validate_NmlId(self.ion_channel) # validate type NmlId - value = find_attr_value_('condDensity', node) - if value is not None and 'condDensity' not in already_processed: - already_processed.add('condDensity') + self.validate_NmlId(self.ion_channel) # validate type NmlId + value = find_attr_value_("condDensity", node) + if value is not None and "condDensity" not in already_processed: + already_processed.add("condDensity") self.cond_density = value - self.validate_Nml2Quantity_conductanceDensity(self.cond_density) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + self.validate_Nml2Quantity_conductanceDensity( + self.cond_density + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') + self.validate_NmlId(self.segment_groups) # validate type NmlId + value = find_attr_value_("segment", node) + if value is not None and "segment" not in already_processed: + already_processed.add("segment") self.segments = value - self.validate_NmlId(self.segments) # validate type NmlId - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + self.validate_NmlId(self.segments) # validate type NmlId + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - super(ChannelDensityGHK2, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ChannelDensityGHK2, self).buildChildren(child_, node, nodeName_, True) + self.validate_NmlId(self.ion) # validate type NmlId + super(ChannelDensityGHK2, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ChannelDensityGHK2, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class ChannelDensityGHK2 class ChannelDensityGHK(Base): - """Specifying the ion here again is redundant, this will be set in - ionChannel definition. It is added here TEMPORARILY since - selecting all ca or na conducting channel populations/densities - in a cell would be difficult otherwise. Also, it will make it - easier to set the correct native simulator value for erev (e.g. - ek for ion = k in NEURON). Currently a required attribute. It - should be removed in the longer term, due to possible - inconsistencies in this value and that in the ionChannel - element. TODO: remove.""" + """ChannelDensityGHK -- Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the cell, producing a current density **iDensity** and whose reversal potential is calculated from the Goldman Hodgkin Katz equation. Hard coded for Ca only! See https://github.com/OpenSourceBrain/ghk-nernst. + \n + :param permeability: + :type permeability: permeability + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('permeability', 'Nml2Quantity_permeability', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('segments', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_( + "ion_channel", "NmlId", 0, 0, {"use": "required", "name": "ion_channel"} + ), + MemberSpec_( + "permeability", + "Nml2Quantity_permeability", + 0, + 0, + {"use": "required", "name": "permeability"}, + ), + MemberSpec_( + "segment_groups", + "NmlId", + 0, + 1, + {"use": "optional", "name": "segment_groups"}, + ), + MemberSpec_("segments", "NmlId", 0, 1, {"use": "optional", "name": "segments"}), + MemberSpec_("ion", "NmlId", 0, 0, {"use": "required", "name": "ion"}), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, permeability=None, segment_groups='all', segments=None, ion=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + permeability=None, + segment_groups="all", + segments=None, + ion=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityGHK, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ChannelDensityGHK"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.ion_channel = _cast(None, ion_channel) + self.ion_channel_nsprefix_ = None self.permeability = _cast(None, permeability) + self.permeability_nsprefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None self.segments = _cast(None, segments) + self.segments_nsprefix_ = None self.ion = _cast(None, ion) + self.ion_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensityGHK) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ChannelDensityGHK) if subclass is not None: return subclass(*args_, **kwargs_) if ChannelDensityGHK.subclass: return ChannelDensityGHK.subclass(*args_, **kwargs_) else: return ChannelDensityGHK(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + def validate_Nml2Quantity_permeability(self, value): # Validate type Nml2Quantity_permeability, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_permeability_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_permeability_patterns_, )) - validate_Nml2Quantity_permeability_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m_per_s| um_per_ms|cm_per_s|cm_per_ms)$']] - def hasContent_(self): if ( - super(ChannelDensityGHK, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_permeability_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_permeability_patterns_, + ) + ) + + validate_Nml2Quantity_permeability_patterns_ = [ + [ + "^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m_per_s|um_per_ms|cm_per_s|cm_per_ms))$" + ] + ] + + def _hasContent(self): + if super(ChannelDensityGHK, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityGHK', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityGHK') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityGHK", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelDensityGHK") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ChannelDensityGHK": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityGHK') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityGHK', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityGHK'): - super(ChannelDensityGHK, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityGHK') - if self.ion_channel is not None and 'ion_channel' not in already_processed: - already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) - if self.permeability is not None and 'permeability' not in already_processed: - already_processed.add('permeability') - outfile.write(' permeability=%s' % (quote_attrib(self.permeability), )) - if self.segment_groups != "all" and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityGHK', fromsubclass_=False, pretty_print=True): - super(ChannelDensityGHK, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityGHK", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensityGHK", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensityGHK", + ): + super(ChannelDensityGHK, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityGHK", + ) + if self.ion_channel is not None and "ion_channel" not in already_processed: + already_processed.add("ion_channel") + outfile.write( + " ionChannel=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.ion_channel), input_name="ionChannel" + ) + ), + ) + ) + if self.permeability is not None and "permeability" not in already_processed: + already_processed.add("permeability") + outfile.write( + " permeability=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.permeability), input_name="permeability" + ) + ), + ) + ) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write( + " segmentGroup=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.segment_groups), input_name="segmentGroup" + ) + ), + ) + ) + if self.segments is not None and "segments" not in already_processed: + already_processed.add("segments") + outfile.write( + " segment=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.segments), input_name="segment" + ) + ), + ) + ) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write( + " ion=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.ion), input_name="ion") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityGHK", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensityGHK, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ionChannel', node) - if value is not None and 'ionChannel' not in already_processed: - already_processed.add('ionChannel') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("ionChannel", node) + if value is not None and "ionChannel" not in already_processed: + already_processed.add("ionChannel") self.ion_channel = value - self.validate_NmlId(self.ion_channel) # validate type NmlId - value = find_attr_value_('permeability', node) - if value is not None and 'permeability' not in already_processed: - already_processed.add('permeability') + self.validate_NmlId(self.ion_channel) # validate type NmlId + value = find_attr_value_("permeability", node) + if value is not None and "permeability" not in already_processed: + already_processed.add("permeability") self.permeability = value - self.validate_Nml2Quantity_permeability(self.permeability) # validate type Nml2Quantity_permeability - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + self.validate_Nml2Quantity_permeability( + self.permeability + ) # validate type Nml2Quantity_permeability + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') + self.validate_NmlId(self.segment_groups) # validate type NmlId + value = find_attr_value_("segment", node) + if value is not None and "segment" not in already_processed: + already_processed.add("segment") self.segments = value - self.validate_NmlId(self.segments) # validate type NmlId - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + self.validate_NmlId(self.segments) # validate type NmlId + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - super(ChannelDensityGHK, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ChannelDensityGHK, self).buildChildren(child_, node, nodeName_, True) + self.validate_NmlId(self.ion) # validate type NmlId + super(ChannelDensityGHK, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ChannelDensityGHK, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class ChannelDensityGHK class ChannelDensityNernst(Base): - """Specifying the ion here again is redundant, this will be set in - ionChannel definition. It is added here TEMPORARILY since - selecting all ca or na conducting channel populations/densities - in a cell would be difficult otherwise. Also, it will make it - easier to set the correct native simulator value for erev (e.g. - ek for ion = k in NEURON). Currently a required attribute. It - should be removed in the longer term, due to possible - inconsistencies in this value and that in the ionChannel - element. TODO: remove.""" + """ChannelDensityNernst -- Specifies a time varying conductance density, **gDensity,** which is distributed on an area of the **cell,** producing a current density **iDensity** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only! See https://github.com/OpenSourceBrain/ghk-nernst. + \n + :param condDensity: + :type condDensity: conductanceDensity + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('cond_density', 'Nml2Quantity_conductanceDensity', 0, 1, {'use': u'optional'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('segments', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), + MemberSpec_( + "ion_channel", "NmlId", 0, 0, {"use": "required", "name": "ion_channel"} + ), + MemberSpec_( + "cond_density", + "Nml2Quantity_conductanceDensity", + 0, + 1, + {"use": "optional", "name": "cond_density"}, + ), + MemberSpec_( + "segment_groups", + "NmlId", + 0, + 1, + {"use": "optional", "name": "segment_groups"}, + ), + MemberSpec_("segments", "NmlId", 0, 1, {"use": "optional", "name": "segments"}), + MemberSpec_("ion", "NmlId", 0, 0, {"use": "required", "name": "ion"}), + MemberSpec_( + "variable_parameters", + "VariableParameter", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "variableParameter", + "type": "VariableParameter", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + cond_density=None, + segment_groups="all", + segments=None, + ion=None, + variable_parameters=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityNernst, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ChannelDensityNernst"), self).__init__( + neuro_lex_id, id, extensiontype_, **kwargs_ + ) self.ion_channel = _cast(None, ion_channel) + self.ion_channel_nsprefix_ = None self.cond_density = _cast(None, cond_density) + self.cond_density_nsprefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None self.segments = _cast(None, segments) + self.segments_nsprefix_ = None self.ion = _cast(None, ion) + self.ion_nsprefix_ = None if variable_parameters is None: self.variable_parameters = [] else: self.variable_parameters = variable_parameters + self.variable_parameters_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensityNernst) + CurrentSubclassModule_, ChannelDensityNernst + ) if subclass is not None: return subclass(*args_, **kwargs_) if ChannelDensityNernst.subclass: return ChannelDensityNernst.subclass(*args_, **kwargs_) else: return ChannelDensityNernst(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + def validate_Nml2Quantity_conductanceDensity(self, value): # Validate type Nml2Quantity_conductanceDensity, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductanceDensity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductanceDensity_patterns_, )) - validate_Nml2Quantity_conductanceDensity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2)$']] - def hasContent_(self): if ( - self.variable_parameters or - super(ChannelDensityNernst, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_conductanceDensity_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_conductanceDensity_patterns_, + ) + ) + + validate_Nml2Quantity_conductanceDensity_patterns_ = [ + [ + "^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2))$" + ] + ] + + def _hasContent(self): + if self.variable_parameters or super(ChannelDensityNernst, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNernst', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityNernst') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ChannelDensityNernst", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelDensityNernst") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ChannelDensityNernst": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNernst') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNernst', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNernst", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensityNernst", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNernst'): - super(ChannelDensityNernst, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNernst') - if self.ion_channel is not None and 'ion_channel' not in already_processed: - already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) - if self.cond_density is not None and 'cond_density' not in already_processed: - already_processed.add('cond_density') - outfile.write(' condDensity=%s' % (quote_attrib(self.cond_density), )) - if self.segment_groups != "all" and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensityNernst", + ): + super(ChannelDensityNernst, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNernst", + ) + if self.ion_channel is not None and "ion_channel" not in already_processed: + already_processed.add("ion_channel") + outfile.write( + " ionChannel=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.ion_channel), input_name="ionChannel" + ) + ), + ) + ) + if self.cond_density is not None and "cond_density" not in already_processed: + already_processed.add("cond_density") + outfile.write( + " condDensity=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.cond_density), input_name="condDensity" + ) + ), + ) + ) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write( + " segmentGroup=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.segment_groups), input_name="segmentGroup" + ) + ), + ) + ) + if self.segments is not None and "segments" not in already_processed: + already_processed.add("segments") + outfile.write( + " segment=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.segments), input_name="segment" + ) + ), + ) + ) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write( + " ion=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.ion), input_name="ion") + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNernst', fromsubclass_=False, pretty_print=True): - super(ChannelDensityNernst, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ChannelDensityNernst", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensityNernst, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for variableParameter_ in self.variable_parameters: - variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.variable_parameters_nsprefix_ + ":" + if (UseCapturedNS_ and self.variable_parameters_nsprefix_) + else "" + ) + variableParameter_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="variableParameter", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ionChannel', node) - if value is not None and 'ionChannel' not in already_processed: - already_processed.add('ionChannel') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("ionChannel", node) + if value is not None and "ionChannel" not in already_processed: + already_processed.add("ionChannel") self.ion_channel = value - self.validate_NmlId(self.ion_channel) # validate type NmlId - value = find_attr_value_('condDensity', node) - if value is not None and 'condDensity' not in already_processed: - already_processed.add('condDensity') + self.validate_NmlId(self.ion_channel) # validate type NmlId + value = find_attr_value_("condDensity", node) + if value is not None and "condDensity" not in already_processed: + already_processed.add("condDensity") self.cond_density = value - self.validate_Nml2Quantity_conductanceDensity(self.cond_density) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + self.validate_Nml2Quantity_conductanceDensity( + self.cond_density + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') + self.validate_NmlId(self.segment_groups) # validate type NmlId + value = find_attr_value_("segment", node) + if value is not None and "segment" not in already_processed: + already_processed.add("segment") self.segments = value - self.validate_NmlId(self.segments) # validate type NmlId - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + self.validate_NmlId(self.segments) # validate type NmlId + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_NmlId(self.ion) # validate type NmlId + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(ChannelDensityNernst, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'variableParameter': + super(ChannelDensityNernst, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "variableParameter": obj_ = VariableParameter.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.variable_parameters.append(obj_) - obj_.original_tagname_ = 'variableParameter' - super(ChannelDensityNernst, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "variableParameter" + super(ChannelDensityNernst, self)._buildChildren(child_, node, nodeName_, True) + + # end class ChannelDensityNernst class ChannelDensity(Base): - """Specifying the ion here again is redundant, this will be set in - ionChannel definition. It is added here TEMPORARILY since - selecting all ca or na conducting channel populations/densities - in a cell would be difficult otherwise. Also, it will make it - easier to set the correct native simulator value for erev (e.g. - ek for ion = k in NEURON). Currently a required attribute. It - should be removed in the longer term, due to possible - inconsistencies in this value and that in the ionChannel - element. TODO: remove.""" + """ChannelDensity -- Specifies a time varying ohmic conductance density, **gDensity,** which is distributed on an area of the **cell** ( specified in **membraneProperties** ) with fixed reversal potential **erev** producing a current density **iDensity** + \n + :param erev: The reversal potential of the current produced + :type erev: voltage + :param condDensity: + :type condDensity: conductanceDensity + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('cond_density', 'Nml2Quantity_conductanceDensity', 0, 1, {'use': u'optional'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('segments', 'NonNegativeInteger', 0, 1, {'use': u'optional'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), + MemberSpec_( + "ion_channel", "NmlId", 0, 0, {"use": "required", "name": "ion_channel"} + ), + MemberSpec_( + "cond_density", + "Nml2Quantity_conductanceDensity", + 0, + 1, + {"use": "optional", "name": "cond_density"}, + ), + MemberSpec_( + "erev", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "erev"} + ), + MemberSpec_( + "segment_groups", + "NmlId", + 0, + 1, + {"use": "optional", "name": "segment_groups"}, + ), + MemberSpec_( + "segments", + "NonNegativeInteger", + 0, + 1, + {"use": "optional", "name": "segments"}, + ), + MemberSpec_("ion", "NmlId", 0, 0, {"use": "required", "name": "ion"}), + MemberSpec_( + "variable_parameters", + "VariableParameter", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "variableParameter", + "type": "VariableParameter", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, erev=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + cond_density=None, + erev=None, + segment_groups="all", + segments=None, + ion=None, + variable_parameters=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensity, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ChannelDensity"), self).__init__( + neuro_lex_id, id, extensiontype_, **kwargs_ + ) self.ion_channel = _cast(None, ion_channel) + self.ion_channel_nsprefix_ = None self.cond_density = _cast(None, cond_density) + self.cond_density_nsprefix_ = None self.erev = _cast(None, erev) + self.erev_nsprefix_ = None self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None self.segments = _cast(int, segments) + self.segments_nsprefix_ = None self.ion = _cast(None, ion) + self.ion_nsprefix_ = None if variable_parameters is None: self.variable_parameters = [] else: self.variable_parameters = variable_parameters + self.variable_parameters_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensity) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ChannelDensity) if subclass is not None: return subclass(*args_, **kwargs_) if ChannelDensity.subclass: return ChannelDensity.subclass(*args_, **kwargs_) else: return ChannelDensity(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + def validate_Nml2Quantity_conductanceDensity(self, value): # Validate type Nml2Quantity_conductanceDensity, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductanceDensity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductanceDensity_patterns_, )) - validate_Nml2Quantity_conductanceDensity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2)$']] + self.validate_Nml2Quantity_conductanceDensity_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_conductanceDensity_patterns_, + ) + ) + + validate_Nml2Quantity_conductanceDensity_patterns_ = [ + [ + "^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2))$" + ] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: - pass - def hasContent_(self): if ( - self.variable_parameters or - super(ChannelDensity, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + pass + + def _hasContent(self): + if self.variable_parameters or super(ChannelDensity, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensity', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensity') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ChannelDensity", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelDensity") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ChannelDensity": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensity') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensity', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ChannelDensity" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensity", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensity'): - super(ChannelDensity, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensity') - if self.ion_channel is not None and 'ion_channel' not in already_processed: - already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) - if self.cond_density is not None and 'cond_density' not in already_processed: - already_processed.add('cond_density') - outfile.write(' condDensity=%s' % (quote_attrib(self.cond_density), )) - if self.erev is not None and 'erev' not in already_processed: - already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) - if self.segment_groups != "all" and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensity", + ): + super(ChannelDensity, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ChannelDensity" + ) + if self.ion_channel is not None and "ion_channel" not in already_processed: + already_processed.add("ion_channel") + outfile.write( + " ionChannel=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.ion_channel), input_name="ionChannel" + ) + ), + ) + ) + if self.cond_density is not None and "cond_density" not in already_processed: + already_processed.add("cond_density") + outfile.write( + " condDensity=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.cond_density), input_name="condDensity" + ) + ), + ) + ) + if self.erev is not None and "erev" not in already_processed: + already_processed.add("erev") + outfile.write( + " erev=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.erev), input_name="erev" + ) + ), + ) + ) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write( + " segmentGroup=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.segment_groups), input_name="segmentGroup" + ) + ), + ) + ) + if self.segments is not None and "segments" not in already_processed: + already_processed.add("segments") + outfile.write( + ' segment="%s"' + % self.gds_format_integer(self.segments, input_name="segment") + ) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write( + " ion=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.ion), input_name="ion") + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensity', fromsubclass_=False, pretty_print=True): - super(ChannelDensity, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ChannelDensity", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensity, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for variableParameter_ in self.variable_parameters: - variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.variable_parameters_nsprefix_ + ":" + if (UseCapturedNS_ and self.variable_parameters_nsprefix_) + else "" + ) + variableParameter_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="variableParameter", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ionChannel', node) - if value is not None and 'ionChannel' not in already_processed: - already_processed.add('ionChannel') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("ionChannel", node) + if value is not None and "ionChannel" not in already_processed: + already_processed.add("ionChannel") self.ion_channel = value - self.validate_NmlId(self.ion_channel) # validate type NmlId - value = find_attr_value_('condDensity', node) - if value is not None and 'condDensity' not in already_processed: - already_processed.add('condDensity') + self.validate_NmlId(self.ion_channel) # validate type NmlId + value = find_attr_value_("condDensity", node) + if value is not None and "condDensity" not in already_processed: + already_processed.add("condDensity") self.cond_density = value - self.validate_Nml2Quantity_conductanceDensity(self.cond_density) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('erev', node) - if value is not None and 'erev' not in already_processed: - already_processed.add('erev') + self.validate_Nml2Quantity_conductanceDensity( + self.cond_density + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("erev", node) + if value is not None and "erev" not in already_processed: + already_processed.add("erev") self.erev = value - self.validate_Nml2Quantity_voltage(self.erev) # validate type Nml2Quantity_voltage - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') + self.validate_Nml2Quantity_voltage( + self.erev + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') - try: - self.segments = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + self.validate_NmlId(self.segment_groups) # validate type NmlId + value = find_attr_value_("segment", node) + if value is not None and "segment" not in already_processed: + already_processed.add("segment") + self.segments = self.gds_parse_integer(value, node, "segment") if self.segments < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.segments) # validate type NonNegativeInteger - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.segments + ) # validate type NonNegativeInteger + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_NmlId(self.ion) # validate type NmlId + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(ChannelDensity, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'variableParameter': + super(ChannelDensity, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "variableParameter": obj_ = VariableParameter.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.variable_parameters.append(obj_) - obj_.original_tagname_ = 'variableParameter' - super(ChannelDensity, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "variableParameter" + super(ChannelDensity, self)._buildChildren(child_, node, nodeName_, True) + + # end class ChannelDensity class ChannelDensityNonUniformGHK(Base): - """Specifying the ion here again is redundant, this will be set in - ionChannel definition. It is added here TEMPORARILY since - selecting all ca or na conducting channel populations/densities - in a cell would be difficult otherwise. Also, it will make it - easier to set the correct native simulator value for erev (e.g. - ek for ion = k in NEURON). Currently a required attribute. It - should be removed in the longer term, due to possible - inconsistencies in this value and that in the ionChannel - element. TODO: remove.""" - member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), - ] - subclass = None - superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, ion=None, variable_parameters=None, **kwargs_): - self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityNonUniformGHK, self).__init__(neuro_lex_id, id, **kwargs_) - self.ion_channel = _cast(None, ion_channel) - self.ion = _cast(None, ion) - if variable_parameters is None: - self.variable_parameters = [] - else: - self.variable_parameters = variable_parameters - def factory(*args_, **kwargs_): - if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensityNonUniformGHK) - if subclass is not None: - return subclass(*args_, **kwargs_) - if ChannelDensityNonUniformGHK.subclass: - return ChannelDensityNonUniformGHK.subclass(*args_, **kwargs_) - else: - return ChannelDensityNonUniformGHK(*args_, **kwargs_) - factory = staticmethod(factory) - def validate_NmlId(self, value): - # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): - if ( - self.variable_parameters or - super(ChannelDensityNonUniformGHK, self).hasContent_() - ): - return True - else: - return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniformGHK', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityNonUniformGHK') - if imported_ns_def_ is not None: - namespacedef_ = imported_ns_def_ - if pretty_print: - eol_ = '\n' - else: - eol_ = '' - if self.original_tagname_ is not None: - name_ = self.original_tagname_ - showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniformGHK') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNonUniformGHK', pretty_print=pretty_print) - showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNonUniformGHK'): - super(ChannelDensityNonUniformGHK, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniformGHK') - if self.ion_channel is not None and 'ion_channel' not in already_processed: - already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniformGHK', fromsubclass_=False, pretty_print=True): - super(ChannelDensityNonUniformGHK, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - if pretty_print: - eol_ = '\n' - else: - eol_ = '' - for variableParameter_ in self.variable_parameters: - variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) - for child in node: - nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) - return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ionChannel', node) - if value is not None and 'ionChannel' not in already_processed: - already_processed.add('ionChannel') - self.ion_channel = value - self.validate_NmlId(self.ion_channel) # validate type NmlId - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') - self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - super(ChannelDensityNonUniformGHK, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'variableParameter': - obj_ = VariableParameter.factory(parent_object_=self) - obj_.build(child_) - self.variable_parameters.append(obj_) - obj_.original_tagname_ = 'variableParameter' - super(ChannelDensityNonUniformGHK, self).buildChildren(child_, node, nodeName_, True) -# end class ChannelDensityNonUniformGHK - + """ChannelDensityNonUniformGHK -- Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose current is calculated from the Goldman-Hodgkin-Katz equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON""" -class ChannelDensityNonUniformNernst(Base): - """Specifying the ion here again is redundant, this will be set in - ionChannel definition. It is added here TEMPORARILY since - selecting all ca or na conducting channel populations/densities - in a cell would be difficult otherwise. Also, it will make it - easier to set the correct native simulator value for erev (e.g. - ek for ion = k in NEURON). Currently a required attribute. It - should be removed in the longer term, due to possible - inconsistencies in this value and that in the ionChannel - element. TODO: remove.""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), + MemberSpec_( + "ion_channel", "NmlId", 0, 0, {"use": "required", "name": "ion_channel"} + ), + MemberSpec_("ion", "NmlId", 0, 0, {"use": "required", "name": "ion"}), + MemberSpec_( + "variable_parameters", + "VariableParameter", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "variableParameter", + "type": "VariableParameter", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, ion=None, variable_parameters=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + ion=None, + variable_parameters=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityNonUniformNernst, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ChannelDensityNonUniformGHK"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.ion_channel = _cast(None, ion_channel) + self.ion_channel_nsprefix_ = None self.ion = _cast(None, ion) + self.ion_nsprefix_ = None if variable_parameters is None: self.variable_parameters = [] else: self.variable_parameters = variable_parameters + self.variable_parameters_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensityNonUniformNernst) + CurrentSubclassModule_, ChannelDensityNonUniformGHK + ) if subclass is not None: return subclass(*args_, **kwargs_) - if ChannelDensityNonUniformNernst.subclass: - return ChannelDensityNonUniformNernst.subclass(*args_, **kwargs_) + if ChannelDensityNonUniformGHK.subclass: + return ChannelDensityNonUniformGHK.subclass(*args_, **kwargs_) else: - return ChannelDensityNonUniformNernst(*args_, **kwargs_) + return ChannelDensityNonUniformGHK(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): if ( - self.variable_parameters or - super(ChannelDensityNonUniformNernst, self).hasContent_() + self.variable_parameters + or super(ChannelDensityNonUniformGHK, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniformNernst', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityNonUniformNernst') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ChannelDensityNonUniformGHK", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelDensityNonUniformGHK") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if ( + self.original_tagname_ is not None + and name_ == "ChannelDensityNonUniformGHK" + ): name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniformNernst') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNonUniformNernst', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNonUniformGHK", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensityNonUniformGHK", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNonUniformNernst'): - super(ChannelDensityNonUniformNernst, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniformNernst') - if self.ion_channel is not None and 'ion_channel' not in already_processed: - already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniformNernst', fromsubclass_=False, pretty_print=True): - super(ChannelDensityNonUniformNernst, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensityNonUniformGHK", + ): + super(ChannelDensityNonUniformGHK, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNonUniformGHK", + ) + if self.ion_channel is not None and "ion_channel" not in already_processed: + already_processed.add("ion_channel") + outfile.write( + " ionChannel=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.ion_channel), input_name="ionChannel" + ) + ), + ) + ) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write( + " ion=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.ion), input_name="ion") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ChannelDensityNonUniformGHK", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensityNonUniformGHK, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for variableParameter_ in self.variable_parameters: - variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.variable_parameters_nsprefix_ + ":" + if (UseCapturedNS_ and self.variable_parameters_nsprefix_) + else "" + ) + variableParameter_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="variableParameter", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ionChannel', node) - if value is not None and 'ionChannel' not in already_processed: - already_processed.add('ionChannel') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("ionChannel", node) + if value is not None and "ionChannel" not in already_processed: + already_processed.add("ionChannel") self.ion_channel = value - self.validate_NmlId(self.ion_channel) # validate type NmlId - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + self.validate_NmlId(self.ion_channel) # validate type NmlId + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - super(ChannelDensityNonUniformNernst, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'variableParameter': + self.validate_NmlId(self.ion) # validate type NmlId + super(ChannelDensityNonUniformGHK, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "variableParameter": obj_ = VariableParameter.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.variable_parameters.append(obj_) - obj_.original_tagname_ = 'variableParameter' - super(ChannelDensityNonUniformNernst, self).buildChildren(child_, node, nodeName_, True) -# end class ChannelDensityNonUniformNernst + obj_.original_tagname_ = "variableParameter" + super(ChannelDensityNonUniformGHK, self)._buildChildren( + child_, node, nodeName_, True + ) -class ChannelDensityNonUniform(Base): - """Specifying the ion here again is redundant, this will be set in - ionChannel definition. It is added here TEMPORARILY since - selecting all ca or na conducting channel populations/densities - in a cell would be difficult otherwise. Also, it will make it - easier to set the correct native simulator value for erev (e.g. - ek for ion = k in NEURON). Currently a required attribute. It - should be removed in the longer term, due to possible - inconsistencies in this value and that in the ionChannel - element. TODO: remove.""" +# end class ChannelDensityNonUniformGHK + + +class ChannelDensityNonUniformNernst(Base): + """ChannelDensityNonUniformNernst -- Specifies a time varying conductance density, which is distributed on a region of the **cell,** and whose reversal potential is calculated from the Nernst equation. Hard coded for Ca only!. The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), + MemberSpec_( + "ion_channel", "NmlId", 0, 0, {"use": "required", "name": "ion_channel"} + ), + MemberSpec_("ion", "NmlId", 0, 0, {"use": "required", "name": "ion"}), + MemberSpec_( + "variable_parameters", + "VariableParameter", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "variableParameter", + "type": "VariableParameter", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, erev=None, ion=None, variable_parameters=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + ion=None, + variable_parameters=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityNonUniform, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ChannelDensityNonUniformNernst"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.ion_channel = _cast(None, ion_channel) - self.erev = _cast(None, erev) + self.ion_channel_nsprefix_ = None self.ion = _cast(None, ion) + self.ion_nsprefix_ = None if variable_parameters is None: self.variable_parameters = [] else: self.variable_parameters = variable_parameters + self.variable_parameters_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensityNonUniform) + CurrentSubclassModule_, ChannelDensityNonUniformNernst + ) if subclass is not None: return subclass(*args_, **kwargs_) - if ChannelDensityNonUniform.subclass: - return ChannelDensityNonUniform.subclass(*args_, **kwargs_) + if ChannelDensityNonUniformNernst.subclass: + return ChannelDensityNonUniformNernst.subclass(*args_, **kwargs_) else: - return ChannelDensityNonUniform(*args_, **kwargs_) + return ChannelDensityNonUniformNernst(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def validate_Nml2Quantity_voltage(self, value): - # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): if ( - self.variable_parameters or - super(ChannelDensityNonUniform, self).hasContent_() + self.variable_parameters + or super(ChannelDensityNonUniformNernst, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniform', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityNonUniform') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ChannelDensityNonUniformNernst", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get( + "ChannelDensityNonUniformNernst" + ) if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if ( + self.original_tagname_ is not None + and name_ == "ChannelDensityNonUniformNernst" + ): name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniform') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNonUniform', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNonUniformNernst", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensityNonUniformNernst", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNonUniform'): - super(ChannelDensityNonUniform, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNonUniform') - if self.ion_channel is not None and 'ion_channel' not in already_processed: - already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) - if self.erev is not None and 'erev' not in already_processed: - already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNonUniform', fromsubclass_=False, pretty_print=True): - super(ChannelDensityNonUniform, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensityNonUniformNernst", + ): + super(ChannelDensityNonUniformNernst, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNonUniformNernst", + ) + if self.ion_channel is not None and "ion_channel" not in already_processed: + already_processed.add("ion_channel") + outfile.write( + " ionChannel=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.ion_channel), input_name="ionChannel" + ) + ), + ) + ) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write( + " ion=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.ion), input_name="ion") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ChannelDensityNonUniformNernst", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensityNonUniformNernst, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for variableParameter_ in self.variable_parameters: - variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.variable_parameters_nsprefix_ + ":" + if (UseCapturedNS_ and self.variable_parameters_nsprefix_) + else "" + ) + variableParameter_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="variableParameter", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ionChannel', node) - if value is not None and 'ionChannel' not in already_processed: - already_processed.add('ionChannel') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("ionChannel", node) + if value is not None and "ionChannel" not in already_processed: + already_processed.add("ionChannel") self.ion_channel = value - self.validate_NmlId(self.ion_channel) # validate type NmlId - value = find_attr_value_('erev', node) - if value is not None and 'erev' not in already_processed: - already_processed.add('erev') - self.erev = value - self.validate_Nml2Quantity_voltage(self.erev) # validate type Nml2Quantity_voltage - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + self.validate_NmlId(self.ion_channel) # validate type NmlId + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - super(ChannelDensityNonUniform, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'variableParameter': + self.validate_NmlId(self.ion) # validate type NmlId + super(ChannelDensityNonUniformNernst, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "variableParameter": obj_ = VariableParameter.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.variable_parameters.append(obj_) - obj_.original_tagname_ = 'variableParameter' - super(ChannelDensityNonUniform, self).buildChildren(child_, node, nodeName_, True) -# end class ChannelDensityNonUniform + obj_.original_tagname_ = "variableParameter" + super(ChannelDensityNonUniformNernst, self)._buildChildren( + child_, node, nodeName_, True + ) -class ChannelPopulation(Base): - """Specifying the ion here again is redundant, this will be set in - ionChannel definition. It is added here TEMPORARILY since - selecting all ca or na conducting channel populations/densities - in a cell would be difficult otherwise. Also, it will make it - easier to set the correct native simulator value for erev (e.g. - ek for ion = k in NEURON). Currently a required attribute. It - should be removed in the longer term, due to possible - inconsistencies in this value and that in the ionChannel - element. TODO: remove.""" +# end class ChannelDensityNonUniformNernst + + +class ChannelDensityNonUniform(Base): + """ChannelDensityNonUniform -- Specifies a time varying ohmic conductance density, which is distributed on a region of the **cell.** The conductance density of the channel is not uniform, but is set using the **variableParameter** . Note, there is no dynamical description of this in LEMS yet, as this type only makes sense for multicompartmental cells. A ComponentType for this needs to be present to enable export of NeuroML 2 multicompartmental cells via LEMS/jNeuroML to NEURON + \n + :param erev: The reversal potential of the current produced + :type erev: voltage + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion_channel', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('number', 'NonNegativeInteger', 0, 0, {'use': u'required'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('segment_groups', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('segments', 'NonNegativeInteger', 0, 1, {'use': u'optional'}), - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('variable_parameters', 'VariableParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VariableParameter', u'name': u'variableParameter', u'minOccurs': u'0'}, None), + MemberSpec_( + "ion_channel", "NmlId", 0, 0, {"use": "required", "name": "ion_channel"} + ), + MemberSpec_( + "erev", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "erev"} + ), + MemberSpec_("ion", "NmlId", 0, 0, {"use": "required", "name": "ion"}), + MemberSpec_( + "variable_parameters", + "VariableParameter", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "variableParameter", + "type": "VariableParameter", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, number=None, erev=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + erev=None, + ion=None, + variable_parameters=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelPopulation, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ChannelDensityNonUniform"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.ion_channel = _cast(None, ion_channel) - self.number = _cast(int, number) + self.ion_channel_nsprefix_ = None self.erev = _cast(None, erev) - self.segment_groups = _cast(None, segment_groups) - self.segments = _cast(int, segments) + self.erev_nsprefix_ = None self.ion = _cast(None, ion) + self.ion_nsprefix_ = None if variable_parameters is None: self.variable_parameters = [] else: self.variable_parameters = variable_parameters + self.variable_parameters_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelPopulation) + CurrentSubclassModule_, ChannelDensityNonUniform + ) if subclass is not None: return subclass(*args_, **kwargs_) - if ChannelPopulation.subclass: - return ChannelPopulation.subclass(*args_, **kwargs_) + if ChannelDensityNonUniform.subclass: + return ChannelDensityNonUniform.subclass(*args_, **kwargs_) else: - return ChannelPopulation(*args_, **kwargs_) + return ChannelDensityNonUniform(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def validate_NonNegativeInteger(self, value): - # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: - pass + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + + def _hasContent(self): if ( - self.variable_parameters or - super(ChannelPopulation, self).hasContent_() + self.variable_parameters + or super(ChannelDensityNonUniform, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelPopulation', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelPopulation') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ChannelDensityNonUniform", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelDensityNonUniform") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ChannelDensityNonUniform": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelPopulation') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelPopulation', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNonUniform", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensityNonUniform", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelPopulation'): - super(ChannelPopulation, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelPopulation') - if self.ion_channel is not None and 'ion_channel' not in already_processed: - already_processed.add('ion_channel') - outfile.write(' ionChannel=%s' % (quote_attrib(self.ion_channel), )) - if self.number is not None and 'number' not in already_processed: - already_processed.add('number') - outfile.write(' number=%s' % (quote_attrib(self.number), )) - if self.erev is not None and 'erev' not in already_processed: - already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) - if self.segment_groups != "all" and 'segment_groups' not in already_processed: - already_processed.add('segment_groups') - outfile.write(' segmentGroup=%s' % (quote_attrib(self.segment_groups), )) - if self.segments is not None and 'segments' not in already_processed: - already_processed.add('segments') - outfile.write(' segment=%s' % (quote_attrib(self.segments), )) - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelPopulation', fromsubclass_=False, pretty_print=True): - super(ChannelPopulation, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensityNonUniform", + ): + super(ChannelDensityNonUniform, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNonUniform", + ) + if self.ion_channel is not None and "ion_channel" not in already_processed: + already_processed.add("ion_channel") + outfile.write( + " ionChannel=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.ion_channel), input_name="ionChannel" + ) + ), + ) + ) + if self.erev is not None and "erev" not in already_processed: + already_processed.add("erev") + outfile.write( + " erev=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.erev), input_name="erev" + ) + ), + ) + ) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write( + " ion=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.ion), input_name="ion") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ChannelDensityNonUniform", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensityNonUniform, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for variableParameter_ in self.variable_parameters: - variableParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='variableParameter', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.variable_parameters_nsprefix_ + ":" + if (UseCapturedNS_ and self.variable_parameters_nsprefix_) + else "" + ) + variableParameter_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="variableParameter", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ionChannel', node) - if value is not None and 'ionChannel' not in already_processed: - already_processed.add('ionChannel') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("ionChannel", node) + if value is not None and "ionChannel" not in already_processed: + already_processed.add("ionChannel") self.ion_channel = value - self.validate_NmlId(self.ion_channel) # validate type NmlId - value = find_attr_value_('number', node) - if value is not None and 'number' not in already_processed: - already_processed.add('number') - try: - self.number = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) - if self.number < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.number) # validate type NonNegativeInteger - value = find_attr_value_('erev', node) - if value is not None and 'erev' not in already_processed: - already_processed.add('erev') + self.validate_NmlId(self.ion_channel) # validate type NmlId + value = find_attr_value_("erev", node) + if value is not None and "erev" not in already_processed: + already_processed.add("erev") self.erev = value - self.validate_Nml2Quantity_voltage(self.erev) # validate type Nml2Quantity_voltage - value = find_attr_value_('segmentGroup', node) - if value is not None and 'segmentGroup' not in already_processed: - already_processed.add('segmentGroup') - self.segment_groups = value - self.validate_NmlId(self.segment_groups) # validate type NmlId - value = find_attr_value_('segment', node) - if value is not None and 'segment' not in already_processed: - already_processed.add('segment') - try: - self.segments = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) - if self.segments < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.segments) # validate type NonNegativeInteger - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + self.validate_Nml2Quantity_voltage( + self.erev + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - super(ChannelPopulation, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'variableParameter': + self.validate_NmlId(self.ion) # validate type NmlId + super(ChannelDensityNonUniform, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "variableParameter": obj_ = VariableParameter.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.variable_parameters.append(obj_) - obj_.original_tagname_ = 'variableParameter' - super(ChannelPopulation, self).buildChildren(child_, node, nodeName_, True) -# end class ChannelPopulation + obj_.original_tagname_ = "variableParameter" + super(ChannelDensityNonUniform, self)._buildChildren( + child_, node, nodeName_, True + ) -class Resistivity(ValueAcrossSegOrSegGroup): - """Using a thin extension of ValueAcrossSegOrSegGroup to facilitate - library generation (e.g. libNeuroML)""" - member_data_items_ = [ - ] - subclass = None - superclass = ValueAcrossSegOrSegGroup - def __init__(self, value=None, segment_groups='all', segments=None, **kwargs_): - self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Resistivity, self).__init__(value, segment_groups, segments, **kwargs_) - def factory(*args_, **kwargs_): - if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Resistivity) - if subclass is not None: - return subclass(*args_, **kwargs_) - if Resistivity.subclass: - return Resistivity.subclass(*args_, **kwargs_) - else: - return Resistivity(*args_, **kwargs_) - factory = staticmethod(factory) - def hasContent_(self): - if ( - super(Resistivity, self).hasContent_() - ): - return True - else: - return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Resistivity', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Resistivity') - if imported_ns_def_ is not None: - namespacedef_ = imported_ns_def_ - if pretty_print: - eol_ = '\n' - else: - eol_ = '' - if self.original_tagname_ is not None: - name_ = self.original_tagname_ - showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Resistivity') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Resistivity', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Resistivity'): - super(Resistivity, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Resistivity') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Resistivity', fromsubclass_=False, pretty_print=True): - super(Resistivity, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - pass - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) - for child in node: - nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) - return self - def buildAttributes(self, node, attrs, already_processed): - super(Resistivity, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(Resistivity, self).buildChildren(child_, node, nodeName_, True) - pass -# end class Resistivity +# end class ChannelDensityNonUniform -class InitMembPotential(ValueAcrossSegOrSegGroup): - """Using a thin extension of ValueAcrossSegOrSegGroup to facilitate - library generation (e.g. libNeuroML)""" - member_data_items_ = [ - ] - subclass = None - superclass = ValueAcrossSegOrSegGroup - def __init__(self, value=None, segment_groups='all', segments=None, **kwargs_): - self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(InitMembPotential, self).__init__(value, segment_groups, segments, **kwargs_) - def factory(*args_, **kwargs_): - if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, InitMembPotential) - if subclass is not None: - return subclass(*args_, **kwargs_) - if InitMembPotential.subclass: - return InitMembPotential.subclass(*args_, **kwargs_) - else: - return InitMembPotential(*args_, **kwargs_) - factory = staticmethod(factory) - def hasContent_(self): - if ( - super(InitMembPotential, self).hasContent_() - ): - return True - else: - return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InitMembPotential', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('InitMembPotential') - if imported_ns_def_ is not None: - namespacedef_ = imported_ns_def_ - if pretty_print: - eol_ = '\n' - else: - eol_ = '' - if self.original_tagname_ is not None: - name_ = self.original_tagname_ - showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InitMembPotential') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InitMembPotential', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InitMembPotential'): - super(InitMembPotential, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InitMembPotential') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InitMembPotential', fromsubclass_=False, pretty_print=True): - super(InitMembPotential, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - pass - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) - for child in node: - nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) - return self - def buildAttributes(self, node, attrs, already_processed): - super(InitMembPotential, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(InitMembPotential, self).buildChildren(child_, node, nodeName_, True) - pass -# end class InitMembPotential +class ChannelPopulation(Base): + """ChannelPopulation -- Population of a **number** of ohmic ion channels. These each produce a conductance **channelg** across a reversal potential **erev,** giving a total current **i.** Note that active membrane currents are more frequently specified as a density over an area of the **cell** using **channelDensity** + \n + :param number: The number of channels present. This will be multiplied by the time varying conductance of the individual ion channel ( which extends **baseIonChannel** ) to produce the total conductance + :type number: none + :param erev: The reversal potential of the current produced + :type erev: voltage + """ -class SpecificCapacitance(ValueAcrossSegOrSegGroup): - """Using a thin extension of ValueAcrossSegOrSegGroup to facilitate - library generation (e.g. libNeuroML)""" + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ + MemberSpec_( + "ion_channel", "NmlId", 0, 0, {"use": "required", "name": "ion_channel"} + ), + MemberSpec_( + "number", "NonNegativeInteger", 0, 0, {"use": "required", "name": "number"} + ), + MemberSpec_( + "erev", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "erev"} + ), + MemberSpec_( + "segment_groups", + "NmlId", + 0, + 1, + {"use": "optional", "name": "segment_groups"}, + ), + MemberSpec_( + "segments", + "NonNegativeInteger", + 0, + 1, + {"use": "optional", "name": "segments"}, + ), + MemberSpec_("ion", "NmlId", 0, 0, {"use": "required", "name": "ion"}), + MemberSpec_( + "variable_parameters", + "VariableParameter", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "variableParameter", + "type": "VariableParameter", + }, + None, + ), ] subclass = None - superclass = ValueAcrossSegOrSegGroup - def __init__(self, value=None, segment_groups='all', segments=None, **kwargs_): + superclass = Base + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + number=None, + erev=None, + segment_groups="all", + segments=None, + ion=None, + variable_parameters=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SpecificCapacitance, self).__init__(value, segment_groups, segments, **kwargs_) - def factory(*args_, **kwargs_): - if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpecificCapacitance) - if subclass is not None: - return subclass(*args_, **kwargs_) - if SpecificCapacitance.subclass: - return SpecificCapacitance.subclass(*args_, **kwargs_) - else: - return SpecificCapacitance(*args_, **kwargs_) - factory = staticmethod(factory) - def hasContent_(self): - if ( - super(SpecificCapacitance, self).hasContent_() - ): - return True - else: - return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecificCapacitance', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpecificCapacitance') - if imported_ns_def_ is not None: - namespacedef_ = imported_ns_def_ - if pretty_print: - eol_ = '\n' + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ChannelPopulation"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) + self.ion_channel = _cast(None, ion_channel) + self.ion_channel_nsprefix_ = None + self.number = _cast(int, number) + self.number_nsprefix_ = None + self.erev = _cast(None, erev) + self.erev_nsprefix_ = None + self.segment_groups = _cast(None, segment_groups) + self.segment_groups_nsprefix_ = None + self.segments = _cast(int, segments) + self.segments_nsprefix_ = None + self.ion = _cast(None, ion) + self.ion_nsprefix_ = None + if variable_parameters is None: + self.variable_parameters = [] else: - eol_ = '' - if self.original_tagname_ is not None: - name_ = self.original_tagname_ - showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpecificCapacitance') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpecificCapacitance', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpecificCapacitance'): - super(SpecificCapacitance, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpecificCapacitance') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpecificCapacitance', fromsubclass_=False, pretty_print=True): - super(SpecificCapacitance, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - pass - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) - for child in node: - nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) - return self - def buildAttributes(self, node, attrs, already_processed): - super(SpecificCapacitance, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SpecificCapacitance, self).buildChildren(child_, node, nodeName_, True) - pass -# end class SpecificCapacitance - + self.variable_parameters = variable_parameters + self.variable_parameters_nsprefix_ = None -class SpikeThresh(ValueAcrossSegOrSegGroup): - """Using a thin extension of ValueAcrossSegOrSegGroup to facilitate - library generation (e.g. libNeuroML)""" - member_data_items_ = [ - ] - subclass = None - superclass = ValueAcrossSegOrSegGroup - def __init__(self, value=None, segment_groups='all', segments=None, **kwargs_): - self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeThresh, self).__init__(value, segment_groups, segments, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpikeThresh) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ChannelPopulation) if subclass is not None: return subclass(*args_, **kwargs_) - if SpikeThresh.subclass: - return SpikeThresh.subclass(*args_, **kwargs_) + if ChannelPopulation.subclass: + return ChannelPopulation.subclass(*args_, **kwargs_) else: - return SpikeThresh(*args_, **kwargs_) + return ChannelPopulation(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): + + def validate_NmlId(self, value): + # Validate type NmlId, a restriction on xs:string. + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def validate_NonNegativeInteger(self, value): + # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + pass + + def validate_Nml2Quantity_voltage(self, value): + # Validate type Nml2Quantity_voltage, a restriction on xs:string. if ( - super(SpikeThresh, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + + def _hasContent(self): + if self.variable_parameters or super(ChannelPopulation, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeThresh', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpikeThresh') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ChannelPopulation", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelPopulation") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ChannelPopulation": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeThresh') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeThresh', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeThresh'): - super(SpikeThresh, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeThresh') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeThresh', fromsubclass_=False, pretty_print=True): - super(SpikeThresh, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - pass - def build(self, node): + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelPopulation", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelPopulation", + pretty_print=pretty_print, + ) + showIndent(outfile, level, pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelPopulation", + ): + super(ChannelPopulation, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelPopulation", + ) + if self.ion_channel is not None and "ion_channel" not in already_processed: + already_processed.add("ion_channel") + outfile.write( + " ionChannel=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.ion_channel), input_name="ionChannel" + ) + ), + ) + ) + if self.number is not None and "number" not in already_processed: + already_processed.add("number") + outfile.write( + ' number="%s"' + % self.gds_format_integer(self.number, input_name="number") + ) + if self.erev is not None and "erev" not in already_processed: + already_processed.add("erev") + outfile.write( + " erev=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.erev), input_name="erev" + ) + ), + ) + ) + if self.segment_groups != "all" and "segment_groups" not in already_processed: + already_processed.add("segment_groups") + outfile.write( + " segmentGroup=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.segment_groups), input_name="segmentGroup" + ) + ), + ) + ) + if self.segments is not None and "segments" not in already_processed: + already_processed.add("segments") + outfile.write( + ' segment="%s"' + % self.gds_format_integer(self.segments, input_name="segment") + ) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write( + " ion=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.ion), input_name="ion") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ChannelPopulation", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelPopulation, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + if pretty_print: + eol_ = "\n" + else: + eol_ = "" + for variableParameter_ in self.variable_parameters: + namespaceprefix_ = ( + self.variable_parameters_nsprefix_ + ":" + if (UseCapturedNS_ and self.variable_parameters_nsprefix_) + else "" + ) + variableParameter_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="variableParameter", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(SpikeThresh, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SpikeThresh, self).buildChildren(child_, node, nodeName_, True) - pass -# end class SpikeThresh + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("ionChannel", node) + if value is not None and "ionChannel" not in already_processed: + already_processed.add("ionChannel") + self.ion_channel = value + self.validate_NmlId(self.ion_channel) # validate type NmlId + value = find_attr_value_("number", node) + if value is not None and "number" not in already_processed: + already_processed.add("number") + self.number = self.gds_parse_integer(value, node, "number") + if self.number < 0: + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.number + ) # validate type NonNegativeInteger + value = find_attr_value_("erev", node) + if value is not None and "erev" not in already_processed: + already_processed.add("erev") + self.erev = value + self.validate_Nml2Quantity_voltage( + self.erev + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("segmentGroup", node) + if value is not None and "segmentGroup" not in already_processed: + already_processed.add("segmentGroup") + self.segment_groups = value + self.validate_NmlId(self.segment_groups) # validate type NmlId + value = find_attr_value_("segment", node) + if value is not None and "segment" not in already_processed: + already_processed.add("segment") + self.segments = self.gds_parse_integer(value, node, "segment") + if self.segments < 0: + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.segments + ) # validate type NonNegativeInteger + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") + self.ion = value + self.validate_NmlId(self.ion) # validate type NmlId + super(ChannelPopulation, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "variableParameter": + obj_ = VariableParameter.factory(parent_object_=self) + obj_.build(child_, gds_collector_=gds_collector_) + self.variable_parameters.append(obj_) + obj_.original_tagname_ = "variableParameter" + super(ChannelPopulation, self)._buildChildren(child_, node, nodeName_, True) + + +# end class ChannelPopulation class BiophysicalProperties2CaPools(Standalone): - """Standalone element which is usually inside a single cell, but could - be outside and referenced by id.""" + """BiophysicalProperties2CaPools -- The biophysical properties of the **cell** , including the **membraneProperties2CaPools** and the **intracellularProperties2CaPools** for a cell with two Ca pools""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('membrane_properties2_ca_pools', 'MembraneProperties2CaPools', 0, 0, {u'type': u'MembraneProperties2CaPools', u'name': u'membraneProperties2CaPools'}, None), - MemberSpec_('intracellular_properties2_ca_pools', 'IntracellularProperties2CaPools', 0, 1, {u'type': u'IntracellularProperties2CaPools', u'name': u'intracellularProperties2CaPools', u'minOccurs': u'0'}, None), - MemberSpec_('extracellular_properties', 'ExtracellularProperties', 0, 1, {u'type': u'ExtracellularProperties', u'name': u'extracellularProperties', u'minOccurs': u'0'}, None), + MemberSpec_( + "membrane_properties2_ca_pools", + "MembraneProperties2CaPools", + 0, + 0, + { + "name": "membraneProperties2CaPools", + "type": "MembraneProperties2CaPools", + }, + None, + ), + MemberSpec_( + "intracellular_properties2_ca_pools", + "IntracellularProperties2CaPools", + 0, + 1, + { + "minOccurs": "0", + "name": "intracellularProperties2CaPools", + "type": "IntracellularProperties2CaPools", + }, + None, + ), + MemberSpec_( + "extracellular_properties", + "ExtracellularProperties", + 0, + 1, + { + "minOccurs": "0", + "name": "extracellularProperties", + "type": "ExtracellularProperties", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, membrane_properties2_ca_pools=None, intracellular_properties2_ca_pools=None, extracellular_properties=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + membrane_properties2_ca_pools=None, + intracellular_properties2_ca_pools=None, + extracellular_properties=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BiophysicalProperties2CaPools, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("BiophysicalProperties2CaPools"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.membrane_properties2_ca_pools = membrane_properties2_ca_pools + self.membrane_properties2_ca_pools_nsprefix_ = None self.intracellular_properties2_ca_pools = intracellular_properties2_ca_pools + self.intracellular_properties2_ca_pools_nsprefix_ = None self.extracellular_properties = extracellular_properties + self.extracellular_properties_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BiophysicalProperties2CaPools) + CurrentSubclassModule_, BiophysicalProperties2CaPools + ) if subclass is not None: return subclass(*args_, **kwargs_) if BiophysicalProperties2CaPools.subclass: return BiophysicalProperties2CaPools.subclass(*args_, **kwargs_) else: return BiophysicalProperties2CaPools(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): + + def _hasContent(self): if ( - self.membrane_properties2_ca_pools is not None or - self.intracellular_properties2_ca_pools is not None or - self.extracellular_properties is not None or - super(BiophysicalProperties2CaPools, self).hasContent_() + self.membrane_properties2_ca_pools is not None + or self.intracellular_properties2_ca_pools is not None + or self.extracellular_properties is not None + or super(BiophysicalProperties2CaPools, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BiophysicalProperties2CaPools', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BiophysicalProperties2CaPools') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="BiophysicalProperties2CaPools", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BiophysicalProperties2CaPools") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if ( + self.original_tagname_ is not None + and name_ == "BiophysicalProperties2CaPools" + ): name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BiophysicalProperties2CaPools') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BiophysicalProperties2CaPools', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BiophysicalProperties2CaPools", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BiophysicalProperties2CaPools", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BiophysicalProperties2CaPools'): - super(BiophysicalProperties2CaPools, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BiophysicalProperties2CaPools') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BiophysicalProperties2CaPools', fromsubclass_=False, pretty_print=True): - super(BiophysicalProperties2CaPools, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BiophysicalProperties2CaPools", + ): + super(BiophysicalProperties2CaPools, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BiophysicalProperties2CaPools", + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="BiophysicalProperties2CaPools", + fromsubclass_=False, + pretty_print=True, + ): + super(BiophysicalProperties2CaPools, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.membrane_properties2_ca_pools is not None: - self.membrane_properties2_ca_pools.export(outfile, level, namespaceprefix_, namespacedef_='', name_='membraneProperties2CaPools', pretty_print=pretty_print) + namespaceprefix_ = ( + self.membrane_properties2_ca_pools_nsprefix_ + ":" + if (UseCapturedNS_ and self.membrane_properties2_ca_pools_nsprefix_) + else "" + ) + self.membrane_properties2_ca_pools.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="membraneProperties2CaPools", + pretty_print=pretty_print, + ) if self.intracellular_properties2_ca_pools is not None: - self.intracellular_properties2_ca_pools.export(outfile, level, namespaceprefix_, namespacedef_='', name_='intracellularProperties2CaPools', pretty_print=pretty_print) + namespaceprefix_ = ( + self.intracellular_properties2_ca_pools_nsprefix_ + ":" + if ( + UseCapturedNS_ and self.intracellular_properties2_ca_pools_nsprefix_ + ) + else "" + ) + self.intracellular_properties2_ca_pools.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="intracellularProperties2CaPools", + pretty_print=pretty_print, + ) if self.extracellular_properties is not None: - self.extracellular_properties.export(outfile, level, namespaceprefix_, namespacedef_='', name_='extracellularProperties', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.extracellular_properties_nsprefix_ + ":" + if (UseCapturedNS_ and self.extracellular_properties_nsprefix_) + else "" + ) + self.extracellular_properties.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="extracellularProperties", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(BiophysicalProperties2CaPools, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'membraneProperties2CaPools': + + def _buildAttributes(self, node, attrs, already_processed): + super(BiophysicalProperties2CaPools, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "membraneProperties2CaPools": obj_ = MembraneProperties2CaPools.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.membrane_properties2_ca_pools = obj_ - obj_.original_tagname_ = 'membraneProperties2CaPools' - elif nodeName_ == 'intracellularProperties2CaPools': + obj_.original_tagname_ = "membraneProperties2CaPools" + elif nodeName_ == "intracellularProperties2CaPools": obj_ = IntracellularProperties2CaPools.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.intracellular_properties2_ca_pools = obj_ - obj_.original_tagname_ = 'intracellularProperties2CaPools' - elif nodeName_ == 'extracellularProperties': + obj_.original_tagname_ = "intracellularProperties2CaPools" + elif nodeName_ == "extracellularProperties": obj_ = ExtracellularProperties.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.extracellular_properties = obj_ - obj_.original_tagname_ = 'extracellularProperties' - super(BiophysicalProperties2CaPools, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "extracellularProperties" + super(BiophysicalProperties2CaPools, self)._buildChildren( + child_, node, nodeName_, True + ) + + # end class BiophysicalProperties2CaPools class BiophysicalProperties(Standalone): - """Standalone element which is usually inside a single cell, but could - be outside and referenced by id.""" + """BiophysicalProperties -- The biophysical properties of the **cell** , including the **membraneProperties** and the **intracellularProperties**""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('membrane_properties', 'MembraneProperties', 0, 0, {u'type': u'MembraneProperties', u'name': u'membraneProperties'}, None), - MemberSpec_('intracellular_properties', 'IntracellularProperties', 0, 1, {u'type': u'IntracellularProperties', u'name': u'intracellularProperties', u'minOccurs': u'0'}, None), - MemberSpec_('extracellular_properties', 'ExtracellularProperties', 0, 1, {u'type': u'ExtracellularProperties', u'name': u'extracellularProperties', u'minOccurs': u'0'}, None), + MemberSpec_( + "membrane_properties", + "MembraneProperties", + 0, + 0, + {"name": "membraneProperties", "type": "MembraneProperties"}, + None, + ), + MemberSpec_( + "intracellular_properties", + "IntracellularProperties", + 0, + 1, + { + "minOccurs": "0", + "name": "intracellularProperties", + "type": "IntracellularProperties", + }, + None, + ), + MemberSpec_( + "extracellular_properties", + "ExtracellularProperties", + 0, + 1, + { + "minOccurs": "0", + "name": "extracellularProperties", + "type": "ExtracellularProperties", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, membrane_properties=None, intracellular_properties=None, extracellular_properties=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + membrane_properties=None, + intracellular_properties=None, + extracellular_properties=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BiophysicalProperties, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("BiophysicalProperties"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.membrane_properties = membrane_properties + self.membrane_properties_nsprefix_ = None self.intracellular_properties = intracellular_properties + self.intracellular_properties_nsprefix_ = None self.extracellular_properties = extracellular_properties + self.extracellular_properties_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BiophysicalProperties) + CurrentSubclassModule_, BiophysicalProperties + ) if subclass is not None: return subclass(*args_, **kwargs_) if BiophysicalProperties.subclass: return BiophysicalProperties.subclass(*args_, **kwargs_) else: return BiophysicalProperties(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): + + def _hasContent(self): if ( - self.membrane_properties is not None or - self.intracellular_properties is not None or - self.extracellular_properties is not None or - super(BiophysicalProperties, self).hasContent_() + self.membrane_properties is not None + or self.intracellular_properties is not None + or self.extracellular_properties is not None + or super(BiophysicalProperties, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BiophysicalProperties', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BiophysicalProperties') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="BiophysicalProperties", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BiophysicalProperties") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "BiophysicalProperties": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BiophysicalProperties') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BiophysicalProperties', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BiophysicalProperties", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BiophysicalProperties", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BiophysicalProperties'): - super(BiophysicalProperties, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BiophysicalProperties') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BiophysicalProperties', fromsubclass_=False, pretty_print=True): - super(BiophysicalProperties, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BiophysicalProperties", + ): + super(BiophysicalProperties, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BiophysicalProperties", + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="BiophysicalProperties", + fromsubclass_=False, + pretty_print=True, + ): + super(BiophysicalProperties, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.membrane_properties is not None: - self.membrane_properties.export(outfile, level, namespaceprefix_, namespacedef_='', name_='membraneProperties', pretty_print=pretty_print) + namespaceprefix_ = ( + self.membrane_properties_nsprefix_ + ":" + if (UseCapturedNS_ and self.membrane_properties_nsprefix_) + else "" + ) + self.membrane_properties.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="membraneProperties", + pretty_print=pretty_print, + ) if self.intracellular_properties is not None: - self.intracellular_properties.export(outfile, level, namespaceprefix_, namespacedef_='', name_='intracellularProperties', pretty_print=pretty_print) + namespaceprefix_ = ( + self.intracellular_properties_nsprefix_ + ":" + if (UseCapturedNS_ and self.intracellular_properties_nsprefix_) + else "" + ) + self.intracellular_properties.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="intracellularProperties", + pretty_print=pretty_print, + ) if self.extracellular_properties is not None: - self.extracellular_properties.export(outfile, level, namespaceprefix_, namespacedef_='', name_='extracellularProperties', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.extracellular_properties_nsprefix_ + ":" + if (UseCapturedNS_ and self.extracellular_properties_nsprefix_) + else "" + ) + self.extracellular_properties.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="extracellularProperties", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(BiophysicalProperties, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'membraneProperties': + + def _buildAttributes(self, node, attrs, already_processed): + super(BiophysicalProperties, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "membraneProperties": class_obj_ = self.get_class_obj_(child_, MembraneProperties) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.membrane_properties = obj_ - obj_.original_tagname_ = 'membraneProperties' - elif nodeName_ == 'intracellularProperties': + obj_.original_tagname_ = "membraneProperties" + elif nodeName_ == "intracellularProperties": class_obj_ = self.get_class_obj_(child_, IntracellularProperties) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.intracellular_properties = obj_ - obj_.original_tagname_ = 'intracellularProperties' - elif nodeName_ == 'extracellularProperties': + obj_.original_tagname_ = "intracellularProperties" + elif nodeName_ == "extracellularProperties": obj_ = ExtracellularProperties.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.extracellular_properties = obj_ - obj_.original_tagname_ = 'extracellularProperties' - super(BiophysicalProperties, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "extracellularProperties" + super(BiophysicalProperties, self)._buildChildren(child_, node, nodeName_, True) + + # end class BiophysicalProperties class InhomogeneousParameter(Base): + """InhomogeneousParameter -- An inhomogeneous parameter specified across the **segmentGroup** ( see **variableParameter** for usage ).""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('variable', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('metric', 'Metric', 0, 0, {'use': u'required'}), - MemberSpec_('proximal', 'ProximalDetails', 0, 1, {u'type': u'ProximalDetails', u'name': u'proximal', u'minOccurs': u'0'}, None), - MemberSpec_('distal', 'DistalDetails', 0, 1, {u'type': u'DistalDetails', u'name': u'distal', u'minOccurs': u'0'}, None), + MemberSpec_( + "variable", "xs:string", 0, 0, {"use": "required", "name": "variable"} + ), + MemberSpec_("metric", "Metric", 0, 0, {"use": "required", "name": "metric"}), + MemberSpec_( + "proximal", + "ProximalDetails", + 0, + 1, + {"minOccurs": "0", "name": "proximal", "type": "ProximalDetails"}, + None, + ), + MemberSpec_( + "distal", + "DistalDetails", + 0, + 1, + {"minOccurs": "0", "name": "distal", "type": "DistalDetails"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, variable=None, metric=None, proximal=None, distal=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + variable=None, + metric=None, + proximal=None, + distal=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(InhomogeneousParameter, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("InhomogeneousParameter"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.variable = _cast(None, variable) + self.variable_nsprefix_ = None self.metric = _cast(None, metric) + self.metric_nsprefix_ = None self.proximal = proximal + self.proximal_nsprefix_ = None self.distal = distal + self.distal_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, InhomogeneousParameter) + CurrentSubclassModule_, InhomogeneousParameter + ) if subclass is not None: return subclass(*args_, **kwargs_) if InhomogeneousParameter.subclass: return InhomogeneousParameter.subclass(*args_, **kwargs_) else: return InhomogeneousParameter(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Metric(self, value): # Validate type Metric, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - value = str(value) - enumerations = ['Path Length from root'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on Metric' % {"value" : value.encode("utf-8")} ) - def hasContent_(self): if ( - self.proximal is not None or - self.distal is not None or - super(InhomogeneousParameter, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + value = value + enumerations = ["Path Length from root"] + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on Metric' + % {"value": encode_str_2_3(value), "lineno": lineno} + ) + result = False + + def _hasContent(self): + if ( + self.proximal is not None + or self.distal is not None + or super(InhomogeneousParameter, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InhomogeneousParameter', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('InhomogeneousParameter') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="InhomogeneousParameter", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("InhomogeneousParameter") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "InhomogeneousParameter": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InhomogeneousParameter') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='InhomogeneousParameter', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="InhomogeneousParameter", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="InhomogeneousParameter", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='InhomogeneousParameter'): - super(InhomogeneousParameter, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='InhomogeneousParameter') - if self.variable is not None and 'variable' not in already_processed: - already_processed.add('variable') - outfile.write(' variable=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.variable), input_name='variable')), )) - if self.metric is not None and 'metric' not in already_processed: - already_processed.add('metric') - outfile.write(' metric=%s' % (quote_attrib(self.metric), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='InhomogeneousParameter', fromsubclass_=False, pretty_print=True): - super(InhomogeneousParameter, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="InhomogeneousParameter", + ): + super(InhomogeneousParameter, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="InhomogeneousParameter", + ) + if self.variable is not None and "variable" not in already_processed: + already_processed.add("variable") + outfile.write( + " variable=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.variable), input_name="variable" + ) + ), + ) + ) + if self.metric is not None and "metric" not in already_processed: + already_processed.add("metric") + outfile.write( + " metric=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.metric), input_name="metric" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="InhomogeneousParameter", + fromsubclass_=False, + pretty_print=True, + ): + super(InhomogeneousParameter, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.proximal is not None: - self.proximal.export(outfile, level, namespaceprefix_, namespacedef_='', name_='proximal', pretty_print=pretty_print) + namespaceprefix_ = ( + self.proximal_nsprefix_ + ":" + if (UseCapturedNS_ and self.proximal_nsprefix_) + else "" + ) + self.proximal.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="proximal", + pretty_print=pretty_print, + ) if self.distal is not None: - self.distal.export(outfile, level, namespaceprefix_, namespacedef_='', name_='distal', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.distal_nsprefix_ + ":" + if (UseCapturedNS_ and self.distal_nsprefix_) + else "" + ) + self.distal.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="distal", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('variable', node) - if value is not None and 'variable' not in already_processed: - already_processed.add('variable') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("variable", node) + if value is not None and "variable" not in already_processed: + already_processed.add("variable") self.variable = value - value = find_attr_value_('metric', node) - if value is not None and 'metric' not in already_processed: - already_processed.add('metric') + value = find_attr_value_("metric", node) + if value is not None and "metric" not in already_processed: + already_processed.add("metric") self.metric = value - self.validate_Metric(self.metric) # validate type Metric - super(InhomogeneousParameter, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'proximal': + self.validate_Metric(self.metric) # validate type Metric + super(InhomogeneousParameter, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "proximal": obj_ = ProximalDetails.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.proximal = obj_ - obj_.original_tagname_ = 'proximal' - elif nodeName_ == 'distal': + obj_.original_tagname_ = "proximal" + elif nodeName_ == "distal": obj_ = DistalDetails.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.distal = obj_ - obj_.original_tagname_ = 'distal' - super(InhomogeneousParameter, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "distal" + super(InhomogeneousParameter, self)._buildChildren( + child_, node, nodeName_, True + ) + + # end class InhomogeneousParameter class SegmentGroup(Base): + """SegmentGroup -- A method to describe a group of **segment** s in a **morphology** , e. g. soma_group, dendrite_group, axon_group. While a name is useful to describe the group, the **neuroLexId** attribute can be used to explicitly specify the meaning of the group, e. g. sao1044911821 for 'Neuronal Cell Body', sao1211023249 for 'Dendrite'. The **segment** s in this group can be specified as: a list of individual **member** segments; a **path** , all of the segments along which should be included; a **subTree** of the **cell** to include; other segmentGroups to **include** ( so all segments from those get included here ). An **inhomogeneousParameter** can be defined on the region of the cell specified by this group ( see **variableParameter** for usage ).""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('properties', 'Property', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Property', u'name': u'property', u'minOccurs': u'0'}, None), - MemberSpec_('annotation', 'Annotation', 0, 1, {u'type': u'Annotation', u'name': u'annotation', u'minOccurs': u'0'}, None), - MemberSpec_('members', 'Member', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Member', u'name': u'member', u'minOccurs': u'0'}, None), - MemberSpec_('includes', 'Include', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Include', u'name': u'include', u'minOccurs': u'0'}, None), - MemberSpec_('paths', 'Path', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Path', u'name': u'path', u'minOccurs': u'0'}, None), - MemberSpec_('sub_trees', 'SubTree', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SubTree', u'name': u'subTree', u'minOccurs': u'0'}, None), - MemberSpec_('inhomogeneous_parameters', 'InhomogeneousParameter', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'InhomogeneousParameter', u'name': u'inhomogeneousParameter', u'minOccurs': u'0'}, None), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {"minOccurs": "0", "name": "notes", "type": "xs:string"}, + None, + ), + MemberSpec_( + "properties", + "Property", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "property", + "type": "Property", + }, + None, + ), + MemberSpec_( + "annotation", + "Annotation", + 0, + 1, + {"minOccurs": "0", "name": "annotation", "type": "Annotation"}, + None, + ), + MemberSpec_( + "members", + "Member", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "member", + "type": "Member", + }, + None, + ), + MemberSpec_( + "includes", + "Include", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "include", + "type": "Include", + }, + None, + ), + MemberSpec_( + "paths", + "Path", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "path", + "type": "Path", + }, + None, + ), + MemberSpec_( + "sub_trees", + "SubTree", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "subTree", + "type": "SubTree", + }, + None, + ), + MemberSpec_( + "inhomogeneous_parameters", + "InhomogeneousParameter", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "inhomogeneousParameter", + "type": "InhomogeneousParameter", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, notes=None, properties=None, annotation=None, members=None, includes=None, paths=None, sub_trees=None, inhomogeneous_parameters=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + notes=None, + properties=None, + annotation=None, + members=None, + includes=None, + paths=None, + sub_trees=None, + inhomogeneous_parameters=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SegmentGroup, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("SegmentGroup"), self).__init__(neuro_lex_id, id, **kwargs_) self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None if properties is None: self.properties = [] else: self.properties = properties + self.properties_nsprefix_ = None self.annotation = annotation + self.annotation_nsprefix_ = None if members is None: self.members = [] else: self.members = members + self.members_nsprefix_ = None if includes is None: self.includes = [] else: self.includes = includes + self.includes_nsprefix_ = None if paths is None: self.paths = [] else: self.paths = paths + self.paths_nsprefix_ = None if sub_trees is None: self.sub_trees = [] else: self.sub_trees = sub_trees + self.sub_trees_nsprefix_ = None if inhomogeneous_parameters is None: self.inhomogeneous_parameters = [] else: self.inhomogeneous_parameters = inhomogeneous_parameters + self.inhomogeneous_parameters_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SegmentGroup) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SegmentGroup) if subclass is not None: return subclass(*args_, **kwargs_) if SegmentGroup.subclass: return SegmentGroup.subclass(*args_, **kwargs_) else: return SegmentGroup(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass - def hasContent_(self): - if ( - self.notes is not None or - self.properties or - self.annotation is not None or - self.members or - self.includes or - self.paths or - self.sub_trees or - self.inhomogeneous_parameters or - super(SegmentGroup, self).hasContent_() + return result + + def _hasContent(self): + if ( + self.notes is not None + or self.properties + or self.annotation is not None + or self.members + or self.includes + or self.paths + or self.sub_trees + or self.inhomogeneous_parameters + or super(SegmentGroup, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentGroup', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SegmentGroup') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="SegmentGroup", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SegmentGroup") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SegmentGroup": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SegmentGroup') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SegmentGroup', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SegmentGroup" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SegmentGroup", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SegmentGroup'): - super(SegmentGroup, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SegmentGroup') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SegmentGroup', fromsubclass_=False, pretty_print=True): - super(SegmentGroup, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SegmentGroup", + ): + super(SegmentGroup, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SegmentGroup" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="SegmentGroup", + fromsubclass_=False, + pretty_print=True, + ): + super(SegmentGroup, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: + namespaceprefix_ = ( + self.notes_nsprefix_ + ":" + if (UseCapturedNS_ and self.notes_nsprefix_) + else "" + ) showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) for property_ in self.properties: - property_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='property', pretty_print=pretty_print) + namespaceprefix_ = ( + self.properties_nsprefix_ + ":" + if (UseCapturedNS_ and self.properties_nsprefix_) + else "" + ) + property_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="property", + pretty_print=pretty_print, + ) if self.annotation is not None: - self.annotation.export(outfile, level, namespaceprefix_, namespacedef_='', name_='annotation', pretty_print=pretty_print) + namespaceprefix_ = ( + self.annotation_nsprefix_ + ":" + if (UseCapturedNS_ and self.annotation_nsprefix_) + else "" + ) + self.annotation.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="annotation", + pretty_print=pretty_print, + ) for member_ in self.members: - member_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='member', pretty_print=pretty_print) + namespaceprefix_ = ( + self.members_nsprefix_ + ":" + if (UseCapturedNS_ and self.members_nsprefix_) + else "" + ) + member_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="member", + pretty_print=pretty_print, + ) for include_ in self.includes: - include_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='include', pretty_print=pretty_print) + namespaceprefix_ = ( + self.includes_nsprefix_ + ":" + if (UseCapturedNS_ and self.includes_nsprefix_) + else "" + ) + include_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="include", + pretty_print=pretty_print, + ) for path_ in self.paths: - path_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='path', pretty_print=pretty_print) + namespaceprefix_ = ( + self.paths_nsprefix_ + ":" + if (UseCapturedNS_ and self.paths_nsprefix_) + else "" + ) + path_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="path", + pretty_print=pretty_print, + ) for subTree_ in self.sub_trees: - subTree_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='subTree', pretty_print=pretty_print) + namespaceprefix_ = ( + self.sub_trees_nsprefix_ + ":" + if (UseCapturedNS_ and self.sub_trees_nsprefix_) + else "" + ) + subTree_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="subTree", + pretty_print=pretty_print, + ) for inhomogeneousParameter_ in self.inhomogeneous_parameters: - inhomogeneousParameter_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='inhomogeneousParameter', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.inhomogeneous_parameters_nsprefix_ + ":" + if (UseCapturedNS_ and self.inhomogeneous_parameters_nsprefix_) + else "" + ) + inhomogeneousParameter_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="inhomogeneousParameter", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(SegmentGroup, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + + def _buildAttributes(self, node, attrs, already_processed): + super(SegmentGroup, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "notes": + value_ = child_.text + value_ = self.gds_parse_string(value_, node, "notes") + value_ = self.gds_validate_string(value_, node, "notes") + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'property': + elif nodeName_ == "property": obj_ = Property.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.properties.append(obj_) - obj_.original_tagname_ = 'property' - elif nodeName_ == 'annotation': + obj_.original_tagname_ = "property" + elif nodeName_ == "annotation": obj_ = Annotation.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.annotation = obj_ - obj_.original_tagname_ = 'annotation' - elif nodeName_ == 'member': + obj_.original_tagname_ = "annotation" + elif nodeName_ == "member": obj_ = Member.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.members.append(obj_) - obj_.original_tagname_ = 'member' - elif nodeName_ == 'include': + obj_.original_tagname_ = "member" + elif nodeName_ == "include": obj_ = Include.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.includes.append(obj_) - obj_.original_tagname_ = 'include' - elif nodeName_ == 'path': + obj_.original_tagname_ = "include" + elif nodeName_ == "path": obj_ = Path.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.paths.append(obj_) - obj_.original_tagname_ = 'path' - elif nodeName_ == 'subTree': + obj_.original_tagname_ = "path" + elif nodeName_ == "subTree": obj_ = SubTree.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.sub_trees.append(obj_) - obj_.original_tagname_ = 'subTree' - elif nodeName_ == 'inhomogeneousParameter': + obj_.original_tagname_ = "subTree" + elif nodeName_ == "inhomogeneousParameter": obj_ = InhomogeneousParameter.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.inhomogeneous_parameters.append(obj_) - obj_.original_tagname_ = 'inhomogeneousParameter' - super(SegmentGroup, self).buildChildren(child_, node, nodeName_, True) - + obj_.original_tagname_ = "inhomogeneousParameter" + super(SegmentGroup, self)._buildChildren(child_, node, nodeName_, True) def __str__(self): - return "SegmentGroup: "+str(self.id)+", "+str(len(self.members))+" member(s), "+str(len(self.includes))+" included group(s)" + return ( + "SegmentGroup: " + + str(self.id) + + ", " + + str(len(self.members)) + + " member(s), " + + str(len(self.includes)) + + " included group(s)" + ) def __repr__(self): return str(self) + # end class SegmentGroup class Segment(BaseNonNegativeIntegerId): + """Segment -- A segment defines the smallest unit within a possibly branching structure ( **morphology** ), such as a dendrite or axon. Its **id** should be a nonnegative integer ( usually soma/root = 0 ). Its end points are given by the **proximal** and **distal** points. The **proximal** point can be omitted, usually because it is the same as a point on the **parent** segment, see **proximal** for details. **parent** specifies the parent segment. The first segment of a **cell** ( with no **parent** ) usually represents the soma. The shape is normally a cylinder ( radii of the **proximal** and **distal** equal, but positions different ) or a conical frustum ( radii and positions different ). If the x, y, x positions of the **proximal** and **distal** are equal, the segment can be interpreted as a sphere, and in this case the radii of these points must be equal. NOTE: LEMS does not yet support multicompartmental modelling, so the Dynamics here is only appropriate for single compartment modelling.""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('name', 'xs:string', 0, 1, {'use': u'optional'}), - MemberSpec_('parent', 'SegmentParent', 0, 1, {u'type': u'SegmentParent', u'name': u'parent', u'minOccurs': u'0'}, None), - MemberSpec_('proximal', 'Point3DWithDiam', 0, 1, {u'type': u'Point3DWithDiam', u'name': u'proximal', u'minOccurs': u'0'}, None), - MemberSpec_('distal', 'Point3DWithDiam', 0, 0, {u'type': u'Point3DWithDiam', u'name': u'distal', u'minOccurs': u'1'}, None), + MemberSpec_("name", "xs:string", 0, 1, {"use": "optional", "name": "name"}), + MemberSpec_( + "parent", + "SegmentParent", + 0, + 1, + {"minOccurs": "0", "name": "parent", "type": "SegmentParent"}, + None, + ), + MemberSpec_( + "proximal", + "Point3DWithDiam", + 0, + 1, + {"minOccurs": "0", "name": "proximal", "type": "Point3DWithDiam"}, + None, + ), + MemberSpec_( + "distal", + "Point3DWithDiam", + 0, + 0, + {"minOccurs": "1", "name": "distal", "type": "Point3DWithDiam"}, + None, + ), ] subclass = None superclass = BaseNonNegativeIntegerId - def __init__(self, neuro_lex_id=None, id=None, name=None, parent=None, proximal=None, distal=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + name=None, + parent=None, + proximal=None, + distal=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Segment, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("Segment"), self).__init__(neuro_lex_id, id, **kwargs_) self.name = _cast(None, name) + self.name_nsprefix_ = None self.parent = parent + self.parent_nsprefix_ = None self.proximal = proximal + self.proximal_nsprefix_ = None self.distal = distal + self.distal_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Segment) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Segment) if subclass is not None: return subclass(*args_, **kwargs_) if Segment.subclass: return Segment.subclass(*args_, **kwargs_) else: return Segment(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): + + def _hasContent(self): if ( - self.parent is not None or - self.proximal is not None or - self.distal is not None or - super(Segment, self).hasContent_() + self.parent is not None + or self.proximal is not None + or self.distal is not None + or super(Segment, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Segment', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Segment') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Segment", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Segment") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Segment": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Segment') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Segment', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Segment" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Segment", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Segment'): - super(Segment, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Segment') - if self.name is not None and 'name' not in already_processed: - already_processed.add('name') - outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Segment', fromsubclass_=False, pretty_print=True): - super(Segment, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Segment" + ): + super(Segment, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Segment" + ) + if self.name is not None and "name" not in already_processed: + already_processed.add("name") + outfile.write( + " name=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.name), input_name="name" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Segment", + fromsubclass_=False, + pretty_print=True, + ): + super(Segment, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.parent is not None: - self.parent.export(outfile, level, namespaceprefix_, namespacedef_='', name_='parent', pretty_print=pretty_print) + namespaceprefix_ = ( + self.parent_nsprefix_ + ":" + if (UseCapturedNS_ and self.parent_nsprefix_) + else "" + ) + self.parent.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="parent", + pretty_print=pretty_print, + ) if self.proximal is not None: - self.proximal.export(outfile, level, namespaceprefix_, namespacedef_='', name_='proximal', pretty_print=pretty_print) + namespaceprefix_ = ( + self.proximal_nsprefix_ + ":" + if (UseCapturedNS_ and self.proximal_nsprefix_) + else "" + ) + self.proximal.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="proximal", + pretty_print=pretty_print, + ) if self.distal is not None: - self.distal.export(outfile, level, namespaceprefix_, namespacedef_='', name_='distal', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.distal_nsprefix_ + ":" + if (UseCapturedNS_ and self.distal_nsprefix_) + else "" + ) + self.distal.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="distal", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('name', node) - if value is not None and 'name' not in already_processed: - already_processed.add('name') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("name", node) + if value is not None and "name" not in already_processed: + already_processed.add("name") self.name = value - super(Segment, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'parent': + super(Segment, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "parent": obj_ = SegmentParent.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.parent = obj_ - obj_.original_tagname_ = 'parent' - elif nodeName_ == 'proximal': + obj_.original_tagname_ = "parent" + elif nodeName_ == "proximal": obj_ = Point3DWithDiam.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.proximal = obj_ - obj_.original_tagname_ = 'proximal' - elif nodeName_ == 'distal': + obj_.original_tagname_ = "proximal" + elif nodeName_ == "distal": obj_ = Point3DWithDiam.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.distal = obj_ - obj_.original_tagname_ = 'distal' - super(Segment, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "distal" + super(Segment, self)._buildChildren(child_, node, nodeName_, True) + @property def length(self): + """Get the length of the segment. + + :returns: length of the segment + :rtype: float + """ - if self.proximal==None: - raise Exception('Cannot get length of segment '+str(self.id)+' using the length property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_length(segment_id) on the cell instead.') + if self.proximal == None: + raise Exception( + "Cannot get length of segment " + + str(self.id) + + " using the length property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_length(segment_id) on the cell instead." + ) prox_x = self.proximal.x prox_y = self.proximal.y @@ -12397,13 +28974,20 @@ def length(self): dist_y = self.distal.y dist_z = self.distal.z - length = ((prox_x-dist_x)**2 + (prox_y-dist_y)**2 + (prox_z-dist_z)**2)**(0.5) + length = ( + (prox_x - dist_x) ** 2 + (prox_y - dist_y) ** 2 + (prox_z - dist_z) ** 2 + ) ** (0.5) return length def __str__(self): - return "" + return ( + "" + ) def __repr__(self): @@ -12411,4246 +28995,10968 @@ def __repr__(self): @property def volume(self): + """Get the volume of the segment. + + :returns: volume of segment + :rtype: float + """ from math import pi - if self.proximal==None: - raise Exception('Cannot get volume of segment '+str(self.id)+' using the volume property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_volume(segment_id) on the cell instead.') - prox_rad = self.proximal.diameter/2.0 - dist_rad = self.distal.diameter/2.0 + if self.proximal == None: + raise Exception( + "Cannot get volume of segment " + + str(self.id) + + " using the volume property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_volume(segment_id) on the cell instead." + ) + + prox_rad = self.proximal.diameter / 2.0 + dist_rad = self.distal.diameter / 2.0 - if self.proximal.x == self.distal.x and self.proximal.y == self.distal.y and self.proximal.z == self.distal.z: + if ( + self.proximal.x == self.distal.x + and self.proximal.y == self.distal.y + and self.proximal.z == self.distal.z + ): - if prox_rad!=dist_rad: - raise Exception('Cannot get volume of segment '+str(self.id)+'. The (x,y,z) coordinates of the proximal and distal points match (i.e. it is a sphere), but the diameters of these points are different, making the volume calculation ambiguous.') + if prox_rad != dist_rad: + raise Exception( + "Cannot get volume of segment " + + str(self.id) + + ". The (x,y,z) coordinates of the proximal and distal points match (i.e. it is a sphere), but the diameters of these points are different, making the volume calculation ambiguous." + ) - return 4.0/3 * pi * prox_rad**3 + return 4.0 / 3 * pi * prox_rad ** 3 length = self.length - volume = (pi/3)*length*(prox_rad**2+dist_rad**2+prox_rad*dist_rad) + volume = ( + (pi / 3) * length * (prox_rad ** 2 + dist_rad ** 2 + prox_rad * dist_rad) + ) return volume - + @property def surface_area(self): + """Get the surface area of the segment. + + :returns: surface area of segment + :rtype: float + """ from math import pi from math import sqrt - if self.proximal==None: - raise Exception('Cannot get surface area of segment '+str(self.id)+' using the surface_area property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_surface_area(segment_id) on the cell instead.') + if self.proximal == None: + raise Exception( + "Cannot get surface area of segment " + + str(self.id) + + " using the surface_area property, since no proximal point is set on it (the proximal point comes from the parent segment). Use the method get_segment_surface_area(segment_id) on the cell instead." + ) - prox_rad = self.proximal.diameter/2.0 - dist_rad = self.distal.diameter/2.0 + prox_rad = self.proximal.diameter / 2.0 + dist_rad = self.distal.diameter / 2.0 - if self.proximal.x == self.distal.x and self.proximal.y == self.distal.y and self.proximal.z == self.distal.z: + if ( + self.proximal.x == self.distal.x + and self.proximal.y == self.distal.y + and self.proximal.z == self.distal.z + ): - if prox_rad!=dist_rad: - raise Exception('Cannot get surface area of segment '+str(self.id)+'. The (x,y,z) coordinates of the proximal and distal points match (i.e. it is a sphere), but the diameters of these points are different, making the surface area calculation ambiguous.') + if prox_rad != dist_rad: + raise Exception( + "Cannot get surface area of segment " + + str(self.id) + + ". The (x,y,z) coordinates of the proximal and distal points match (i.e. it is a sphere), but the diameters of these points are different, making the surface area calculation ambiguous." + ) - return 4.0 * pi * prox_rad**2 + return 4.0 * pi * prox_rad ** 2 length = self.length - surface_area = pi*(prox_rad+dist_rad)*sqrt((prox_rad-dist_rad)**2+length**2) + surface_area = ( + pi * (prox_rad + dist_rad) * sqrt((prox_rad - dist_rad) ** 2 + length ** 2) + ) return surface_area + # end class Segment class Morphology(Standalone): - """Standalone element which is usually inside a single cell, but could - be outside and referenced by id.""" + """Morphology -- The collection of **segment** s which specify the 3D structure of the cell, along with a number of **segmentGroup** s""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('segments', 'Segment', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'Segment', u'name': u'segment'}, None), - MemberSpec_('segment_groups', 'SegmentGroup', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SegmentGroup', u'name': u'segmentGroup', u'minOccurs': u'0'}, None), + MemberSpec_( + "segments", + "Segment", + 1, + 0, + {"maxOccurs": "unbounded", "name": "segment", "type": "Segment"}, + None, + ), + MemberSpec_( + "segment_groups", + "SegmentGroup", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "segmentGroup", + "type": "SegmentGroup", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, segments=None, segment_groups=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + segments=None, + segment_groups=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Morphology, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("Morphology"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) if segments is None: self.segments = [] else: self.segments = segments + self.segments_nsprefix_ = None if segment_groups is None: self.segment_groups = [] else: self.segment_groups = segment_groups + self.segment_groups_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Morphology) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Morphology) if subclass is not None: return subclass(*args_, **kwargs_) if Morphology.subclass: return Morphology.subclass(*args_, **kwargs_) else: return Morphology(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): + + def _hasContent(self): if ( - self.segments or - self.segment_groups or - super(Morphology, self).hasContent_() + self.segments + or self.segment_groups + or super(Morphology, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Morphology', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Morphology') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Morphology", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Morphology") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Morphology": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Morphology') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Morphology', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Morphology" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Morphology", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Morphology'): - super(Morphology, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Morphology') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Morphology', fromsubclass_=False, pretty_print=True): - super(Morphology, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Morphology" + ): + super(Morphology, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Morphology" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Morphology", + fromsubclass_=False, + pretty_print=True, + ): + super(Morphology, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for segment_ in self.segments: - segment_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='segment', pretty_print=pretty_print) + namespaceprefix_ = ( + self.segments_nsprefix_ + ":" + if (UseCapturedNS_ and self.segments_nsprefix_) + else "" + ) + segment_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="segment", + pretty_print=pretty_print, + ) for segmentGroup_ in self.segment_groups: - segmentGroup_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='segmentGroup', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.segment_groups_nsprefix_ + ":" + if (UseCapturedNS_ and self.segment_groups_nsprefix_) + else "" + ) + segmentGroup_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="segmentGroup", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(Morphology, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'segment': + + def _buildAttributes(self, node, attrs, already_processed): + super(Morphology, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "segment": obj_ = Segment.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.segments.append(obj_) - obj_.original_tagname_ = 'segment' - elif nodeName_ == 'segmentGroup': + obj_.original_tagname_ = "segment" + elif nodeName_ == "segmentGroup": obj_ = SegmentGroup.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.segment_groups.append(obj_) - obj_.original_tagname_ = 'segmentGroup' - super(Morphology, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "segmentGroup" + super(Morphology, self)._buildChildren(child_, node, nodeName_, True) + @property def num_segments(self): + """Get the number of segments included in this cell morphology. + + :returns: number of segments + :rtype: int + """ return len(self.segments) + + # end class Morphology class BaseCell(Standalone): - member_data_items_ = [ - ] + """BaseCell -- Base type of any cell ( e. g. point neuron like **izhikevich2007Cell** , or a morphologically detailed **Cell** with **segment** s ) which can be used in a **population**""" + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("BaseCell"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, BaseCell) if subclass is not None: return subclass(*args_, **kwargs_) if BaseCell.subclass: return BaseCell.subclass(*args_, **kwargs_) else: return BaseCell(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(BaseCell, self).hasContent_() - ): + + def _hasContent(self): + if super(BaseCell, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "BaseCell": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCell') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseCell', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseCell" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseCell'): - super(BaseCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCell') - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="BaseCell" + ): + super(BaseCell, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseCell" + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCell', fromsubclass_=False, pretty_print=True): - super(BaseCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseCell", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseCell, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseCell, self).buildChildren(child_, node, nodeName_, True) + super(BaseCell, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(BaseCell, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class BaseCell class BaseSynapse(Standalone): - member_data_items_ = [ - ] + """BaseSynapse -- Base type for all synapses, i. e. ComponentTypes which produce a current ( dimension current ) and change Dynamics in response to an incoming event. cno_0000009""" + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("BaseSynapse"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, BaseSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if BaseSynapse.subclass: return BaseSynapse.subclass(*args_, **kwargs_) else: return BaseSynapse(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(BaseSynapse, self).hasContent_() - ): + + def _hasContent(self): + if super(BaseSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "BaseSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseSynapse" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseSynapse'): - super(BaseSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseSynapse') - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseSynapse", + ): + super(BaseSynapse, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BaseSynapse" + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseSynapse', fromsubclass_=False, pretty_print=True): - super(BaseSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseSynapse, self).buildChildren(child_, node, nodeName_, True) + super(BaseSynapse, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(BaseSynapse, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class BaseSynapse class FixedFactorConcentrationModel(Standalone): - """Should not be required, as it's present on the species element!""" + """FixedFactorConcentrationModel -- Model of buffering of concentration of an ion ( currently hard coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** A fixed factor **rho** is used to scale the incoming current *independently of the size of the compartment* to produce a concentration change. + \n + :param restingConc: + :type restingConc: concentration + :param decayConstant: + :type decayConstant: time + :param rho: + :type rho: rho_factor + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('resting_conc', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), - MemberSpec_('decay_constant', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('rho', 'Nml2Quantity_rhoFactor', 0, 0, {'use': u'required'}), + MemberSpec_("ion", "NmlId", 0, 0, {"use": "required", "name": "ion"}), + MemberSpec_( + "resting_conc", + "Nml2Quantity_concentration", + 0, + 0, + {"use": "required", "name": "resting_conc"}, + ), + MemberSpec_( + "decay_constant", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "decay_constant"}, + ), + MemberSpec_( + "rho", "Nml2Quantity_rhoFactor", 0, 0, {"use": "required", "name": "rho"} + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, ion=None, resting_conc=None, decay_constant=None, rho=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + ion=None, + resting_conc=None, + decay_constant=None, + rho=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(FixedFactorConcentrationModel, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("FixedFactorConcentrationModel"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.ion = _cast(None, ion) + self.ion_nsprefix_ = None self.resting_conc = _cast(None, resting_conc) + self.resting_conc_nsprefix_ = None self.decay_constant = _cast(None, decay_constant) + self.decay_constant_nsprefix_ = None self.rho = _cast(None, rho) + self.rho_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, FixedFactorConcentrationModel) + CurrentSubclassModule_, FixedFactorConcentrationModel + ) if subclass is not None: return subclass(*args_, **kwargs_) if FixedFactorConcentrationModel.subclass: return FixedFactorConcentrationModel.subclass(*args_, **kwargs_) else: return FixedFactorConcentrationModel(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + def validate_Nml2Quantity_concentration(self, value): # Validate type Nml2Quantity_concentration, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_concentration_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_concentration_patterns_, )) - validate_Nml2Quantity_concentration_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM)$']] + self.validate_Nml2Quantity_concentration_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_concentration_patterns_, + ) + ) + + validate_Nml2Quantity_concentration_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM))$"] + ] + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + def validate_Nml2Quantity_rhoFactor(self, value): # Validate type Nml2Quantity_rhoFactor, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_rhoFactor_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_rhoFactor_patterns_, )) - validate_Nml2Quantity_rhoFactor_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m_per_A_per_s|mol_per_cm_per_uA_per_ms)$']] - def hasContent_(self): if ( - super(FixedFactorConcentrationModel, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_rhoFactor_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_rhoFactor_patterns_, + ) + ) + + validate_Nml2Quantity_rhoFactor_patterns_ = [ + [ + "^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m_per_A_per_s|mol_per_cm_per_uA_per_ms))$" + ] + ] + + def _hasContent(self): + if super(FixedFactorConcentrationModel, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FixedFactorConcentrationModel', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('FixedFactorConcentrationModel') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="FixedFactorConcentrationModel", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("FixedFactorConcentrationModel") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if ( + self.original_tagname_ is not None + and name_ == "FixedFactorConcentrationModel" + ): name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FixedFactorConcentrationModel') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FixedFactorConcentrationModel', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="FixedFactorConcentrationModel", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="FixedFactorConcentrationModel", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FixedFactorConcentrationModel'): - super(FixedFactorConcentrationModel, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FixedFactorConcentrationModel') - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - if self.resting_conc is not None and 'resting_conc' not in already_processed: - already_processed.add('resting_conc') - outfile.write(' restingConc=%s' % (quote_attrib(self.resting_conc), )) - if self.decay_constant is not None and 'decay_constant' not in already_processed: - already_processed.add('decay_constant') - outfile.write(' decayConstant=%s' % (quote_attrib(self.decay_constant), )) - if self.rho is not None and 'rho' not in already_processed: - already_processed.add('rho') - outfile.write(' rho=%s' % (quote_attrib(self.rho), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FixedFactorConcentrationModel', fromsubclass_=False, pretty_print=True): - super(FixedFactorConcentrationModel, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="FixedFactorConcentrationModel", + ): + super(FixedFactorConcentrationModel, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="FixedFactorConcentrationModel", + ) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write( + " ion=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.ion), input_name="ion") + ), + ) + ) + if self.resting_conc is not None and "resting_conc" not in already_processed: + already_processed.add("resting_conc") + outfile.write( + " restingConc=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.resting_conc), input_name="restingConc" + ) + ), + ) + ) + if ( + self.decay_constant is not None + and "decay_constant" not in already_processed + ): + already_processed.add("decay_constant") + outfile.write( + " decayConstant=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.decay_constant), + input_name="decayConstant", + ) + ), + ) + ) + if self.rho is not None and "rho" not in already_processed: + already_processed.add("rho") + outfile.write( + " rho=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.rho), input_name="rho") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="FixedFactorConcentrationModel", + fromsubclass_=False, + pretty_print=True, + ): + super(FixedFactorConcentrationModel, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - value = find_attr_value_('restingConc', node) - if value is not None and 'restingConc' not in already_processed: - already_processed.add('restingConc') + self.validate_NmlId(self.ion) # validate type NmlId + value = find_attr_value_("restingConc", node) + if value is not None and "restingConc" not in already_processed: + already_processed.add("restingConc") self.resting_conc = value - self.validate_Nml2Quantity_concentration(self.resting_conc) # validate type Nml2Quantity_concentration - value = find_attr_value_('decayConstant', node) - if value is not None and 'decayConstant' not in already_processed: - already_processed.add('decayConstant') + self.validate_Nml2Quantity_concentration( + self.resting_conc + ) # validate type Nml2Quantity_concentration + value = find_attr_value_("decayConstant", node) + if value is not None and "decayConstant" not in already_processed: + already_processed.add("decayConstant") self.decay_constant = value - self.validate_Nml2Quantity_time(self.decay_constant) # validate type Nml2Quantity_time - value = find_attr_value_('rho', node) - if value is not None and 'rho' not in already_processed: - already_processed.add('rho') + self.validate_Nml2Quantity_time( + self.decay_constant + ) # validate type Nml2Quantity_time + value = find_attr_value_("rho", node) + if value is not None and "rho" not in already_processed: + already_processed.add("rho") self.rho = value - self.validate_Nml2Quantity_rhoFactor(self.rho) # validate type Nml2Quantity_rhoFactor - super(FixedFactorConcentrationModel, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(FixedFactorConcentrationModel, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_rhoFactor( + self.rho + ) # validate type Nml2Quantity_rhoFactor + super(FixedFactorConcentrationModel, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(FixedFactorConcentrationModel, self)._buildChildren( + child_, node, nodeName_, True + ) pass + + # end class FixedFactorConcentrationModel class DecayingPoolConcentrationModel(Standalone): - """Should not be required, as it's present on the species element!""" + """DecayingPoolConcentrationModel -- Model of an intracellular buffering mechanism for **ion** ( currently hard Coded to be calcium, due to requirement for **iCa** ) which has a baseline level **restingConc** and tends to this value with time course **decayConstant.** The ion is assumed to occupy a shell inside the membrane of thickness **shellThickness.** + \n + :param restingConc: + :type restingConc: concentration + :param decayConstant: + :type decayConstant: time + :param shellThickness: + :type shellThickness: length + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('ion', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('resting_conc', 'Nml2Quantity_concentration', 0, 0, {'use': u'required'}), - MemberSpec_('decay_constant', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('shell_thickness', 'Nml2Quantity_length', 0, 0, {'use': u'required'}), + MemberSpec_("ion", "NmlId", 0, 0, {"use": "required", "name": "ion"}), + MemberSpec_( + "resting_conc", + "Nml2Quantity_concentration", + 0, + 0, + {"use": "required", "name": "resting_conc"}, + ), + MemberSpec_( + "decay_constant", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "decay_constant"}, + ), + MemberSpec_( + "shell_thickness", + "Nml2Quantity_length", + 0, + 0, + {"use": "required", "name": "shell_thickness"}, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, ion=None, resting_conc=None, decay_constant=None, shell_thickness=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + ion=None, + resting_conc=None, + decay_constant=None, + shell_thickness=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(DecayingPoolConcentrationModel, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("DecayingPoolConcentrationModel"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.ion = _cast(None, ion) + self.ion_nsprefix_ = None self.resting_conc = _cast(None, resting_conc) + self.resting_conc_nsprefix_ = None self.decay_constant = _cast(None, decay_constant) + self.decay_constant_nsprefix_ = None self.shell_thickness = _cast(None, shell_thickness) + self.shell_thickness_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, DecayingPoolConcentrationModel) + CurrentSubclassModule_, DecayingPoolConcentrationModel + ) if subclass is not None: return subclass(*args_, **kwargs_) if DecayingPoolConcentrationModel.subclass: return DecayingPoolConcentrationModel.subclass(*args_, **kwargs_) else: return DecayingPoolConcentrationModel(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + def validate_Nml2Quantity_concentration(self, value): # Validate type Nml2Quantity_concentration, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_concentration_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_concentration_patterns_, )) - validate_Nml2Quantity_concentration_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM)$']] + self.validate_Nml2Quantity_concentration_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_concentration_patterns_, + ) + ) + + validate_Nml2Quantity_concentration_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(mol_per_m3|mol_per_cm3|M|mM))$"] + ] + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + def validate_Nml2Quantity_length(self, value): # Validate type Nml2Quantity_length, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_length_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_length_patterns_, )) - validate_Nml2Quantity_length_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m|cm|um)$']] - def hasContent_(self): if ( - super(DecayingPoolConcentrationModel, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_length_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_length_patterns_, + ) + ) + + validate_Nml2Quantity_length_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(m|cm|um))$"] + ] + + def _hasContent(self): + if super(DecayingPoolConcentrationModel, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DecayingPoolConcentrationModel', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('DecayingPoolConcentrationModel') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DecayingPoolConcentrationModel", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get( + "DecayingPoolConcentrationModel" + ) if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if ( + self.original_tagname_ is not None + and name_ == "DecayingPoolConcentrationModel" + ): name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DecayingPoolConcentrationModel') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DecayingPoolConcentrationModel', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="DecayingPoolConcentrationModel", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="DecayingPoolConcentrationModel", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DecayingPoolConcentrationModel'): - super(DecayingPoolConcentrationModel, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DecayingPoolConcentrationModel') - if self.ion is not None and 'ion' not in already_processed: - already_processed.add('ion') - outfile.write(' ion=%s' % (quote_attrib(self.ion), )) - if self.resting_conc is not None and 'resting_conc' not in already_processed: - already_processed.add('resting_conc') - outfile.write(' restingConc=%s' % (quote_attrib(self.resting_conc), )) - if self.decay_constant is not None and 'decay_constant' not in already_processed: - already_processed.add('decay_constant') - outfile.write(' decayConstant=%s' % (quote_attrib(self.decay_constant), )) - if self.shell_thickness is not None and 'shell_thickness' not in already_processed: - already_processed.add('shell_thickness') - outfile.write(' shellThickness=%s' % (quote_attrib(self.shell_thickness), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="DecayingPoolConcentrationModel", + ): + super(DecayingPoolConcentrationModel, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="DecayingPoolConcentrationModel", + ) + if self.ion is not None and "ion" not in already_processed: + already_processed.add("ion") + outfile.write( + " ion=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.ion), input_name="ion") + ), + ) + ) + if self.resting_conc is not None and "resting_conc" not in already_processed: + already_processed.add("resting_conc") + outfile.write( + " restingConc=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.resting_conc), input_name="restingConc" + ) + ), + ) + ) + if ( + self.decay_constant is not None + and "decay_constant" not in already_processed + ): + already_processed.add("decay_constant") + outfile.write( + " decayConstant=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.decay_constant), + input_name="decayConstant", + ) + ), + ) + ) + if ( + self.shell_thickness is not None + and "shell_thickness" not in already_processed + ): + already_processed.add("shell_thickness") + outfile.write( + " shellThickness=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.shell_thickness), + input_name="shellThickness", + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DecayingPoolConcentrationModel', fromsubclass_=False, pretty_print=True): - super(DecayingPoolConcentrationModel, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DecayingPoolConcentrationModel", + fromsubclass_=False, + pretty_print=True, + ): + super(DecayingPoolConcentrationModel, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('ion', node) - if value is not None and 'ion' not in already_processed: - already_processed.add('ion') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("ion", node) + if value is not None and "ion" not in already_processed: + already_processed.add("ion") self.ion = value - self.validate_NmlId(self.ion) # validate type NmlId - value = find_attr_value_('restingConc', node) - if value is not None and 'restingConc' not in already_processed: - already_processed.add('restingConc') + self.validate_NmlId(self.ion) # validate type NmlId + value = find_attr_value_("restingConc", node) + if value is not None and "restingConc" not in already_processed: + already_processed.add("restingConc") self.resting_conc = value - self.validate_Nml2Quantity_concentration(self.resting_conc) # validate type Nml2Quantity_concentration - value = find_attr_value_('decayConstant', node) - if value is not None and 'decayConstant' not in already_processed: - already_processed.add('decayConstant') + self.validate_Nml2Quantity_concentration( + self.resting_conc + ) # validate type Nml2Quantity_concentration + value = find_attr_value_("decayConstant", node) + if value is not None and "decayConstant" not in already_processed: + already_processed.add("decayConstant") self.decay_constant = value - self.validate_Nml2Quantity_time(self.decay_constant) # validate type Nml2Quantity_time - value = find_attr_value_('shellThickness', node) - if value is not None and 'shellThickness' not in already_processed: - already_processed.add('shellThickness') + self.validate_Nml2Quantity_time( + self.decay_constant + ) # validate type Nml2Quantity_time + value = find_attr_value_("shellThickness", node) + if value is not None and "shellThickness" not in already_processed: + already_processed.add("shellThickness") self.shell_thickness = value - self.validate_Nml2Quantity_length(self.shell_thickness) # validate type Nml2Quantity_length - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_length( + self.shell_thickness + ) # validate type Nml2Quantity_length + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(DecayingPoolConcentrationModel, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(DecayingPoolConcentrationModel, self).buildChildren(child_, node, nodeName_, True) + super(DecayingPoolConcentrationModel, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(DecayingPoolConcentrationModel, self)._buildChildren( + child_, node, nodeName_, True + ) pass + + # end class DecayingPoolConcentrationModel class GateFractionalSubgate(Base): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('fractional_conductance', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'1'}, None), - MemberSpec_('time_course', 'HHTime', 0, 0, {u'type': u'HHTime', u'name': u'timeCourse', u'minOccurs': u'1'}, None), + MemberSpec_( + "fractional_conductance", + "Nml2Quantity_none", + 0, + 0, + {"use": "required", "name": "fractional_conductance"}, + ), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {"minOccurs": "0", "name": "notes", "type": "xs:string"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {"minOccurs": "0", "name": "q10Settings", "type": "Q10Settings"}, + None, + ), + MemberSpec_( + "steady_state", + "HHVariable", + 0, + 0, + {"minOccurs": "1", "name": "steadyState", "type": "HHVariable"}, + None, + ), + MemberSpec_( + "time_course", + "HHTime", + 0, + 0, + {"minOccurs": "1", "name": "timeCourse", "type": "HHTime"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, fractional_conductance=None, notes=None, q10_settings=None, steady_state=None, time_course=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + fractional_conductance=None, + notes=None, + q10_settings=None, + steady_state=None, + time_course=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateFractionalSubgate, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("GateFractionalSubgate"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.fractional_conductance = _cast(None, fractional_conductance) + self.fractional_conductance_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None self.steady_state = steady_state + self.steady_state_nsprefix_ = None self.time_course = time_course + self.time_course_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateFractionalSubgate) + CurrentSubclassModule_, GateFractionalSubgate + ) if subclass is not None: return subclass(*args_, **kwargs_) if GateFractionalSubgate.subclass: return GateFractionalSubgate.subclass(*args_, **kwargs_) else: return GateFractionalSubgate(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass + return result + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] - def hasContent_(self): + self.validate_Nml2Quantity_none_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$"] + ] + + def _hasContent(self): if ( - self.notes is not None or - self.q10_settings is not None or - self.steady_state is not None or - self.time_course is not None or - super(GateFractionalSubgate, self).hasContent_() + self.notes is not None + or self.q10_settings is not None + or self.steady_state is not None + or self.time_course is not None + or super(GateFractionalSubgate, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateFractionalSubgate', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateFractionalSubgate') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateFractionalSubgate", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateFractionalSubgate") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "GateFractionalSubgate": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateFractionalSubgate') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateFractionalSubgate', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="GateFractionalSubgate", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateFractionalSubgate", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateFractionalSubgate'): - super(GateFractionalSubgate, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateFractionalSubgate') - if self.fractional_conductance is not None and 'fractional_conductance' not in already_processed: - already_processed.add('fractional_conductance') - outfile.write(' fractionalConductance=%s' % (quote_attrib(self.fractional_conductance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateFractionalSubgate', fromsubclass_=False, pretty_print=True): - super(GateFractionalSubgate, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateFractionalSubgate", + ): + super(GateFractionalSubgate, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="GateFractionalSubgate", + ) + if ( + self.fractional_conductance is not None + and "fractional_conductance" not in already_processed + ): + already_processed.add("fractional_conductance") + outfile.write( + " fractionalConductance=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.fractional_conductance), + input_name="fractionalConductance", + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateFractionalSubgate", + fromsubclass_=False, + pretty_print=True, + ): + super(GateFractionalSubgate, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: + namespaceprefix_ = ( + self.notes_nsprefix_ + ":" + if (UseCapturedNS_ and self.notes_nsprefix_) + else "" + ) showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + namespaceprefix_ = ( + self.q10_settings_nsprefix_ + ":" + if (UseCapturedNS_ and self.q10_settings_nsprefix_) + else "" + ) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) if self.steady_state is not None: - self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) + namespaceprefix_ = ( + self.steady_state_nsprefix_ + ":" + if (UseCapturedNS_ and self.steady_state_nsprefix_) + else "" + ) + self.steady_state.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="steadyState", + pretty_print=pretty_print, + ) if self.time_course is not None: - self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.time_course_nsprefix_ + ":" + if (UseCapturedNS_ and self.time_course_nsprefix_) + else "" + ) + self.time_course.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="timeCourse", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('fractionalConductance', node) - if value is not None and 'fractionalConductance' not in already_processed: - already_processed.add('fractionalConductance') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("fractionalConductance", node) + if value is not None and "fractionalConductance" not in already_processed: + already_processed.add("fractionalConductance") self.fractional_conductance = value - self.validate_Nml2Quantity_none(self.fractional_conductance) # validate type Nml2Quantity_none - super(GateFractionalSubgate, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + self.validate_Nml2Quantity_none( + self.fractional_conductance + ) # validate type Nml2Quantity_none + super(GateFractionalSubgate, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "notes": + value_ = child_.text + value_ = self.gds_parse_string(value_, node, "notes") + value_ = self.gds_validate_string(value_, node, "notes") + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'steadyState': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "steadyState": obj_ = HHVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.steady_state = obj_ - obj_.original_tagname_ = 'steadyState' - elif nodeName_ == 'timeCourse': + obj_.original_tagname_ = "steadyState" + elif nodeName_ == "timeCourse": obj_ = HHTime.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.time_course = obj_ - obj_.original_tagname_ = 'timeCourse' - super(GateFractionalSubgate, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "timeCourse" + super(GateFractionalSubgate, self)._buildChildren(child_, node, nodeName_, True) + + # end class GateFractionalSubgate class GateFractional(Base): + """GateFractional -- Gate composed of subgates contributing with fractional conductance + \n + :param instances: + :type instances: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('sub_gates', 'GateFractionalSubgate', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'GateFractionalSubgate', u'name': u'subGate', u'minOccurs': u'1'}, None), + MemberSpec_( + "instances", + "PositiveInteger", + 0, + 0, + {"use": "required", "name": "instances"}, + ), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {"minOccurs": "0", "name": "notes", "type": "xs:string"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {"minOccurs": "0", "name": "q10Settings", "type": "Q10Settings"}, + None, + ), + MemberSpec_( + "sub_gates", + "GateFractionalSubgate", + 1, + 0, + { + "maxOccurs": "unbounded", + "minOccurs": "1", + "name": "subGate", + "type": "GateFractionalSubgate", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, sub_gates=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + notes=None, + q10_settings=None, + sub_gates=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateFractional, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("GateFractional"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None if sub_gates is None: self.sub_gates = [] else: self.sub_gates = sub_gates + self.sub_gates_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateFractional) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GateFractional) if subclass is not None: return subclass(*args_, **kwargs_) if GateFractional.subclass: return GateFractional.subclass(*args_, **kwargs_) else: return GateFractional(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass + return result + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass - def hasContent_(self): + + def _hasContent(self): if ( - self.notes is not None or - self.q10_settings is not None or - self.sub_gates or - super(GateFractional, self).hasContent_() + self.notes is not None + or self.q10_settings is not None + or self.sub_gates + or super(GateFractional, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateFractional', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateFractional') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateFractional", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateFractional") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "GateFractional": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateFractional') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateFractional', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateFractional" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateFractional", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateFractional'): - super(GateFractional, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateFractional') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateFractional', fromsubclass_=False, pretty_print=True): - super(GateFractional, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateFractional", + ): + super(GateFractional, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateFractional" + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write( + ' instances="%s"' + % self.gds_format_integer(self.instances, input_name="instances") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateFractional", + fromsubclass_=False, + pretty_print=True, + ): + super(GateFractional, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: + namespaceprefix_ = ( + self.notes_nsprefix_ + ":" + if (UseCapturedNS_ and self.notes_nsprefix_) + else "" + ) showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + namespaceprefix_ = ( + self.q10_settings_nsprefix_ + ":" + if (UseCapturedNS_ and self.q10_settings_nsprefix_) + else "" + ) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) for subGate_ in self.sub_gates: - subGate_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='subGate', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.sub_gates_nsprefix_ + ":" + if (UseCapturedNS_ and self.sub_gates_nsprefix_) + else "" + ) + subGate_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="subGate", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") + self.instances = self.gds_parse_integer(value, node, "instances") if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - super(GateFractional, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger + super(GateFractional, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "notes": + value_ = child_.text + value_ = self.gds_parse_string(value_, node, "notes") + value_ = self.gds_validate_string(value_, node, "notes") + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'subGate': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "subGate": obj_ = GateFractionalSubgate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.sub_gates.append(obj_) - obj_.original_tagname_ = 'subGate' - super(GateFractional, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "subGate" + super(GateFractional, self)._buildChildren(child_, node, nodeName_, True) + + # end class GateFractional class GateHHInstantaneous(Base): + """GateHHInstantaneous -- Gate which follows the general Hodgkin Huxley formalism but is instantaneous, so tau = 0 and gate follows exactly inf value + \n + :param instances: + :type instances: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'1'}, None), + MemberSpec_( + "instances", + "PositiveInteger", + 0, + 0, + {"use": "required", "name": "instances"}, + ), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {"minOccurs": "0", "name": "notes", "type": "xs:string"}, + None, + ), + MemberSpec_( + "steady_state", + "HHVariable", + 0, + 0, + {"minOccurs": "1", "name": "steadyState", "type": "HHVariable"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, steady_state=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + notes=None, + steady_state=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHInstantaneous, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("GateHHInstantaneous"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.steady_state = steady_state + self.steady_state_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateHHInstantaneous) + CurrentSubclassModule_, GateHHInstantaneous + ) if subclass is not None: return subclass(*args_, **kwargs_) if GateHHInstantaneous.subclass: return GateHHInstantaneous.subclass(*args_, **kwargs_) else: return GateHHInstantaneous(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass + return result + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass - def hasContent_(self): + + def _hasContent(self): if ( - self.notes is not None or - self.steady_state is not None or - super(GateHHInstantaneous, self).hasContent_() + self.notes is not None + or self.steady_state is not None + or super(GateHHInstantaneous, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHInstantaneous', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHInstantaneous') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateHHInstantaneous", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateHHInstantaneous") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "GateHHInstantaneous": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHInstantaneous') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHInstantaneous', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="GateHHInstantaneous", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateHHInstantaneous", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHInstantaneous'): - super(GateHHInstantaneous, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHInstantaneous') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHInstantaneous', fromsubclass_=False, pretty_print=True): - super(GateHHInstantaneous, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateHHInstantaneous", + ): + super(GateHHInstantaneous, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="GateHHInstantaneous", + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write( + ' instances="%s"' + % self.gds_format_integer(self.instances, input_name="instances") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateHHInstantaneous", + fromsubclass_=False, + pretty_print=True, + ): + super(GateHHInstantaneous, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: + namespaceprefix_ = ( + self.notes_nsprefix_ + ":" + if (UseCapturedNS_ and self.notes_nsprefix_) + else "" + ) showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.steady_state is not None: - self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.steady_state_nsprefix_ + ":" + if (UseCapturedNS_ and self.steady_state_nsprefix_) + else "" + ) + self.steady_state.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="steadyState", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") + self.instances = self.gds_parse_integer(value, node, "instances") if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - super(GateHHInstantaneous, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger + super(GateHHInstantaneous, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "notes": + value_ = child_.text + value_ = self.gds_parse_string(value_, node, "notes") + value_ = self.gds_validate_string(value_, node, "notes") + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'steadyState': + elif nodeName_ == "steadyState": obj_ = HHVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.steady_state = obj_ - obj_.original_tagname_ = 'steadyState' - super(GateHHInstantaneous, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "steadyState" + super(GateHHInstantaneous, self)._buildChildren(child_, node, nodeName_, True) + + # end class GateHHInstantaneous class GateHHRatesInf(Base): + """GateHHRatesInf -- Gate which follows the general Hodgkin Huxley formalism + \n + :param instances: + :type instances: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('forward_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'forwardRate', u'minOccurs': u'1'}, None), - MemberSpec_('reverse_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'reverseRate', u'minOccurs': u'1'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'1'}, None), + MemberSpec_( + "instances", + "PositiveInteger", + 0, + 0, + {"use": "required", "name": "instances"}, + ), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {"minOccurs": "0", "name": "notes", "type": "xs:string"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {"minOccurs": "0", "name": "q10Settings", "type": "Q10Settings"}, + None, + ), + MemberSpec_( + "forward_rate", + "HHRate", + 0, + 0, + {"minOccurs": "1", "name": "forwardRate", "type": "HHRate"}, + None, + ), + MemberSpec_( + "reverse_rate", + "HHRate", + 0, + 0, + {"minOccurs": "1", "name": "reverseRate", "type": "HHRate"}, + None, + ), + MemberSpec_( + "steady_state", + "HHVariable", + 0, + 0, + {"minOccurs": "1", "name": "steadyState", "type": "HHVariable"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, steady_state=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + notes=None, + q10_settings=None, + forward_rate=None, + reverse_rate=None, + steady_state=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHRatesInf, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("GateHHRatesInf"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None self.forward_rate = forward_rate + self.forward_rate_nsprefix_ = None self.reverse_rate = reverse_rate + self.reverse_rate_nsprefix_ = None self.steady_state = steady_state + self.steady_state_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateHHRatesInf) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GateHHRatesInf) if subclass is not None: return subclass(*args_, **kwargs_) if GateHHRatesInf.subclass: return GateHHRatesInf.subclass(*args_, **kwargs_) else: return GateHHRatesInf(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass + return result + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass - def hasContent_(self): + + def _hasContent(self): if ( - self.notes is not None or - self.q10_settings is not None or - self.forward_rate is not None or - self.reverse_rate is not None or - self.steady_state is not None or - super(GateHHRatesInf, self).hasContent_() + self.notes is not None + or self.q10_settings is not None + or self.forward_rate is not None + or self.reverse_rate is not None + or self.steady_state is not None + or super(GateHHRatesInf, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesInf', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHRatesInf') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateHHRatesInf", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateHHRatesInf") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "GateHHRatesInf": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesInf') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHRatesInf', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateHHRatesInf" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateHHRatesInf", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHRatesInf'): - super(GateHHRatesInf, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesInf') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesInf', fromsubclass_=False, pretty_print=True): - super(GateHHRatesInf, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateHHRatesInf", + ): + super(GateHHRatesInf, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateHHRatesInf" + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write( + ' instances="%s"' + % self.gds_format_integer(self.instances, input_name="instances") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateHHRatesInf", + fromsubclass_=False, + pretty_print=True, + ): + super(GateHHRatesInf, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: + namespaceprefix_ = ( + self.notes_nsprefix_ + ":" + if (UseCapturedNS_ and self.notes_nsprefix_) + else "" + ) showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + namespaceprefix_ = ( + self.q10_settings_nsprefix_ + ":" + if (UseCapturedNS_ and self.q10_settings_nsprefix_) + else "" + ) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) if self.forward_rate is not None: - self.forward_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardRate', pretty_print=pretty_print) + namespaceprefix_ = ( + self.forward_rate_nsprefix_ + ":" + if (UseCapturedNS_ and self.forward_rate_nsprefix_) + else "" + ) + self.forward_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="forwardRate", + pretty_print=pretty_print, + ) if self.reverse_rate is not None: - self.reverse_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseRate', pretty_print=pretty_print) + namespaceprefix_ = ( + self.reverse_rate_nsprefix_ + ":" + if (UseCapturedNS_ and self.reverse_rate_nsprefix_) + else "" + ) + self.reverse_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="reverseRate", + pretty_print=pretty_print, + ) if self.steady_state is not None: - self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.steady_state_nsprefix_ + ":" + if (UseCapturedNS_ and self.steady_state_nsprefix_) + else "" + ) + self.steady_state.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="steadyState", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") + self.instances = self.gds_parse_integer(value, node, "instances") if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - super(GateHHRatesInf, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger + super(GateHHRatesInf, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "notes": + value_ = child_.text + value_ = self.gds_parse_string(value_, node, "notes") + value_ = self.gds_validate_string(value_, node, "notes") + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'forwardRate': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "forwardRate": obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.forward_rate = obj_ - obj_.original_tagname_ = 'forwardRate' - elif nodeName_ == 'reverseRate': + obj_.original_tagname_ = "forwardRate" + elif nodeName_ == "reverseRate": obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.reverse_rate = obj_ - obj_.original_tagname_ = 'reverseRate' - elif nodeName_ == 'steadyState': + obj_.original_tagname_ = "reverseRate" + elif nodeName_ == "steadyState": obj_ = HHVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.steady_state = obj_ - obj_.original_tagname_ = 'steadyState' - super(GateHHRatesInf, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "steadyState" + super(GateHHRatesInf, self)._buildChildren(child_, node, nodeName_, True) + + # end class GateHHRatesInf class GateHHRatesTau(Base): + """GateHHRatesTau -- Gate which follows the general Hodgkin Huxley formalism + \n + :param instances: + :type instances: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('forward_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'forwardRate', u'minOccurs': u'1'}, None), - MemberSpec_('reverse_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'reverseRate', u'minOccurs': u'1'}, None), - MemberSpec_('time_course', 'HHTime', 0, 0, {u'type': u'HHTime', u'name': u'timeCourse', u'minOccurs': u'1'}, None), + MemberSpec_( + "instances", + "PositiveInteger", + 0, + 0, + {"use": "required", "name": "instances"}, + ), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {"minOccurs": "0", "name": "notes", "type": "xs:string"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {"minOccurs": "0", "name": "q10Settings", "type": "Q10Settings"}, + None, + ), + MemberSpec_( + "forward_rate", + "HHRate", + 0, + 0, + {"minOccurs": "1", "name": "forwardRate", "type": "HHRate"}, + None, + ), + MemberSpec_( + "reverse_rate", + "HHRate", + 0, + 0, + {"minOccurs": "1", "name": "reverseRate", "type": "HHRate"}, + None, + ), + MemberSpec_( + "time_course", + "HHTime", + 0, + 0, + {"minOccurs": "1", "name": "timeCourse", "type": "HHTime"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, time_course=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + notes=None, + q10_settings=None, + forward_rate=None, + reverse_rate=None, + time_course=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHRatesTau, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("GateHHRatesTau"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None self.forward_rate = forward_rate + self.forward_rate_nsprefix_ = None self.reverse_rate = reverse_rate + self.reverse_rate_nsprefix_ = None self.time_course = time_course + self.time_course_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateHHRatesTau) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GateHHRatesTau) if subclass is not None: return subclass(*args_, **kwargs_) if GateHHRatesTau.subclass: return GateHHRatesTau.subclass(*args_, **kwargs_) else: return GateHHRatesTau(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass + return result + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass - def hasContent_(self): + + def _hasContent(self): if ( - self.notes is not None or - self.q10_settings is not None or - self.forward_rate is not None or - self.reverse_rate is not None or - self.time_course is not None or - super(GateHHRatesTau, self).hasContent_() + self.notes is not None + or self.q10_settings is not None + or self.forward_rate is not None + or self.reverse_rate is not None + or self.time_course is not None + or super(GateHHRatesTau, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesTau', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHRatesTau') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateHHRatesTau", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateHHRatesTau") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "GateHHRatesTau": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesTau') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHRatesTau', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateHHRatesTau" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateHHRatesTau", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHRatesTau'): - super(GateHHRatesTau, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesTau') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesTau', fromsubclass_=False, pretty_print=True): - super(GateHHRatesTau, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateHHRatesTau", + ): + super(GateHHRatesTau, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateHHRatesTau" + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write( + ' instances="%s"' + % self.gds_format_integer(self.instances, input_name="instances") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateHHRatesTau", + fromsubclass_=False, + pretty_print=True, + ): + super(GateHHRatesTau, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: + namespaceprefix_ = ( + self.notes_nsprefix_ + ":" + if (UseCapturedNS_ and self.notes_nsprefix_) + else "" + ) showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + namespaceprefix_ = ( + self.q10_settings_nsprefix_ + ":" + if (UseCapturedNS_ and self.q10_settings_nsprefix_) + else "" + ) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) if self.forward_rate is not None: - self.forward_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardRate', pretty_print=pretty_print) + namespaceprefix_ = ( + self.forward_rate_nsprefix_ + ":" + if (UseCapturedNS_ and self.forward_rate_nsprefix_) + else "" + ) + self.forward_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="forwardRate", + pretty_print=pretty_print, + ) if self.reverse_rate is not None: - self.reverse_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseRate', pretty_print=pretty_print) + namespaceprefix_ = ( + self.reverse_rate_nsprefix_ + ":" + if (UseCapturedNS_ and self.reverse_rate_nsprefix_) + else "" + ) + self.reverse_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="reverseRate", + pretty_print=pretty_print, + ) if self.time_course is not None: - self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.time_course_nsprefix_ + ":" + if (UseCapturedNS_ and self.time_course_nsprefix_) + else "" + ) + self.time_course.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="timeCourse", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") + self.instances = self.gds_parse_integer(value, node, "instances") if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - super(GateHHRatesTau, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger + super(GateHHRatesTau, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "notes": + value_ = child_.text + value_ = self.gds_parse_string(value_, node, "notes") + value_ = self.gds_validate_string(value_, node, "notes") + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'forwardRate': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "forwardRate": obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.forward_rate = obj_ - obj_.original_tagname_ = 'forwardRate' - elif nodeName_ == 'reverseRate': + obj_.original_tagname_ = "forwardRate" + elif nodeName_ == "reverseRate": obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.reverse_rate = obj_ - obj_.original_tagname_ = 'reverseRate' - elif nodeName_ == 'timeCourse': + obj_.original_tagname_ = "reverseRate" + elif nodeName_ == "timeCourse": obj_ = HHTime.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.time_course = obj_ - obj_.original_tagname_ = 'timeCourse' - super(GateHHRatesTau, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "timeCourse" + super(GateHHRatesTau, self)._buildChildren(child_, node, nodeName_, True) + + # end class GateHHRatesTau class GateHHRatesTauInf(Base): + """GateHHRatesTauInf -- Gate which follows the general Hodgkin Huxley formalism + \n + :param instances: + :type instances: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('forward_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'forwardRate', u'minOccurs': u'1'}, None), - MemberSpec_('reverse_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'reverseRate', u'minOccurs': u'1'}, None), - MemberSpec_('time_course', 'HHTime', 0, 0, {u'type': u'HHTime', u'name': u'timeCourse', u'minOccurs': u'1'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'1'}, None), + MemberSpec_( + "instances", + "PositiveInteger", + 0, + 0, + {"use": "required", "name": "instances"}, + ), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {"minOccurs": "0", "name": "notes", "type": "xs:string"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {"minOccurs": "0", "name": "q10Settings", "type": "Q10Settings"}, + None, + ), + MemberSpec_( + "forward_rate", + "HHRate", + 0, + 0, + {"minOccurs": "1", "name": "forwardRate", "type": "HHRate"}, + None, + ), + MemberSpec_( + "reverse_rate", + "HHRate", + 0, + 0, + {"minOccurs": "1", "name": "reverseRate", "type": "HHRate"}, + None, + ), + MemberSpec_( + "time_course", + "HHTime", + 0, + 0, + {"minOccurs": "1", "name": "timeCourse", "type": "HHTime"}, + None, + ), + MemberSpec_( + "steady_state", + "HHVariable", + 0, + 0, + {"minOccurs": "1", "name": "steadyState", "type": "HHVariable"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, time_course=None, steady_state=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + notes=None, + q10_settings=None, + forward_rate=None, + reverse_rate=None, + time_course=None, + steady_state=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHRatesTauInf, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("GateHHRatesTauInf"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None self.forward_rate = forward_rate + self.forward_rate_nsprefix_ = None self.reverse_rate = reverse_rate + self.reverse_rate_nsprefix_ = None self.time_course = time_course + self.time_course_nsprefix_ = None self.steady_state = steady_state + self.steady_state_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateHHRatesTauInf) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GateHHRatesTauInf) if subclass is not None: return subclass(*args_, **kwargs_) if GateHHRatesTauInf.subclass: return GateHHRatesTauInf.subclass(*args_, **kwargs_) else: return GateHHRatesTauInf(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass + return result + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass - def hasContent_(self): + + def _hasContent(self): if ( - self.notes is not None or - self.q10_settings is not None or - self.forward_rate is not None or - self.reverse_rate is not None or - self.time_course is not None or - self.steady_state is not None or - super(GateHHRatesTauInf, self).hasContent_() + self.notes is not None + or self.q10_settings is not None + or self.forward_rate is not None + or self.reverse_rate is not None + or self.time_course is not None + or self.steady_state is not None + or super(GateHHRatesTauInf, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesTauInf', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHRatesTauInf') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateHHRatesTauInf", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateHHRatesTauInf") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "GateHHRatesTauInf": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesTauInf') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHRatesTauInf', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="GateHHRatesTauInf", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateHHRatesTauInf", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHRatesTauInf'): - super(GateHHRatesTauInf, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRatesTauInf') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRatesTauInf', fromsubclass_=False, pretty_print=True): - super(GateHHRatesTauInf, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateHHRatesTauInf", + ): + super(GateHHRatesTauInf, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="GateHHRatesTauInf", + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write( + ' instances="%s"' + % self.gds_format_integer(self.instances, input_name="instances") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateHHRatesTauInf", + fromsubclass_=False, + pretty_print=True, + ): + super(GateHHRatesTauInf, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: + namespaceprefix_ = ( + self.notes_nsprefix_ + ":" + if (UseCapturedNS_ and self.notes_nsprefix_) + else "" + ) showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + namespaceprefix_ = ( + self.q10_settings_nsprefix_ + ":" + if (UseCapturedNS_ and self.q10_settings_nsprefix_) + else "" + ) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) if self.forward_rate is not None: - self.forward_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardRate', pretty_print=pretty_print) + namespaceprefix_ = ( + self.forward_rate_nsprefix_ + ":" + if (UseCapturedNS_ and self.forward_rate_nsprefix_) + else "" + ) + self.forward_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="forwardRate", + pretty_print=pretty_print, + ) if self.reverse_rate is not None: - self.reverse_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseRate', pretty_print=pretty_print) + namespaceprefix_ = ( + self.reverse_rate_nsprefix_ + ":" + if (UseCapturedNS_ and self.reverse_rate_nsprefix_) + else "" + ) + self.reverse_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="reverseRate", + pretty_print=pretty_print, + ) if self.time_course is not None: - self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.time_course_nsprefix_ + ":" + if (UseCapturedNS_ and self.time_course_nsprefix_) + else "" + ) + self.time_course.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="timeCourse", + pretty_print=pretty_print, + ) if self.steady_state is not None: - self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.steady_state_nsprefix_ + ":" + if (UseCapturedNS_ and self.steady_state_nsprefix_) + else "" + ) + self.steady_state.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="steadyState", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") + self.instances = self.gds_parse_integer(value, node, "instances") if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - super(GateHHRatesTauInf, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger + super(GateHHRatesTauInf, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "notes": + value_ = child_.text + value_ = self.gds_parse_string(value_, node, "notes") + value_ = self.gds_validate_string(value_, node, "notes") + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'forwardRate': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "forwardRate": obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.forward_rate = obj_ - obj_.original_tagname_ = 'forwardRate' - elif nodeName_ == 'reverseRate': + obj_.original_tagname_ = "forwardRate" + elif nodeName_ == "reverseRate": obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.reverse_rate = obj_ - obj_.original_tagname_ = 'reverseRate' - elif nodeName_ == 'timeCourse': + obj_.original_tagname_ = "reverseRate" + elif nodeName_ == "timeCourse": obj_ = HHTime.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.time_course = obj_ - obj_.original_tagname_ = 'timeCourse' - elif nodeName_ == 'steadyState': + obj_.original_tagname_ = "timeCourse" + elif nodeName_ == "steadyState": obj_ = HHVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.steady_state = obj_ - obj_.original_tagname_ = 'steadyState' - super(GateHHRatesTauInf, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "steadyState" + super(GateHHRatesTauInf, self)._buildChildren(child_, node, nodeName_, True) + + # end class GateHHRatesTauInf class GateHHTauInf(Base): + """GateHHTauInf -- Gate which follows the general Hodgkin Huxley formalism + \n + :param instances: + :type instances: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('time_course', 'HHTime', 0, 0, {u'type': u'HHTime', u'name': u'timeCourse', u'minOccurs': u'1'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'1'}, None), + MemberSpec_( + "instances", + "PositiveInteger", + 0, + 0, + {"use": "required", "name": "instances"}, + ), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {"minOccurs": "0", "name": "notes", "type": "xs:string"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {"minOccurs": "0", "name": "q10Settings", "type": "Q10Settings"}, + None, + ), + MemberSpec_( + "time_course", + "HHTime", + 0, + 0, + {"minOccurs": "1", "name": "timeCourse", "type": "HHTime"}, + None, + ), + MemberSpec_( + "steady_state", + "HHVariable", + 0, + 0, + {"minOccurs": "1", "name": "steadyState", "type": "HHVariable"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, time_course=None, steady_state=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + notes=None, + q10_settings=None, + time_course=None, + steady_state=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHTauInf, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("GateHHTauInf"), self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None self.time_course = time_course + self.time_course_nsprefix_ = None self.steady_state = steady_state + self.steady_state_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateHHTauInf) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GateHHTauInf) if subclass is not None: return subclass(*args_, **kwargs_) if GateHHTauInf.subclass: return GateHHTauInf.subclass(*args_, **kwargs_) else: return GateHHTauInf(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass + return result + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass - def hasContent_(self): + + def _hasContent(self): if ( - self.notes is not None or - self.q10_settings is not None or - self.time_course is not None or - self.steady_state is not None or - super(GateHHTauInf, self).hasContent_() + self.notes is not None + or self.q10_settings is not None + or self.time_course is not None + or self.steady_state is not None + or super(GateHHTauInf, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHTauInf', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHTauInf') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateHHTauInf", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateHHTauInf") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "GateHHTauInf": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHTauInf') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHTauInf', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateHHTauInf" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateHHTauInf", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHTauInf'): - super(GateHHTauInf, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHTauInf') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHTauInf', fromsubclass_=False, pretty_print=True): - super(GateHHTauInf, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateHHTauInf", + ): + super(GateHHTauInf, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateHHTauInf" + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write( + ' instances="%s"' + % self.gds_format_integer(self.instances, input_name="instances") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateHHTauInf", + fromsubclass_=False, + pretty_print=True, + ): + super(GateHHTauInf, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: + namespaceprefix_ = ( + self.notes_nsprefix_ + ":" + if (UseCapturedNS_ and self.notes_nsprefix_) + else "" + ) showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + namespaceprefix_ = ( + self.q10_settings_nsprefix_ + ":" + if (UseCapturedNS_ and self.q10_settings_nsprefix_) + else "" + ) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) if self.time_course is not None: - self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.time_course_nsprefix_ + ":" + if (UseCapturedNS_ and self.time_course_nsprefix_) + else "" + ) + self.time_course.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="timeCourse", + pretty_print=pretty_print, + ) if self.steady_state is not None: - self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.steady_state_nsprefix_ + ":" + if (UseCapturedNS_ and self.steady_state_nsprefix_) + else "" + ) + self.steady_state.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="steadyState", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") + self.instances = self.gds_parse_integer(value, node, "instances") if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - super(GateHHTauInf, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger + super(GateHHTauInf, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "notes": + value_ = child_.text + value_ = self.gds_parse_string(value_, node, "notes") + value_ = self.gds_validate_string(value_, node, "notes") + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'timeCourse': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "timeCourse": obj_ = HHTime.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.time_course = obj_ - obj_.original_tagname_ = 'timeCourse' - elif nodeName_ == 'steadyState': + obj_.original_tagname_ = "timeCourse" + elif nodeName_ == "steadyState": obj_ = HHVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.steady_state = obj_ - obj_.original_tagname_ = 'steadyState' - super(GateHHTauInf, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "steadyState" + super(GateHHTauInf, self)._buildChildren(child_, node, nodeName_, True) + + # end class GateHHTauInf class GateHHRates(Base): + """GateHHRates -- Gate which follows the general Hodgkin Huxley formalism + \n + :param instances: + :type instances: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('forward_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'forwardRate', u'minOccurs': u'1'}, None), - MemberSpec_('reverse_rate', 'HHRate', 0, 0, {u'type': u'HHRate', u'name': u'reverseRate', u'minOccurs': u'1'}, None), + MemberSpec_( + "instances", + "PositiveInteger", + 0, + 0, + {"use": "required", "name": "instances"}, + ), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {"minOccurs": "0", "name": "notes", "type": "xs:string"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {"minOccurs": "0", "name": "q10Settings", "type": "Q10Settings"}, + None, + ), + MemberSpec_( + "forward_rate", + "HHRate", + 0, + 0, + {"minOccurs": "1", "name": "forwardRate", "type": "HHRate"}, + None, + ), + MemberSpec_( + "reverse_rate", + "HHRate", + 0, + 0, + {"minOccurs": "1", "name": "reverseRate", "type": "HHRate"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + notes=None, + q10_settings=None, + forward_rate=None, + reverse_rate=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHRates, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("GateHHRates"), self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None self.forward_rate = forward_rate + self.forward_rate_nsprefix_ = None self.reverse_rate = reverse_rate + self.reverse_rate_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateHHRates) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GateHHRates) if subclass is not None: return subclass(*args_, **kwargs_) if GateHHRates.subclass: return GateHHRates.subclass(*args_, **kwargs_) else: return GateHHRates(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass + return result + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass - def hasContent_(self): + + def _hasContent(self): if ( - self.notes is not None or - self.q10_settings is not None or - self.forward_rate is not None or - self.reverse_rate is not None or - super(GateHHRates, self).hasContent_() + self.notes is not None + or self.q10_settings is not None + or self.forward_rate is not None + or self.reverse_rate is not None + or super(GateHHRates, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRates', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHRates') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateHHRates", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateHHRates") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "GateHHRates": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRates') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHRates', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateHHRates" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateHHRates", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHRates'): - super(GateHHRates, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHRates') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHRates', fromsubclass_=False, pretty_print=True): - super(GateHHRates, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateHHRates", + ): + super(GateHHRates, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateHHRates" + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write( + ' instances="%s"' + % self.gds_format_integer(self.instances, input_name="instances") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateHHRates", + fromsubclass_=False, + pretty_print=True, + ): + super(GateHHRates, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: + namespaceprefix_ = ( + self.notes_nsprefix_ + ":" + if (UseCapturedNS_ and self.notes_nsprefix_) + else "" + ) showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + namespaceprefix_ = ( + self.q10_settings_nsprefix_ + ":" + if (UseCapturedNS_ and self.q10_settings_nsprefix_) + else "" + ) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) if self.forward_rate is not None: - self.forward_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardRate', pretty_print=pretty_print) + namespaceprefix_ = ( + self.forward_rate_nsprefix_ + ":" + if (UseCapturedNS_ and self.forward_rate_nsprefix_) + else "" + ) + self.forward_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="forwardRate", + pretty_print=pretty_print, + ) if self.reverse_rate is not None: - self.reverse_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseRate', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.reverse_rate_nsprefix_ + ":" + if (UseCapturedNS_ and self.reverse_rate_nsprefix_) + else "" + ) + self.reverse_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="reverseRate", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") + self.instances = self.gds_parse_integer(value, node, "instances") if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - super(GateHHRates, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger + super(GateHHRates, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "notes": + value_ = child_.text + value_ = self.gds_parse_string(value_, node, "notes") + value_ = self.gds_validate_string(value_, node, "notes") + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'forwardRate': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "forwardRate": obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.forward_rate = obj_ - obj_.original_tagname_ = 'forwardRate' - elif nodeName_ == 'reverseRate': + obj_.original_tagname_ = "forwardRate" + elif nodeName_ == "reverseRate": obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.reverse_rate = obj_ - obj_.original_tagname_ = 'reverseRate' - super(GateHHRates, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "reverseRate" + super(GateHHRates, self)._buildChildren(child_, node, nodeName_, True) + + # end class GateHHRates class GateHHUndetermined(Base): - """Note all sub elements for gateHHrates, gateHHratesTau, - gateFractional etc. allowed here. Which are valid should be - constrained by what type is set""" + """GateHHUndetermined -- Note all sub elements for gateHHrates, gateHHratesTau, gateFractional etc. allowed here. Which are valid should be constrained by what type is set""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('type', 'gateTypes', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('forward_rate', 'HHRate', 0, 1, {u'type': u'HHRate', u'name': u'forwardRate', u'minOccurs': u'0'}, None), - MemberSpec_('reverse_rate', 'HHRate', 0, 1, {u'type': u'HHRate', u'name': u'reverseRate', u'minOccurs': u'0'}, None), - MemberSpec_('time_course', 'HHTime', 0, 1, {u'type': u'HHTime', u'name': u'timeCourse', u'minOccurs': u'0'}, None), - MemberSpec_('steady_state', 'HHVariable', 0, 1, {u'type': u'HHVariable', u'name': u'steadyState', u'minOccurs': u'0'}, None), - MemberSpec_('sub_gates', 'GateFractionalSubgate', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateFractionalSubgate', u'name': u'subGate', u'minOccurs': u'0'}, None), + MemberSpec_( + "instances", + "PositiveInteger", + 0, + 0, + {"use": "required", "name": "instances"}, + ), + MemberSpec_("type", "gateTypes", 0, 0, {"use": "required", "name": "type"}), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {"minOccurs": "0", "name": "notes", "type": "xs:string"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {"minOccurs": "0", "name": "q10Settings", "type": "Q10Settings"}, + None, + ), + MemberSpec_( + "forward_rate", + "HHRate", + 0, + 1, + {"minOccurs": "0", "name": "forwardRate", "type": "HHRate"}, + None, + ), + MemberSpec_( + "reverse_rate", + "HHRate", + 0, + 1, + {"minOccurs": "0", "name": "reverseRate", "type": "HHRate"}, + None, + ), + MemberSpec_( + "time_course", + "HHTime", + 0, + 1, + {"minOccurs": "0", "name": "timeCourse", "type": "HHTime"}, + None, + ), + MemberSpec_( + "steady_state", + "HHVariable", + 0, + 1, + {"minOccurs": "0", "name": "steadyState", "type": "HHVariable"}, + None, + ), + MemberSpec_( + "sub_gates", + "GateFractionalSubgate", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "subGate", + "type": "GateFractionalSubgate", + }, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, type=None, notes=None, q10_settings=None, forward_rate=None, reverse_rate=None, time_course=None, steady_state=None, sub_gates=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + type=None, + notes=None, + q10_settings=None, + forward_rate=None, + reverse_rate=None, + time_course=None, + steady_state=None, + sub_gates=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateHHUndetermined, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("GateHHUndetermined"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None self.forward_rate = forward_rate + self.forward_rate_nsprefix_ = None self.reverse_rate = reverse_rate + self.reverse_rate_nsprefix_ = None self.time_course = time_course + self.time_course_nsprefix_ = None self.steady_state = steady_state + self.steady_state_nsprefix_ = None if sub_gates is None: self.sub_gates = [] else: self.sub_gates = sub_gates + self.sub_gates_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateHHUndetermined) + CurrentSubclassModule_, GateHHUndetermined + ) if subclass is not None: return subclass(*args_, **kwargs_) if GateHHUndetermined.subclass: return GateHHUndetermined.subclass(*args_, **kwargs_) else: return GateHHUndetermined(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass + return result + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass + def validate_gateTypes(self, value): # Validate type gateTypes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - value = str(value) - enumerations = ['gateHHrates', 'gateHHratesTau', 'gateHHtauInf', 'gateHHratesInf', 'gateHHratesTauInf', 'gateHHInstantaneous', 'gateKS', 'gateFractional'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on gateTypes' % {"value" : value.encode("utf-8")} ) - def hasContent_(self): - if ( - self.notes is not None or - self.q10_settings is not None or - self.forward_rate is not None or - self.reverse_rate is not None or - self.time_course is not None or - self.steady_state is not None or - self.sub_gates or - super(GateHHUndetermined, self).hasContent_() + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + value = value + enumerations = [ + "gateHHrates", + "gateHHratesTau", + "gateHHtauInf", + "gateHHratesInf", + "gateHHratesTauInf", + "gateHHInstantaneous", + "gateKS", + "gateFractional", + ] + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on gateTypes' + % {"value": encode_str_2_3(value), "lineno": lineno} + ) + result = False + + def _hasContent(self): + if ( + self.notes is not None + or self.q10_settings is not None + or self.forward_rate is not None + or self.reverse_rate is not None + or self.time_course is not None + or self.steady_state is not None + or self.sub_gates + or super(GateHHUndetermined, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHUndetermined', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateHHUndetermined') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateHHUndetermined", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateHHUndetermined") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "GateHHUndetermined": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHUndetermined') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateHHUndetermined', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="GateHHUndetermined", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateHHUndetermined", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateHHUndetermined'): - super(GateHHUndetermined, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateHHUndetermined') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateHHUndetermined', fromsubclass_=False, pretty_print=True): - super(GateHHUndetermined, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GateHHUndetermined", + ): + super(GateHHUndetermined, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="GateHHUndetermined", + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write( + ' instances="%s"' + % self.gds_format_integer(self.instances, input_name="instances") + ) + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type), input_name="type" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateHHUndetermined", + fromsubclass_=False, + pretty_print=True, + ): + super(GateHHUndetermined, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: + namespaceprefix_ = ( + self.notes_nsprefix_ + ":" + if (UseCapturedNS_ and self.notes_nsprefix_) + else "" + ) showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + namespaceprefix_ = ( + self.q10_settings_nsprefix_ + ":" + if (UseCapturedNS_ and self.q10_settings_nsprefix_) + else "" + ) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) if self.forward_rate is not None: - self.forward_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardRate', pretty_print=pretty_print) + namespaceprefix_ = ( + self.forward_rate_nsprefix_ + ":" + if (UseCapturedNS_ and self.forward_rate_nsprefix_) + else "" + ) + self.forward_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="forwardRate", + pretty_print=pretty_print, + ) if self.reverse_rate is not None: - self.reverse_rate.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseRate', pretty_print=pretty_print) + namespaceprefix_ = ( + self.reverse_rate_nsprefix_ + ":" + if (UseCapturedNS_ and self.reverse_rate_nsprefix_) + else "" + ) + self.reverse_rate.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="reverseRate", + pretty_print=pretty_print, + ) if self.time_course is not None: - self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.time_course_nsprefix_ + ":" + if (UseCapturedNS_ and self.time_course_nsprefix_) + else "" + ) + self.time_course.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="timeCourse", + pretty_print=pretty_print, + ) if self.steady_state is not None: - self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) + namespaceprefix_ = ( + self.steady_state_nsprefix_ + ":" + if (UseCapturedNS_ and self.steady_state_nsprefix_) + else "" + ) + self.steady_state.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="steadyState", + pretty_print=pretty_print, + ) for subGate_ in self.sub_gates: - subGate_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='subGate', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.sub_gates_nsprefix_ + ":" + if (UseCapturedNS_ and self.sub_gates_nsprefix_) + else "" + ) + subGate_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="subGate", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") + self.instances = self.gds_parse_integer(value, node, "instances") if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_gateTypes(self.type) # validate type gateTypes - super(GateHHUndetermined, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + self.validate_gateTypes(self.type) # validate type gateTypes + super(GateHHUndetermined, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "notes": + value_ = child_.text + value_ = self.gds_parse_string(value_, node, "notes") + value_ = self.gds_validate_string(value_, node, "notes") + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'forwardRate': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "forwardRate": obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.forward_rate = obj_ - obj_.original_tagname_ = 'forwardRate' - elif nodeName_ == 'reverseRate': + obj_.original_tagname_ = "forwardRate" + elif nodeName_ == "reverseRate": obj_ = HHRate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.reverse_rate = obj_ - obj_.original_tagname_ = 'reverseRate' - elif nodeName_ == 'timeCourse': + obj_.original_tagname_ = "reverseRate" + elif nodeName_ == "timeCourse": obj_ = HHTime.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.time_course = obj_ - obj_.original_tagname_ = 'timeCourse' - elif nodeName_ == 'steadyState': + obj_.original_tagname_ = "timeCourse" + elif nodeName_ == "steadyState": obj_ = HHVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.steady_state = obj_ - obj_.original_tagname_ = 'steadyState' - elif nodeName_ == 'subGate': + obj_.original_tagname_ = "steadyState" + elif nodeName_ == "subGate": obj_ = GateFractionalSubgate.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.sub_gates.append(obj_) - obj_.original_tagname_ = 'subGate' - super(GateHHUndetermined, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "subGate" + super(GateHHUndetermined, self)._buildChildren(child_, node, nodeName_, True) + + # end class GateHHUndetermined class GateKS(Base): + """GateKS -- A gate which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them + \n + :param instances: + :type instances: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('instances', 'PositiveInteger', 0, 0, {'use': u'required'}), - MemberSpec_('notes', ['Notes', 'xs:string'], 0, 1, {u'type': u'xs:string', u'name': u'notes', u'minOccurs': u'0'}, None), - MemberSpec_('q10_settings', 'Q10Settings', 0, 1, {u'type': u'Q10Settings', u'name': u'q10Settings', u'minOccurs': u'0'}, None), - MemberSpec_('closed_states', 'ClosedState', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'ClosedState', u'name': u'closedState', u'minOccurs': u'1'}, None), - MemberSpec_('open_states', 'OpenState', 1, 0, {u'maxOccurs': u'unbounded', u'type': u'OpenState', u'name': u'openState', u'minOccurs': u'1'}, None), - MemberSpec_('forward_transition', 'ForwardTransition', 1, 0, {u'type': u'ForwardTransition', u'name': u'forwardTransition'}, 2), - MemberSpec_('reverse_transition', 'ReverseTransition', 1, 0, {u'type': u'ReverseTransition', u'name': u'reverseTransition'}, 2), - MemberSpec_('tau_inf_transition', 'TauInfTransition', 1, 0, {u'type': u'TauInfTransition', u'name': u'tauInfTransition'}, 2), + MemberSpec_( + "instances", + "PositiveInteger", + 0, + 0, + {"use": "required", "name": "instances"}, + ), + MemberSpec_( + "notes", + ["Notes", "xs:string"], + 0, + 1, + {"minOccurs": "0", "name": "notes", "type": "xs:string"}, + None, + ), + MemberSpec_( + "q10_settings", + "Q10Settings", + 0, + 1, + {"minOccurs": "0", "name": "q10Settings", "type": "Q10Settings"}, + None, + ), + MemberSpec_( + "closed_states", + "ClosedState", + 1, + 0, + { + "maxOccurs": "unbounded", + "minOccurs": "1", + "name": "closedState", + "type": "ClosedState", + }, + None, + ), + MemberSpec_( + "open_states", + "OpenState", + 1, + 0, + { + "maxOccurs": "unbounded", + "minOccurs": "1", + "name": "openState", + "type": "OpenState", + }, + None, + ), + MemberSpec_( + "forward_transition", + "ForwardTransition", + 1, + 0, + {"name": "forwardTransition", "type": "ForwardTransition"}, + 2, + ), + MemberSpec_( + "reverse_transition", + "ReverseTransition", + 1, + 0, + {"name": "reverseTransition", "type": "ReverseTransition"}, + 2, + ), + MemberSpec_( + "tau_inf_transition", + "TauInfTransition", + 1, + 0, + {"name": "tauInfTransition", "type": "TauInfTransition"}, + 2, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, instances=None, notes=None, q10_settings=None, closed_states=None, open_states=None, forward_transition=None, reverse_transition=None, tau_inf_transition=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + instances=None, + notes=None, + q10_settings=None, + closed_states=None, + open_states=None, + forward_transition=None, + reverse_transition=None, + tau_inf_transition=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GateKS, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("GateKS"), self).__init__(neuro_lex_id, id, **kwargs_) self.instances = _cast(int, instances) + self.instances_nsprefix_ = None self.notes = notes self.validate_Notes(self.notes) + self.notes_nsprefix_ = None self.q10_settings = q10_settings + self.q10_settings_nsprefix_ = None if closed_states is None: self.closed_states = [] else: self.closed_states = closed_states + self.closed_states_nsprefix_ = None if open_states is None: self.open_states = [] else: self.open_states = open_states + self.open_states_nsprefix_ = None if forward_transition is None: self.forward_transition = [] else: self.forward_transition = forward_transition + self.forward_transition_nsprefix_ = None if reverse_transition is None: self.reverse_transition = [] else: self.reverse_transition = reverse_transition + self.reverse_transition_nsprefix_ = None if tau_inf_transition is None: self.tau_inf_transition = [] else: self.tau_inf_transition = tau_inf_transition + self.tau_inf_transition_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GateKS) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GateKS) if subclass is not None: return subclass(*args_, **kwargs_) if GateKS.subclass: return GateKS.subclass(*args_, **kwargs_) else: return GateKS(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Notes(self, value): + result = True # Validate type Notes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass + return result + def validate_PositiveInteger(self, value): # Validate type PositiveInteger, a restriction on xs:positiveInteger. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass - def hasContent_(self): + + def _hasContent(self): if ( - self.notes is not None or - self.q10_settings is not None or - self.closed_states or - self.open_states or - self.forward_transition or - self.reverse_transition or - self.tau_inf_transition or - super(GateKS, self).hasContent_() + self.notes is not None + or self.q10_settings is not None + or self.closed_states + or self.open_states + or self.forward_transition + or self.reverse_transition + or self.tau_inf_transition + or super(GateKS, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateKS', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GateKS') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateKS", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GateKS") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "GateKS": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateKS') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GateKS', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateKS" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GateKS", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GateKS'): - super(GateKS, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GateKS') - if self.instances is not None and 'instances' not in already_processed: - already_processed.add('instances') - outfile.write(' instances=%s' % (quote_attrib(self.instances), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GateKS', fromsubclass_=False, pretty_print=True): - super(GateKS, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="GateKS" + ): + super(GateKS, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GateKS" + ) + if self.instances is not None and "instances" not in already_processed: + already_processed.add("instances") + outfile.write( + ' instances="%s"' + % self.gds_format_integer(self.instances, input_name="instances") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="GateKS", + fromsubclass_=False, + pretty_print=True, + ): + super(GateKS, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.notes is not None: + namespaceprefix_ = ( + self.notes_nsprefix_ + ":" + if (UseCapturedNS_ and self.notes_nsprefix_) + else "" + ) showIndent(outfile, level, pretty_print) - outfile.write('<%snotes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.notes), input_name='notes')), namespaceprefix_ , eol_)) + outfile.write( + "<%snotes>%s%s" + % ( + namespaceprefix_, + self.gds_encode( + self.gds_format_string( + quote_xml(self.notes), input_name="notes" + ) + ), + namespaceprefix_, + eol_, + ) + ) if self.q10_settings is not None: - self.q10_settings.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10Settings', pretty_print=pretty_print) + namespaceprefix_ = ( + self.q10_settings_nsprefix_ + ":" + if (UseCapturedNS_ and self.q10_settings_nsprefix_) + else "" + ) + self.q10_settings.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10Settings", + pretty_print=pretty_print, + ) for closedState_ in self.closed_states: - closedState_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='closedState', pretty_print=pretty_print) + namespaceprefix_ = ( + self.closed_states_nsprefix_ + ":" + if (UseCapturedNS_ and self.closed_states_nsprefix_) + else "" + ) + closedState_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="closedState", + pretty_print=pretty_print, + ) for openState_ in self.open_states: - openState_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='openState', pretty_print=pretty_print) + namespaceprefix_ = ( + self.open_states_nsprefix_ + ":" + if (UseCapturedNS_ and self.open_states_nsprefix_) + else "" + ) + openState_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="openState", + pretty_print=pretty_print, + ) for forwardTransition_ in self.forward_transition: - forwardTransition_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='forwardTransition', pretty_print=pretty_print) + namespaceprefix_ = ( + self.forward_transition_nsprefix_ + ":" + if (UseCapturedNS_ and self.forward_transition_nsprefix_) + else "" + ) + forwardTransition_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="forwardTransition", + pretty_print=pretty_print, + ) for reverseTransition_ in self.reverse_transition: - reverseTransition_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='reverseTransition', pretty_print=pretty_print) + namespaceprefix_ = ( + self.reverse_transition_nsprefix_ + ":" + if (UseCapturedNS_ and self.reverse_transition_nsprefix_) + else "" + ) + reverseTransition_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="reverseTransition", + pretty_print=pretty_print, + ) for tauInfTransition_ in self.tau_inf_transition: - tauInfTransition_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='tauInfTransition', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.tau_inf_transition_nsprefix_ + ":" + if (UseCapturedNS_ and self.tau_inf_transition_nsprefix_) + else "" + ) + tauInfTransition_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="tauInfTransition", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('instances', node) - if value is not None and 'instances' not in already_processed: - already_processed.add('instances') - try: - self.instances = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("instances", node) + if value is not None and "instances" not in already_processed: + already_processed.add("instances") + self.instances = self.gds_parse_integer(value, node, "instances") if self.instances <= 0: - raise_parse_error(node, 'Invalid PositiveInteger') - self.validate_PositiveInteger(self.instances) # validate type PositiveInteger - super(GateKS, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'notes': - notes_ = child_.text - notes_ = self.gds_validate_string(notes_, node, 'notes') - self.notes = notes_ + raise_parse_error(node, "Invalid PositiveInteger") + self.validate_PositiveInteger( + self.instances + ) # validate type PositiveInteger + super(GateKS, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "notes": + value_ = child_.text + value_ = self.gds_parse_string(value_, node, "notes") + value_ = self.gds_validate_string(value_, node, "notes") + self.notes = value_ + self.notes_nsprefix_ = child_.prefix # validate type Notes self.validate_Notes(self.notes) - elif nodeName_ == 'q10Settings': + elif nodeName_ == "q10Settings": obj_ = Q10Settings.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_settings = obj_ - obj_.original_tagname_ = 'q10Settings' - elif nodeName_ == 'closedState': + obj_.original_tagname_ = "q10Settings" + elif nodeName_ == "closedState": obj_ = ClosedState.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.closed_states.append(obj_) - obj_.original_tagname_ = 'closedState' - elif nodeName_ == 'openState': + obj_.original_tagname_ = "closedState" + elif nodeName_ == "openState": obj_ = OpenState.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.open_states.append(obj_) - obj_.original_tagname_ = 'openState' - elif nodeName_ == 'forwardTransition': + obj_.original_tagname_ = "openState" + elif nodeName_ == "forwardTransition": obj_ = ForwardTransition.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.forward_transition.append(obj_) - obj_.original_tagname_ = 'forwardTransition' - elif nodeName_ == 'reverseTransition': + obj_.original_tagname_ = "forwardTransition" + elif nodeName_ == "reverseTransition": obj_ = ReverseTransition.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.reverse_transition.append(obj_) - obj_.original_tagname_ = 'reverseTransition' - elif nodeName_ == 'tauInfTransition': + obj_.original_tagname_ = "reverseTransition" + elif nodeName_ == "tauInfTransition": obj_ = TauInfTransition.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.tau_inf_transition.append(obj_) - obj_.original_tagname_ = 'tauInfTransition' - super(GateKS, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "tauInfTransition" + super(GateKS, self)._buildChildren(child_, node, nodeName_, True) + + # end class GateKS class TauInfTransition(Base): + """TauInfTransition -- KS Transition specified in terms of time constant **tau** and steady state **inf**""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('from_', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('to', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('steady_state', 'HHVariable', 0, 0, {u'type': u'HHVariable', u'name': u'steadyState'}, None), - MemberSpec_('time_course', 'HHTime', 0, 0, {u'type': u'HHTime', u'name': u'timeCourse'}, None), + MemberSpec_("from_", "NmlId", 0, 0, {"use": "required", "name": "from_"}), + MemberSpec_("to", "NmlId", 0, 0, {"use": "required", "name": "to"}), + MemberSpec_( + "steady_state", + "HHVariable", + 0, + 0, + {"name": "steadyState", "type": "HHVariable"}, + None, + ), + MemberSpec_( + "time_course", + "HHTime", + 0, + 0, + {"name": "timeCourse", "type": "HHTime"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, from_=None, to=None, steady_state=None, time_course=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + from_=None, + to=None, + steady_state=None, + time_course=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(TauInfTransition, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("TauInfTransition"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.from_ = _cast(None, from_) + self.from__nsprefix_ = None self.to = _cast(None, to) + self.to_nsprefix_ = None self.steady_state = steady_state + self.steady_state_nsprefix_ = None self.time_course = time_course + self.time_course_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, TauInfTransition) + subclass = getSubclassFromModule_(CurrentSubclassModule_, TauInfTransition) if subclass is not None: return subclass(*args_, **kwargs_) if TauInfTransition.subclass: return TauInfTransition.subclass(*args_, **kwargs_) else: return TauInfTransition(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): if ( - self.steady_state is not None or - self.time_course is not None or - super(TauInfTransition, self).hasContent_() + self.steady_state is not None + or self.time_course is not None + or super(TauInfTransition, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TauInfTransition', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('TauInfTransition') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="TauInfTransition", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("TauInfTransition") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "TauInfTransition": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TauInfTransition') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TauInfTransition', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="TauInfTransition", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="TauInfTransition", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TauInfTransition'): - super(TauInfTransition, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TauInfTransition') - if self.from_ is not None and 'from_' not in already_processed: - already_processed.add('from_') - outfile.write(' from=%s' % (quote_attrib(self.from_), )) - if self.to is not None and 'to' not in already_processed: - already_processed.add('to') - outfile.write(' to=%s' % (quote_attrib(self.to), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TauInfTransition', fromsubclass_=False, pretty_print=True): - super(TauInfTransition, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="TauInfTransition", + ): + super(TauInfTransition, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="TauInfTransition", + ) + if self.from_ is not None and "from_" not in already_processed: + already_processed.add("from_") + outfile.write( + " from=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.from_), input_name="from" + ) + ), + ) + ) + if self.to is not None and "to" not in already_processed: + already_processed.add("to") + outfile.write( + " to=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.to), input_name="to") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="TauInfTransition", + fromsubclass_=False, + pretty_print=True, + ): + super(TauInfTransition, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.steady_state is not None: - self.steady_state.export(outfile, level, namespaceprefix_, namespacedef_='', name_='steadyState', pretty_print=pretty_print) + namespaceprefix_ = ( + self.steady_state_nsprefix_ + ":" + if (UseCapturedNS_ and self.steady_state_nsprefix_) + else "" + ) + self.steady_state.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="steadyState", + pretty_print=pretty_print, + ) if self.time_course is not None: - self.time_course.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timeCourse', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.time_course_nsprefix_ + ":" + if (UseCapturedNS_ and self.time_course_nsprefix_) + else "" + ) + self.time_course.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="timeCourse", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('from', node) - if value is not None and 'from' not in already_processed: - already_processed.add('from') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("from", node) + if value is not None and "from" not in already_processed: + already_processed.add("from") self.from_ = value - self.validate_NmlId(self.from_) # validate type NmlId - value = find_attr_value_('to', node) - if value is not None and 'to' not in already_processed: - already_processed.add('to') + self.validate_NmlId(self.from_) # validate type NmlId + value = find_attr_value_("to", node) + if value is not None and "to" not in already_processed: + already_processed.add("to") self.to = value - self.validate_NmlId(self.to) # validate type NmlId - super(TauInfTransition, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'steadyState': + self.validate_NmlId(self.to) # validate type NmlId + super(TauInfTransition, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "steadyState": obj_ = HHVariable.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.steady_state = obj_ - obj_.original_tagname_ = 'steadyState' - elif nodeName_ == 'timeCourse': + obj_.original_tagname_ = "steadyState" + elif nodeName_ == "timeCourse": obj_ = HHTime.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.time_course = obj_ - obj_.original_tagname_ = 'timeCourse' - super(TauInfTransition, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "timeCourse" + super(TauInfTransition, self)._buildChildren(child_, node, nodeName_, True) + + # end class TauInfTransition class ReverseTransition(Base): + """ReverseTransition -- A reverse only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate**""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('from_', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('to', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_("from_", "NmlId", 0, 0, {"use": "required", "name": "from_"}), + MemberSpec_("to", "NmlId", 0, 0, {"use": "required", "name": "to"}), + MemberSpec_( + "__ANY__", + "__ANY__", + 1, + 1, + {"maxOccurs": "unbounded", "minOccurs": "0", "processContents": "skip"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, from_=None, to=None, anytypeobjs_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + from_=None, + to=None, + anytypeobjs_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ReverseTransition, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ReverseTransition"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.from_ = _cast(None, from_) + self.from__nsprefix_ = None self.to = _cast(None, to) + self.to_nsprefix_ = None if anytypeobjs_ is None: self.anytypeobjs_ = [] else: self.anytypeobjs_ = anytypeobjs_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ReverseTransition) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ReverseTransition) if subclass is not None: return subclass(*args_, **kwargs_) if ReverseTransition.subclass: return ReverseTransition.subclass(*args_, **kwargs_) else: return ReverseTransition(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): if ( - self.anytypeobjs_ or - super(ReverseTransition, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): + if self.anytypeobjs_ or super(ReverseTransition, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ReverseTransition', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ReverseTransition') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ReverseTransition", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ReverseTransition") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ReverseTransition": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReverseTransition') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ReverseTransition', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ReverseTransition", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ReverseTransition", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ReverseTransition'): - super(ReverseTransition, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReverseTransition') - if self.from_ is not None and 'from_' not in already_processed: - already_processed.add('from_') - outfile.write(' from=%s' % (quote_attrib(self.from_), )) - if self.to is not None and 'to' not in already_processed: - already_processed.add('to') - outfile.write(' to=%s' % (quote_attrib(self.to), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ReverseTransition', fromsubclass_=False, pretty_print=True): - super(ReverseTransition, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ReverseTransition", + ): + super(ReverseTransition, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ReverseTransition", + ) + if self.from_ is not None and "from_" not in already_processed: + already_processed.add("from_") + outfile.write( + " from=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.from_), input_name="from" + ) + ), + ) + ) + if self.to is not None and "to" not in already_processed: + already_processed.add("to") + outfile.write( + " to=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.to), input_name="to") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ReverseTransition", + fromsubclass_=False, + pretty_print=True, + ): + super(ReverseTransition, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' - else: - eol_ = '' - for obj_ in self.anytypeobjs_: - obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + eol_ = "\n" + else: + eol_ = "" + if not fromsubclass_: + for obj_ in self.anytypeobjs_: + showIndent(outfile, level, pretty_print) + outfile.write(obj_) + outfile.write("\n") + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('from', node) - if value is not None and 'from' not in already_processed: - already_processed.add('from') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("from", node) + if value is not None and "from" not in already_processed: + already_processed.add("from") self.from_ = value - self.validate_NmlId(self.from_) # validate type NmlId - value = find_attr_value_('to', node) - if value is not None and 'to' not in already_processed: - already_processed.add('to') + self.validate_NmlId(self.from_) # validate type NmlId + value = find_attr_value_("to", node) + if value is not None and "to" not in already_processed: + already_processed.add("to") self.to = value - self.validate_NmlId(self.to) # validate type NmlId - super(ReverseTransition, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'ReverseTransition') - if obj_ is not None: - self.add_anytypeobjs_(obj_) - super(ReverseTransition, self).buildChildren(child_, node, nodeName_, True) + self.validate_NmlId(self.to) # validate type NmlId + super(ReverseTransition, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + content_ = self.gds_build_any(child_, "ReverseTransition") + self.anytypeobjs_.append(content_) + super(ReverseTransition, self)._buildChildren(child_, node, nodeName_, True) + + # end class ReverseTransition class ForwardTransition(Base): + """ForwardTransition -- A forward only **KSTransition** for a **gateKS** which specifies a **rate** ( type **baseHHRate** ) which follows one of the standard Hodgkin Huxley forms ( e. g. **HHExpRate** , **HHSigmoidRate** , **HHExpLinearRate**""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('from_', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('to', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('__ANY__', '__ANY__', 1, 1, {u'maxOccurs': u'unbounded', u'processContents': u'skip', u'minOccurs': u'0'}, None), + MemberSpec_("from_", "NmlId", 0, 0, {"use": "required", "name": "from_"}), + MemberSpec_("to", "NmlId", 0, 0, {"use": "required", "name": "to"}), + MemberSpec_( + "__ANY__", + "__ANY__", + 1, + 1, + {"maxOccurs": "unbounded", "minOccurs": "0", "processContents": "skip"}, + None, + ), ] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, from_=None, to=None, anytypeobjs_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + from_=None, + to=None, + anytypeobjs_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ForwardTransition, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ForwardTransition"), self).__init__( + neuro_lex_id, id, **kwargs_ + ) self.from_ = _cast(None, from_) + self.from__nsprefix_ = None self.to = _cast(None, to) + self.to_nsprefix_ = None if anytypeobjs_ is None: self.anytypeobjs_ = [] else: self.anytypeobjs_ = anytypeobjs_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ForwardTransition) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ForwardTransition) if subclass is not None: return subclass(*args_, **kwargs_) if ForwardTransition.subclass: return ForwardTransition.subclass(*args_, **kwargs_) else: return ForwardTransition(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): if ( - self.anytypeobjs_ or - super(ForwardTransition, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): + if self.anytypeobjs_ or super(ForwardTransition, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ForwardTransition', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ForwardTransition') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ForwardTransition", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ForwardTransition") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ForwardTransition": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ForwardTransition') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ForwardTransition', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ForwardTransition", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ForwardTransition", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ForwardTransition'): - super(ForwardTransition, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ForwardTransition') - if self.from_ is not None and 'from_' not in already_processed: - already_processed.add('from_') - outfile.write(' from=%s' % (quote_attrib(self.from_), )) - if self.to is not None and 'to' not in already_processed: - already_processed.add('to') - outfile.write(' to=%s' % (quote_attrib(self.to), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ForwardTransition', fromsubclass_=False, pretty_print=True): - super(ForwardTransition, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ForwardTransition", + ): + super(ForwardTransition, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ForwardTransition", + ) + if self.from_ is not None and "from_" not in already_processed: + already_processed.add("from_") + outfile.write( + " from=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.from_), input_name="from" + ) + ), + ) + ) + if self.to is not None and "to" not in already_processed: + already_processed.add("to") + outfile.write( + " to=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.to), input_name="to") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ForwardTransition", + fromsubclass_=False, + pretty_print=True, + ): + super(ForwardTransition, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' - else: - eol_ = '' - for obj_ in self.anytypeobjs_: - obj_.export(outfile, level, namespaceprefix_, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + eol_ = "\n" + else: + eol_ = "" + if not fromsubclass_: + for obj_ in self.anytypeobjs_: + showIndent(outfile, level, pretty_print) + outfile.write(obj_) + outfile.write("\n") + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('from', node) - if value is not None and 'from' not in already_processed: - already_processed.add('from') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("from", node) + if value is not None and "from" not in already_processed: + already_processed.add("from") self.from_ = value - self.validate_NmlId(self.from_) # validate type NmlId - value = find_attr_value_('to', node) - if value is not None and 'to' not in already_processed: - already_processed.add('to') + self.validate_NmlId(self.from_) # validate type NmlId + value = find_attr_value_("to", node) + if value is not None and "to" not in already_processed: + already_processed.add("to") self.to = value - self.validate_NmlId(self.to) # validate type NmlId - super(ForwardTransition, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - obj_ = self.gds_build_any(child_, 'ForwardTransition') - if obj_ is not None: - self.add_anytypeobjs_(obj_) - super(ForwardTransition, self).buildChildren(child_, node, nodeName_, True) + self.validate_NmlId(self.to) # validate type NmlId + super(ForwardTransition, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + content_ = self.gds_build_any(child_, "ForwardTransition") + self.anytypeobjs_.append(content_) + super(ForwardTransition, self)._buildChildren(child_, node, nodeName_, True) + + # end class ForwardTransition class OpenState(Base): - member_data_items_ = [ - ] + """OpenState -- A **KSState** with **relativeConductance** of 1 + \n + :param relativeConductance: + :type relativeConductance: none + + """ + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, **kwargs_): + + def __init__(self, neuro_lex_id=None, id=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(OpenState, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("OpenState"), self).__init__(neuro_lex_id, id, **kwargs_) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, OpenState) + subclass = getSubclassFromModule_(CurrentSubclassModule_, OpenState) if subclass is not None: return subclass(*args_, **kwargs_) if OpenState.subclass: return OpenState.subclass(*args_, **kwargs_) else: return OpenState(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(OpenState, self).hasContent_() - ): + + def _hasContent(self): + if super(OpenState, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='OpenState', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('OpenState') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="OpenState", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("OpenState") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "OpenState": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='OpenState') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='OpenState', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='OpenState'): - super(OpenState, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='OpenState') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='OpenState', fromsubclass_=False, pretty_print=True): - super(OpenState, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="OpenState" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="OpenState", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="OpenState" + ): + super(OpenState, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="OpenState" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="OpenState", + fromsubclass_=False, + pretty_print=True, + ): + super(OpenState, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(OpenState, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(OpenState, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + super(OpenState, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(OpenState, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class OpenState class ClosedState(Base): - member_data_items_ = [ - ] + """ClosedState -- A **KSState** with **relativeConductance** of 0 + \n + :param relativeConductance: + :type relativeConductance: none + + """ + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = Base - def __init__(self, neuro_lex_id=None, id=None, **kwargs_): + + def __init__(self, neuro_lex_id=None, id=None, gds_collector_=None, **kwargs_): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ClosedState, self).__init__(neuro_lex_id, id, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ClosedState"), self).__init__(neuro_lex_id, id, **kwargs_) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ClosedState) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ClosedState) if subclass is not None: return subclass(*args_, **kwargs_) if ClosedState.subclass: return ClosedState.subclass(*args_, **kwargs_) else: return ClosedState(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(ClosedState, self).hasContent_() - ): + + def _hasContent(self): + if super(ClosedState, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ClosedState', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ClosedState') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ClosedState", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ClosedState") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ClosedState": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ClosedState') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ClosedState', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ClosedState'): - super(ClosedState, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ClosedState') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ClosedState', fromsubclass_=False, pretty_print=True): - super(ClosedState, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ClosedState" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ClosedState", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ClosedState", + ): + super(ClosedState, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ClosedState" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ClosedState", + fromsubclass_=False, + pretty_print=True, + ): + super(ClosedState, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(ClosedState, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ClosedState, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + super(ClosedState, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ClosedState, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class ClosedState class IonChannelKS(Standalone): - """Kinetic scheme based ion channel.""" + """A kinetic scheme based ion channel with multiple **gateKS** s, each of which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them + IonChannelKS -- A kinetic scheme based ion channel with multiple **gateKS** s, each of which consists of multiple **KSState** s and **KSTransition** s giving the rates of transition between them + \n + :param conductance: + :type conductance: conductance + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('species', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 1, {'use': u'optional'}), - MemberSpec_('gate_kses', 'GateKS', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateKS', u'name': u'gateKS', u'minOccurs': u'0'}, None), + MemberSpec_("species", "NmlId", 0, 1, {"use": "optional", "name": "species"}), + MemberSpec_( + "conductance", + "Nml2Quantity_conductance", + 0, + 1, + {"use": "optional", "name": "conductance"}, + ), + MemberSpec_( + "gate_kses", + "GateKS", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "gateKS", + "type": "GateKS", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, species=None, conductance=None, gate_kses=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + species=None, + conductance=None, + gate_kses=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IonChannelKS, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("IonChannelKS"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.species = _cast(None, species) + self.species_nsprefix_ = None self.conductance = _cast(None, conductance) + self.conductance_nsprefix_ = None if gate_kses is None: self.gate_kses = [] else: self.gate_kses = gate_kses + self.gate_kses_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IonChannelKS) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IonChannelKS) if subclass is not None: return subclass(*args_, **kwargs_) if IonChannelKS.subclass: return IonChannelKS.subclass(*args_, **kwargs_) else: return IonChannelKS(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] - def hasContent_(self): if ( - self.gate_kses or - super(IonChannelKS, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_conductance_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$"] + ] + + def _hasContent(self): + if self.gate_kses or super(IonChannelKS, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelKS', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IonChannelKS') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="IonChannelKS", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IonChannelKS") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "IonChannelKS": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelKS') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannelKS', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IonChannelKS" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IonChannelKS", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannelKS'): - super(IonChannelKS, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelKS') - if self.species is not None and 'species' not in already_processed: - already_processed.add('species') - outfile.write(' species=%s' % (quote_attrib(self.species), )) - if self.conductance is not None and 'conductance' not in already_processed: - already_processed.add('conductance') - outfile.write(' conductance=%s' % (quote_attrib(self.conductance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelKS', fromsubclass_=False, pretty_print=True): - super(IonChannelKS, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IonChannelKS", + ): + super(IonChannelKS, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IonChannelKS" + ) + if self.species is not None and "species" not in already_processed: + already_processed.add("species") + outfile.write( + " species=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.species), input_name="species" + ) + ), + ) + ) + if self.conductance is not None and "conductance" not in already_processed: + already_processed.add("conductance") + outfile.write( + " conductance=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.conductance), input_name="conductance" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="IonChannelKS", + fromsubclass_=False, + pretty_print=True, + ): + super(IonChannelKS, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for gateKS_ in self.gate_kses: - gateKS_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateKS', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.gate_kses_nsprefix_ + ":" + if (UseCapturedNS_ and self.gate_kses_nsprefix_) + else "" + ) + gateKS_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gateKS", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('species', node) - if value is not None and 'species' not in already_processed: - already_processed.add('species') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("species", node) + if value is not None and "species" not in already_processed: + already_processed.add("species") self.species = value - self.validate_NmlId(self.species) # validate type NmlId - value = find_attr_value_('conductance', node) - if value is not None and 'conductance' not in already_processed: - already_processed.add('conductance') + self.validate_NmlId(self.species) # validate type NmlId + value = find_attr_value_("conductance", node) + if value is not None and "conductance" not in already_processed: + already_processed.add("conductance") self.conductance = value - self.validate_Nml2Quantity_conductance(self.conductance) # validate type Nml2Quantity_conductance - super(IonChannelKS, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'gateKS': + self.validate_Nml2Quantity_conductance( + self.conductance + ) # validate type Nml2Quantity_conductance + super(IonChannelKS, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "gateKS": obj_ = GateKS.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gate_kses.append(obj_) - obj_.original_tagname_ = 'gateKS' - super(IonChannelKS, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "gateKS" + super(IonChannelKS, self)._buildChildren(child_, node, nodeName_, True) + + # end class IonChannelKS class IonChannelScalable(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('q10_conductance_scalings', 'Q10ConductanceScaling', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Q10ConductanceScaling', u'name': u'q10ConductanceScaling', u'minOccurs': u'0'}, None), + MemberSpec_( + "q10_conductance_scalings", + "Q10ConductanceScaling", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "q10ConductanceScaling", + "type": "Q10ConductanceScaling", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, q10_conductance_scalings=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + q10_conductance_scalings=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IonChannelScalable, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("IonChannelScalable"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) if q10_conductance_scalings is None: self.q10_conductance_scalings = [] else: self.q10_conductance_scalings = q10_conductance_scalings + self.q10_conductance_scalings_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, IonChannelScalable) + CurrentSubclassModule_, IonChannelScalable + ) if subclass is not None: return subclass(*args_, **kwargs_) if IonChannelScalable.subclass: return IonChannelScalable.subclass(*args_, **kwargs_) else: return IonChannelScalable(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): + + def _hasContent(self): if ( - self.q10_conductance_scalings or - super(IonChannelScalable, self).hasContent_() + self.q10_conductance_scalings + or super(IonChannelScalable, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelScalable', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IonChannelScalable') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="IonChannelScalable", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IonChannelScalable") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "IonChannelScalable": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelScalable') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannelScalable', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="IonChannelScalable", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IonChannelScalable", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannelScalable'): - super(IonChannelScalable, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelScalable') - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IonChannelScalable", + ): + super(IonChannelScalable, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="IonChannelScalable", + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelScalable', fromsubclass_=False, pretty_print=True): - super(IonChannelScalable, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="IonChannelScalable", + fromsubclass_=False, + pretty_print=True, + ): + super(IonChannelScalable, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for q10ConductanceScaling_ in self.q10_conductance_scalings: - q10ConductanceScaling_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='q10ConductanceScaling', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.q10_conductance_scalings_nsprefix_ + ":" + if (UseCapturedNS_ and self.q10_conductance_scalings_nsprefix_) + else "" + ) + q10ConductanceScaling_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="q10ConductanceScaling", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(IonChannelScalable, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'q10ConductanceScaling': + super(IonChannelScalable, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "q10ConductanceScaling": obj_ = Q10ConductanceScaling.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.q10_conductance_scalings.append(obj_) - obj_.original_tagname_ = 'q10ConductanceScaling' - super(IonChannelScalable, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "q10ConductanceScaling" + super(IonChannelScalable, self)._buildChildren(child_, node, nodeName_, True) + + # end class IonChannelScalable class NeuroMLDocument(Standalone): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('includes', 'IncludeType', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IncludeType', u'name': u'include', u'minOccurs': u'0'}, None), - MemberSpec_('extracellular_properties', 'ExtracellularProperties', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExtracellularProperties', u'name': u'extracellularProperties', u'minOccurs': u'0'}, None), - MemberSpec_('intracellular_properties', 'IntracellularProperties', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IntracellularProperties', u'name': u'intracellularProperties', u'minOccurs': u'0'}, None), - MemberSpec_('morphology', 'Morphology', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Morphology', u'name': u'morphology', u'minOccurs': u'0'}, None), - MemberSpec_('ion_channel', 'IonChannel', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IonChannel', u'name': u'ionChannel', u'minOccurs': u'0'}, None), - MemberSpec_('ion_channel_hhs', 'IonChannelHH', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IonChannelHH', u'name': u'ionChannelHH', u'minOccurs': u'0'}, None), - MemberSpec_('ion_channel_v_shifts', 'IonChannelVShift', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IonChannelVShift', u'name': u'ionChannelVShift', u'minOccurs': u'0'}, None), - MemberSpec_('ion_channel_kses', 'IonChannelKS', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IonChannelKS', u'name': u'ionChannelKS', u'minOccurs': u'0'}, None), - MemberSpec_('decaying_pool_concentration_models', 'DecayingPoolConcentrationModel', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'DecayingPoolConcentrationModel', u'name': u'decayingPoolConcentrationModel', u'minOccurs': u'0'}, None), - MemberSpec_('fixed_factor_concentration_models', 'FixedFactorConcentrationModel', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'FixedFactorConcentrationModel', u'name': u'fixedFactorConcentrationModel', u'minOccurs': u'0'}, None), - MemberSpec_('alpha_current_synapses', 'AlphaCurrentSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'AlphaCurrentSynapse', u'name': u'alphaCurrentSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('alpha_synapses', 'AlphaSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'AlphaSynapse', u'name': u'alphaSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('exp_one_synapses', 'ExpOneSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExpOneSynapse', u'name': u'expOneSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('exp_two_synapses', 'ExpTwoSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExpTwoSynapse', u'name': u'expTwoSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('exp_three_synapses', 'ExpThreeSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExpThreeSynapse', u'name': u'expThreeSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('blocking_plastic_synapses', 'BlockingPlasticSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'BlockingPlasticSynapse', u'name': u'blockingPlasticSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('double_synapses', 'DoubleSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'DoubleSynapse', u'name': u'doubleSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('gap_junctions', 'GapJunction', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GapJunction', u'name': u'gapJunction', u'minOccurs': u'0'}, None), - MemberSpec_('silent_synapses', 'SilentSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SilentSynapse', u'name': u'silentSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('linear_graded_synapses', 'LinearGradedSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'LinearGradedSynapse', u'name': u'linearGradedSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('graded_synapses', 'GradedSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GradedSynapse', u'name': u'gradedSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('biophysical_properties', 'BiophysicalProperties', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'BiophysicalProperties', u'name': u'biophysicalProperties', u'minOccurs': u'0'}, None), - MemberSpec_('cells', 'Cell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Cell', u'name': u'cell', u'minOccurs': u'0'}, None), - MemberSpec_('cell2_ca_poolses', 'Cell2CaPools', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Cell2CaPools', u'name': u'cell2CaPools', u'minOccurs': u'0'}, None), - MemberSpec_('base_cells', 'BaseCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'BaseCell', u'name': u'baseCell', u'minOccurs': u'0'}, None), - MemberSpec_('iaf_tau_cells', 'IafTauCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IafTauCell', u'name': u'iafTauCell', u'minOccurs': u'0'}, None), - MemberSpec_('iaf_tau_ref_cells', 'IafTauRefCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IafTauRefCell', u'name': u'iafTauRefCell', u'minOccurs': u'0'}, None), - MemberSpec_('iaf_cells', 'IafCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IafCell', u'name': u'iafCell', u'minOccurs': u'0'}, None), - MemberSpec_('iaf_ref_cells', 'IafRefCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IafRefCell', u'name': u'iafRefCell', u'minOccurs': u'0'}, None), - MemberSpec_('izhikevich_cells', 'IzhikevichCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IzhikevichCell', u'name': u'izhikevichCell', u'minOccurs': u'0'}, None), - MemberSpec_('izhikevich2007_cells', 'Izhikevich2007Cell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Izhikevich2007Cell', u'name': u'izhikevich2007Cell', u'minOccurs': u'0'}, None), - MemberSpec_('ad_ex_ia_f_cells', 'AdExIaFCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'AdExIaFCell', u'name': u'adExIaFCell', u'minOccurs': u'0'}, None), - MemberSpec_('fitz_hugh_nagumo_cells', 'FitzHughNagumoCell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'FitzHughNagumoCell', u'name': u'fitzHughNagumoCell', u'minOccurs': u'0'}, None), - MemberSpec_('fitz_hugh_nagumo1969_cells', 'FitzHughNagumo1969Cell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'FitzHughNagumo1969Cell', u'name': u'fitzHughNagumo1969Cell', u'minOccurs': u'0'}, None), - MemberSpec_('pinsky_rinzel_ca3_cells', 'PinskyRinzelCA3Cell', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PinskyRinzelCA3Cell', u'name': u'pinskyRinzelCA3Cell', u'minOccurs': u'0'}, None), - MemberSpec_('pulse_generators', 'PulseGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PulseGenerator', u'name': u'pulseGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('pulse_generator_dls', 'PulseGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PulseGeneratorDL', u'name': u'pulseGeneratorDL', u'minOccurs': u'0'}, None), - MemberSpec_('sine_generators', 'SineGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SineGenerator', u'name': u'sineGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('sine_generator_dls', 'SineGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SineGeneratorDL', u'name': u'sineGeneratorDL', u'minOccurs': u'0'}, None), - MemberSpec_('ramp_generators', 'RampGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'RampGenerator', u'name': u'rampGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('ramp_generator_dls', 'RampGeneratorDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'RampGeneratorDL', u'name': u'rampGeneratorDL', u'minOccurs': u'0'}, None), - MemberSpec_('compound_inputs', 'CompoundInput', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'CompoundInput', u'name': u'compoundInput', u'minOccurs': u'0'}, None), - MemberSpec_('compound_input_dls', 'CompoundInputDL', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'CompoundInputDL', u'name': u'compoundInputDL', u'minOccurs': u'0'}, None), - MemberSpec_('voltage_clamps', 'VoltageClamp', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VoltageClamp', u'name': u'voltageClamp', u'minOccurs': u'0'}, None), - MemberSpec_('voltage_clamp_triples', 'VoltageClampTriple', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'VoltageClampTriple', u'name': u'voltageClampTriple', u'minOccurs': u'0'}, None), - MemberSpec_('spike_arrays', 'SpikeArray', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeArray', u'name': u'spikeArray', u'minOccurs': u'0'}, None), - MemberSpec_('timed_synaptic_inputs', 'TimedSynapticInput', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'TimedSynapticInput', u'name': u'timedSynapticInput', u'minOccurs': u'0'}, None), - MemberSpec_('spike_generators', 'SpikeGenerator', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeGenerator', u'name': u'spikeGenerator', u'minOccurs': u'0'}, None), - MemberSpec_('spike_generator_randoms', 'SpikeGeneratorRandom', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeGeneratorRandom', u'name': u'spikeGeneratorRandom', u'minOccurs': u'0'}, None), - MemberSpec_('spike_generator_poissons', 'SpikeGeneratorPoisson', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeGeneratorPoisson', u'name': u'spikeGeneratorPoisson', u'minOccurs': u'0'}, None), - MemberSpec_('spike_generator_ref_poissons', 'SpikeGeneratorRefPoisson', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeGeneratorRefPoisson', u'name': u'spikeGeneratorRefPoisson', u'minOccurs': u'0'}, None), - MemberSpec_('poisson_firing_synapses', 'PoissonFiringSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'PoissonFiringSynapse', u'name': u'poissonFiringSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('transient_poisson_firing_synapses', 'TransientPoissonFiringSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'TransientPoissonFiringSynapse', u'name': u'transientPoissonFiringSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('IF_curr_alpha', 'IF_curr_alpha', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IF_curr_alpha', u'name': u'IF_curr_alpha', u'minOccurs': u'0'}, None), - MemberSpec_('IF_curr_exp', 'IF_curr_exp', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IF_curr_exp', u'name': u'IF_curr_exp', u'minOccurs': u'0'}, None), - MemberSpec_('IF_cond_alpha', 'IF_cond_alpha', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IF_cond_alpha', u'name': u'IF_cond_alpha', u'minOccurs': u'0'}, None), - MemberSpec_('IF_cond_exp', 'IF_cond_exp', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'IF_cond_exp', u'name': u'IF_cond_exp', u'minOccurs': u'0'}, None), - MemberSpec_('EIF_cond_exp_isfa_ista', 'EIF_cond_exp_isfa_ista', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'EIF_cond_exp_isfa_ista', u'name': u'EIF_cond_exp_isfa_ista', u'minOccurs': u'0'}, None), - MemberSpec_('EIF_cond_alpha_isfa_ista', 'EIF_cond_alpha_isfa_ista', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'EIF_cond_alpha_isfa_ista', u'name': u'EIF_cond_alpha_isfa_ista', u'minOccurs': u'0'}, None), - MemberSpec_('HH_cond_exp', 'HH_cond_exp', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'HH_cond_exp', u'name': u'HH_cond_exp', u'minOccurs': u'0'}, None), - MemberSpec_('exp_cond_synapses', 'ExpCondSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExpCondSynapse', u'name': u'expCondSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('alpha_cond_synapses', 'AlphaCondSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'AlphaCondSynapse', u'name': u'alphaCondSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('exp_curr_synapses', 'ExpCurrSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ExpCurrSynapse', u'name': u'expCurrSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('alpha_curr_synapses', 'AlphaCurrSynapse', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'AlphaCurrSynapse', u'name': u'alphaCurrSynapse', u'minOccurs': u'0'}, None), - MemberSpec_('SpikeSourcePoisson', 'SpikeSourcePoisson', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'SpikeSourcePoisson', u'name': u'SpikeSourcePoisson', u'minOccurs': u'0'}, None), - MemberSpec_('networks', 'Network', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Network', u'name': u'network', u'minOccurs': u'0'}, None), - MemberSpec_('ComponentType', 'ComponentType', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ComponentType', u'name': u'ComponentType', u'minOccurs': u'0'}, None), + MemberSpec_( + "includes", + "IncludeType", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "include", + "type": "IncludeType", + }, + None, + ), + MemberSpec_( + "extracellular_properties", + "ExtracellularProperties", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "extracellularProperties", + "type": "ExtracellularProperties", + }, + None, + ), + MemberSpec_( + "intracellular_properties", + "IntracellularProperties", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "intracellularProperties", + "type": "IntracellularProperties", + }, + None, + ), + MemberSpec_( + "morphology", + "Morphology", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "morphology", + "type": "Morphology", + }, + None, + ), + MemberSpec_( + "ion_channel", + "IonChannel", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "ionChannel", + "type": "IonChannel", + }, + None, + ), + MemberSpec_( + "ion_channel_hhs", + "IonChannelHH", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "ionChannelHH", + "type": "IonChannelHH", + }, + None, + ), + MemberSpec_( + "ion_channel_v_shifts", + "IonChannelVShift", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "ionChannelVShift", + "type": "IonChannelVShift", + }, + None, + ), + MemberSpec_( + "ion_channel_kses", + "IonChannelKS", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "ionChannelKS", + "type": "IonChannelKS", + }, + None, + ), + MemberSpec_( + "decaying_pool_concentration_models", + "DecayingPoolConcentrationModel", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "decayingPoolConcentrationModel", + "type": "DecayingPoolConcentrationModel", + }, + None, + ), + MemberSpec_( + "fixed_factor_concentration_models", + "FixedFactorConcentrationModel", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "fixedFactorConcentrationModel", + "type": "FixedFactorConcentrationModel", + }, + None, + ), + MemberSpec_( + "alpha_current_synapses", + "AlphaCurrentSynapse", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "alphaCurrentSynapse", + "type": "AlphaCurrentSynapse", + }, + None, + ), + MemberSpec_( + "alpha_synapses", + "AlphaSynapse", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "alphaSynapse", + "type": "AlphaSynapse", + }, + None, + ), + MemberSpec_( + "exp_one_synapses", + "ExpOneSynapse", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "expOneSynapse", + "type": "ExpOneSynapse", + }, + None, + ), + MemberSpec_( + "exp_two_synapses", + "ExpTwoSynapse", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "expTwoSynapse", + "type": "ExpTwoSynapse", + }, + None, + ), + MemberSpec_( + "exp_three_synapses", + "ExpThreeSynapse", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "expThreeSynapse", + "type": "ExpThreeSynapse", + }, + None, + ), + MemberSpec_( + "blocking_plastic_synapses", + "BlockingPlasticSynapse", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "blockingPlasticSynapse", + "type": "BlockingPlasticSynapse", + }, + None, + ), + MemberSpec_( + "double_synapses", + "DoubleSynapse", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "doubleSynapse", + "type": "DoubleSynapse", + }, + None, + ), + MemberSpec_( + "gap_junctions", + "GapJunction", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "gapJunction", + "type": "GapJunction", + }, + None, + ), + MemberSpec_( + "silent_synapses", + "SilentSynapse", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "silentSynapse", + "type": "SilentSynapse", + }, + None, + ), + MemberSpec_( + "linear_graded_synapses", + "LinearGradedSynapse", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "linearGradedSynapse", + "type": "LinearGradedSynapse", + }, + None, + ), + MemberSpec_( + "graded_synapses", + "GradedSynapse", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "gradedSynapse", + "type": "GradedSynapse", + }, + None, + ), + MemberSpec_( + "biophysical_properties", + "BiophysicalProperties", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "biophysicalProperties", + "type": "BiophysicalProperties", + }, + None, + ), + MemberSpec_( + "cells", + "Cell", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "cell", + "type": "Cell", + }, + None, + ), + MemberSpec_( + "cell2_ca_poolses", + "Cell2CaPools", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "cell2CaPools", + "type": "Cell2CaPools", + }, + None, + ), + MemberSpec_( + "base_cells", + "BaseCell", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "baseCell", + "type": "BaseCell", + }, + None, + ), + MemberSpec_( + "iaf_tau_cells", + "IafTauCell", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "iafTauCell", + "type": "IafTauCell", + }, + None, + ), + MemberSpec_( + "iaf_tau_ref_cells", + "IafTauRefCell", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "iafTauRefCell", + "type": "IafTauRefCell", + }, + None, + ), + MemberSpec_( + "iaf_cells", + "IafCell", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "iafCell", + "type": "IafCell", + }, + None, + ), + MemberSpec_( + "iaf_ref_cells", + "IafRefCell", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "iafRefCell", + "type": "IafRefCell", + }, + None, + ), + MemberSpec_( + "izhikevich_cells", + "IzhikevichCell", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "izhikevichCell", + "type": "IzhikevichCell", + }, + None, + ), + MemberSpec_( + "izhikevich2007_cells", + "Izhikevich2007Cell", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "izhikevich2007Cell", + "type": "Izhikevich2007Cell", + }, + None, + ), + MemberSpec_( + "ad_ex_ia_f_cells", + "AdExIaFCell", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "adExIaFCell", + "type": "AdExIaFCell", + }, + None, + ), + MemberSpec_( + "fitz_hugh_nagumo_cells", + "FitzHughNagumoCell", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "fitzHughNagumoCell", + "type": "FitzHughNagumoCell", + }, + None, + ), + MemberSpec_( + "fitz_hugh_nagumo1969_cells", + "FitzHughNagumo1969Cell", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "fitzHughNagumo1969Cell", + "type": "FitzHughNagumo1969Cell", + }, + None, + ), + MemberSpec_( + "pinsky_rinzel_ca3_cells", + "PinskyRinzelCA3Cell", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "pinskyRinzelCA3Cell", + "type": "PinskyRinzelCA3Cell", + }, + None, + ), + MemberSpec_( + "pulse_generators", + "PulseGenerator", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "pulseGenerator", + "type": "PulseGenerator", + }, + None, + ), + MemberSpec_( + "pulse_generator_dls", + "PulseGeneratorDL", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "pulseGeneratorDL", + "type": "PulseGeneratorDL", + }, + None, + ), + MemberSpec_( + "sine_generators", + "SineGenerator", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "sineGenerator", + "type": "SineGenerator", + }, + None, + ), + MemberSpec_( + "sine_generator_dls", + "SineGeneratorDL", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "sineGeneratorDL", + "type": "SineGeneratorDL", + }, + None, + ), + MemberSpec_( + "ramp_generators", + "RampGenerator", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "rampGenerator", + "type": "RampGenerator", + }, + None, + ), + MemberSpec_( + "ramp_generator_dls", + "RampGeneratorDL", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "rampGeneratorDL", + "type": "RampGeneratorDL", + }, + None, + ), + MemberSpec_( + "compound_inputs", + "CompoundInput", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "compoundInput", + "type": "CompoundInput", + }, + None, + ), + MemberSpec_( + "compound_input_dls", + "CompoundInputDL", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "compoundInputDL", + "type": "CompoundInputDL", + }, + None, + ), + MemberSpec_( + "voltage_clamps", + "VoltageClamp", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "voltageClamp", + "type": "VoltageClamp", + }, + None, + ), + MemberSpec_( + "voltage_clamp_triples", + "VoltageClampTriple", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "voltageClampTriple", + "type": "VoltageClampTriple", + }, + None, + ), + MemberSpec_( + "spike_arrays", + "SpikeArray", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "spikeArray", + "type": "SpikeArray", + }, + None, + ), + MemberSpec_( + "timed_synaptic_inputs", + "TimedSynapticInput", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "timedSynapticInput", + "type": "TimedSynapticInput", + }, + None, + ), + MemberSpec_( + "spike_generators", + "SpikeGenerator", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "spikeGenerator", + "type": "SpikeGenerator", + }, + None, + ), + MemberSpec_( + "spike_generator_randoms", + "SpikeGeneratorRandom", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "spikeGeneratorRandom", + "type": "SpikeGeneratorRandom", + }, + None, + ), + MemberSpec_( + "spike_generator_poissons", + "SpikeGeneratorPoisson", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "spikeGeneratorPoisson", + "type": "SpikeGeneratorPoisson", + }, + None, + ), + MemberSpec_( + "spike_generator_ref_poissons", + "SpikeGeneratorRefPoisson", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "spikeGeneratorRefPoisson", + "type": "SpikeGeneratorRefPoisson", + }, + None, + ), + MemberSpec_( + "poisson_firing_synapses", + "PoissonFiringSynapse", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "poissonFiringSynapse", + "type": "PoissonFiringSynapse", + }, + None, + ), + MemberSpec_( + "transient_poisson_firing_synapses", + "TransientPoissonFiringSynapse", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "transientPoissonFiringSynapse", + "type": "TransientPoissonFiringSynapse", + }, + None, + ), + MemberSpec_( + "IF_curr_alpha", + "IF_curr_alpha", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "IF_curr_alpha", + "type": "IF_curr_alpha", + }, + None, + ), + MemberSpec_( + "IF_curr_exp", + "IF_curr_exp", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "IF_curr_exp", + "type": "IF_curr_exp", + }, + None, + ), + MemberSpec_( + "IF_cond_alpha", + "IF_cond_alpha", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "IF_cond_alpha", + "type": "IF_cond_alpha", + }, + None, + ), + MemberSpec_( + "IF_cond_exp", + "IF_cond_exp", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "IF_cond_exp", + "type": "IF_cond_exp", + }, + None, + ), + MemberSpec_( + "EIF_cond_exp_isfa_ista", + "EIF_cond_exp_isfa_ista", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "EIF_cond_exp_isfa_ista", + "type": "EIF_cond_exp_isfa_ista", + }, + None, + ), + MemberSpec_( + "EIF_cond_alpha_isfa_ista", + "EIF_cond_alpha_isfa_ista", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "EIF_cond_alpha_isfa_ista", + "type": "EIF_cond_alpha_isfa_ista", + }, + None, + ), + MemberSpec_( + "HH_cond_exp", + "HH_cond_exp", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "HH_cond_exp", + "type": "HH_cond_exp", + }, + None, + ), + MemberSpec_( + "exp_cond_synapses", + "ExpCondSynapse", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "expCondSynapse", + "type": "ExpCondSynapse", + }, + None, + ), + MemberSpec_( + "alpha_cond_synapses", + "AlphaCondSynapse", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "alphaCondSynapse", + "type": "AlphaCondSynapse", + }, + None, + ), + MemberSpec_( + "exp_curr_synapses", + "ExpCurrSynapse", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "expCurrSynapse", + "type": "ExpCurrSynapse", + }, + None, + ), + MemberSpec_( + "alpha_curr_synapses", + "AlphaCurrSynapse", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "alphaCurrSynapse", + "type": "AlphaCurrSynapse", + }, + None, + ), + MemberSpec_( + "SpikeSourcePoisson", + "SpikeSourcePoisson", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "SpikeSourcePoisson", + "type": "SpikeSourcePoisson", + }, + None, + ), + MemberSpec_( + "networks", + "Network", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "network", + "type": "Network", + }, + None, + ), + MemberSpec_( + "ComponentType", + "ComponentType", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "ComponentType", + "type": "ComponentType", + }, + None, + ), ] subclass = None superclass = Standalone - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, includes=None, extracellular_properties=None, intracellular_properties=None, morphology=None, ion_channel=None, ion_channel_hhs=None, ion_channel_v_shifts=None, ion_channel_kses=None, decaying_pool_concentration_models=None, fixed_factor_concentration_models=None, alpha_current_synapses=None, alpha_synapses=None, exp_one_synapses=None, exp_two_synapses=None, exp_three_synapses=None, blocking_plastic_synapses=None, double_synapses=None, gap_junctions=None, silent_synapses=None, linear_graded_synapses=None, graded_synapses=None, biophysical_properties=None, cells=None, cell2_ca_poolses=None, base_cells=None, iaf_tau_cells=None, iaf_tau_ref_cells=None, iaf_cells=None, iaf_ref_cells=None, izhikevich_cells=None, izhikevich2007_cells=None, ad_ex_ia_f_cells=None, fitz_hugh_nagumo_cells=None, fitz_hugh_nagumo1969_cells=None, pinsky_rinzel_ca3_cells=None, pulse_generators=None, pulse_generator_dls=None, sine_generators=None, sine_generator_dls=None, ramp_generators=None, ramp_generator_dls=None, compound_inputs=None, compound_input_dls=None, voltage_clamps=None, voltage_clamp_triples=None, spike_arrays=None, timed_synaptic_inputs=None, spike_generators=None, spike_generator_randoms=None, spike_generator_poissons=None, spike_generator_ref_poissons=None, poisson_firing_synapses=None, transient_poisson_firing_synapses=None, IF_curr_alpha=None, IF_curr_exp=None, IF_cond_alpha=None, IF_cond_exp=None, EIF_cond_exp_isfa_ista=None, EIF_cond_alpha_isfa_ista=None, HH_cond_exp=None, exp_cond_synapses=None, alpha_cond_synapses=None, exp_curr_synapses=None, alpha_curr_synapses=None, SpikeSourcePoisson=None, networks=None, ComponentType=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + includes=None, + extracellular_properties=None, + intracellular_properties=None, + morphology=None, + ion_channel=None, + ion_channel_hhs=None, + ion_channel_v_shifts=None, + ion_channel_kses=None, + decaying_pool_concentration_models=None, + fixed_factor_concentration_models=None, + alpha_current_synapses=None, + alpha_synapses=None, + exp_one_synapses=None, + exp_two_synapses=None, + exp_three_synapses=None, + blocking_plastic_synapses=None, + double_synapses=None, + gap_junctions=None, + silent_synapses=None, + linear_graded_synapses=None, + graded_synapses=None, + biophysical_properties=None, + cells=None, + cell2_ca_poolses=None, + base_cells=None, + iaf_tau_cells=None, + iaf_tau_ref_cells=None, + iaf_cells=None, + iaf_ref_cells=None, + izhikevich_cells=None, + izhikevich2007_cells=None, + ad_ex_ia_f_cells=None, + fitz_hugh_nagumo_cells=None, + fitz_hugh_nagumo1969_cells=None, + pinsky_rinzel_ca3_cells=None, + pulse_generators=None, + pulse_generator_dls=None, + sine_generators=None, + sine_generator_dls=None, + ramp_generators=None, + ramp_generator_dls=None, + compound_inputs=None, + compound_input_dls=None, + voltage_clamps=None, + voltage_clamp_triples=None, + spike_arrays=None, + timed_synaptic_inputs=None, + spike_generators=None, + spike_generator_randoms=None, + spike_generator_poissons=None, + spike_generator_ref_poissons=None, + poisson_firing_synapses=None, + transient_poisson_firing_synapses=None, + IF_curr_alpha=None, + IF_curr_exp=None, + IF_cond_alpha=None, + IF_cond_exp=None, + EIF_cond_exp_isfa_ista=None, + EIF_cond_alpha_isfa_ista=None, + HH_cond_exp=None, + exp_cond_synapses=None, + alpha_cond_synapses=None, + exp_curr_synapses=None, + alpha_curr_synapses=None, + SpikeSourcePoisson=None, + networks=None, + ComponentType=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(NeuroMLDocument, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("NeuroMLDocument"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) if includes is None: self.includes = [] else: self.includes = includes + self.includes_nsprefix_ = None if extracellular_properties is None: self.extracellular_properties = [] else: self.extracellular_properties = extracellular_properties + self.extracellular_properties_nsprefix_ = None if intracellular_properties is None: self.intracellular_properties = [] else: self.intracellular_properties = intracellular_properties + self.intracellular_properties_nsprefix_ = None if morphology is None: self.morphology = [] else: self.morphology = morphology + self.morphology_nsprefix_ = None if ion_channel is None: self.ion_channel = [] else: self.ion_channel = ion_channel + self.ion_channel_nsprefix_ = None if ion_channel_hhs is None: self.ion_channel_hhs = [] else: self.ion_channel_hhs = ion_channel_hhs + self.ion_channel_hhs_nsprefix_ = None if ion_channel_v_shifts is None: self.ion_channel_v_shifts = [] else: self.ion_channel_v_shifts = ion_channel_v_shifts + self.ion_channel_v_shifts_nsprefix_ = None if ion_channel_kses is None: self.ion_channel_kses = [] else: self.ion_channel_kses = ion_channel_kses + self.ion_channel_kses_nsprefix_ = None if decaying_pool_concentration_models is None: self.decaying_pool_concentration_models = [] else: self.decaying_pool_concentration_models = decaying_pool_concentration_models + self.decaying_pool_concentration_models_nsprefix_ = None if fixed_factor_concentration_models is None: self.fixed_factor_concentration_models = [] else: self.fixed_factor_concentration_models = fixed_factor_concentration_models + self.fixed_factor_concentration_models_nsprefix_ = None if alpha_current_synapses is None: self.alpha_current_synapses = [] else: self.alpha_current_synapses = alpha_current_synapses + self.alpha_current_synapses_nsprefix_ = None if alpha_synapses is None: self.alpha_synapses = [] else: self.alpha_synapses = alpha_synapses + self.alpha_synapses_nsprefix_ = None if exp_one_synapses is None: self.exp_one_synapses = [] else: self.exp_one_synapses = exp_one_synapses + self.exp_one_synapses_nsprefix_ = None if exp_two_synapses is None: self.exp_two_synapses = [] else: self.exp_two_synapses = exp_two_synapses + self.exp_two_synapses_nsprefix_ = None if exp_three_synapses is None: self.exp_three_synapses = [] else: self.exp_three_synapses = exp_three_synapses + self.exp_three_synapses_nsprefix_ = None if blocking_plastic_synapses is None: self.blocking_plastic_synapses = [] else: self.blocking_plastic_synapses = blocking_plastic_synapses + self.blocking_plastic_synapses_nsprefix_ = None if double_synapses is None: self.double_synapses = [] else: self.double_synapses = double_synapses + self.double_synapses_nsprefix_ = None if gap_junctions is None: self.gap_junctions = [] else: self.gap_junctions = gap_junctions + self.gap_junctions_nsprefix_ = None if silent_synapses is None: self.silent_synapses = [] else: self.silent_synapses = silent_synapses + self.silent_synapses_nsprefix_ = None if linear_graded_synapses is None: self.linear_graded_synapses = [] else: self.linear_graded_synapses = linear_graded_synapses + self.linear_graded_synapses_nsprefix_ = None if graded_synapses is None: self.graded_synapses = [] else: self.graded_synapses = graded_synapses + self.graded_synapses_nsprefix_ = None if biophysical_properties is None: self.biophysical_properties = [] else: self.biophysical_properties = biophysical_properties + self.biophysical_properties_nsprefix_ = None if cells is None: self.cells = [] else: self.cells = cells + self.cells_nsprefix_ = None if cell2_ca_poolses is None: self.cell2_ca_poolses = [] else: self.cell2_ca_poolses = cell2_ca_poolses + self.cell2_ca_poolses_nsprefix_ = None if base_cells is None: self.base_cells = [] else: self.base_cells = base_cells + self.base_cells_nsprefix_ = None if iaf_tau_cells is None: self.iaf_tau_cells = [] else: self.iaf_tau_cells = iaf_tau_cells + self.iaf_tau_cells_nsprefix_ = None if iaf_tau_ref_cells is None: self.iaf_tau_ref_cells = [] else: self.iaf_tau_ref_cells = iaf_tau_ref_cells + self.iaf_tau_ref_cells_nsprefix_ = None if iaf_cells is None: self.iaf_cells = [] else: self.iaf_cells = iaf_cells + self.iaf_cells_nsprefix_ = None if iaf_ref_cells is None: self.iaf_ref_cells = [] else: self.iaf_ref_cells = iaf_ref_cells + self.iaf_ref_cells_nsprefix_ = None if izhikevich_cells is None: self.izhikevich_cells = [] else: self.izhikevich_cells = izhikevich_cells + self.izhikevich_cells_nsprefix_ = None if izhikevich2007_cells is None: self.izhikevich2007_cells = [] else: self.izhikevich2007_cells = izhikevich2007_cells + self.izhikevich2007_cells_nsprefix_ = None if ad_ex_ia_f_cells is None: self.ad_ex_ia_f_cells = [] else: self.ad_ex_ia_f_cells = ad_ex_ia_f_cells + self.ad_ex_ia_f_cells_nsprefix_ = None if fitz_hugh_nagumo_cells is None: self.fitz_hugh_nagumo_cells = [] else: self.fitz_hugh_nagumo_cells = fitz_hugh_nagumo_cells + self.fitz_hugh_nagumo_cells_nsprefix_ = None if fitz_hugh_nagumo1969_cells is None: self.fitz_hugh_nagumo1969_cells = [] else: self.fitz_hugh_nagumo1969_cells = fitz_hugh_nagumo1969_cells + self.fitz_hugh_nagumo1969_cells_nsprefix_ = None if pinsky_rinzel_ca3_cells is None: self.pinsky_rinzel_ca3_cells = [] else: self.pinsky_rinzel_ca3_cells = pinsky_rinzel_ca3_cells + self.pinsky_rinzel_ca3_cells_nsprefix_ = None if pulse_generators is None: self.pulse_generators = [] else: self.pulse_generators = pulse_generators + self.pulse_generators_nsprefix_ = None if pulse_generator_dls is None: self.pulse_generator_dls = [] else: self.pulse_generator_dls = pulse_generator_dls + self.pulse_generator_dls_nsprefix_ = None if sine_generators is None: self.sine_generators = [] else: self.sine_generators = sine_generators + self.sine_generators_nsprefix_ = None if sine_generator_dls is None: self.sine_generator_dls = [] else: self.sine_generator_dls = sine_generator_dls + self.sine_generator_dls_nsprefix_ = None if ramp_generators is None: self.ramp_generators = [] else: self.ramp_generators = ramp_generators + self.ramp_generators_nsprefix_ = None if ramp_generator_dls is None: self.ramp_generator_dls = [] else: self.ramp_generator_dls = ramp_generator_dls + self.ramp_generator_dls_nsprefix_ = None if compound_inputs is None: self.compound_inputs = [] else: self.compound_inputs = compound_inputs + self.compound_inputs_nsprefix_ = None if compound_input_dls is None: self.compound_input_dls = [] else: self.compound_input_dls = compound_input_dls + self.compound_input_dls_nsprefix_ = None if voltage_clamps is None: self.voltage_clamps = [] else: self.voltage_clamps = voltage_clamps + self.voltage_clamps_nsprefix_ = None if voltage_clamp_triples is None: self.voltage_clamp_triples = [] else: self.voltage_clamp_triples = voltage_clamp_triples + self.voltage_clamp_triples_nsprefix_ = None if spike_arrays is None: self.spike_arrays = [] else: self.spike_arrays = spike_arrays + self.spike_arrays_nsprefix_ = None if timed_synaptic_inputs is None: self.timed_synaptic_inputs = [] else: self.timed_synaptic_inputs = timed_synaptic_inputs + self.timed_synaptic_inputs_nsprefix_ = None if spike_generators is None: self.spike_generators = [] else: self.spike_generators = spike_generators + self.spike_generators_nsprefix_ = None if spike_generator_randoms is None: self.spike_generator_randoms = [] else: self.spike_generator_randoms = spike_generator_randoms + self.spike_generator_randoms_nsprefix_ = None if spike_generator_poissons is None: self.spike_generator_poissons = [] else: self.spike_generator_poissons = spike_generator_poissons + self.spike_generator_poissons_nsprefix_ = None if spike_generator_ref_poissons is None: self.spike_generator_ref_poissons = [] else: self.spike_generator_ref_poissons = spike_generator_ref_poissons + self.spike_generator_ref_poissons_nsprefix_ = None if poisson_firing_synapses is None: self.poisson_firing_synapses = [] else: self.poisson_firing_synapses = poisson_firing_synapses + self.poisson_firing_synapses_nsprefix_ = None if transient_poisson_firing_synapses is None: self.transient_poisson_firing_synapses = [] else: self.transient_poisson_firing_synapses = transient_poisson_firing_synapses + self.transient_poisson_firing_synapses_nsprefix_ = None if IF_curr_alpha is None: self.IF_curr_alpha = [] else: self.IF_curr_alpha = IF_curr_alpha + self.IF_curr_alpha_nsprefix_ = None if IF_curr_exp is None: self.IF_curr_exp = [] else: self.IF_curr_exp = IF_curr_exp + self.IF_curr_exp_nsprefix_ = None if IF_cond_alpha is None: self.IF_cond_alpha = [] else: self.IF_cond_alpha = IF_cond_alpha + self.IF_cond_alpha_nsprefix_ = None if IF_cond_exp is None: self.IF_cond_exp = [] else: self.IF_cond_exp = IF_cond_exp + self.IF_cond_exp_nsprefix_ = None if EIF_cond_exp_isfa_ista is None: self.EIF_cond_exp_isfa_ista = [] else: self.EIF_cond_exp_isfa_ista = EIF_cond_exp_isfa_ista + self.EIF_cond_exp_isfa_ista_nsprefix_ = None if EIF_cond_alpha_isfa_ista is None: self.EIF_cond_alpha_isfa_ista = [] else: self.EIF_cond_alpha_isfa_ista = EIF_cond_alpha_isfa_ista + self.EIF_cond_alpha_isfa_ista_nsprefix_ = None if HH_cond_exp is None: self.HH_cond_exp = [] else: self.HH_cond_exp = HH_cond_exp + self.HH_cond_exp_nsprefix_ = None if exp_cond_synapses is None: self.exp_cond_synapses = [] else: self.exp_cond_synapses = exp_cond_synapses + self.exp_cond_synapses_nsprefix_ = None if alpha_cond_synapses is None: self.alpha_cond_synapses = [] else: self.alpha_cond_synapses = alpha_cond_synapses + self.alpha_cond_synapses_nsprefix_ = None if exp_curr_synapses is None: self.exp_curr_synapses = [] else: self.exp_curr_synapses = exp_curr_synapses + self.exp_curr_synapses_nsprefix_ = None if alpha_curr_synapses is None: self.alpha_curr_synapses = [] else: self.alpha_curr_synapses = alpha_curr_synapses + self.alpha_curr_synapses_nsprefix_ = None if SpikeSourcePoisson is None: self.SpikeSourcePoisson = [] else: self.SpikeSourcePoisson = SpikeSourcePoisson + self.SpikeSourcePoisson_nsprefix_ = None if networks is None: self.networks = [] else: self.networks = networks + self.networks_nsprefix_ = None if ComponentType is None: self.ComponentType = [] else: self.ComponentType = ComponentType + self.ComponentType_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, NeuroMLDocument) + subclass = getSubclassFromModule_(CurrentSubclassModule_, NeuroMLDocument) if subclass is not None: return subclass(*args_, **kwargs_) if NeuroMLDocument.subclass: return NeuroMLDocument.subclass(*args_, **kwargs_) else: return NeuroMLDocument(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - self.includes or - self.extracellular_properties or - self.intracellular_properties or - self.morphology or - self.ion_channel or - self.ion_channel_hhs or - self.ion_channel_v_shifts or - self.ion_channel_kses or - self.decaying_pool_concentration_models or - self.fixed_factor_concentration_models or - self.alpha_current_synapses or - self.alpha_synapses or - self.exp_one_synapses or - self.exp_two_synapses or - self.exp_three_synapses or - self.blocking_plastic_synapses or - self.double_synapses or - self.gap_junctions or - self.silent_synapses or - self.linear_graded_synapses or - self.graded_synapses or - self.biophysical_properties or - self.cells or - self.cell2_ca_poolses or - self.base_cells or - self.iaf_tau_cells or - self.iaf_tau_ref_cells or - self.iaf_cells or - self.iaf_ref_cells or - self.izhikevich_cells or - self.izhikevich2007_cells or - self.ad_ex_ia_f_cells or - self.fitz_hugh_nagumo_cells or - self.fitz_hugh_nagumo1969_cells or - self.pinsky_rinzel_ca3_cells or - self.pulse_generators or - self.pulse_generator_dls or - self.sine_generators or - self.sine_generator_dls or - self.ramp_generators or - self.ramp_generator_dls or - self.compound_inputs or - self.compound_input_dls or - self.voltage_clamps or - self.voltage_clamp_triples or - self.spike_arrays or - self.timed_synaptic_inputs or - self.spike_generators or - self.spike_generator_randoms or - self.spike_generator_poissons or - self.spike_generator_ref_poissons or - self.poisson_firing_synapses or - self.transient_poisson_firing_synapses or - self.IF_curr_alpha or - self.IF_curr_exp or - self.IF_cond_alpha or - self.IF_cond_exp or - self.EIF_cond_exp_isfa_ista or - self.EIF_cond_alpha_isfa_ista or - self.HH_cond_exp or - self.exp_cond_synapses or - self.alpha_cond_synapses or - self.exp_curr_synapses or - self.alpha_curr_synapses or - self.SpikeSourcePoisson or - self.networks or - self.ComponentType or - super(NeuroMLDocument, self).hasContent_() + + def _hasContent(self): + if ( + self.includes + or self.extracellular_properties + or self.intracellular_properties + or self.morphology + or self.ion_channel + or self.ion_channel_hhs + or self.ion_channel_v_shifts + or self.ion_channel_kses + or self.decaying_pool_concentration_models + or self.fixed_factor_concentration_models + or self.alpha_current_synapses + or self.alpha_synapses + or self.exp_one_synapses + or self.exp_two_synapses + or self.exp_three_synapses + or self.blocking_plastic_synapses + or self.double_synapses + or self.gap_junctions + or self.silent_synapses + or self.linear_graded_synapses + or self.graded_synapses + or self.biophysical_properties + or self.cells + or self.cell2_ca_poolses + or self.base_cells + or self.iaf_tau_cells + or self.iaf_tau_ref_cells + or self.iaf_cells + or self.iaf_ref_cells + or self.izhikevich_cells + or self.izhikevich2007_cells + or self.ad_ex_ia_f_cells + or self.fitz_hugh_nagumo_cells + or self.fitz_hugh_nagumo1969_cells + or self.pinsky_rinzel_ca3_cells + or self.pulse_generators + or self.pulse_generator_dls + or self.sine_generators + or self.sine_generator_dls + or self.ramp_generators + or self.ramp_generator_dls + or self.compound_inputs + or self.compound_input_dls + or self.voltage_clamps + or self.voltage_clamp_triples + or self.spike_arrays + or self.timed_synaptic_inputs + or self.spike_generators + or self.spike_generator_randoms + or self.spike_generator_poissons + or self.spike_generator_ref_poissons + or self.poisson_firing_synapses + or self.transient_poisson_firing_synapses + or self.IF_curr_alpha + or self.IF_curr_exp + or self.IF_cond_alpha + or self.IF_cond_exp + or self.EIF_cond_exp_isfa_ista + or self.EIF_cond_alpha_isfa_ista + or self.HH_cond_exp + or self.exp_cond_synapses + or self.alpha_cond_synapses + or self.exp_curr_synapses + or self.alpha_curr_synapses + or self.SpikeSourcePoisson + or self.networks + or self.ComponentType + or super(NeuroMLDocument, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NeuroMLDocument', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('NeuroMLDocument') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="NeuroMLDocument", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("NeuroMLDocument") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "NeuroMLDocument": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NeuroMLDocument') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NeuroMLDocument', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="NeuroMLDocument" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="NeuroMLDocument", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NeuroMLDocument'): - super(NeuroMLDocument, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NeuroMLDocument') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='NeuroMLDocument', fromsubclass_=False, pretty_print=True): - super(NeuroMLDocument, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="NeuroMLDocument", + ): + super(NeuroMLDocument, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="NeuroMLDocument" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="NeuroMLDocument", + fromsubclass_=False, + pretty_print=True, + ): + super(NeuroMLDocument, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for include_ in self.includes: - include_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='include', pretty_print=pretty_print) + namespaceprefix_ = ( + self.includes_nsprefix_ + ":" + if (UseCapturedNS_ and self.includes_nsprefix_) + else "" + ) + include_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="include", + pretty_print=pretty_print, + ) for extracellularProperties_ in self.extracellular_properties: - extracellularProperties_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='extracellularProperties', pretty_print=pretty_print) + namespaceprefix_ = ( + self.extracellular_properties_nsprefix_ + ":" + if (UseCapturedNS_ and self.extracellular_properties_nsprefix_) + else "" + ) + extracellularProperties_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="extracellularProperties", + pretty_print=pretty_print, + ) for intracellularProperties_ in self.intracellular_properties: - intracellularProperties_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='intracellularProperties', pretty_print=pretty_print) + namespaceprefix_ = ( + self.intracellular_properties_nsprefix_ + ":" + if (UseCapturedNS_ and self.intracellular_properties_nsprefix_) + else "" + ) + intracellularProperties_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="intracellularProperties", + pretty_print=pretty_print, + ) for morphology_ in self.morphology: - morphology_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='morphology', pretty_print=pretty_print) + namespaceprefix_ = ( + self.morphology_nsprefix_ + ":" + if (UseCapturedNS_ and self.morphology_nsprefix_) + else "" + ) + morphology_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="morphology", + pretty_print=pretty_print, + ) for ionChannel_ in self.ion_channel: - ionChannel_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ionChannel', pretty_print=pretty_print) + namespaceprefix_ = ( + self.ion_channel_nsprefix_ + ":" + if (UseCapturedNS_ and self.ion_channel_nsprefix_) + else "" + ) + ionChannel_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="ionChannel", + pretty_print=pretty_print, + ) for ionChannelHH_ in self.ion_channel_hhs: - ionChannelHH_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ionChannelHH', pretty_print=pretty_print) + namespaceprefix_ = ( + self.ion_channel_hhs_nsprefix_ + ":" + if (UseCapturedNS_ and self.ion_channel_hhs_nsprefix_) + else "" + ) + ionChannelHH_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="ionChannelHH", + pretty_print=pretty_print, + ) for ionChannelVShift_ in self.ion_channel_v_shifts: - ionChannelVShift_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ionChannelVShift', pretty_print=pretty_print) + namespaceprefix_ = ( + self.ion_channel_v_shifts_nsprefix_ + ":" + if (UseCapturedNS_ and self.ion_channel_v_shifts_nsprefix_) + else "" + ) + ionChannelVShift_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="ionChannelVShift", + pretty_print=pretty_print, + ) for ionChannelKS_ in self.ion_channel_kses: - ionChannelKS_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ionChannelKS', pretty_print=pretty_print) + namespaceprefix_ = ( + self.ion_channel_kses_nsprefix_ + ":" + if (UseCapturedNS_ and self.ion_channel_kses_nsprefix_) + else "" + ) + ionChannelKS_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="ionChannelKS", + pretty_print=pretty_print, + ) for decayingPoolConcentrationModel_ in self.decaying_pool_concentration_models: - decayingPoolConcentrationModel_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='decayingPoolConcentrationModel', pretty_print=pretty_print) + namespaceprefix_ = ( + self.decaying_pool_concentration_models_nsprefix_ + ":" + if ( + UseCapturedNS_ and self.decaying_pool_concentration_models_nsprefix_ + ) + else "" + ) + decayingPoolConcentrationModel_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="decayingPoolConcentrationModel", + pretty_print=pretty_print, + ) for fixedFactorConcentrationModel_ in self.fixed_factor_concentration_models: - fixedFactorConcentrationModel_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='fixedFactorConcentrationModel', pretty_print=pretty_print) + namespaceprefix_ = ( + self.fixed_factor_concentration_models_nsprefix_ + ":" + if (UseCapturedNS_ and self.fixed_factor_concentration_models_nsprefix_) + else "" + ) + fixedFactorConcentrationModel_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="fixedFactorConcentrationModel", + pretty_print=pretty_print, + ) for alphaCurrentSynapse_ in self.alpha_current_synapses: - alphaCurrentSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='alphaCurrentSynapse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.alpha_current_synapses_nsprefix_ + ":" + if (UseCapturedNS_ and self.alpha_current_synapses_nsprefix_) + else "" + ) + alphaCurrentSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="alphaCurrentSynapse", + pretty_print=pretty_print, + ) for alphaSynapse_ in self.alpha_synapses: - alphaSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='alphaSynapse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.alpha_synapses_nsprefix_ + ":" + if (UseCapturedNS_ and self.alpha_synapses_nsprefix_) + else "" + ) + alphaSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="alphaSynapse", + pretty_print=pretty_print, + ) for expOneSynapse_ in self.exp_one_synapses: - expOneSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='expOneSynapse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.exp_one_synapses_nsprefix_ + ":" + if (UseCapturedNS_ and self.exp_one_synapses_nsprefix_) + else "" + ) + expOneSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="expOneSynapse", + pretty_print=pretty_print, + ) for expTwoSynapse_ in self.exp_two_synapses: - expTwoSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='expTwoSynapse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.exp_two_synapses_nsprefix_ + ":" + if (UseCapturedNS_ and self.exp_two_synapses_nsprefix_) + else "" + ) + expTwoSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="expTwoSynapse", + pretty_print=pretty_print, + ) for expThreeSynapse_ in self.exp_three_synapses: - expThreeSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='expThreeSynapse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.exp_three_synapses_nsprefix_ + ":" + if (UseCapturedNS_ and self.exp_three_synapses_nsprefix_) + else "" + ) + expThreeSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="expThreeSynapse", + pretty_print=pretty_print, + ) for blockingPlasticSynapse_ in self.blocking_plastic_synapses: - blockingPlasticSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='blockingPlasticSynapse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.blocking_plastic_synapses_nsprefix_ + ":" + if (UseCapturedNS_ and self.blocking_plastic_synapses_nsprefix_) + else "" + ) + blockingPlasticSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="blockingPlasticSynapse", + pretty_print=pretty_print, + ) for doubleSynapse_ in self.double_synapses: - doubleSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='doubleSynapse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.double_synapses_nsprefix_ + ":" + if (UseCapturedNS_ and self.double_synapses_nsprefix_) + else "" + ) + doubleSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="doubleSynapse", + pretty_print=pretty_print, + ) for gapJunction_ in self.gap_junctions: - gapJunction_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gapJunction', pretty_print=pretty_print) + namespaceprefix_ = ( + self.gap_junctions_nsprefix_ + ":" + if (UseCapturedNS_ and self.gap_junctions_nsprefix_) + else "" + ) + gapJunction_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gapJunction", + pretty_print=pretty_print, + ) for silentSynapse_ in self.silent_synapses: - silentSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='silentSynapse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.silent_synapses_nsprefix_ + ":" + if (UseCapturedNS_ and self.silent_synapses_nsprefix_) + else "" + ) + silentSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="silentSynapse", + pretty_print=pretty_print, + ) for linearGradedSynapse_ in self.linear_graded_synapses: - linearGradedSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='linearGradedSynapse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.linear_graded_synapses_nsprefix_ + ":" + if (UseCapturedNS_ and self.linear_graded_synapses_nsprefix_) + else "" + ) + linearGradedSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="linearGradedSynapse", + pretty_print=pretty_print, + ) for gradedSynapse_ in self.graded_synapses: - gradedSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gradedSynapse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.graded_synapses_nsprefix_ + ":" + if (UseCapturedNS_ and self.graded_synapses_nsprefix_) + else "" + ) + gradedSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gradedSynapse", + pretty_print=pretty_print, + ) for biophysicalProperties_ in self.biophysical_properties: - biophysicalProperties_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='biophysicalProperties', pretty_print=pretty_print) + namespaceprefix_ = ( + self.biophysical_properties_nsprefix_ + ":" + if (UseCapturedNS_ and self.biophysical_properties_nsprefix_) + else "" + ) + biophysicalProperties_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="biophysicalProperties", + pretty_print=pretty_print, + ) for cell_ in self.cells: - cell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='cell', pretty_print=pretty_print) + namespaceprefix_ = ( + self.cells_nsprefix_ + ":" + if (UseCapturedNS_ and self.cells_nsprefix_) + else "" + ) + cell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="cell", + pretty_print=pretty_print, + ) for cell2CaPools_ in self.cell2_ca_poolses: - cell2CaPools_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='cell2CaPools', pretty_print=pretty_print) + namespaceprefix_ = ( + self.cell2_ca_poolses_nsprefix_ + ":" + if (UseCapturedNS_ and self.cell2_ca_poolses_nsprefix_) + else "" + ) + cell2CaPools_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="cell2CaPools", + pretty_print=pretty_print, + ) for baseCell_ in self.base_cells: - baseCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='baseCell', pretty_print=pretty_print) + namespaceprefix_ = ( + self.base_cells_nsprefix_ + ":" + if (UseCapturedNS_ and self.base_cells_nsprefix_) + else "" + ) + baseCell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="baseCell", + pretty_print=pretty_print, + ) for iafTauCell_ in self.iaf_tau_cells: - iafTauCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='iafTauCell', pretty_print=pretty_print) + namespaceprefix_ = ( + self.iaf_tau_cells_nsprefix_ + ":" + if (UseCapturedNS_ and self.iaf_tau_cells_nsprefix_) + else "" + ) + iafTauCell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="iafTauCell", + pretty_print=pretty_print, + ) for iafTauRefCell_ in self.iaf_tau_ref_cells: - iafTauRefCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='iafTauRefCell', pretty_print=pretty_print) + namespaceprefix_ = ( + self.iaf_tau_ref_cells_nsprefix_ + ":" + if (UseCapturedNS_ and self.iaf_tau_ref_cells_nsprefix_) + else "" + ) + iafTauRefCell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="iafTauRefCell", + pretty_print=pretty_print, + ) for iafCell_ in self.iaf_cells: - iafCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='iafCell', pretty_print=pretty_print) + namespaceprefix_ = ( + self.iaf_cells_nsprefix_ + ":" + if (UseCapturedNS_ and self.iaf_cells_nsprefix_) + else "" + ) + iafCell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="iafCell", + pretty_print=pretty_print, + ) for iafRefCell_ in self.iaf_ref_cells: - iafRefCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='iafRefCell', pretty_print=pretty_print) + namespaceprefix_ = ( + self.iaf_ref_cells_nsprefix_ + ":" + if (UseCapturedNS_ and self.iaf_ref_cells_nsprefix_) + else "" + ) + iafRefCell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="iafRefCell", + pretty_print=pretty_print, + ) for izhikevichCell_ in self.izhikevich_cells: - izhikevichCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='izhikevichCell', pretty_print=pretty_print) + namespaceprefix_ = ( + self.izhikevich_cells_nsprefix_ + ":" + if (UseCapturedNS_ and self.izhikevich_cells_nsprefix_) + else "" + ) + izhikevichCell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="izhikevichCell", + pretty_print=pretty_print, + ) for izhikevich2007Cell_ in self.izhikevich2007_cells: - izhikevich2007Cell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='izhikevich2007Cell', pretty_print=pretty_print) + namespaceprefix_ = ( + self.izhikevich2007_cells_nsprefix_ + ":" + if (UseCapturedNS_ and self.izhikevich2007_cells_nsprefix_) + else "" + ) + izhikevich2007Cell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="izhikevich2007Cell", + pretty_print=pretty_print, + ) for adExIaFCell_ in self.ad_ex_ia_f_cells: - adExIaFCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='adExIaFCell', pretty_print=pretty_print) + namespaceprefix_ = ( + self.ad_ex_ia_f_cells_nsprefix_ + ":" + if (UseCapturedNS_ and self.ad_ex_ia_f_cells_nsprefix_) + else "" + ) + adExIaFCell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="adExIaFCell", + pretty_print=pretty_print, + ) for fitzHughNagumoCell_ in self.fitz_hugh_nagumo_cells: - fitzHughNagumoCell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='fitzHughNagumoCell', pretty_print=pretty_print) + namespaceprefix_ = ( + self.fitz_hugh_nagumo_cells_nsprefix_ + ":" + if (UseCapturedNS_ and self.fitz_hugh_nagumo_cells_nsprefix_) + else "" + ) + fitzHughNagumoCell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="fitzHughNagumoCell", + pretty_print=pretty_print, + ) for fitzHughNagumo1969Cell_ in self.fitz_hugh_nagumo1969_cells: - fitzHughNagumo1969Cell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='fitzHughNagumo1969Cell', pretty_print=pretty_print) + namespaceprefix_ = ( + self.fitz_hugh_nagumo1969_cells_nsprefix_ + ":" + if (UseCapturedNS_ and self.fitz_hugh_nagumo1969_cells_nsprefix_) + else "" + ) + fitzHughNagumo1969Cell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="fitzHughNagumo1969Cell", + pretty_print=pretty_print, + ) for pinskyRinzelCA3Cell_ in self.pinsky_rinzel_ca3_cells: - pinskyRinzelCA3Cell_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='pinskyRinzelCA3Cell', pretty_print=pretty_print) + namespaceprefix_ = ( + self.pinsky_rinzel_ca3_cells_nsprefix_ + ":" + if (UseCapturedNS_ and self.pinsky_rinzel_ca3_cells_nsprefix_) + else "" + ) + pinskyRinzelCA3Cell_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="pinskyRinzelCA3Cell", + pretty_print=pretty_print, + ) for pulseGenerator_ in self.pulse_generators: - pulseGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='pulseGenerator', pretty_print=pretty_print) + namespaceprefix_ = ( + self.pulse_generators_nsprefix_ + ":" + if (UseCapturedNS_ and self.pulse_generators_nsprefix_) + else "" + ) + pulseGenerator_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="pulseGenerator", + pretty_print=pretty_print, + ) for pulseGeneratorDL_ in self.pulse_generator_dls: - pulseGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='pulseGeneratorDL', pretty_print=pretty_print) + namespaceprefix_ = ( + self.pulse_generator_dls_nsprefix_ + ":" + if (UseCapturedNS_ and self.pulse_generator_dls_nsprefix_) + else "" + ) + pulseGeneratorDL_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="pulseGeneratorDL", + pretty_print=pretty_print, + ) for sineGenerator_ in self.sine_generators: - sineGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='sineGenerator', pretty_print=pretty_print) + namespaceprefix_ = ( + self.sine_generators_nsprefix_ + ":" + if (UseCapturedNS_ and self.sine_generators_nsprefix_) + else "" + ) + sineGenerator_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="sineGenerator", + pretty_print=pretty_print, + ) for sineGeneratorDL_ in self.sine_generator_dls: - sineGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='sineGeneratorDL', pretty_print=pretty_print) + namespaceprefix_ = ( + self.sine_generator_dls_nsprefix_ + ":" + if (UseCapturedNS_ and self.sine_generator_dls_nsprefix_) + else "" + ) + sineGeneratorDL_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="sineGeneratorDL", + pretty_print=pretty_print, + ) for rampGenerator_ in self.ramp_generators: - rampGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='rampGenerator', pretty_print=pretty_print) + namespaceprefix_ = ( + self.ramp_generators_nsprefix_ + ":" + if (UseCapturedNS_ and self.ramp_generators_nsprefix_) + else "" + ) + rampGenerator_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="rampGenerator", + pretty_print=pretty_print, + ) for rampGeneratorDL_ in self.ramp_generator_dls: - rampGeneratorDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='rampGeneratorDL', pretty_print=pretty_print) + namespaceprefix_ = ( + self.ramp_generator_dls_nsprefix_ + ":" + if (UseCapturedNS_ and self.ramp_generator_dls_nsprefix_) + else "" + ) + rampGeneratorDL_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="rampGeneratorDL", + pretty_print=pretty_print, + ) for compoundInput_ in self.compound_inputs: - compoundInput_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='compoundInput', pretty_print=pretty_print) + namespaceprefix_ = ( + self.compound_inputs_nsprefix_ + ":" + if (UseCapturedNS_ and self.compound_inputs_nsprefix_) + else "" + ) + compoundInput_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="compoundInput", + pretty_print=pretty_print, + ) for compoundInputDL_ in self.compound_input_dls: - compoundInputDL_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='compoundInputDL', pretty_print=pretty_print) + namespaceprefix_ = ( + self.compound_input_dls_nsprefix_ + ":" + if (UseCapturedNS_ and self.compound_input_dls_nsprefix_) + else "" + ) + compoundInputDL_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="compoundInputDL", + pretty_print=pretty_print, + ) for voltageClamp_ in self.voltage_clamps: - voltageClamp_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='voltageClamp', pretty_print=pretty_print) + namespaceprefix_ = ( + self.voltage_clamps_nsprefix_ + ":" + if (UseCapturedNS_ and self.voltage_clamps_nsprefix_) + else "" + ) + voltageClamp_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="voltageClamp", + pretty_print=pretty_print, + ) for voltageClampTriple_ in self.voltage_clamp_triples: - voltageClampTriple_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='voltageClampTriple', pretty_print=pretty_print) + namespaceprefix_ = ( + self.voltage_clamp_triples_nsprefix_ + ":" + if (UseCapturedNS_ and self.voltage_clamp_triples_nsprefix_) + else "" + ) + voltageClampTriple_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="voltageClampTriple", + pretty_print=pretty_print, + ) for spikeArray_ in self.spike_arrays: - spikeArray_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeArray', pretty_print=pretty_print) + namespaceprefix_ = ( + self.spike_arrays_nsprefix_ + ":" + if (UseCapturedNS_ and self.spike_arrays_nsprefix_) + else "" + ) + spikeArray_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="spikeArray", + pretty_print=pretty_print, + ) for timedSynapticInput_ in self.timed_synaptic_inputs: - timedSynapticInput_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='timedSynapticInput', pretty_print=pretty_print) + namespaceprefix_ = ( + self.timed_synaptic_inputs_nsprefix_ + ":" + if (UseCapturedNS_ and self.timed_synaptic_inputs_nsprefix_) + else "" + ) + timedSynapticInput_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="timedSynapticInput", + pretty_print=pretty_print, + ) for spikeGenerator_ in self.spike_generators: - spikeGenerator_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeGenerator', pretty_print=pretty_print) + namespaceprefix_ = ( + self.spike_generators_nsprefix_ + ":" + if (UseCapturedNS_ and self.spike_generators_nsprefix_) + else "" + ) + spikeGenerator_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="spikeGenerator", + pretty_print=pretty_print, + ) for spikeGeneratorRandom_ in self.spike_generator_randoms: - spikeGeneratorRandom_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeGeneratorRandom', pretty_print=pretty_print) + namespaceprefix_ = ( + self.spike_generator_randoms_nsprefix_ + ":" + if (UseCapturedNS_ and self.spike_generator_randoms_nsprefix_) + else "" + ) + spikeGeneratorRandom_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="spikeGeneratorRandom", + pretty_print=pretty_print, + ) for spikeGeneratorPoisson_ in self.spike_generator_poissons: - spikeGeneratorPoisson_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeGeneratorPoisson', pretty_print=pretty_print) + namespaceprefix_ = ( + self.spike_generator_poissons_nsprefix_ + ":" + if (UseCapturedNS_ and self.spike_generator_poissons_nsprefix_) + else "" + ) + spikeGeneratorPoisson_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="spikeGeneratorPoisson", + pretty_print=pretty_print, + ) for spikeGeneratorRefPoisson_ in self.spike_generator_ref_poissons: - spikeGeneratorRefPoisson_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='spikeGeneratorRefPoisson', pretty_print=pretty_print) + namespaceprefix_ = ( + self.spike_generator_ref_poissons_nsprefix_ + ":" + if (UseCapturedNS_ and self.spike_generator_ref_poissons_nsprefix_) + else "" + ) + spikeGeneratorRefPoisson_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="spikeGeneratorRefPoisson", + pretty_print=pretty_print, + ) for poissonFiringSynapse_ in self.poisson_firing_synapses: - poissonFiringSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='poissonFiringSynapse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.poisson_firing_synapses_nsprefix_ + ":" + if (UseCapturedNS_ and self.poisson_firing_synapses_nsprefix_) + else "" + ) + poissonFiringSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="poissonFiringSynapse", + pretty_print=pretty_print, + ) for transientPoissonFiringSynapse_ in self.transient_poisson_firing_synapses: - transientPoissonFiringSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='transientPoissonFiringSynapse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.transient_poisson_firing_synapses_nsprefix_ + ":" + if (UseCapturedNS_ and self.transient_poisson_firing_synapses_nsprefix_) + else "" + ) + transientPoissonFiringSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="transientPoissonFiringSynapse", + pretty_print=pretty_print, + ) for IF_curr_alpha_ in self.IF_curr_alpha: - IF_curr_alpha_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='IF_curr_alpha', pretty_print=pretty_print) + namespaceprefix_ = ( + self.IF_curr_alpha_nsprefix_ + ":" + if (UseCapturedNS_ and self.IF_curr_alpha_nsprefix_) + else "" + ) + IF_curr_alpha_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="IF_curr_alpha", + pretty_print=pretty_print, + ) for IF_curr_exp_ in self.IF_curr_exp: - IF_curr_exp_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='IF_curr_exp', pretty_print=pretty_print) + namespaceprefix_ = ( + self.IF_curr_exp_nsprefix_ + ":" + if (UseCapturedNS_ and self.IF_curr_exp_nsprefix_) + else "" + ) + IF_curr_exp_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="IF_curr_exp", + pretty_print=pretty_print, + ) for IF_cond_alpha_ in self.IF_cond_alpha: - IF_cond_alpha_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='IF_cond_alpha', pretty_print=pretty_print) + namespaceprefix_ = ( + self.IF_cond_alpha_nsprefix_ + ":" + if (UseCapturedNS_ and self.IF_cond_alpha_nsprefix_) + else "" + ) + IF_cond_alpha_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="IF_cond_alpha", + pretty_print=pretty_print, + ) for IF_cond_exp_ in self.IF_cond_exp: - IF_cond_exp_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='IF_cond_exp', pretty_print=pretty_print) + namespaceprefix_ = ( + self.IF_cond_exp_nsprefix_ + ":" + if (UseCapturedNS_ and self.IF_cond_exp_nsprefix_) + else "" + ) + IF_cond_exp_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="IF_cond_exp", + pretty_print=pretty_print, + ) for EIF_cond_exp_isfa_ista_ in self.EIF_cond_exp_isfa_ista: - EIF_cond_exp_isfa_ista_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='EIF_cond_exp_isfa_ista', pretty_print=pretty_print) + namespaceprefix_ = ( + self.EIF_cond_exp_isfa_ista_nsprefix_ + ":" + if (UseCapturedNS_ and self.EIF_cond_exp_isfa_ista_nsprefix_) + else "" + ) + EIF_cond_exp_isfa_ista_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="EIF_cond_exp_isfa_ista", + pretty_print=pretty_print, + ) for EIF_cond_alpha_isfa_ista_ in self.EIF_cond_alpha_isfa_ista: - EIF_cond_alpha_isfa_ista_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='EIF_cond_alpha_isfa_ista', pretty_print=pretty_print) + namespaceprefix_ = ( + self.EIF_cond_alpha_isfa_ista_nsprefix_ + ":" + if (UseCapturedNS_ and self.EIF_cond_alpha_isfa_ista_nsprefix_) + else "" + ) + EIF_cond_alpha_isfa_ista_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="EIF_cond_alpha_isfa_ista", + pretty_print=pretty_print, + ) for HH_cond_exp_ in self.HH_cond_exp: - HH_cond_exp_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='HH_cond_exp', pretty_print=pretty_print) + namespaceprefix_ = ( + self.HH_cond_exp_nsprefix_ + ":" + if (UseCapturedNS_ and self.HH_cond_exp_nsprefix_) + else "" + ) + HH_cond_exp_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="HH_cond_exp", + pretty_print=pretty_print, + ) for expCondSynapse_ in self.exp_cond_synapses: - expCondSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='expCondSynapse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.exp_cond_synapses_nsprefix_ + ":" + if (UseCapturedNS_ and self.exp_cond_synapses_nsprefix_) + else "" + ) + expCondSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="expCondSynapse", + pretty_print=pretty_print, + ) for alphaCondSynapse_ in self.alpha_cond_synapses: - alphaCondSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='alphaCondSynapse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.alpha_cond_synapses_nsprefix_ + ":" + if (UseCapturedNS_ and self.alpha_cond_synapses_nsprefix_) + else "" + ) + alphaCondSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="alphaCondSynapse", + pretty_print=pretty_print, + ) for expCurrSynapse_ in self.exp_curr_synapses: - expCurrSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='expCurrSynapse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.exp_curr_synapses_nsprefix_ + ":" + if (UseCapturedNS_ and self.exp_curr_synapses_nsprefix_) + else "" + ) + expCurrSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="expCurrSynapse", + pretty_print=pretty_print, + ) for alphaCurrSynapse_ in self.alpha_curr_synapses: - alphaCurrSynapse_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='alphaCurrSynapse', pretty_print=pretty_print) + namespaceprefix_ = ( + self.alpha_curr_synapses_nsprefix_ + ":" + if (UseCapturedNS_ and self.alpha_curr_synapses_nsprefix_) + else "" + ) + alphaCurrSynapse_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="alphaCurrSynapse", + pretty_print=pretty_print, + ) for SpikeSourcePoisson_ in self.SpikeSourcePoisson: - SpikeSourcePoisson_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SpikeSourcePoisson', pretty_print=pretty_print) + namespaceprefix_ = ( + self.SpikeSourcePoisson_nsprefix_ + ":" + if (UseCapturedNS_ and self.SpikeSourcePoisson_nsprefix_) + else "" + ) + SpikeSourcePoisson_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="SpikeSourcePoisson", + pretty_print=pretty_print, + ) for network_ in self.networks: - network_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='network', pretty_print=pretty_print) + namespaceprefix_ = ( + self.networks_nsprefix_ + ":" + if (UseCapturedNS_ and self.networks_nsprefix_) + else "" + ) + network_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="network", + pretty_print=pretty_print, + ) for ComponentType_ in self.ComponentType: - ComponentType_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ComponentType', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.ComponentType_nsprefix_ + ":" + if (UseCapturedNS_ and self.ComponentType_nsprefix_) + else "" + ) + ComponentType_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="ComponentType", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(NeuroMLDocument, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'include': + + def _buildAttributes(self, node, attrs, already_processed): + super(NeuroMLDocument, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "include": obj_ = IncludeType.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.includes.append(obj_) - obj_.original_tagname_ = 'include' - elif nodeName_ == 'extracellularProperties': + obj_.original_tagname_ = "include" + elif nodeName_ == "extracellularProperties": obj_ = ExtracellularProperties.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.extracellular_properties.append(obj_) - obj_.original_tagname_ = 'extracellularProperties' - elif nodeName_ == 'intracellularProperties': + obj_.original_tagname_ = "extracellularProperties" + elif nodeName_ == "intracellularProperties": class_obj_ = self.get_class_obj_(child_, IntracellularProperties) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.intracellular_properties.append(obj_) - obj_.original_tagname_ = 'intracellularProperties' - elif nodeName_ == 'morphology': + obj_.original_tagname_ = "intracellularProperties" + elif nodeName_ == "morphology": obj_ = Morphology.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.morphology.append(obj_) - obj_.original_tagname_ = 'morphology' - elif nodeName_ == 'ionChannel': + obj_.original_tagname_ = "morphology" + elif nodeName_ == "ionChannel": class_obj_ = self.get_class_obj_(child_, IonChannel) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ion_channel.append(obj_) - obj_.original_tagname_ = 'ionChannel' - elif nodeName_ == 'ionChannelHH': + obj_.original_tagname_ = "ionChannel" + elif nodeName_ == "ionChannelHH": obj_ = IonChannelHH.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ion_channel_hhs.append(obj_) - obj_.original_tagname_ = 'ionChannelHH' - elif nodeName_ == 'ionChannelVShift': + obj_.original_tagname_ = "ionChannelHH" + elif nodeName_ == "ionChannelVShift": obj_ = IonChannelVShift.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ion_channel_v_shifts.append(obj_) - obj_.original_tagname_ = 'ionChannelVShift' - elif nodeName_ == 'ionChannelKS': + obj_.original_tagname_ = "ionChannelVShift" + elif nodeName_ == "ionChannelKS": obj_ = IonChannelKS.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ion_channel_kses.append(obj_) - obj_.original_tagname_ = 'ionChannelKS' - elif nodeName_ == 'decayingPoolConcentrationModel': + obj_.original_tagname_ = "ionChannelKS" + elif nodeName_ == "decayingPoolConcentrationModel": class_obj_ = self.get_class_obj_(child_, DecayingPoolConcentrationModel) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.decaying_pool_concentration_models.append(obj_) - obj_.original_tagname_ = 'decayingPoolConcentrationModel' - elif nodeName_ == 'fixedFactorConcentrationModel': + obj_.original_tagname_ = "decayingPoolConcentrationModel" + elif nodeName_ == "fixedFactorConcentrationModel": obj_ = FixedFactorConcentrationModel.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.fixed_factor_concentration_models.append(obj_) - obj_.original_tagname_ = 'fixedFactorConcentrationModel' - elif nodeName_ == 'alphaCurrentSynapse': + obj_.original_tagname_ = "fixedFactorConcentrationModel" + elif nodeName_ == "alphaCurrentSynapse": obj_ = AlphaCurrentSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.alpha_current_synapses.append(obj_) - obj_.original_tagname_ = 'alphaCurrentSynapse' - elif nodeName_ == 'alphaSynapse': + obj_.original_tagname_ = "alphaCurrentSynapse" + elif nodeName_ == "alphaSynapse": obj_ = AlphaSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.alpha_synapses.append(obj_) - obj_.original_tagname_ = 'alphaSynapse' - elif nodeName_ == 'expOneSynapse': + obj_.original_tagname_ = "alphaSynapse" + elif nodeName_ == "expOneSynapse": obj_ = ExpOneSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.exp_one_synapses.append(obj_) - obj_.original_tagname_ = 'expOneSynapse' - elif nodeName_ == 'expTwoSynapse': + obj_.original_tagname_ = "expOneSynapse" + elif nodeName_ == "expTwoSynapse": class_obj_ = self.get_class_obj_(child_, ExpTwoSynapse) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.exp_two_synapses.append(obj_) - obj_.original_tagname_ = 'expTwoSynapse' - elif nodeName_ == 'expThreeSynapse': + obj_.original_tagname_ = "expTwoSynapse" + elif nodeName_ == "expThreeSynapse": obj_ = ExpThreeSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.exp_three_synapses.append(obj_) - obj_.original_tagname_ = 'expThreeSynapse' - elif nodeName_ == 'blockingPlasticSynapse': + obj_.original_tagname_ = "expThreeSynapse" + elif nodeName_ == "blockingPlasticSynapse": obj_ = BlockingPlasticSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.blocking_plastic_synapses.append(obj_) - obj_.original_tagname_ = 'blockingPlasticSynapse' - elif nodeName_ == 'doubleSynapse': + obj_.original_tagname_ = "blockingPlasticSynapse" + elif nodeName_ == "doubleSynapse": obj_ = DoubleSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.double_synapses.append(obj_) - obj_.original_tagname_ = 'doubleSynapse' - elif nodeName_ == 'gapJunction': + obj_.original_tagname_ = "doubleSynapse" + elif nodeName_ == "gapJunction": obj_ = GapJunction.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gap_junctions.append(obj_) - obj_.original_tagname_ = 'gapJunction' - elif nodeName_ == 'silentSynapse': + obj_.original_tagname_ = "gapJunction" + elif nodeName_ == "silentSynapse": obj_ = SilentSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.silent_synapses.append(obj_) - obj_.original_tagname_ = 'silentSynapse' - elif nodeName_ == 'linearGradedSynapse': + obj_.original_tagname_ = "silentSynapse" + elif nodeName_ == "linearGradedSynapse": obj_ = LinearGradedSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.linear_graded_synapses.append(obj_) - obj_.original_tagname_ = 'linearGradedSynapse' - elif nodeName_ == 'gradedSynapse': + obj_.original_tagname_ = "linearGradedSynapse" + elif nodeName_ == "gradedSynapse": obj_ = GradedSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.graded_synapses.append(obj_) - obj_.original_tagname_ = 'gradedSynapse' - elif nodeName_ == 'biophysicalProperties': + obj_.original_tagname_ = "gradedSynapse" + elif nodeName_ == "biophysicalProperties": obj_ = BiophysicalProperties.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.biophysical_properties.append(obj_) - obj_.original_tagname_ = 'biophysicalProperties' - elif nodeName_ == 'cell': + obj_.original_tagname_ = "biophysicalProperties" + elif nodeName_ == "cell": class_obj_ = self.get_class_obj_(child_, Cell) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.cells.append(obj_) - obj_.original_tagname_ = 'cell' - elif nodeName_ == 'cell2CaPools': + obj_.original_tagname_ = "cell" + elif nodeName_ == "cell2CaPools": obj_ = Cell2CaPools.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.cell2_ca_poolses.append(obj_) - obj_.original_tagname_ = 'cell2CaPools' - elif nodeName_ == 'baseCell': + obj_.original_tagname_ = "cell2CaPools" + elif nodeName_ == "baseCell": class_obj_ = self.get_class_obj_(child_, BaseCell) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.base_cells.append(obj_) - obj_.original_tagname_ = 'baseCell' - elif nodeName_ == 'iafTauCell': + obj_.original_tagname_ = "baseCell" + elif nodeName_ == "iafTauCell": class_obj_ = self.get_class_obj_(child_, IafTauCell) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.iaf_tau_cells.append(obj_) - obj_.original_tagname_ = 'iafTauCell' - elif nodeName_ == 'iafTauRefCell': + obj_.original_tagname_ = "iafTauCell" + elif nodeName_ == "iafTauRefCell": obj_ = IafTauRefCell.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.iaf_tau_ref_cells.append(obj_) - obj_.original_tagname_ = 'iafTauRefCell' - elif nodeName_ == 'iafCell': + obj_.original_tagname_ = "iafTauRefCell" + elif nodeName_ == "iafCell": class_obj_ = self.get_class_obj_(child_, IafCell) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.iaf_cells.append(obj_) - obj_.original_tagname_ = 'iafCell' - elif nodeName_ == 'iafRefCell': + obj_.original_tagname_ = "iafCell" + elif nodeName_ == "iafRefCell": obj_ = IafRefCell.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.iaf_ref_cells.append(obj_) - obj_.original_tagname_ = 'iafRefCell' - elif nodeName_ == 'izhikevichCell': + obj_.original_tagname_ = "iafRefCell" + elif nodeName_ == "izhikevichCell": obj_ = IzhikevichCell.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.izhikevich_cells.append(obj_) - obj_.original_tagname_ = 'izhikevichCell' - elif nodeName_ == 'izhikevich2007Cell': + obj_.original_tagname_ = "izhikevichCell" + elif nodeName_ == "izhikevich2007Cell": obj_ = Izhikevich2007Cell.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.izhikevich2007_cells.append(obj_) - obj_.original_tagname_ = 'izhikevich2007Cell' - elif nodeName_ == 'adExIaFCell': + obj_.original_tagname_ = "izhikevich2007Cell" + elif nodeName_ == "adExIaFCell": obj_ = AdExIaFCell.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ad_ex_ia_f_cells.append(obj_) - obj_.original_tagname_ = 'adExIaFCell' - elif nodeName_ == 'fitzHughNagumoCell': + obj_.original_tagname_ = "adExIaFCell" + elif nodeName_ == "fitzHughNagumoCell": obj_ = FitzHughNagumoCell.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.fitz_hugh_nagumo_cells.append(obj_) - obj_.original_tagname_ = 'fitzHughNagumoCell' - elif nodeName_ == 'fitzHughNagumo1969Cell': + obj_.original_tagname_ = "fitzHughNagumoCell" + elif nodeName_ == "fitzHughNagumo1969Cell": obj_ = FitzHughNagumo1969Cell.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.fitz_hugh_nagumo1969_cells.append(obj_) - obj_.original_tagname_ = 'fitzHughNagumo1969Cell' - elif nodeName_ == 'pinskyRinzelCA3Cell': + obj_.original_tagname_ = "fitzHughNagumo1969Cell" + elif nodeName_ == "pinskyRinzelCA3Cell": obj_ = PinskyRinzelCA3Cell.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.pinsky_rinzel_ca3_cells.append(obj_) - obj_.original_tagname_ = 'pinskyRinzelCA3Cell' - elif nodeName_ == 'pulseGenerator': + obj_.original_tagname_ = "pinskyRinzelCA3Cell" + elif nodeName_ == "pulseGenerator": obj_ = PulseGenerator.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.pulse_generators.append(obj_) - obj_.original_tagname_ = 'pulseGenerator' - elif nodeName_ == 'pulseGeneratorDL': + obj_.original_tagname_ = "pulseGenerator" + elif nodeName_ == "pulseGeneratorDL": obj_ = PulseGeneratorDL.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.pulse_generator_dls.append(obj_) - obj_.original_tagname_ = 'pulseGeneratorDL' - elif nodeName_ == 'sineGenerator': + obj_.original_tagname_ = "pulseGeneratorDL" + elif nodeName_ == "sineGenerator": obj_ = SineGenerator.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.sine_generators.append(obj_) - obj_.original_tagname_ = 'sineGenerator' - elif nodeName_ == 'sineGeneratorDL': + obj_.original_tagname_ = "sineGenerator" + elif nodeName_ == "sineGeneratorDL": obj_ = SineGeneratorDL.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.sine_generator_dls.append(obj_) - obj_.original_tagname_ = 'sineGeneratorDL' - elif nodeName_ == 'rampGenerator': + obj_.original_tagname_ = "sineGeneratorDL" + elif nodeName_ == "rampGenerator": obj_ = RampGenerator.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ramp_generators.append(obj_) - obj_.original_tagname_ = 'rampGenerator' - elif nodeName_ == 'rampGeneratorDL': + obj_.original_tagname_ = "rampGenerator" + elif nodeName_ == "rampGeneratorDL": obj_ = RampGeneratorDL.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ramp_generator_dls.append(obj_) - obj_.original_tagname_ = 'rampGeneratorDL' - elif nodeName_ == 'compoundInput': + obj_.original_tagname_ = "rampGeneratorDL" + elif nodeName_ == "compoundInput": obj_ = CompoundInput.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.compound_inputs.append(obj_) - obj_.original_tagname_ = 'compoundInput' - elif nodeName_ == 'compoundInputDL': + obj_.original_tagname_ = "compoundInput" + elif nodeName_ == "compoundInputDL": obj_ = CompoundInputDL.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.compound_input_dls.append(obj_) - obj_.original_tagname_ = 'compoundInputDL' - elif nodeName_ == 'voltageClamp': + obj_.original_tagname_ = "compoundInputDL" + elif nodeName_ == "voltageClamp": obj_ = VoltageClamp.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.voltage_clamps.append(obj_) - obj_.original_tagname_ = 'voltageClamp' - elif nodeName_ == 'voltageClampTriple': + obj_.original_tagname_ = "voltageClamp" + elif nodeName_ == "voltageClampTriple": obj_ = VoltageClampTriple.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.voltage_clamp_triples.append(obj_) - obj_.original_tagname_ = 'voltageClampTriple' - elif nodeName_ == 'spikeArray': + obj_.original_tagname_ = "voltageClampTriple" + elif nodeName_ == "spikeArray": obj_ = SpikeArray.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spike_arrays.append(obj_) - obj_.original_tagname_ = 'spikeArray' - elif nodeName_ == 'timedSynapticInput': + obj_.original_tagname_ = "spikeArray" + elif nodeName_ == "timedSynapticInput": obj_ = TimedSynapticInput.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.timed_synaptic_inputs.append(obj_) - obj_.original_tagname_ = 'timedSynapticInput' - elif nodeName_ == 'spikeGenerator': + obj_.original_tagname_ = "timedSynapticInput" + elif nodeName_ == "spikeGenerator": obj_ = SpikeGenerator.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spike_generators.append(obj_) - obj_.original_tagname_ = 'spikeGenerator' - elif nodeName_ == 'spikeGeneratorRandom': + obj_.original_tagname_ = "spikeGenerator" + elif nodeName_ == "spikeGeneratorRandom": obj_ = SpikeGeneratorRandom.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spike_generator_randoms.append(obj_) - obj_.original_tagname_ = 'spikeGeneratorRandom' - elif nodeName_ == 'spikeGeneratorPoisson': + obj_.original_tagname_ = "spikeGeneratorRandom" + elif nodeName_ == "spikeGeneratorPoisson": class_obj_ = self.get_class_obj_(child_, SpikeGeneratorPoisson) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spike_generator_poissons.append(obj_) - obj_.original_tagname_ = 'spikeGeneratorPoisson' - elif nodeName_ == 'spikeGeneratorRefPoisson': + obj_.original_tagname_ = "spikeGeneratorPoisson" + elif nodeName_ == "spikeGeneratorRefPoisson": obj_ = SpikeGeneratorRefPoisson.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.spike_generator_ref_poissons.append(obj_) - obj_.original_tagname_ = 'spikeGeneratorRefPoisson' - elif nodeName_ == 'poissonFiringSynapse': + obj_.original_tagname_ = "spikeGeneratorRefPoisson" + elif nodeName_ == "poissonFiringSynapse": obj_ = PoissonFiringSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.poisson_firing_synapses.append(obj_) - obj_.original_tagname_ = 'poissonFiringSynapse' - elif nodeName_ == 'transientPoissonFiringSynapse': + obj_.original_tagname_ = "poissonFiringSynapse" + elif nodeName_ == "transientPoissonFiringSynapse": obj_ = TransientPoissonFiringSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.transient_poisson_firing_synapses.append(obj_) - obj_.original_tagname_ = 'transientPoissonFiringSynapse' - elif nodeName_ == 'IF_curr_alpha': + obj_.original_tagname_ = "transientPoissonFiringSynapse" + elif nodeName_ == "IF_curr_alpha": obj_ = IF_curr_alpha.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.IF_curr_alpha.append(obj_) - obj_.original_tagname_ = 'IF_curr_alpha' - elif nodeName_ == 'IF_curr_exp': + obj_.original_tagname_ = "IF_curr_alpha" + elif nodeName_ == "IF_curr_exp": obj_ = IF_curr_exp.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.IF_curr_exp.append(obj_) - obj_.original_tagname_ = 'IF_curr_exp' - elif nodeName_ == 'IF_cond_alpha': + obj_.original_tagname_ = "IF_curr_exp" + elif nodeName_ == "IF_cond_alpha": obj_ = IF_cond_alpha.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.IF_cond_alpha.append(obj_) - obj_.original_tagname_ = 'IF_cond_alpha' - elif nodeName_ == 'IF_cond_exp': + obj_.original_tagname_ = "IF_cond_alpha" + elif nodeName_ == "IF_cond_exp": obj_ = IF_cond_exp.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.IF_cond_exp.append(obj_) - obj_.original_tagname_ = 'IF_cond_exp' - elif nodeName_ == 'EIF_cond_exp_isfa_ista': + obj_.original_tagname_ = "IF_cond_exp" + elif nodeName_ == "EIF_cond_exp_isfa_ista": class_obj_ = self.get_class_obj_(child_, EIF_cond_exp_isfa_ista) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.EIF_cond_exp_isfa_ista.append(obj_) - obj_.original_tagname_ = 'EIF_cond_exp_isfa_ista' - elif nodeName_ == 'EIF_cond_alpha_isfa_ista': + obj_.original_tagname_ = "EIF_cond_exp_isfa_ista" + elif nodeName_ == "EIF_cond_alpha_isfa_ista": obj_ = EIF_cond_alpha_isfa_ista.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.EIF_cond_alpha_isfa_ista.append(obj_) - obj_.original_tagname_ = 'EIF_cond_alpha_isfa_ista' - elif nodeName_ == 'HH_cond_exp': + obj_.original_tagname_ = "EIF_cond_alpha_isfa_ista" + elif nodeName_ == "HH_cond_exp": obj_ = HH_cond_exp.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.HH_cond_exp.append(obj_) - obj_.original_tagname_ = 'HH_cond_exp' - elif nodeName_ == 'expCondSynapse': + obj_.original_tagname_ = "HH_cond_exp" + elif nodeName_ == "expCondSynapse": obj_ = ExpCondSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.exp_cond_synapses.append(obj_) - obj_.original_tagname_ = 'expCondSynapse' - elif nodeName_ == 'alphaCondSynapse': + obj_.original_tagname_ = "expCondSynapse" + elif nodeName_ == "alphaCondSynapse": obj_ = AlphaCondSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.alpha_cond_synapses.append(obj_) - obj_.original_tagname_ = 'alphaCondSynapse' - elif nodeName_ == 'expCurrSynapse': + obj_.original_tagname_ = "alphaCondSynapse" + elif nodeName_ == "expCurrSynapse": obj_ = ExpCurrSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.exp_curr_synapses.append(obj_) - obj_.original_tagname_ = 'expCurrSynapse' - elif nodeName_ == 'alphaCurrSynapse': + obj_.original_tagname_ = "expCurrSynapse" + elif nodeName_ == "alphaCurrSynapse": obj_ = AlphaCurrSynapse.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.alpha_curr_synapses.append(obj_) - obj_.original_tagname_ = 'alphaCurrSynapse' - elif nodeName_ == 'SpikeSourcePoisson': + obj_.original_tagname_ = "alphaCurrSynapse" + elif nodeName_ == "SpikeSourcePoisson": obj_ = SpikeSourcePoisson.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.SpikeSourcePoisson.append(obj_) - obj_.original_tagname_ = 'SpikeSourcePoisson' - elif nodeName_ == 'network': + obj_.original_tagname_ = "SpikeSourcePoisson" + elif nodeName_ == "network": obj_ = Network.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.networks.append(obj_) - obj_.original_tagname_ = 'network' - elif nodeName_ == 'ComponentType': + obj_.original_tagname_ = "network" + elif nodeName_ == "ComponentType": obj_ = ComponentType.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.ComponentType.append(obj_) - obj_.original_tagname_ = 'ComponentType' - super(NeuroMLDocument, self).buildChildren(child_, node, nodeName_, True) - + obj_.original_tagname_ = "ComponentType" + super(NeuroMLDocument, self)._buildChildren(child_, node, nodeName_, True) def summary(self, show_includes=True, show_non_network=True): + """Get a pretty-printed summary of the complete NeuroMLDocument. + + This includes information on the various Components included in the + NeuroMLDocument: networks, cells, projections, synapses, and so on. + """ import inspect info = "*******************************************************\n" - info+="* NeuroMLDocument: "+self.id+"\n*\n" + info += "* NeuroMLDocument: " + self.id + "\n*\n" post = "" membs = inspect.getmembers(self) for memb in membs: - if isinstance(memb[1], list) and len(memb[1])>0 and not memb[0].endswith('_') and not memb[0] == 'networks': - if (memb[0] == 'includes' and show_includes) or (not memb[0] == 'includes' and show_non_network): + if ( + isinstance(memb[1], list) + and len(memb[1]) > 0 + and not memb[0].endswith("_") + and not memb[0] == "networks" + ): + if (memb[0] == "includes" and show_includes) or ( + not memb[0] == "includes" and show_non_network + ): post = "*\n" - info+="* "+str(memb[1][0].__class__.__name__)+": " + info += "* " + str(memb[1][0].__class__.__name__) + ": " listed = [] for entry in memb[1]: - if hasattr(entry,'id'): + if hasattr(entry, "id"): listed.append(str(entry.id)) - elif hasattr(entry,'name'): + elif hasattr(entry, "name"): listed.append(str(entry.name)) - elif hasattr(entry,'href'): + elif hasattr(entry, "href"): listed.append(str(entry.href)) - elif hasattr(entry,'tag'): - listed.append(str(entry.tag)+" = "+str(entry.value)) - info+= str(sorted(listed))+"\n" - info+= post + elif hasattr(entry, "tag"): + listed.append(str(entry.tag) + " = " + str(entry.value)) + info += str(sorted(listed)) + "\n" + info += post for network in self.networks: - info+="* Network: "+network.id + info += "* Network: " + network.id if network.temperature: - info+=" (temperature: "+network.temperature+")" - info+="\n*\n" - tot_pop =0 + info += " (temperature: " + network.temperature + ")" + info += "\n*\n" + tot_pop = 0 tot_cells = 0 pop_info = "" for pop in sorted(network.populations, key=lambda x: x.id): - pop_info+="* "+str(pop)+"\n" - tot_pop+=1 - tot_cells+=pop.get_size() - if len(pop.instances)>0: + pop_info += "* " + str(pop) + "\n" + tot_pop += 1 + tot_cells += pop.get_size() + if len(pop.instances) > 0: loc = pop.instances[0].location - pop_info+="* Locations: ["+str(loc)+", ...]\n" - if len(pop.properties)>0: - pop_info+="* Properties: " + pop_info += "* Locations: [" + str(loc) + ", ...]\n" + if len(pop.properties) > 0: + pop_info += "* Properties: " for p in pop.properties: - pop_info+=(str(p.tag)+'='+str(p.value)+'; ') - pop_info+="\n" - - info+="* "+str(tot_cells)+" cells in "+str(tot_pop)+" populations \n"+pop_info+"*\n" - + pop_info += str(p.tag) + "=" + str(p.value) + "; " + pop_info += "\n" + + info += ( + "* " + + str(tot_cells) + + " cells in " + + str(tot_pop) + + " populations \n" + + pop_info + + "*\n" + ) - tot_proj =0 + tot_proj = 0 tot_conns = 0 proj_info = "" for proj in sorted(network.projections, key=lambda x: x.id): - proj_info+="* "+str(proj)+"\n" - tot_proj+=1 - tot_conns+=len(proj.connections) - tot_conns+=len(proj.connection_wds) - if len(proj.connections)>0: - proj_info+="* "+str(len(proj.connections))+" connections: [("+str(proj.connections[0])+"), ...]\n" - if len(proj.connection_wds)>0: - proj_info+="* "+str(len(proj.connection_wds))+" connections (wd): [("+str(proj.connection_wds[0])+"), ...]\n" + proj_info += "* " + str(proj) + "\n" + tot_proj += 1 + tot_conns += len(proj.connections) + tot_conns += len(proj.connection_wds) + if len(proj.connections) > 0: + proj_info += ( + "* " + + str(len(proj.connections)) + + " connections: [(" + + str(proj.connections[0]) + + "), ...]\n" + ) + if len(proj.connection_wds) > 0: + proj_info += ( + "* " + + str(len(proj.connection_wds)) + + " connections (wd): [(" + + str(proj.connection_wds[0]) + + "), ...]\n" + ) for proj in sorted(network.electrical_projections, key=lambda x: x.id): - proj_info+="* Electrical projection: "+proj.id+" from "+proj.presynaptic_population+" to "+proj.postsynaptic_population+"\n" - tot_proj+=1 - tot_conns+=len(proj.electrical_connections) - tot_conns+=len(proj.electrical_connection_instances) - tot_conns+=len(proj.electrical_connection_instance_ws) - if len(proj.electrical_connections)>0: - proj_info+="* "+str(len(proj.electrical_connections))+" connections: [("+str(proj.electrical_connections[0])+"), ...]\n" - if len(proj.electrical_connection_instances)>0: - proj_info+="* "+str(len(proj.electrical_connection_instances))+" connections: [("+str(proj.electrical_connection_instances[0])+"), ...]\n" - if len(proj.electrical_connection_instance_ws)>0: - proj_info+="* "+str(len(proj.electrical_connection_instance_ws))+" connections: [("+str(proj.electrical_connection_instance_ws[0])+"), ...]\n" + proj_info += ( + "* Electrical projection: " + + proj.id + + " from " + + proj.presynaptic_population + + " to " + + proj.postsynaptic_population + + "\n" + ) + tot_proj += 1 + tot_conns += len(proj.electrical_connections) + tot_conns += len(proj.electrical_connection_instances) + tot_conns += len(proj.electrical_connection_instance_ws) + if len(proj.electrical_connections) > 0: + proj_info += ( + "* " + + str(len(proj.electrical_connections)) + + " connections: [(" + + str(proj.electrical_connections[0]) + + "), ...]\n" + ) + if len(proj.electrical_connection_instances) > 0: + proj_info += ( + "* " + + str(len(proj.electrical_connection_instances)) + + " connections: [(" + + str(proj.electrical_connection_instances[0]) + + "), ...]\n" + ) + if len(proj.electrical_connection_instance_ws) > 0: + proj_info += ( + "* " + + str(len(proj.electrical_connection_instance_ws)) + + " connections: [(" + + str(proj.electrical_connection_instance_ws[0]) + + "), ...]\n" + ) for proj in sorted(network.continuous_projections, key=lambda x: x.id): - proj_info+="* Continuous projection: "+proj.id+" from "+proj.presynaptic_population+" to "+proj.postsynaptic_population+"\n" - tot_proj+=1 - tot_conns+=len(proj.continuous_connections) - tot_conns+=len(proj.continuous_connection_instances) - tot_conns+=len(proj.continuous_connection_instance_ws) - if len(proj.continuous_connections)>0: - proj_info+="* "+str(len(proj.continuous_connections))+" connections: [("+str(proj.continuous_connections[0])+"), ...]\n" - if len(proj.continuous_connection_instances)>0: - proj_info+="* "+str(len(proj.continuous_connection_instances))+" connections: [("+str(proj.continuous_connection_instances[0])+"), ...]\n" - if len(proj.continuous_connection_instance_ws)>0: - proj_info+="* "+str(len(proj.continuous_connection_instance_ws))+" connections (w): [("+str(proj.continuous_connection_instance_ws[0])+"), ...]\n" - - info+="* "+str(tot_conns)+" connections in "+str(tot_proj)+" projections \n"+proj_info+"*\n" - - if len(network.synaptic_connections)>0: - info+="* "+str(len(network.synaptic_connections))+" explicit synaptic connections (outside of projections)\n" + proj_info += ( + "* Continuous projection: " + + proj.id + + " from " + + proj.presynaptic_population + + " to " + + proj.postsynaptic_population + + "\n" + ) + tot_proj += 1 + tot_conns += len(proj.continuous_connections) + tot_conns += len(proj.continuous_connection_instances) + tot_conns += len(proj.continuous_connection_instance_ws) + if len(proj.continuous_connections) > 0: + proj_info += ( + "* " + + str(len(proj.continuous_connections)) + + " connections: [(" + + str(proj.continuous_connections[0]) + + "), ...]\n" + ) + if len(proj.continuous_connection_instances) > 0: + proj_info += ( + "* " + + str(len(proj.continuous_connection_instances)) + + " connections: [(" + + str(proj.continuous_connection_instances[0]) + + "), ...]\n" + ) + if len(proj.continuous_connection_instance_ws) > 0: + proj_info += ( + "* " + + str(len(proj.continuous_connection_instance_ws)) + + " connections (w): [(" + + str(proj.continuous_connection_instance_ws[0]) + + "), ...]\n" + ) + + info += ( + "* " + + str(tot_conns) + + " connections in " + + str(tot_proj) + + " projections \n" + + proj_info + + "*\n" + ) + + if len(network.synaptic_connections) > 0: + info += ( + "* " + + str(len(network.synaptic_connections)) + + " explicit synaptic connections (outside of projections)\n" + ) for sc in network.synaptic_connections: - info+="* "+str(sc)+"\n" - info+="*\n" + info += "* " + str(sc) + "\n" + info += "*\n" tot_input_lists = 0 tot_inputs = 0 input_info = "" for il in sorted(network.input_lists, key=lambda x: x.id): - input_info+="* "+str(il)+"\n" + input_info += "* " + str(il) + "\n" tot_input_lists += 1 - if len(il.input)>0: - input_info+="* "+str(len(il.input))+" inputs: [("+str(il.input[0])+"), ...]\n" - tot_inputs+=len(il.input) - if len(il.input_ws)>0: - input_info+="* "+str(len(il.input_ws))+" inputs: [("+str(il.input_ws[0])+"), ...]\n" - tot_inputs+=len(il.input_ws) - - info+="* "+str(tot_inputs)+" inputs in "+str(tot_input_lists)+" input lists \n"+input_info+"*\n" + if len(il.input) > 0: + input_info += ( + "* " + + str(len(il.input)) + + " inputs: [(" + + str(il.input[0]) + + "), ...]\n" + ) + tot_inputs += len(il.input) + if len(il.input_ws) > 0: + input_info += ( + "* " + + str(len(il.input_ws)) + + " inputs: [(" + + str(il.input_ws[0]) + + "), ...]\n" + ) + tot_inputs += len(il.input_ws) + + info += ( + "* " + + str(tot_inputs) + + " inputs in " + + str(tot_input_lists) + + " input lists \n" + + input_info + + "*\n" + ) - if len(network.explicit_inputs)>0: - info+="* "+str(len(network.explicit_inputs))+" explicit inputs (outside of input lists)\n" + if len(network.explicit_inputs) > 0: + info += ( + "* " + + str(len(network.explicit_inputs)) + + " explicit inputs (outside of input lists)\n" + ) for el in network.explicit_inputs: - info+="* "+str(el)+"\n" - info+="*\n" - + info += "* " + str(el) + "\n" + info += "*\n" - info+="*******************************************************" + info += "*******************************************************" return info warn_count = 0 - def get_by_id(self,id): - if len(id)==0: + def get_by_id(self, id): + """Get a component by specifying its ID. + + :param id: id of Component to get + :type id: str + :returns: Component with given ID or None if no Component with provided ID was found + """ + if len(id) == 0: import inspect + callframe = inspect.getouterframes(inspect.currentframe(), 2) - print('Method: '+ callframe[1][3] + ' is asking for an element with no id...') + print( + "Method: " + callframe[1][3] + " is asking for an element with no id..." + ) return None all_ids = [] for ms in self.member_data_items_: mlist = self.__getattribute__(ms.name) for m in mlist: - if hasattr(m,"id"): + if hasattr(m, "id"): if m.id == id: return m else: all_ids.append(m.id) from neuroml.loaders import print_ - if self.warn_count<10: - print_("Id "+id+" not found in element. All ids: "+str(sorted(all_ids))) - self.warn_count+=1 - elif self.warn_count==10: + + if self.warn_count < 10: + print_( + "Id " + + id + + " not found in element. All ids: " + + str(sorted(all_ids)) + ) + self.warn_count += 1 + elif self.warn_count == 10: print_(" - Suppressing further warnings about id not found...") return None - def append(self,element): - from neuroml.utils import append_to_element - append_to_element(self,element) + def append(self, element): + """Append an element + + :param element: element to append + :type element: Object + """ + self.add(element) # end class NeuroMLDocument class BasePynnSynapse(BaseSynapse): + """BasePynnSynapse -- Base type for all PyNN synapses. Note, the current **I** produced is dimensionless, but it requires a membrane potential **v** with dimension voltage + \n + :param tau_syn: + :type tau_syn: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('tau_syn', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_( + "tau_syn", "xs:float", 0, 0, {"use": "required", "name": "tau_syn"} + ), ] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + tau_syn=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BasePynnSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("BasePynnSynapse"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.tau_syn = _cast(float, tau_syn) + self.tau_syn_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, BasePynnSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, BasePynnSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if BasePynnSynapse.subclass: return BasePynnSynapse.subclass(*args_, **kwargs_) else: return BasePynnSynapse(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(BasePynnSynapse, self).hasContent_() - ): + + def _hasContent(self): + if super(BasePynnSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BasePynnSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BasePynnSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BasePynnSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BasePynnSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "BasePynnSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BasePynnSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BasePynnSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BasePynnSynapse" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BasePynnSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BasePynnSynapse'): - super(BasePynnSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BasePynnSynapse') - if self.tau_syn is not None and 'tau_syn' not in already_processed: - already_processed.add('tau_syn') - outfile.write(' tau_syn="%s"' % self.gds_format_float(self.tau_syn, input_name='tau_syn')) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BasePynnSynapse", + ): + super(BasePynnSynapse, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="BasePynnSynapse" + ) + if self.tau_syn is not None and "tau_syn" not in already_processed: + already_processed.add("tau_syn") + outfile.write( + ' tau_syn="%s"' + % self.gds_format_float(self.tau_syn, input_name="tau_syn") + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BasePynnSynapse', fromsubclass_=False, pretty_print=True): - super(BasePynnSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BasePynnSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(BasePynnSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('tau_syn', node) - if value is not None and 'tau_syn' not in already_processed: - already_processed.add('tau_syn') - try: - self.tau_syn = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_syn): %s' % exp) - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("tau_syn", node) + if value is not None and "tau_syn" not in already_processed: + already_processed.add("tau_syn") + value = self.gds_parse_float(value, node, "tau_syn") + self.tau_syn = value + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BasePynnSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BasePynnSynapse, self).buildChildren(child_, node, nodeName_, True) + super(BasePynnSynapse, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(BasePynnSynapse, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class BasePynnSynapse class basePyNNCell(BaseCell): + """basePyNNCell -- Base type of any PyNN standard cell model. Note: membrane potential **v** has dimensions voltage, but all other parameters are dimensionless. This is to facilitate translation to and from PyNN scripts in Python, where these parameters have implicit units, see http://neuralensemble.org/trac/PyNN/wiki/StandardModels + \n + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('cm', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('i_offset', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('tau_syn_E', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('tau_syn_I', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('v_init', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("cm", "xs:float", 0, 0, {"use": "required", "name": "cm"}), + MemberSpec_( + "i_offset", "xs:float", 0, 0, {"use": "required", "name": "i_offset"} + ), + MemberSpec_( + "tau_syn_E", "xs:float", 0, 0, {"use": "required", "name": "tau_syn_E"} + ), + MemberSpec_( + "tau_syn_I", "xs:float", 0, 0, {"use": "required", "name": "tau_syn_I"} + ), + MemberSpec_("v_init", "xs:float", 0, 0, {"use": "required", "name": "v_init"}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(basePyNNCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("basePyNNCell"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.cm = _cast(float, cm) + self.cm_nsprefix_ = None self.i_offset = _cast(float, i_offset) + self.i_offset_nsprefix_ = None self.tau_syn_E = _cast(float, tau_syn_E) + self.tau_syn_E_nsprefix_ = None self.tau_syn_I = _cast(float, tau_syn_I) + self.tau_syn_I_nsprefix_ = None self.v_init = _cast(float, v_init) + self.v_init_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, basePyNNCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, basePyNNCell) if subclass is not None: return subclass(*args_, **kwargs_) if basePyNNCell.subclass: return basePyNNCell.subclass(*args_, **kwargs_) else: return basePyNNCell(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(basePyNNCell, self).hasContent_() - ): + + def _hasContent(self): + if super(basePyNNCell, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('basePyNNCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="basePyNNCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("basePyNNCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "basePyNNCell": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNCell') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='basePyNNCell', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="basePyNNCell" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="basePyNNCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='basePyNNCell'): - super(basePyNNCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNCell') - if self.cm is not None and 'cm' not in already_processed: - already_processed.add('cm') - outfile.write(' cm="%s"' % self.gds_format_float(self.cm, input_name='cm')) - if self.i_offset is not None and 'i_offset' not in already_processed: - already_processed.add('i_offset') - outfile.write(' i_offset="%s"' % self.gds_format_float(self.i_offset, input_name='i_offset')) - if self.tau_syn_E is not None and 'tau_syn_E' not in already_processed: - already_processed.add('tau_syn_E') - outfile.write(' tau_syn_E="%s"' % self.gds_format_float(self.tau_syn_E, input_name='tau_syn_E')) - if self.tau_syn_I is not None and 'tau_syn_I' not in already_processed: - already_processed.add('tau_syn_I') - outfile.write(' tau_syn_I="%s"' % self.gds_format_float(self.tau_syn_I, input_name='tau_syn_I')) - if self.v_init is not None and 'v_init' not in already_processed: - already_processed.add('v_init') - outfile.write(' v_init="%s"' % self.gds_format_float(self.v_init, input_name='v_init')) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="basePyNNCell", + ): + super(basePyNNCell, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="basePyNNCell" + ) + if self.cm is not None and "cm" not in already_processed: + already_processed.add("cm") + outfile.write(' cm="%s"' % self.gds_format_float(self.cm, input_name="cm")) + if self.i_offset is not None and "i_offset" not in already_processed: + already_processed.add("i_offset") + outfile.write( + ' i_offset="%s"' + % self.gds_format_float(self.i_offset, input_name="i_offset") + ) + if self.tau_syn_E is not None and "tau_syn_E" not in already_processed: + already_processed.add("tau_syn_E") + outfile.write( + ' tau_syn_E="%s"' + % self.gds_format_float(self.tau_syn_E, input_name="tau_syn_E") + ) + if self.tau_syn_I is not None and "tau_syn_I" not in already_processed: + already_processed.add("tau_syn_I") + outfile.write( + ' tau_syn_I="%s"' + % self.gds_format_float(self.tau_syn_I, input_name="tau_syn_I") + ) + if self.v_init is not None and "v_init" not in already_processed: + already_processed.add("v_init") + outfile.write( + ' v_init="%s"' % self.gds_format_float(self.v_init, input_name="v_init") + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNCell', fromsubclass_=False, pretty_print=True): - super(basePyNNCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="basePyNNCell", + fromsubclass_=False, + pretty_print=True, + ): + super(basePyNNCell, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('cm', node) - if value is not None and 'cm' not in already_processed: - already_processed.add('cm') - try: - self.cm = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (cm): %s' % exp) - value = find_attr_value_('i_offset', node) - if value is not None and 'i_offset' not in already_processed: - already_processed.add('i_offset') - try: - self.i_offset = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (i_offset): %s' % exp) - value = find_attr_value_('tau_syn_E', node) - if value is not None and 'tau_syn_E' not in already_processed: - already_processed.add('tau_syn_E') - try: - self.tau_syn_E = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_syn_E): %s' % exp) - value = find_attr_value_('tau_syn_I', node) - if value is not None and 'tau_syn_I' not in already_processed: - already_processed.add('tau_syn_I') - try: - self.tau_syn_I = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_syn_I): %s' % exp) - value = find_attr_value_('v_init', node) - if value is not None and 'v_init' not in already_processed: - already_processed.add('v_init') - try: - self.v_init = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (v_init): %s' % exp) - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("cm", node) + if value is not None and "cm" not in already_processed: + already_processed.add("cm") + value = self.gds_parse_float(value, node, "cm") + self.cm = value + value = find_attr_value_("i_offset", node) + if value is not None and "i_offset" not in already_processed: + already_processed.add("i_offset") + value = self.gds_parse_float(value, node, "i_offset") + self.i_offset = value + value = find_attr_value_("tau_syn_E", node) + if value is not None and "tau_syn_E" not in already_processed: + already_processed.add("tau_syn_E") + value = self.gds_parse_float(value, node, "tau_syn_E") + self.tau_syn_E = value + value = find_attr_value_("tau_syn_I", node) + if value is not None and "tau_syn_I" not in already_processed: + already_processed.add("tau_syn_I") + value = self.gds_parse_float(value, node, "tau_syn_I") + self.tau_syn_I = value + value = find_attr_value_("v_init", node) + if value is not None and "v_init" not in already_processed: + already_processed.add("v_init") + value = self.gds_parse_float(value, node, "v_init") + self.v_init = value + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(basePyNNCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(basePyNNCell, self).buildChildren(child_, node, nodeName_, True) + super(basePyNNCell, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(basePyNNCell, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class basePyNNCell class ContinuousProjection(BaseProjection): - """Projection between two populations consisting of analog connections - (e.g. graded synapses)""" + """ContinuousProjection -- A projection between **presynapticPopulation** and **postsynapticPopulation** through components **preComponent** at the start and **postComponent** at the end of a **continuousConnection** or **continuousConnectionInstance** . Can be used for analog synapses.""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('continuous_connections', 'ContinuousConnection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ContinuousConnection', u'name': u'continuousConnection', u'minOccurs': u'0'}, None), - MemberSpec_('continuous_connection_instances', 'ContinuousConnectionInstance', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ContinuousConnectionInstance', u'name': u'continuousConnectionInstance', u'minOccurs': u'0'}, None), - MemberSpec_('continuous_connection_instance_ws', 'ContinuousConnectionInstanceW', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ContinuousConnectionInstanceW', u'name': u'continuousConnectionInstanceW', u'minOccurs': u'0'}, None), + MemberSpec_( + "continuous_connections", + "ContinuousConnection", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "continuousConnection", + "type": "ContinuousConnection", + }, + None, + ), + MemberSpec_( + "continuous_connection_instances", + "ContinuousConnectionInstance", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "continuousConnectionInstance", + "type": "ContinuousConnectionInstance", + }, + None, + ), + MemberSpec_( + "continuous_connection_instance_ws", + "ContinuousConnectionInstanceW", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "continuousConnectionInstanceW", + "type": "ContinuousConnectionInstanceW", + }, + None, + ), ] subclass = None superclass = BaseProjection - def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, continuous_connections=None, continuous_connection_instances=None, continuous_connection_instance_ws=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + presynaptic_population=None, + postsynaptic_population=None, + continuous_connections=None, + continuous_connection_instances=None, + continuous_connection_instance_ws=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ContinuousProjection, self).__init__(neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ContinuousProjection"), self).__init__( + neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_ + ) if continuous_connections is None: self.continuous_connections = [] else: self.continuous_connections = continuous_connections + self.continuous_connections_nsprefix_ = None if continuous_connection_instances is None: self.continuous_connection_instances = [] else: self.continuous_connection_instances = continuous_connection_instances + self.continuous_connection_instances_nsprefix_ = None if continuous_connection_instance_ws is None: self.continuous_connection_instance_ws = [] else: self.continuous_connection_instance_ws = continuous_connection_instance_ws + self.continuous_connection_instance_ws_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ContinuousProjection) + CurrentSubclassModule_, ContinuousProjection + ) if subclass is not None: return subclass(*args_, **kwargs_) if ContinuousProjection.subclass: return ContinuousProjection.subclass(*args_, **kwargs_) else: return ContinuousProjection(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): + + def _hasContent(self): if ( - self.continuous_connections or - self.continuous_connection_instances or - self.continuous_connection_instance_ws or - super(ContinuousProjection, self).hasContent_() + self.continuous_connections + or self.continuous_connection_instances + or self.continuous_connection_instance_ws + or super(ContinuousProjection, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousProjection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ContinuousProjection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ContinuousProjection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ContinuousProjection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ContinuousProjection": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousProjection') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContinuousProjection', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ContinuousProjection", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ContinuousProjection", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContinuousProjection'): - super(ContinuousProjection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousProjection') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousProjection', fromsubclass_=False, pretty_print=True): - super(ContinuousProjection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ContinuousProjection", + ): + super(ContinuousProjection, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ContinuousProjection", + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ContinuousProjection", + fromsubclass_=False, + pretty_print=True, + ): + super(ContinuousProjection, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for continuousConnection_ in self.continuous_connections: - continuousConnection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='continuousConnection', pretty_print=pretty_print) + namespaceprefix_ = ( + self.continuous_connections_nsprefix_ + ":" + if (UseCapturedNS_ and self.continuous_connections_nsprefix_) + else "" + ) + continuousConnection_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="continuousConnection", + pretty_print=pretty_print, + ) for continuousConnectionInstance_ in self.continuous_connection_instances: - continuousConnectionInstance_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='continuousConnectionInstance', pretty_print=pretty_print) + namespaceprefix_ = ( + self.continuous_connection_instances_nsprefix_ + ":" + if (UseCapturedNS_ and self.continuous_connection_instances_nsprefix_) + else "" + ) + continuousConnectionInstance_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="continuousConnectionInstance", + pretty_print=pretty_print, + ) for continuousConnectionInstanceW_ in self.continuous_connection_instance_ws: - continuousConnectionInstanceW_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='continuousConnectionInstanceW', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.continuous_connection_instance_ws_nsprefix_ + ":" + if (UseCapturedNS_ and self.continuous_connection_instance_ws_nsprefix_) + else "" + ) + continuousConnectionInstanceW_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="continuousConnectionInstanceW", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(ContinuousProjection, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'continuousConnection': + + def _buildAttributes(self, node, attrs, already_processed): + super(ContinuousProjection, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "continuousConnection": class_obj_ = self.get_class_obj_(child_, ContinuousConnection) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.continuous_connections.append(obj_) - obj_.original_tagname_ = 'continuousConnection' - elif nodeName_ == 'continuousConnectionInstance': + obj_.original_tagname_ = "continuousConnection" + elif nodeName_ == "continuousConnectionInstance": class_obj_ = self.get_class_obj_(child_, ContinuousConnectionInstance) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.continuous_connection_instances.append(obj_) - obj_.original_tagname_ = 'continuousConnectionInstance' - elif nodeName_ == 'continuousConnectionInstanceW': + obj_.original_tagname_ = "continuousConnectionInstance" + elif nodeName_ == "continuousConnectionInstanceW": obj_ = ContinuousConnectionInstanceW.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.continuous_connection_instance_ws.append(obj_) - obj_.original_tagname_ = 'continuousConnectionInstanceW' - super(ContinuousProjection, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "continuousConnectionInstanceW" + super(ContinuousProjection, self)._buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): - #print("Exporting ContinuousProjection: "+str(self.id)+" as HDF5") - + """Export to HDF5 file.""" + # print("Exporting ContinuousProjection: "+str(self.id)+" as HDF5") import numpy - projGroup = h5file.create_group(h5Group, 'projection_'+self.id) + projGroup = h5file.create_group(h5Group, "projection_" + self.id) projGroup._f_setattr("id", self.id) projGroup._f_setattr("type", "continuousProjection") projGroup._f_setattr("presynapticPopulation", self.presynaptic_population) projGroup._f_setattr("postsynapticPopulation", self.postsynaptic_population) - pre_comp = self.continuous_connections[0].pre_component if len(self.continuous_connections)>0 else self.continuous_connection_instances[0].pre_component if len(self.continuous_connection_instances)>0 else self.continuous_connection_instance_ws[0].pre_component - projGroup._f_setattr("preComponent", pre_comp ) - post_comp = self.continuous_connections[0].post_component if len(self.continuous_connections)>0 else self.continuous_connection_instances[0].post_component if len(self.continuous_connection_instances)>0 else self.continuous_connection_instance_ws[0].post_component - projGroup._f_setattr("postComponent", post_comp ) + pre_comp = ( + self.continuous_connections[0].pre_component + if len(self.continuous_connections) > 0 + else self.continuous_connection_instances[0].pre_component + if len(self.continuous_connection_instances) > 0 + else self.continuous_connection_instance_ws[0].pre_component + ) + projGroup._f_setattr("preComponent", pre_comp) + post_comp = ( + self.continuous_connections[0].post_component + if len(self.continuous_connections) > 0 + else self.continuous_connection_instances[0].post_component + if len(self.continuous_connection_instances) > 0 + else self.continuous_connection_instance_ws[0].post_component + ) + projGroup._f_setattr("postComponent", post_comp) cols = 7 extra_cols = {} - num_tot = len(self.continuous_connections)+len(self.continuous_connection_instances)+len(self.continuous_connection_instance_ws) + num_tot = ( + len(self.continuous_connections) + + len(self.continuous_connection_instances) + + len(self.continuous_connection_instance_ws) + ) - if len(self.continuous_connection_instance_ws)>0: - extra_cols["column_"+str(cols)] = 'weight' - cols+=1 + if len(self.continuous_connection_instance_ws) > 0: + extra_cols["column_" + str(cols)] = "weight" + cols += 1 - #print("Exporting "+str(num_tot)+" continuous connections") + # print("Exporting "+str(num_tot)+" continuous connections") a = numpy.zeros([num_tot, cols], numpy.float32) - count=0 + count = 0 # TODO: optimise for single compartment cells, i.e. where no pre_segment/post_fraction_along etc. for connection in self.continuous_connections: - a[count,0] = connection.id - a[count,1] = connection.get_pre_cell_id() - a[count,2] = connection.get_post_cell_id() - a[count,3] = connection.pre_segment - a[count,4] = connection.post_segment - a[count,5] = connection.pre_fraction_along - a[count,6] = connection.post_fraction_along - count=count+1 + a[count, 0] = connection.id + a[count, 1] = connection.get_pre_cell_id() + a[count, 2] = connection.get_post_cell_id() + a[count, 3] = connection.pre_segment + a[count, 4] = connection.post_segment + a[count, 5] = connection.pre_fraction_along + a[count, 6] = connection.post_fraction_along + count = count + 1 for connection in self.continuous_connection_instances: - a[count,0] = connection.id - a[count,1] = connection.get_pre_cell_id() - a[count,2] = connection.get_post_cell_id() - a[count,3] = connection.pre_segment - a[count,4] = connection.post_segment - a[count,5] = connection.pre_fraction_along - a[count,6] = connection.post_fraction_along - count=count+1 - + a[count, 0] = connection.id + a[count, 1] = connection.get_pre_cell_id() + a[count, 2] = connection.get_post_cell_id() + a[count, 3] = connection.pre_segment + a[count, 4] = connection.post_segment + a[count, 5] = connection.pre_fraction_along + a[count, 6] = connection.post_fraction_along + count = count + 1 for connection in self.continuous_connection_instance_ws: - a[count,0] = connection.id - a[count,1] = connection.get_pre_cell_id() - a[count,2] = connection.get_post_cell_id() - a[count,3] = connection.pre_segment - a[count,4] = connection.post_segment - a[count,5] = connection.pre_fraction_along - a[count,6] = connection.post_fraction_along - a[count,7] = connection.weight - count=count+1 - - - array = h5file.create_carray(projGroup, self.id, obj=a, title="Connections of cells in "+ self.id) + a[count, 0] = connection.id + a[count, 1] = connection.get_pre_cell_id() + a[count, 2] = connection.get_post_cell_id() + a[count, 3] = connection.pre_segment + a[count, 4] = connection.post_segment + a[count, 5] = connection.pre_fraction_along + a[count, 6] = connection.post_fraction_along + a[count, 7] = connection.weight + count = count + 1 + + array = h5file.create_carray( + projGroup, self.id, obj=a, title="Connections of cells in " + self.id + ) array._f_setattr("column_0", "id") array._f_setattr("column_1", "pre_cell_id") @@ -16662,183 +39968,364 @@ def exportHdf5(self, h5file, h5Group): for k in extra_cols: array._f_setattr(k, extra_cols[k]) - - # end class ContinuousProjection class ElectricalProjection(BaseProjection): - """Projection between two populations consisting of electrical - connections (gap junctions)""" + """ElectricalProjection -- A projection between **presynapticPopulation** to another **postsynapticPopulation** through gap junctions.""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('electrical_connections', 'ElectricalConnection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ElectricalConnection', u'name': u'electricalConnection', u'minOccurs': u'0'}, None), - MemberSpec_('electrical_connection_instances', 'ElectricalConnectionInstance', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ElectricalConnectionInstance', u'name': u'electricalConnectionInstance', u'minOccurs': u'0'}, None), - MemberSpec_('electrical_connection_instance_ws', 'ElectricalConnectionInstanceW', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ElectricalConnectionInstanceW', u'name': u'electricalConnectionInstanceW', u'minOccurs': u'0'}, None), + MemberSpec_( + "electrical_connections", + "ElectricalConnection", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "electricalConnection", + "type": "ElectricalConnection", + }, + None, + ), + MemberSpec_( + "electrical_connection_instances", + "ElectricalConnectionInstance", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "electricalConnectionInstance", + "type": "ElectricalConnectionInstance", + }, + None, + ), + MemberSpec_( + "electrical_connection_instance_ws", + "ElectricalConnectionInstanceW", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "electricalConnectionInstanceW", + "type": "ElectricalConnectionInstanceW", + }, + None, + ), ] subclass = None superclass = BaseProjection - def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, electrical_connections=None, electrical_connection_instances=None, electrical_connection_instance_ws=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + presynaptic_population=None, + postsynaptic_population=None, + electrical_connections=None, + electrical_connection_instances=None, + electrical_connection_instance_ws=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ElectricalProjection, self).__init__(neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ElectricalProjection"), self).__init__( + neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_ + ) if electrical_connections is None: self.electrical_connections = [] else: self.electrical_connections = electrical_connections + self.electrical_connections_nsprefix_ = None if electrical_connection_instances is None: self.electrical_connection_instances = [] else: self.electrical_connection_instances = electrical_connection_instances + self.electrical_connection_instances_nsprefix_ = None if electrical_connection_instance_ws is None: self.electrical_connection_instance_ws = [] else: self.electrical_connection_instance_ws = electrical_connection_instance_ws + self.electrical_connection_instance_ws_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ElectricalProjection) + CurrentSubclassModule_, ElectricalProjection + ) if subclass is not None: return subclass(*args_, **kwargs_) if ElectricalProjection.subclass: return ElectricalProjection.subclass(*args_, **kwargs_) else: return ElectricalProjection(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): + + def _hasContent(self): if ( - self.electrical_connections or - self.electrical_connection_instances or - self.electrical_connection_instance_ws or - super(ElectricalProjection, self).hasContent_() + self.electrical_connections + or self.electrical_connection_instances + or self.electrical_connection_instance_ws + or super(ElectricalProjection, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalProjection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ElectricalProjection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ElectricalProjection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ElectricalProjection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ElectricalProjection": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalProjection') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ElectricalProjection', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ElectricalProjection", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ElectricalProjection", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ElectricalProjection'): - super(ElectricalProjection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalProjection') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalProjection', fromsubclass_=False, pretty_print=True): - super(ElectricalProjection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ElectricalProjection", + ): + super(ElectricalProjection, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ElectricalProjection", + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="ElectricalProjection", + fromsubclass_=False, + pretty_print=True, + ): + super(ElectricalProjection, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for electricalConnection_ in self.electrical_connections: - electricalConnection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='electricalConnection', pretty_print=pretty_print) + namespaceprefix_ = ( + self.electrical_connections_nsprefix_ + ":" + if (UseCapturedNS_ and self.electrical_connections_nsprefix_) + else "" + ) + electricalConnection_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="electricalConnection", + pretty_print=pretty_print, + ) for electricalConnectionInstance_ in self.electrical_connection_instances: - electricalConnectionInstance_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='electricalConnectionInstance', pretty_print=pretty_print) + namespaceprefix_ = ( + self.electrical_connection_instances_nsprefix_ + ":" + if (UseCapturedNS_ and self.electrical_connection_instances_nsprefix_) + else "" + ) + electricalConnectionInstance_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="electricalConnectionInstance", + pretty_print=pretty_print, + ) for electricalConnectionInstanceW_ in self.electrical_connection_instance_ws: - electricalConnectionInstanceW_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='electricalConnectionInstanceW', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.electrical_connection_instance_ws_nsprefix_ + ":" + if (UseCapturedNS_ and self.electrical_connection_instance_ws_nsprefix_) + else "" + ) + electricalConnectionInstanceW_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="electricalConnectionInstanceW", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(ElectricalProjection, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'electricalConnection': + + def _buildAttributes(self, node, attrs, already_processed): + super(ElectricalProjection, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "electricalConnection": class_obj_ = self.get_class_obj_(child_, ElectricalConnection) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.electrical_connections.append(obj_) - obj_.original_tagname_ = 'electricalConnection' - elif nodeName_ == 'electricalConnectionInstance': + obj_.original_tagname_ = "electricalConnection" + elif nodeName_ == "electricalConnectionInstance": class_obj_ = self.get_class_obj_(child_, ElectricalConnectionInstance) obj_ = class_obj_.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.electrical_connection_instances.append(obj_) - obj_.original_tagname_ = 'electricalConnectionInstance' - elif nodeName_ == 'electricalConnectionInstanceW': + obj_.original_tagname_ = "electricalConnectionInstance" + elif nodeName_ == "electricalConnectionInstanceW": obj_ = ElectricalConnectionInstanceW.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.electrical_connection_instance_ws.append(obj_) - obj_.original_tagname_ = 'electricalConnectionInstanceW' - super(ElectricalProjection, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "electricalConnectionInstanceW" + super(ElectricalProjection, self)._buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): - #print("Exporting ElectricalProjection: "+str(self.id)+" as HDF5") - + """Export to HDF5 file.""" + # print("Exporting ElectricalProjection: "+str(self.id)+" as HDF5") import numpy - projGroup = h5file.create_group(h5Group, 'projection_'+self.id) + projGroup = h5file.create_group(h5Group, "projection_" + self.id) projGroup._f_setattr("id", self.id) projGroup._f_setattr("type", "electricalProjection") projGroup._f_setattr("presynapticPopulation", self.presynaptic_population) projGroup._f_setattr("postsynapticPopulation", self.postsynaptic_population) - syn = self.electrical_connections[0].synapse if len(self.electrical_connections)>0 else self.electrical_connection_instances[0].synapse if len(self.electrical_connection_instances)>0 else self.electrical_connection_instance_ws[0].synapse - projGroup._f_setattr("synapse", syn ) + syn = ( + self.electrical_connections[0].synapse + if len(self.electrical_connections) > 0 + else self.electrical_connection_instances[0].synapse + if len(self.electrical_connection_instances) > 0 + else self.electrical_connection_instance_ws[0].synapse + ) + projGroup._f_setattr("synapse", syn) cols = 7 extra_cols = {} - num_tot = len(self.electrical_connections)+len(self.electrical_connection_instances)+len(self.electrical_connection_instance_ws) - if len(self.electrical_connection_instance_ws)>0: - extra_cols["column_"+str(cols)] = "weight" - cols+=1 + num_tot = ( + len(self.electrical_connections) + + len(self.electrical_connection_instances) + + len(self.electrical_connection_instance_ws) + ) + if len(self.electrical_connection_instance_ws) > 0: + extra_cols["column_" + str(cols)] = "weight" + cols += 1 - #print("Exporting "+str(num_tot)+" electrical connections") + # print("Exporting "+str(num_tot)+" electrical connections") a = numpy.zeros([num_tot, cols], numpy.float32) - count=0 + count = 0 # TODO: optimise for single compartment cells, i.e. where no pre_segment/post_fraction_along etc. for connection in self.electrical_connections: - a[count,0] = connection.id - a[count,1] = connection.get_pre_cell_id() - a[count,2] = connection.get_post_cell_id() - a[count,3] = connection.pre_segment - a[count,4] = connection.post_segment - a[count,5] = connection.pre_fraction_along - a[count,6] = connection.post_fraction_along - count=count+1 + a[count, 0] = connection.id + a[count, 1] = connection.get_pre_cell_id() + a[count, 2] = connection.get_post_cell_id() + a[count, 3] = connection.pre_segment + a[count, 4] = connection.post_segment + a[count, 5] = connection.pre_fraction_along + a[count, 6] = connection.post_fraction_along + count = count + 1 for connection in self.electrical_connection_instances: - a[count,0] = connection.id - a[count,1] = connection.get_pre_cell_id() - a[count,2] = connection.get_post_cell_id() - a[count,3] = connection.pre_segment - a[count,4] = connection.post_segment - a[count,5] = connection.pre_fraction_along - a[count,6] = connection.post_fraction_along - count=count+1 + a[count, 0] = connection.id + a[count, 1] = connection.get_pre_cell_id() + a[count, 2] = connection.get_post_cell_id() + a[count, 3] = connection.pre_segment + a[count, 4] = connection.post_segment + a[count, 5] = connection.pre_fraction_along + a[count, 6] = connection.post_fraction_along + count = count + 1 for connection in self.electrical_connection_instance_ws: - a[count,0] = connection.id - a[count,1] = connection.get_pre_cell_id() - a[count,2] = connection.get_post_cell_id() - a[count,3] = connection.pre_segment - a[count,4] = connection.post_segment - a[count,5] = connection.pre_fraction_along - a[count,6] = connection.post_fraction_along - a[count,7] = connection.get_weight() - count=count+1 - - array = h5file.create_carray(projGroup, self.id, obj=a, title="Connections of cells in "+ self.id) + a[count, 0] = connection.id + a[count, 1] = connection.get_pre_cell_id() + a[count, 2] = connection.get_post_cell_id() + a[count, 3] = connection.pre_segment + a[count, 4] = connection.post_segment + a[count, 5] = connection.pre_fraction_along + a[count, 6] = connection.post_fraction_along + a[count, 7] = connection.get_weight() + count = count + 1 + + array = h5file.create_carray( + projGroup, self.id, obj=a, title="Connections of cells in " + self.id + ) array._f_setattr("column_0", "id") array._f_setattr("column_1", "pre_cell_id") @@ -16849,466 +40336,1072 @@ def exportHdf5(self, h5file, h5Group): array._f_setattr("column_6", "post_fraction_along") for col in extra_cols.keys(): - array._f_setattr(col,extra_cols[col]) - + array._f_setattr(col, extra_cols[col]) # end class ElectricalProjection class BaseConnectionNewFormat(BaseConnection): - """Base of all synaptic connections with preCell, postSegment, etc. See - BaseConnectionOldFormat""" + """BaseConnectionNewFormat -- Base of all synaptic connections with preCell, postSegment, etc. + See BaseConnectionOldFormat + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('pre_cell', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('pre_segment', 'NonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('pre_fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional'}), - MemberSpec_('post_cell', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('post_segment', 'NonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('post_fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional'}), + MemberSpec_( + "pre_cell", "xs:string", 0, 0, {"use": "required", "name": "pre_cell"} + ), + MemberSpec_( + "pre_segment", + "NonNegativeInteger", + 0, + 1, + {"use": "optional", "name": "pre_segment"}, + ), + MemberSpec_( + "pre_fraction_along", + "ZeroToOne", + 0, + 1, + {"use": "optional", "name": "pre_fraction_along"}, + ), + MemberSpec_( + "post_cell", "xs:string", 0, 0, {"use": "required", "name": "post_cell"} + ), + MemberSpec_( + "post_segment", + "NonNegativeInteger", + 0, + 1, + {"use": "optional", "name": "post_segment"}, + ), + MemberSpec_( + "post_fraction_along", + "ZeroToOne", + 0, + 1, + {"use": "optional", "name": "post_fraction_along"}, + ), ] subclass = None superclass = BaseConnection - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell=None, + pre_segment="0", + pre_fraction_along="0.5", + post_cell=None, + post_segment="0", + post_fraction_along="0.5", + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseConnectionNewFormat, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("BaseConnectionNewFormat"), self).__init__( + neuro_lex_id, id, extensiontype_, **kwargs_ + ) self.pre_cell = _cast(None, pre_cell) + self.pre_cell_nsprefix_ = None self.pre_segment = _cast(int, pre_segment) + self.pre_segment_nsprefix_ = None self.pre_fraction_along = _cast(float, pre_fraction_along) + self.pre_fraction_along_nsprefix_ = None self.post_cell = _cast(None, post_cell) + self.post_cell_nsprefix_ = None self.post_segment = _cast(int, post_segment) + self.post_segment_nsprefix_ = None self.post_fraction_along = _cast(float, post_fraction_along) + self.post_fraction_along_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseConnectionNewFormat) + CurrentSubclassModule_, BaseConnectionNewFormat + ) if subclass is not None: return subclass(*args_, **kwargs_) if BaseConnectionNewFormat.subclass: return BaseConnectionNewFormat.subclass(*args_, **kwargs_) else: return BaseConnectionNewFormat(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass + def validate_ZeroToOne(self, value): # Validate type ZeroToOne, a restriction on xs:float. - if value is not None and Validate_simpletypes_: - if value < 0: - warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' % {"value" : value} ) - if value > 1: - warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' % {"value" : value} ) - def hasContent_(self): if ( - super(BaseConnectionNewFormat, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, float): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if value < 0: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ZeroToOne' + % {"value": value, "lineno": lineno} + ) + result = False + if value > 1: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ZeroToOne' + % {"value": value, "lineno": lineno} + ) + result = False + + def _hasContent(self): + if super(BaseConnectionNewFormat, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnectionNewFormat', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseConnectionNewFormat') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConnectionNewFormat", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseConnectionNewFormat") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "BaseConnectionNewFormat": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnectionNewFormat') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConnectionNewFormat', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConnectionNewFormat'): - super(BaseConnectionNewFormat, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnectionNewFormat') - if self.pre_cell is not None and 'pre_cell' not in already_processed: - already_processed.add('pre_cell') - outfile.write(' preCell=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.pre_cell), input_name='preCell')), )) - if self.pre_segment != 0 and 'pre_segment' not in already_processed: - already_processed.add('pre_segment') - outfile.write(' preSegment=%s' % (quote_attrib(self.pre_segment), )) - if self.pre_fraction_along != 0.5 and 'pre_fraction_along' not in already_processed: - already_processed.add('pre_fraction_along') - outfile.write(' preFractionAlong=%s' % (quote_attrib(self.pre_fraction_along), )) - if self.post_cell is not None and 'post_cell' not in already_processed: - already_processed.add('post_cell') - outfile.write(' postCell=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.post_cell), input_name='postCell')), )) - if self.post_segment != 0 and 'post_segment' not in already_processed: - already_processed.add('post_segment') - outfile.write(' postSegment=%s' % (quote_attrib(self.post_segment), )) - if self.post_fraction_along != 0.5 and 'post_fraction_along' not in already_processed: - already_processed.add('post_fraction_along') - outfile.write(' postFractionAlong=%s' % (quote_attrib(self.post_fraction_along), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseConnectionNewFormat", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseConnectionNewFormat", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseConnectionNewFormat", + ): + super(BaseConnectionNewFormat, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseConnectionNewFormat", + ) + if self.pre_cell is not None and "pre_cell" not in already_processed: + already_processed.add("pre_cell") + outfile.write( + " preCell=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.pre_cell), input_name="preCell" + ) + ), + ) + ) + if self.pre_segment != 0 and "pre_segment" not in already_processed: + already_processed.add("pre_segment") + outfile.write( + ' preSegment="%s"' + % self.gds_format_integer(self.pre_segment, input_name="preSegment") + ) + if ( + self.pre_fraction_along != 0.5 + and "pre_fraction_along" not in already_processed + ): + already_processed.add("pre_fraction_along") + outfile.write( + ' preFractionAlong="%s"' + % self.gds_format_float( + self.pre_fraction_along, input_name="preFractionAlong" + ) + ) + if self.post_cell is not None and "post_cell" not in already_processed: + already_processed.add("post_cell") + outfile.write( + " postCell=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.post_cell), input_name="postCell" + ) + ), + ) + ) + if self.post_segment != 0 and "post_segment" not in already_processed: + already_processed.add("post_segment") + outfile.write( + ' postSegment="%s"' + % self.gds_format_integer(self.post_segment, input_name="postSegment") + ) + if ( + self.post_fraction_along != 0.5 + and "post_fraction_along" not in already_processed + ): + already_processed.add("post_fraction_along") + outfile.write( + ' postFractionAlong="%s"' + % self.gds_format_float( + self.post_fraction_along, input_name="postFractionAlong" + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnectionNewFormat', fromsubclass_=False, pretty_print=True): - super(BaseConnectionNewFormat, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConnectionNewFormat", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseConnectionNewFormat, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('preCell', node) - if value is not None and 'preCell' not in already_processed: - already_processed.add('preCell') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("preCell", node) + if value is not None and "preCell" not in already_processed: + already_processed.add("preCell") self.pre_cell = value - value = find_attr_value_('preSegment', node) - if value is not None and 'preSegment' not in already_processed: - already_processed.add('preSegment') - try: - self.pre_segment = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + value = find_attr_value_("preSegment", node) + if value is not None and "preSegment" not in already_processed: + already_processed.add("preSegment") + self.pre_segment = self.gds_parse_integer(value, node, "preSegment") if self.pre_segment < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.pre_segment) # validate type NonNegativeInteger - value = find_attr_value_('preFractionAlong', node) - if value is not None and 'preFractionAlong' not in already_processed: - already_processed.add('preFractionAlong') - try: - self.pre_fraction_along = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (preFractionAlong): %s' % exp) - self.validate_ZeroToOne(self.pre_fraction_along) # validate type ZeroToOne - value = find_attr_value_('postCell', node) - if value is not None and 'postCell' not in already_processed: - already_processed.add('postCell') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.pre_segment + ) # validate type NonNegativeInteger + value = find_attr_value_("preFractionAlong", node) + if value is not None and "preFractionAlong" not in already_processed: + already_processed.add("preFractionAlong") + value = self.gds_parse_float(value, node, "preFractionAlong") + self.pre_fraction_along = value + self.validate_ZeroToOne(self.pre_fraction_along) # validate type ZeroToOne + value = find_attr_value_("postCell", node) + if value is not None and "postCell" not in already_processed: + already_processed.add("postCell") self.post_cell = value - value = find_attr_value_('postSegment', node) - if value is not None and 'postSegment' not in already_processed: - already_processed.add('postSegment') - try: - self.post_segment = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + value = find_attr_value_("postSegment", node) + if value is not None and "postSegment" not in already_processed: + already_processed.add("postSegment") + self.post_segment = self.gds_parse_integer(value, node, "postSegment") if self.post_segment < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.post_segment) # validate type NonNegativeInteger - value = find_attr_value_('postFractionAlong', node) - if value is not None and 'postFractionAlong' not in already_processed: - already_processed.add('postFractionAlong') - try: - self.post_fraction_along = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (postFractionAlong): %s' % exp) - self.validate_ZeroToOne(self.post_fraction_along) # validate type ZeroToOne - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.post_segment + ) # validate type NonNegativeInteger + value = find_attr_value_("postFractionAlong", node) + if value is not None and "postFractionAlong" not in already_processed: + already_processed.add("postFractionAlong") + value = self.gds_parse_float(value, node, "postFractionAlong") + self.post_fraction_along = value + self.validate_ZeroToOne(self.post_fraction_along) # validate type ZeroToOne + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseConnectionNewFormat, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseConnectionNewFormat, self).buildChildren(child_, node, nodeName_, True) + super(BaseConnectionNewFormat, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(BaseConnectionNewFormat, self)._buildChildren( + child_, node, nodeName_, True + ) pass + + # end class BaseConnectionNewFormat class BaseConnectionOldFormat(BaseConnection): - """Base of all synaptic connections with preCellId, postSegmentId, etc. - Note: this is not the best name for these attributes, since Id - is superfluous, hence BaseConnectionNewFormat""" + """BaseConnectionOldFormat -- Base of all synaptic connections with preCellId, postSegmentId, etc. + Note: this is not the best name for these attributes, since Id is superfluous, hence BaseConnectionNewFormat + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('pre_cell_id', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('pre_segment_id', 'NonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('pre_fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional'}), - MemberSpec_('post_cell_id', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('post_segment_id', 'NonNegativeInteger', 0, 1, {'use': 'optional'}), - MemberSpec_('post_fraction_along', 'ZeroToOne', 0, 1, {'use': 'optional'}), + MemberSpec_( + "pre_cell_id", "xs:string", 0, 0, {"use": "required", "name": "pre_cell_id"} + ), + MemberSpec_( + "pre_segment_id", + "NonNegativeInteger", + 0, + 1, + {"use": "optional", "name": "pre_segment_id"}, + ), + MemberSpec_( + "pre_fraction_along", + "ZeroToOne", + 0, + 1, + {"use": "optional", "name": "pre_fraction_along"}, + ), + MemberSpec_( + "post_cell_id", + "xs:string", + 0, + 0, + {"use": "required", "name": "post_cell_id"}, + ), + MemberSpec_( + "post_segment_id", + "NonNegativeInteger", + 0, + 1, + {"use": "optional", "name": "post_segment_id"}, + ), + MemberSpec_( + "post_fraction_along", + "ZeroToOne", + 0, + 1, + {"use": "optional", "name": "post_fraction_along"}, + ), ] subclass = None superclass = BaseConnection - def __init__(self, neuro_lex_id=None, id=None, pre_cell_id=None, pre_segment_id='0', pre_fraction_along='0.5', post_cell_id=None, post_segment_id='0', post_fraction_along='0.5', extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell_id=None, + pre_segment_id="0", + pre_fraction_along="0.5", + post_cell_id=None, + post_segment_id="0", + post_fraction_along="0.5", + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseConnectionOldFormat, self).__init__(neuro_lex_id, id, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("BaseConnectionOldFormat"), self).__init__( + neuro_lex_id, id, extensiontype_, **kwargs_ + ) self.pre_cell_id = _cast(None, pre_cell_id) + self.pre_cell_id_nsprefix_ = None self.pre_segment_id = _cast(int, pre_segment_id) + self.pre_segment_id_nsprefix_ = None self.pre_fraction_along = _cast(float, pre_fraction_along) + self.pre_fraction_along_nsprefix_ = None self.post_cell_id = _cast(None, post_cell_id) + self.post_cell_id_nsprefix_ = None self.post_segment_id = _cast(int, post_segment_id) + self.post_segment_id_nsprefix_ = None self.post_fraction_along = _cast(float, post_fraction_along) + self.post_fraction_along_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseConnectionOldFormat) + CurrentSubclassModule_, BaseConnectionOldFormat + ) if subclass is not None: return subclass(*args_, **kwargs_) if BaseConnectionOldFormat.subclass: return BaseConnectionOldFormat.subclass(*args_, **kwargs_) else: return BaseConnectionOldFormat(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NonNegativeInteger(self, value): # Validate type NonNegativeInteger, a restriction on xs:nonNegativeInteger. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, int): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' + % { + "value": value, + "lineno": lineno, + } + ) + return False pass + def validate_ZeroToOne(self, value): # Validate type ZeroToOne, a restriction on xs:float. - if value is not None and Validate_simpletypes_: - if value < 0: - warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on ZeroToOne' % {"value" : value} ) - if value > 1: - warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on ZeroToOne' % {"value" : value} ) - def hasContent_(self): if ( - super(BaseConnectionOldFormat, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, float): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if value < 0: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ZeroToOne' + % {"value": value, "lineno": lineno} + ) + result = False + if value > 1: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ZeroToOne' + % {"value": value, "lineno": lineno} + ) + result = False + + def _hasContent(self): + if super(BaseConnectionOldFormat, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnectionOldFormat', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseConnectionOldFormat') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConnectionOldFormat", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseConnectionOldFormat") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "BaseConnectionOldFormat": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnectionOldFormat') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConnectionOldFormat', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConnectionOldFormat'): - super(BaseConnectionOldFormat, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConnectionOldFormat') - if self.pre_cell_id is not None and 'pre_cell_id' not in already_processed: - already_processed.add('pre_cell_id') - outfile.write(' preCellId=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.pre_cell_id), input_name='preCellId')), )) - if self.pre_segment_id != 0 and 'pre_segment_id' not in already_processed: - already_processed.add('pre_segment_id') - outfile.write(' preSegmentId=%s' % (quote_attrib(self.pre_segment_id), )) - if self.pre_fraction_along != 0.5 and 'pre_fraction_along' not in already_processed: - already_processed.add('pre_fraction_along') - outfile.write(' preFractionAlong=%s' % (quote_attrib(self.pre_fraction_along), )) - if self.post_cell_id is not None and 'post_cell_id' not in already_processed: - already_processed.add('post_cell_id') - outfile.write(' postCellId=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.post_cell_id), input_name='postCellId')), )) - if self.post_segment_id != 0 and 'post_segment_id' not in already_processed: - already_processed.add('post_segment_id') - outfile.write(' postSegmentId=%s' % (quote_attrib(self.post_segment_id), )) - if self.post_fraction_along != 0.5 and 'post_fraction_along' not in already_processed: - already_processed.add('post_fraction_along') - outfile.write(' postFractionAlong=%s' % (quote_attrib(self.post_fraction_along), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseConnectionOldFormat", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseConnectionOldFormat", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseConnectionOldFormat", + ): + super(BaseConnectionOldFormat, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseConnectionOldFormat", + ) + if self.pre_cell_id is not None and "pre_cell_id" not in already_processed: + already_processed.add("pre_cell_id") + outfile.write( + " preCellId=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.pre_cell_id), input_name="preCellId" + ) + ), + ) + ) + if self.pre_segment_id != 0 and "pre_segment_id" not in already_processed: + already_processed.add("pre_segment_id") + outfile.write( + ' preSegmentId="%s"' + % self.gds_format_integer( + self.pre_segment_id, input_name="preSegmentId" + ) + ) + if ( + self.pre_fraction_along != 0.5 + and "pre_fraction_along" not in already_processed + ): + already_processed.add("pre_fraction_along") + outfile.write( + ' preFractionAlong="%s"' + % self.gds_format_float( + self.pre_fraction_along, input_name="preFractionAlong" + ) + ) + if self.post_cell_id is not None and "post_cell_id" not in already_processed: + already_processed.add("post_cell_id") + outfile.write( + " postCellId=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.post_cell_id), input_name="postCellId" + ) + ), + ) + ) + if self.post_segment_id != 0 and "post_segment_id" not in already_processed: + already_processed.add("post_segment_id") + outfile.write( + ' postSegmentId="%s"' + % self.gds_format_integer( + self.post_segment_id, input_name="postSegmentId" + ) + ) + if ( + self.post_fraction_along != 0.5 + and "post_fraction_along" not in already_processed + ): + already_processed.add("post_fraction_along") + outfile.write( + ' postFractionAlong="%s"' + % self.gds_format_float( + self.post_fraction_along, input_name="postFractionAlong" + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConnectionOldFormat', fromsubclass_=False, pretty_print=True): - super(BaseConnectionOldFormat, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConnectionOldFormat", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseConnectionOldFormat, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('preCellId', node) - if value is not None and 'preCellId' not in already_processed: - already_processed.add('preCellId') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("preCellId", node) + if value is not None and "preCellId" not in already_processed: + already_processed.add("preCellId") self.pre_cell_id = value - value = find_attr_value_('preSegmentId', node) - if value is not None and 'preSegmentId' not in already_processed: - already_processed.add('preSegmentId') - try: - self.pre_segment_id = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + value = find_attr_value_("preSegmentId", node) + if value is not None and "preSegmentId" not in already_processed: + already_processed.add("preSegmentId") + self.pre_segment_id = self.gds_parse_integer(value, node, "preSegmentId") if self.pre_segment_id < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.pre_segment_id) # validate type NonNegativeInteger - value = find_attr_value_('preFractionAlong', node) - if value is not None and 'preFractionAlong' not in already_processed: - already_processed.add('preFractionAlong') - try: - self.pre_fraction_along = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (preFractionAlong): %s' % exp) - self.validate_ZeroToOne(self.pre_fraction_along) # validate type ZeroToOne - value = find_attr_value_('postCellId', node) - if value is not None and 'postCellId' not in already_processed: - already_processed.add('postCellId') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.pre_segment_id + ) # validate type NonNegativeInteger + value = find_attr_value_("preFractionAlong", node) + if value is not None and "preFractionAlong" not in already_processed: + already_processed.add("preFractionAlong") + value = self.gds_parse_float(value, node, "preFractionAlong") + self.pre_fraction_along = value + self.validate_ZeroToOne(self.pre_fraction_along) # validate type ZeroToOne + value = find_attr_value_("postCellId", node) + if value is not None and "postCellId" not in already_processed: + already_processed.add("postCellId") self.post_cell_id = value - value = find_attr_value_('postSegmentId', node) - if value is not None and 'postSegmentId' not in already_processed: - already_processed.add('postSegmentId') - try: - self.post_segment_id = int(value) - except ValueError as exp: - raise_parse_error(node, 'Bad integer attribute: %s' % exp) + value = find_attr_value_("postSegmentId", node) + if value is not None and "postSegmentId" not in already_processed: + already_processed.add("postSegmentId") + self.post_segment_id = self.gds_parse_integer(value, node, "postSegmentId") if self.post_segment_id < 0: - raise_parse_error(node, 'Invalid NonNegativeInteger') - self.validate_NonNegativeInteger(self.post_segment_id) # validate type NonNegativeInteger - value = find_attr_value_('postFractionAlong', node) - if value is not None and 'postFractionAlong' not in already_processed: - already_processed.add('postFractionAlong') - try: - self.post_fraction_along = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (postFractionAlong): %s' % exp) - self.validate_ZeroToOne(self.post_fraction_along) # validate type ZeroToOne - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + raise_parse_error(node, "Invalid NonNegativeInteger") + self.validate_NonNegativeInteger( + self.post_segment_id + ) # validate type NonNegativeInteger + value = find_attr_value_("postFractionAlong", node) + if value is not None and "postFractionAlong" not in already_processed: + already_processed.add("postFractionAlong") + value = self.gds_parse_float(value, node, "postFractionAlong") + self.post_fraction_along = value + self.validate_ZeroToOne(self.post_fraction_along) # validate type ZeroToOne + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseConnectionOldFormat, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseConnectionOldFormat, self).buildChildren(child_, node, nodeName_, True) + super(BaseConnectionOldFormat, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(BaseConnectionOldFormat, self)._buildChildren( + child_, node, nodeName_, True + ) pass + + # end class BaseConnectionOldFormat class Projection(BaseProjection): - """Projection (set of synaptic connections) between two populations. - Chemical/event based synaptic transmission""" + """Projection -- Projection from one population, **presynapticPopulation** to another, **postsynapticPopulation,** through **synapse.** Contains lists of **connection** or **connectionWD** elements.""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('synapse', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('connections', 'Connection', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'Connection', u'name': u'connection', u'minOccurs': u'0'}, None), - MemberSpec_('connection_wds', 'ConnectionWD', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'ConnectionWD', u'name': u'connectionWD', u'minOccurs': u'0'}, None), + MemberSpec_("synapse", "NmlId", 0, 0, {"use": "required", "name": "synapse"}), + MemberSpec_( + "connections", + "Connection", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "connection", + "type": "Connection", + }, + None, + ), + MemberSpec_( + "connection_wds", + "ConnectionWD", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "connectionWD", + "type": "ConnectionWD", + }, + None, + ), ] subclass = None superclass = BaseProjection - def __init__(self, neuro_lex_id=None, id=None, presynaptic_population=None, postsynaptic_population=None, synapse=None, connections=None, connection_wds=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + presynaptic_population=None, + postsynaptic_population=None, + synapse=None, + connections=None, + connection_wds=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Projection, self).__init__(neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("Projection"), self).__init__( + neuro_lex_id, id, presynaptic_population, postsynaptic_population, **kwargs_ + ) self.synapse = _cast(None, synapse) + self.synapse_nsprefix_ = None if connections is None: self.connections = [] else: self.connections = connections + self.connections_nsprefix_ = None if connection_wds is None: self.connection_wds = [] else: self.connection_wds = connection_wds + self.connection_wds_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Projection) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Projection) if subclass is not None: return subclass(*args_, **kwargs_) if Projection.subclass: return Projection.subclass(*args_, **kwargs_) else: return Projection(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): if ( - self.connections or - self.connection_wds or - super(Projection, self).hasContent_() + self.connections + or self.connection_wds + or super(Projection, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Projection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Projection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Projection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Projection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Projection": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Projection') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Projection', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Projection" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Projection", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Projection'): - super(Projection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Projection') - if self.synapse is not None and 'synapse' not in already_processed: - already_processed.add('synapse') - outfile.write(' synapse=%s' % (quote_attrib(self.synapse), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Projection', fromsubclass_=False, pretty_print=True): - super(Projection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Projection" + ): + super(Projection, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Projection" + ) + if self.synapse is not None and "synapse" not in already_processed: + already_processed.add("synapse") + outfile.write( + " synapse=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.synapse), input_name="synapse" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Projection", + fromsubclass_=False, + pretty_print=True, + ): + super(Projection, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for connection_ in self.connections: - connection_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='connection', pretty_print=pretty_print) + namespaceprefix_ = ( + self.connections_nsprefix_ + ":" + if (UseCapturedNS_ and self.connections_nsprefix_) + else "" + ) + connection_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="connection", + pretty_print=pretty_print, + ) for connectionWD_ in self.connection_wds: - connectionWD_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='connectionWD', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.connection_wds_nsprefix_ + ":" + if (UseCapturedNS_ and self.connection_wds_nsprefix_) + else "" + ) + connectionWD_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="connectionWD", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('synapse', node) - if value is not None and 'synapse' not in already_processed: - already_processed.add('synapse') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("synapse", node) + if value is not None and "synapse" not in already_processed: + already_processed.add("synapse") self.synapse = value - self.validate_NmlId(self.synapse) # validate type NmlId - super(Projection, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'connection': + self.validate_NmlId(self.synapse) # validate type NmlId + super(Projection, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "connection": obj_ = Connection.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.connections.append(obj_) - obj_.original_tagname_ = 'connection' - elif nodeName_ == 'connectionWD': + obj_.original_tagname_ = "connection" + elif nodeName_ == "connectionWD": obj_ = ConnectionWD.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.connection_wds.append(obj_) - obj_.original_tagname_ = 'connectionWD' - super(Projection, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "connectionWD" + super(Projection, self)._buildChildren(child_, node, nodeName_, True) def exportHdf5(self, h5file, h5Group): - #print("Exporting Projection: "+str(self.id)+" as HDF5") - + """Export to HDF5 file.""" + # print("Exporting Projection: "+str(self.id)+" as HDF5") import numpy - projGroup = h5file.create_group(h5Group, 'projection_'+self.id) + projGroup = h5file.create_group(h5Group, "projection_" + self.id) projGroup._f_setattr("id", self.id) projGroup._f_setattr("type", "projection") projGroup._f_setattr("presynapticPopulation", self.presynaptic_population) projGroup._f_setattr("postsynapticPopulation", self.postsynaptic_population) projGroup._f_setattr("synapse", self.synapse) - #print("Exporting "+str(len(self.connections))+" connections, "+str(len(self.connection_wds))+" connections with weight") + # print("Exporting "+str(len(self.connections))+" connections, "+str(len(self.connection_wds))+" connections with weight") connection_wds = len(self.connection_wds) > 0 @@ -17318,511 +41411,1286 @@ def exportHdf5(self, h5file, h5Group): from neuroml.utils import has_segment_fraction_info - include_segment_fraction = has_segment_fraction_info(self.connections) or has_segment_fraction_info(self.connection_wds) + include_segment_fraction = has_segment_fraction_info( + self.connections + ) or has_segment_fraction_info(self.connection_wds) if include_segment_fraction: - extra_cols["column_"+str(cols)] = "pre_segment_id" - extra_cols["column_"+str(cols+1)] = "post_segment_id" - extra_cols["column_"+str(cols+2)] = "pre_fraction_along" - extra_cols["column_"+str(cols+3)] = "post_fraction_along" - cols +=4 - + extra_cols["column_" + str(cols)] = "pre_segment_id" + extra_cols["column_" + str(cols + 1)] = "post_segment_id" + extra_cols["column_" + str(cols + 2)] = "pre_fraction_along" + extra_cols["column_" + str(cols + 3)] = "post_fraction_along" + cols += 4 if connection_wds: - extra_cols["column_"+str(cols)] = "weight" - extra_cols["column_"+str(cols+1)] = "delay" - cols+=2 + extra_cols["column_" + str(cols)] = "weight" + extra_cols["column_" + str(cols + 1)] = "delay" + cols += 2 - a = numpy.zeros([len(self.connections)+len(self.connection_wds), cols], numpy.float32) + a = numpy.zeros( + [len(self.connections) + len(self.connection_wds), cols], numpy.float32 + ) - count=0 + count = 0 for connection in self.connections: - ####a[count,0] = connection.id - a[count,0] = connection.get_pre_cell_id() - a[count,1] = connection.get_post_cell_id() - if include_segment_fraction: - a[count,2] = connection.pre_segment_id - a[count,3] = connection.post_segment_id - a[count,4] = connection.pre_fraction_along - a[count,5] = connection.post_fraction_along - count=count+1 + ####a[count,0] = connection.id + a[count, 0] = connection.get_pre_cell_id() + a[count, 1] = connection.get_post_cell_id() + if include_segment_fraction: + a[count, 2] = connection.pre_segment_id + a[count, 3] = connection.post_segment_id + a[count, 4] = connection.pre_fraction_along + a[count, 5] = connection.post_fraction_along + count = count + 1 for connection in self.connection_wds: - ###a[count,0] = connection.id - a[count,0] = connection.get_pre_cell_id() - a[count,1] = connection.get_post_cell_id() - - if include_segment_fraction: - a[count,2] = connection.pre_segment_id - a[count,3] = connection.post_segment_id - a[count,4] = connection.pre_fraction_along - a[count,5] = connection.post_fraction_along - - a[count,cols-2] = connection.weight - if 'ms' in connection.delay: - delay = float(connection.delay[:-2].strip()) - elif 's' in connection.delay: - delay = float(connection.delay[:-1].strip())*1000. - elif 'us' in connection.delay: - delay = float(connection.delay[:-2].strip())/1e3 - - a[count,cols-1] = delay - count=count+1 - - if len(a)>0: - array = h5file.create_carray(projGroup, self.id, obj=a, title="Connections of cells in "+ self.id) + ###a[count,0] = connection.id + a[count, 0] = connection.get_pre_cell_id() + a[count, 1] = connection.get_post_cell_id() + + if include_segment_fraction: + a[count, 2] = connection.pre_segment_id + a[count, 3] = connection.post_segment_id + a[count, 4] = connection.pre_fraction_along + a[count, 5] = connection.post_fraction_along + + a[count, cols - 2] = connection.weight + if "ms" in connection.delay: + delay = float(connection.delay[:-2].strip()) + elif "s" in connection.delay: + delay = float(connection.delay[:-1].strip()) * 1000.0 + elif "us" in connection.delay: + delay = float(connection.delay[:-2].strip()) / 1e3 + + a[count, cols - 1] = delay + count = count + 1 + + if len(a) > 0: + array = h5file.create_carray( + projGroup, self.id, obj=a, title="Connections of cells in " + self.id + ) ###array._f_setattr("column_0", "id") array._f_setattr("column_0", "pre_cell_id") array._f_setattr("column_1", "post_cell_id") for col in extra_cols.keys(): - array._f_setattr(col,extra_cols[col]) - + array._f_setattr(col, extra_cols[col]) def __str__(self): - return "Projection: "+self.id+" from "+self.presynaptic_population+" to "+self.postsynaptic_population+", synapse: "+self.synapse - - - + return ( + "Projection: " + + self.id + + " from " + + self.presynaptic_population + + " to " + + self.postsynaptic_population + + ", synapse: " + + self.synapse + ) # end class Projection class SpikeGeneratorRefPoisson(SpikeGeneratorPoisson): + """SpikeGeneratorRefPoisson -- Generator of spikes whose ISI distribution is the maximum entropy distribution over [ **minimumISI,** +infinity ) with mean: 1 / **averageRate** + \n + :param minimumISI: The minimum interspike interval + :type minimumISI: time + :param averageRate: The average rate at which spikes are emitted + :type averageRate: per_time + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('minimum_isi', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_( + "minimum_isi", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "minimum_isi"}, + ), ] subclass = None superclass = SpikeGeneratorPoisson - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, average_rate=None, minimum_isi=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + average_rate=None, + minimum_isi=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SpikeGeneratorRefPoisson, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, average_rate, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("SpikeGeneratorRefPoisson"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + average_rate, + **kwargs_ + ) self.minimum_isi = _cast(None, minimum_isi) + self.minimum_isi_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, SpikeGeneratorRefPoisson) + CurrentSubclassModule_, SpikeGeneratorRefPoisson + ) if subclass is not None: return subclass(*args_, **kwargs_) if SpikeGeneratorRefPoisson.subclass: return SpikeGeneratorRefPoisson.subclass(*args_, **kwargs_) else: return SpikeGeneratorRefPoisson(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): if ( - super(SpikeGeneratorRefPoisson, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + + def _hasContent(self): + if super(SpikeGeneratorRefPoisson, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorRefPoisson', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SpikeGeneratorRefPoisson') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeGeneratorRefPoisson", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SpikeGeneratorRefPoisson") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SpikeGeneratorRefPoisson": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorRefPoisson') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SpikeGeneratorRefPoisson', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpikeGeneratorRefPoisson", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SpikeGeneratorRefPoisson", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SpikeGeneratorRefPoisson'): - super(SpikeGeneratorRefPoisson, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SpikeGeneratorRefPoisson') - if self.minimum_isi is not None and 'minimum_isi' not in already_processed: - already_processed.add('minimum_isi') - outfile.write(' minimumISI=%s' % (quote_attrib(self.minimum_isi), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SpikeGeneratorRefPoisson', fromsubclass_=False, pretty_print=True): - super(SpikeGeneratorRefPoisson, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SpikeGeneratorRefPoisson", + ): + super(SpikeGeneratorRefPoisson, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="SpikeGeneratorRefPoisson", + ) + if self.minimum_isi is not None and "minimum_isi" not in already_processed: + already_processed.add("minimum_isi") + outfile.write( + " minimumISI=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.minimum_isi), input_name="minimumISI" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SpikeGeneratorRefPoisson", + fromsubclass_=False, + pretty_print=True, + ): + super(SpikeGeneratorRefPoisson, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('minimumISI', node) - if value is not None and 'minimumISI' not in already_processed: - already_processed.add('minimumISI') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("minimumISI", node) + if value is not None and "minimumISI" not in already_processed: + already_processed.add("minimumISI") self.minimum_isi = value - self.validate_Nml2Quantity_time(self.minimum_isi) # validate type Nml2Quantity_time - super(SpikeGeneratorRefPoisson, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SpikeGeneratorRefPoisson, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_time( + self.minimum_isi + ) # validate type Nml2Quantity_time + super(SpikeGeneratorRefPoisson, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(SpikeGeneratorRefPoisson, self)._buildChildren( + child_, node, nodeName_, True + ) pass + + # end class SpikeGeneratorRefPoisson class ConcentrationModel_D(DecayingPoolConcentrationModel): + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('type', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_("type", "xs:string", 0, 0, {"use": "required", "name": "type"}), ] subclass = None superclass = DecayingPoolConcentrationModel - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, ion=None, resting_conc=None, decay_constant=None, shell_thickness=None, type='decayingPoolConcentrationModel', **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + ion=None, + resting_conc=None, + decay_constant=None, + shell_thickness=None, + type="decayingPoolConcentrationModel", + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ConcentrationModel_D, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, ion, resting_conc, decay_constant, shell_thickness, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ConcentrationModel_D"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + ion, + resting_conc, + decay_constant, + shell_thickness, + **kwargs_ + ) self.type = _cast(None, type) + self.type_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ConcentrationModel_D) + CurrentSubclassModule_, ConcentrationModel_D + ) if subclass is not None: return subclass(*args_, **kwargs_) if ConcentrationModel_D.subclass: return ConcentrationModel_D.subclass(*args_, **kwargs_) else: return ConcentrationModel_D(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(ConcentrationModel_D, self).hasContent_() - ): + + def _hasContent(self): + if super(ConcentrationModel_D, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConcentrationModel_D', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ConcentrationModel_D') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ConcentrationModel_D", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ConcentrationModel_D") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ConcentrationModel_D": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConcentrationModel_D') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ConcentrationModel_D', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ConcentrationModel_D", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ConcentrationModel_D", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ConcentrationModel_D'): - super(ConcentrationModel_D, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConcentrationModel_D') - if self.type != "decayingPoolConcentrationModel" and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type), input_name='type')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConcentrationModel_D', fromsubclass_=False, pretty_print=True): - super(ConcentrationModel_D, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ConcentrationModel_D", + ): + super(ConcentrationModel_D, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ConcentrationModel_D", + ) + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type), input_name="type" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ConcentrationModel_D", + fromsubclass_=False, + pretty_print=True, + ): + super(ConcentrationModel_D, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - super(ConcentrationModel_D, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ConcentrationModel_D, self).buildChildren(child_, node, nodeName_, True) + super(ConcentrationModel_D, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ConcentrationModel_D, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class ConcentrationModel_D class ChannelDensityNernstCa2(ChannelDensityNernst): - member_data_items_ = [ - ] + """ChannelDensityNernstCa2 -- This component is similar to the original component type **channelDensityNernst** but it is changed in order to have a reversal potential that depends on a second independent Ca++ pool ( ca2 ). See https://github.com/OpenSourceBrain/ghk-nernst. + \n + :param condDensity: + :type condDensity: conductanceDensity + + """ + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = ChannelDensityNernst - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + cond_density=None, + segment_groups="all", + segments=None, + ion=None, + variable_parameters=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityNernstCa2, self).__init__(neuro_lex_id, id, ion_channel, cond_density, segment_groups, segments, ion, variable_parameters, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ChannelDensityNernstCa2"), self).__init__( + neuro_lex_id, + id, + ion_channel, + cond_density, + segment_groups, + segments, + ion, + variable_parameters, + **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensityNernstCa2) + CurrentSubclassModule_, ChannelDensityNernstCa2 + ) if subclass is not None: return subclass(*args_, **kwargs_) if ChannelDensityNernstCa2.subclass: return ChannelDensityNernstCa2.subclass(*args_, **kwargs_) else: return ChannelDensityNernstCa2(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(ChannelDensityNernstCa2, self).hasContent_() - ): + + def _hasContent(self): + if super(ChannelDensityNernstCa2, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNernstCa2', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityNernstCa2') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityNernstCa2", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelDensityNernstCa2") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ChannelDensityNernstCa2": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNernstCa2') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityNernstCa2', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNernstCa2", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensityNernstCa2", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityNernstCa2'): - super(ChannelDensityNernstCa2, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityNernstCa2') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityNernstCa2', fromsubclass_=False, pretty_print=True): - super(ChannelDensityNernstCa2, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensityNernstCa2", + ): + super(ChannelDensityNernstCa2, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityNernstCa2", + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityNernstCa2", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensityNernstCa2, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(ChannelDensityNernstCa2, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ChannelDensityNernstCa2, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + super(ChannelDensityNernstCa2, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ChannelDensityNernstCa2, self)._buildChildren( + child_, node, nodeName_, True + ) pass + + # end class ChannelDensityNernstCa2 class ChannelDensityVShift(ChannelDensity): + """ChannelDensityVShift -- Same as **channelDensity** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. + \n + :param vShift: + :type vShift: voltage + :param erev: The reversal potential of the current produced + :type erev: voltage + :param condDensity: + :type condDensity: conductanceDensity + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('v_shift', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_( + "v_shift", + "Nml2Quantity_voltage", + 0, + 0, + {"use": "required", "name": "v_shift"}, + ), ] subclass = None superclass = ChannelDensity - def __init__(self, neuro_lex_id=None, id=None, ion_channel=None, cond_density=None, erev=None, segment_groups='all', segments=None, ion=None, variable_parameters=None, v_shift=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + ion_channel=None, + cond_density=None, + erev=None, + segment_groups="all", + segments=None, + ion=None, + variable_parameters=None, + v_shift=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ChannelDensityVShift, self).__init__(neuro_lex_id, id, ion_channel, cond_density, erev, segment_groups, segments, ion, variable_parameters, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ChannelDensityVShift"), self).__init__( + neuro_lex_id, + id, + ion_channel, + cond_density, + erev, + segment_groups, + segments, + ion, + variable_parameters, + **kwargs_ + ) self.v_shift = _cast(None, v_shift) + self.v_shift_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ChannelDensityVShift) + CurrentSubclassModule_, ChannelDensityVShift + ) if subclass is not None: return subclass(*args_, **kwargs_) if ChannelDensityVShift.subclass: return ChannelDensityVShift.subclass(*args_, **kwargs_) else: return ChannelDensityVShift(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): - if ( - super(ChannelDensityVShift, self).hasContent_() - ): + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + + def _hasContent(self): + if super(ChannelDensityVShift, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityVShift', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChannelDensityVShift') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityVShift", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ChannelDensityVShift") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ChannelDensityVShift": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityVShift') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChannelDensityVShift', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityVShift", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ChannelDensityVShift", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChannelDensityVShift'): - super(ChannelDensityVShift, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChannelDensityVShift') - if self.v_shift is not None and 'v_shift' not in already_processed: - already_processed.add('v_shift') - outfile.write(' vShift=%s' % (quote_attrib(self.v_shift), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ChannelDensityVShift', fromsubclass_=False, pretty_print=True): - super(ChannelDensityVShift, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ChannelDensityVShift", + ): + super(ChannelDensityVShift, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ChannelDensityVShift", + ) + if self.v_shift is not None and "v_shift" not in already_processed: + already_processed.add("v_shift") + outfile.write( + " vShift=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.v_shift), input_name="vShift" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ChannelDensityVShift", + fromsubclass_=False, + pretty_print=True, + ): + super(ChannelDensityVShift, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('vShift', node) - if value is not None and 'vShift' not in already_processed: - already_processed.add('vShift') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("vShift", node) + if value is not None and "vShift" not in already_processed: + already_processed.add("vShift") self.v_shift = value - self.validate_Nml2Quantity_voltage(self.v_shift) # validate type Nml2Quantity_voltage - super(ChannelDensityVShift, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ChannelDensityVShift, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_voltage( + self.v_shift + ) # validate type Nml2Quantity_voltage + super(ChannelDensityVShift, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ChannelDensityVShift, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class ChannelDensityVShift class Cell(BaseCell): - """Should only be used if morphology element is outside the cell. This - points to the id of the morphology Should only be used if - biophysicalProperties element is outside the cell. This points - to the id of the biophysicalProperties""" + """Cell -- Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment.""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('morphology_attr', 'xs:string', 0, 1, {'use': 'optional'}), - MemberSpec_('biophysical_properties_attr', 'xs:string', 0, 1, {'use': 'optional'}), - MemberSpec_('morphology', 'Morphology', 0, 1, {u'type': u'Morphology', u'name': u'morphology', u'minOccurs': u'0'}, None), - MemberSpec_('biophysical_properties', 'BiophysicalProperties', 0, 1, {u'type': u'BiophysicalProperties', u'name': u'biophysicalProperties', u'minOccurs': u'0'}, None), + MemberSpec_( + "morphology_attr", + "NmlId", + 0, + 1, + {"use": "optional", "name": "morphology_attr"}, + ), + MemberSpec_( + "biophysical_properties_attr", + "NmlId", + 0, + 1, + {"use": "optional", "name": "biophysical_properties_attr"}, + ), + MemberSpec_( + "morphology", + "Morphology", + 0, + 1, + {"minOccurs": "0", "name": "morphology", "type": "Morphology"}, + None, + ), + MemberSpec_( + "biophysical_properties", + "BiophysicalProperties", + 0, + 1, + { + "minOccurs": "0", + "name": "biophysicalProperties", + "type": "BiophysicalProperties", + }, + None, + ), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, morphology_attr=None, biophysical_properties_attr=None, morphology=None, biophysical_properties=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + morphology_attr=None, + biophysical_properties_attr=None, + morphology=None, + biophysical_properties=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Cell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("Cell"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.morphology_attr = _cast(None, morphology_attr) + self.morphology_attr_nsprefix_ = None self.biophysical_properties_attr = _cast(None, biophysical_properties_attr) + self.biophysical_properties_attr_nsprefix_ = None self.morphology = morphology + self.morphology_nsprefix_ = None self.biophysical_properties = biophysical_properties + self.biophysical_properties_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Cell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Cell) if subclass is not None: return subclass(*args_, **kwargs_) if Cell.subclass: return Cell.subclass(*args_, **kwargs_) else: return Cell(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): + + def validate_NmlId(self, value): + # Validate type NmlId, a restriction on xs:string. + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): if ( - self.morphology is not None or - self.biophysical_properties is not None or - super(Cell, self).hasContent_() + self.morphology is not None + or self.biophysical_properties is not None + or super(Cell, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Cell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Cell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Cell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Cell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Cell": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Cell') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Cell', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Cell" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Cell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Cell'): - super(Cell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Cell') - if self.morphology_attr is not None and 'morphology_attr' not in already_processed: - already_processed.add('morphology_attr') - outfile.write(' morphology=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.morphology_attr), input_name='morphology_attr')), )) - if self.biophysical_properties_attr is not None and 'biophysical_properties_attr' not in already_processed: - already_processed.add('biophysical_properties_attr') - outfile.write(' biophysicalProperties=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.biophysical_properties_attr), input_name='biophysical_properties_attr')), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Cell" + ): + super(Cell, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Cell" + ) + if ( + self.morphology_attr is not None + and "morphology_attr" not in already_processed + ): + already_processed.add("morphology_attr") + outfile.write( + " morphology=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.morphology_attr), + input_name="morphology_attr", + ) + ), + ) + ) + if ( + self.biophysical_properties_attr is not None + and "biophysical_properties_attr" not in already_processed + ): + already_processed.add("biophysical_properties_attr") + outfile.write( + " biophysicalProperties=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.biophysical_properties_attr), + input_name="biophysical_properties_attr", + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Cell', fromsubclass_=False, pretty_print=True): - super(Cell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Cell", + fromsubclass_=False, + pretty_print=True, + ): + super(Cell, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.morphology is not None: - self.morphology.export(outfile, level, namespaceprefix_, namespacedef_='', name_='morphology', pretty_print=pretty_print) + namespaceprefix_ = ( + self.morphology_nsprefix_ + ":" + if (UseCapturedNS_ and self.morphology_nsprefix_) + else "" + ) + self.morphology.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="morphology", + pretty_print=pretty_print, + ) if self.biophysical_properties is not None: - self.biophysical_properties.export(outfile, level, namespaceprefix_, namespacedef_='', name_='biophysicalProperties', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.biophysical_properties_nsprefix_ + ":" + if (UseCapturedNS_ and self.biophysical_properties_nsprefix_) + else "" + ) + self.biophysical_properties.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="biophysicalProperties", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('morphology', node) - if value is not None and 'morphology_attr' not in already_processed: - already_processed.add('morphology_attr') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("morphology", node) + if value is not None and "morphology_attr" not in already_processed: + already_processed.add("morphology_attr") self.morphology_attr = value - value = find_attr_value_('biophysicalProperties', node) - if value is not None and 'biophysical_properties_attr' not in already_processed: - already_processed.add('biophysical_properties_attr') + self.validate_NmlId(self.morphology_attr) # validate type NmlId + value = find_attr_value_("biophysicalProperties", node) + if value is not None and "biophysical_properties_attr" not in already_processed: + already_processed.add("biophysical_properties_attr") self.biophysical_properties_attr = value - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_NmlId(self.biophysical_properties_attr) # validate type NmlId + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(Cell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'morphology': + super(Cell, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "morphology": obj_ = Morphology.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.morphology = obj_ - obj_.original_tagname_ = 'morphology' - elif nodeName_ == 'biophysicalProperties': + obj_.original_tagname_ = "morphology" + elif nodeName_ == "biophysicalProperties": obj_ = BiophysicalProperties.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.biophysical_properties = obj_ - obj_.original_tagname_ = 'biophysicalProperties' - super(Cell, self).buildChildren(child_, node, nodeName_, True) - + obj_.original_tagname_ = "biophysicalProperties" + super(Cell, self)._buildChildren(child_, node, nodeName_, True) # Get segment object by its id def get_segment(self, segment_id): + # type: (str) -> Segment """Get segment object by its id :param segment_id: ID of segment @@ -17835,12 +42703,38 @@ def get_segment(self, segment_id): if segment.id == segment_id: return segment - raise Exception("Segment with id "+str(segment_id)+" not found in cell "+str(self.id)) + raise Exception( + "Segment with id " + str(segment_id) + " not found in cell " + str(self.id) + ) + + def get_segments_by_substring(self, substring): + # type: (str) -> dict + """Get a dictionary of segment IDs and the segment matching the specified substring + + :param substring: substring to match + :type substring: str + :return: dictionary with segment ID as key, and segment as value + :raises Exception: if no segments are found + + """ + segments = {} + if substring: + for segment in self.morphology.segments: + if substring in segment.id: + segments[segment.id] = segment + if len(segments) == 0: + raise Exception( + "Segments with id matching " + + str(substring) + + " not found in cell " + + str(self.id) + ) + return segments # Get the proximal point of a segment, even the proximal field is None and # so the proximal point is on the parent (at a point set by fraction_along) def get_actual_proximal(self, segment_id): - + # type: (str) -> Point3DWithDiam """Get the proximal point of a segment. Get the proximal point of a segment, even the proximal field is None @@ -17857,19 +42751,24 @@ def get_actual_proximal(self, segment_id): parent = self.get_segment(segment.parent.segments) fract = float(segment.parent.fraction_along) - if fract==1: + if fract == 1: return parent.distal - elif fract==0: + elif fract == 0: return self.get_actual_proximal(segment.parent.segments) else: pd = parent.distal pp = self.get_actual_proximal(segment.parent.segments) - p = Point3DWithDiam((1-fract)*pp.x+fract*pd.x, (1-fract)*pp.y+fract*pd.y, (1-fract)*pp.z+fract*pd.z) - p.diameter = (1-fract)*pp.diameter+fract*pd.diameter + p = Point3DWithDiam( + (1 - fract) * pp.x + fract * pd.x, + (1 - fract) * pp.y + fract * pd.y, + (1 - fract) * pp.z + fract * pd.z, + ) + p.diameter = (1 - fract) * pp.diameter + fract * pd.diameter return p def get_segment_length(self, segment_id): + # type: (str) -> float """Get the length of the segment. :param segment_id: ID of segment @@ -17887,6 +42786,7 @@ def get_segment_length(self, segment_id): return length def get_segment_surface_area(self, segment_id): + # type: (str) -> float """Get the surface area of the segment. :param segment_id: ID of the segment @@ -17904,6 +42804,7 @@ def get_segment_surface_area(self, segment_id): return temp_seg.surface_area def get_segment_volume(self, segment_id): + # type: (str) -> float """Get volume of segment :param segment_id: ID of the segment @@ -17920,6 +42821,7 @@ def get_segment_volume(self, segment_id): return temp_seg.volume def get_segment_ids_vs_segments(self): + # type: () -> Dict """Get a dictionary of segment IDs and the segments in the cell. :return: dictionary with segment ID as key, and segment as value @@ -17931,9 +42833,8 @@ def get_segment_ids_vs_segments(self): return segments - def get_all_segments_in_group(self, - segment_group, - assume_all_means_all=True): + def get_all_segments_in_group(self, segment_group, assume_all_means_all=True): + # type: (SegmentGroup, bool) -> List[Segment] """Get all the segments in a segment group of the cell. :param segment_group: segment group to get all segments of @@ -17953,10 +42854,14 @@ def get_all_segments_in_group(self, segment_group = sg if isinstance(segment_group, str): - if assume_all_means_all and segment_group=='all': # i.e. wasn't explicitly defined, but assume it means all segments + if ( + assume_all_means_all and segment_group == "all" + ): # i.e. wasn't explicitly defined, but assume it means all segments return [seg.id for seg in self.morphology.segments] - raise Exception('No segment group '+segment_group+ ' found in cell '+self.id) + raise Exception( + "No segment group " + segment_group + " found in cell " + self.id + ) all_segs = [] @@ -17964,7 +42869,6 @@ def get_all_segments_in_group(self, if not member.segments in all_segs: all_segs.append(member.segments) - for include in segment_group.includes: segs_here = self.get_all_segments_in_group(include.segment_groups) for s in segs_here: @@ -17973,13 +42877,15 @@ def get_all_segments_in_group(self, return all_segs - - def get_ordered_segments_in_groups(self, - group_list, - check_parentage=False, - include_cumulative_lengths=False, - include_path_lengths=False, - path_length_metric="Path Length from root"): # Only option supported + def get_ordered_segments_in_groups( + self, + group_list, + check_parentage=False, + include_cumulative_lengths=False, + include_path_lengths=False, + path_length_metric="Path Length from root", + ): # Only option supported + # type: (List, bool, bool, bool, str) -> Dict """ Get ordered list of segments in specified groups @@ -18014,12 +42920,13 @@ def get_ordered_segments_in_groups(self, ord_segs = {} from operator import attrgetter + for key in unord_segs.keys(): segs = unord_segs[key] - if len(segs)==1 or len(segs)==0: - ord_segs[key]=segs + if len(segs) == 1 or len(segs) == 0: + ord_segs[key] = segs else: - ord_segs[key]=sorted(segs,key=attrgetter('id'),reverse=False) + ord_segs[key] = sorted(segs, key=attrgetter("id"), reverse=False) if check_parentage: # check parent ordering @@ -18029,10 +42936,14 @@ def get_ordered_segments_in_groups(self, for s in ord_segs[key]: if s.id != ord_segs[key][0].id: if not s.parent or not s.parent.segments in existing_ids: - raise Exception("Problem with finding parent of seg: "+str(s)+" in list: "+str(ord_segs)) + raise Exception( + "Problem with finding parent of seg: " + + str(s) + + " in list: " + + str(ord_segs) + ) existing_ids.append(s.id) - if include_cumulative_lengths or include_path_lengths: import math @@ -18050,12 +42961,15 @@ def get_ordered_segments_in_groups(self, length = self.get_segment_length(seg.id) - if not seg.parent or not seg.parent.segments in path_lengths_to_distal[key]: + if ( + not seg.parent + or not seg.parent.segments in path_lengths_to_distal[key] + ): path_lengths_to_proximal[key][seg.id] = 0 last_seg = seg par_seg_element = seg.parent - while par_seg_element!=None: + while par_seg_element != None: par_seg = segments[par_seg_element.segments] d = par_seg.distal @@ -18065,28 +42979,30 @@ def get_ordered_segments_in_groups(self, par_seg_parent_seg = segments[par_seg.parent.segments] p = par_seg_parent_seg.distal - par_length = math.sqrt( (d.x-p.x)**2 + (d.y-p.y)**2 + (d.z-p.z)**2 ) + par_length = math.sqrt( + (d.x - p.x) ** 2 + (d.y - p.y) ** 2 + (d.z - p.z) ** 2 + ) fract = float(last_seg.parent.fraction_along) - path_lengths_to_proximal[key][seg.id] += par_length*fract + path_lengths_to_proximal[key][seg.id] += par_length * fract last_seg = par_seg par_seg_element = par_seg.parent - else: pd = path_lengths_to_distal[key][seg.parent.segments] pp = path_lengths_to_proximal[key][seg.parent.segments] fract = float(seg.parent.fraction_along) - path_lengths_to_proximal[key][seg.id] = pp + (pd - pp)*fract + path_lengths_to_proximal[key][seg.id] = pp + (pd - pp) * fract - path_lengths_to_distal[key][seg.id] = path_lengths_to_proximal[key][seg.id] + length + path_lengths_to_distal[key][seg.id] = ( + path_lengths_to_proximal[key][seg.id] + length + ) tot_len += length cumulative_lengths[key].append(tot_len) - if include_path_lengths and not include_cumulative_lengths: return ord_segs, path_lengths_to_proximal, path_lengths_to_distal @@ -18097,5630 +43013,14852 @@ def get_ordered_segments_in_groups(self, if include_cumulative_lengths and include_path_lengths: - return ord_segs, cumulative_lengths, path_lengths_to_proximal, path_lengths_to_distal + return ( + ord_segs, + cumulative_lengths, + path_lengths_to_proximal, + path_lengths_to_distal, + ) return ord_segs + def get_segment_group(self, sg_id): + # type: (str) -> SegmentGroup + """Return the SegmentGroup object for the specified segment group id. + + :param sg_id: id of segment group to find + :type sg_id: str + :returns: SegmentGroup object of specified ID + :raises Exception: if segment group is not found in cell + """ + if sg_id: + for sg in self.morphology.segment_groups: + if sg.id == sg_id: + return sg + + raise Exception( + "Segment group with id " + str(sg_id) + " not found in cell " + str(self.id) + ) + def get_segment_groups_by_substring(self, substring): + # type: (str) -> dict + """Get a dictionary of segment group IDs and the segment groups matching the specified substring + + :param substring: substring to match + :type substring: str + :return: dictionary with segment group ID as key, and segment group as value + :raises Exception: if no segment groups are not found in cell + """ + sgs = {} + for sg in self.morphology.segment_groups: + if substring in sg.id: + sgs[sg.id] = sg + if len(sgs) == 0: + raise Exception( + "Segment group with id matching " + + str(substring) + + " not found in cell " + + str(self.id) + ) + return sgs def summary(self): """Print cell summary.""" print("*******************************************************") - print("* Cell: "+str(self.id)) - print("* Notes: "+str(self.notes)) - print("* Segments: "+str(len(self.morphology.segments))) - print("* SegmentGroups: "+str(len(self.morphology.segment_groups))) + print("* Cell: " + str(self.id)) + print("* Notes: " + str(self.notes)) + print("* Segments: " + str(len(self.morphology.segments))) + print("* SegmentGroups: " + str(len(self.morphology.segment_groups))) print("*******************************************************") # end class Cell class PinskyRinzelCA3Cell(BaseCell): + """PinskyRinzelCA3Cell -- Reduced CA3 cell model from Pinsky and Rinzel 1994. See https://github.com/OpenSourceBrain/PinskyRinzelModel + \n + :param iSoma: + :type iSoma: currentDensity + :param iDend: + :type iDend: currentDensity + :param gLs: + :type gLs: conductanceDensity + :param gLd: + :type gLd: conductanceDensity + :param gNa: + :type gNa: conductanceDensity + :param gKdr: + :type gKdr: conductanceDensity + :param gCa: + :type gCa: conductanceDensity + :param gKahp: + :type gKahp: conductanceDensity + :param gKC: + :type gKC: conductanceDensity + :param gc: + :type gc: conductanceDensity + :param eNa: + :type eNa: voltage + :param eCa: + :type eCa: voltage + :param eK: + :type eK: voltage + :param eL: + :type eL: voltage + :param pp: + :type pp: none + :param cm: + :type cm: specificCapacitance + :param alphac: + :type alphac: none + :param betac: + :type betac: none + :param gNmda: + :type gNmda: conductanceDensity + :param gAmpa: + :type gAmpa: conductanceDensity + :param qd0: + :type qd0: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('i_soma', 'Nml2Quantity_currentDensity', 0, 0, {'use': u'required'}), - MemberSpec_('i_dend', 'Nml2Quantity_currentDensity', 0, 0, {'use': u'required'}), - MemberSpec_('gc', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_ls', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_ld', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_na', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_kdr', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_ca', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_kahp', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_kc', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_nmda', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('g_ampa', 'Nml2Quantity_conductanceDensity', 0, 0, {'use': u'required'}), - MemberSpec_('e_na', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('e_ca', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('e_k', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('e_l', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('qd0', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('pp', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('alphac', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('betac', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('cm', 'Nml2Quantity_specificCapacitance', 0, 0, {'use': u'required'}), + MemberSpec_( + "i_soma", + "Nml2Quantity_currentDensity", + 0, + 0, + {"use": "required", "name": "i_soma"}, + ), + MemberSpec_( + "i_dend", + "Nml2Quantity_currentDensity", + 0, + 0, + {"use": "required", "name": "i_dend"}, + ), + MemberSpec_( + "gc", + "Nml2Quantity_conductanceDensity", + 0, + 0, + {"use": "required", "name": "gc"}, + ), + MemberSpec_( + "g_ls", + "Nml2Quantity_conductanceDensity", + 0, + 0, + {"use": "required", "name": "g_ls"}, + ), + MemberSpec_( + "g_ld", + "Nml2Quantity_conductanceDensity", + 0, + 0, + {"use": "required", "name": "g_ld"}, + ), + MemberSpec_( + "g_na", + "Nml2Quantity_conductanceDensity", + 0, + 0, + {"use": "required", "name": "g_na"}, + ), + MemberSpec_( + "g_kdr", + "Nml2Quantity_conductanceDensity", + 0, + 0, + {"use": "required", "name": "g_kdr"}, + ), + MemberSpec_( + "g_ca", + "Nml2Quantity_conductanceDensity", + 0, + 0, + {"use": "required", "name": "g_ca"}, + ), + MemberSpec_( + "g_kahp", + "Nml2Quantity_conductanceDensity", + 0, + 0, + {"use": "required", "name": "g_kahp"}, + ), + MemberSpec_( + "g_kc", + "Nml2Quantity_conductanceDensity", + 0, + 0, + {"use": "required", "name": "g_kc"}, + ), + MemberSpec_( + "g_nmda", + "Nml2Quantity_conductanceDensity", + 0, + 0, + {"use": "required", "name": "g_nmda"}, + ), + MemberSpec_( + "g_ampa", + "Nml2Quantity_conductanceDensity", + 0, + 0, + {"use": "required", "name": "g_ampa"}, + ), + MemberSpec_( + "e_na", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "e_na"} + ), + MemberSpec_( + "e_ca", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "e_ca"} + ), + MemberSpec_( + "e_k", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "e_k"} + ), + MemberSpec_( + "e_l", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "e_l"} + ), + MemberSpec_( + "qd0", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "qd0"} + ), + MemberSpec_("pp", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "pp"}), + MemberSpec_( + "alphac", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "alphac"} + ), + MemberSpec_( + "betac", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "betac"} + ), + MemberSpec_( + "cm", + "Nml2Quantity_specificCapacitance", + 0, + 0, + {"use": "required", "name": "cm"}, + ), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, i_soma=None, i_dend=None, gc=None, g_ls=None, g_ld=None, g_na=None, g_kdr=None, g_ca=None, g_kahp=None, g_kc=None, g_nmda=None, g_ampa=None, e_na=None, e_ca=None, e_k=None, e_l=None, qd0=None, pp=None, alphac=None, betac=None, cm=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + i_soma=None, + i_dend=None, + gc=None, + g_ls=None, + g_ld=None, + g_na=None, + g_kdr=None, + g_ca=None, + g_kahp=None, + g_kc=None, + g_nmda=None, + g_ampa=None, + e_na=None, + e_ca=None, + e_k=None, + e_l=None, + qd0=None, + pp=None, + alphac=None, + betac=None, + cm=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(PinskyRinzelCA3Cell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("PinskyRinzelCA3Cell"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.i_soma = _cast(None, i_soma) + self.i_soma_nsprefix_ = None self.i_dend = _cast(None, i_dend) + self.i_dend_nsprefix_ = None self.gc = _cast(None, gc) + self.gc_nsprefix_ = None self.g_ls = _cast(None, g_ls) + self.g_ls_nsprefix_ = None self.g_ld = _cast(None, g_ld) + self.g_ld_nsprefix_ = None self.g_na = _cast(None, g_na) + self.g_na_nsprefix_ = None self.g_kdr = _cast(None, g_kdr) + self.g_kdr_nsprefix_ = None self.g_ca = _cast(None, g_ca) + self.g_ca_nsprefix_ = None self.g_kahp = _cast(None, g_kahp) + self.g_kahp_nsprefix_ = None self.g_kc = _cast(None, g_kc) + self.g_kc_nsprefix_ = None self.g_nmda = _cast(None, g_nmda) + self.g_nmda_nsprefix_ = None self.g_ampa = _cast(None, g_ampa) + self.g_ampa_nsprefix_ = None self.e_na = _cast(None, e_na) + self.e_na_nsprefix_ = None self.e_ca = _cast(None, e_ca) + self.e_ca_nsprefix_ = None self.e_k = _cast(None, e_k) + self.e_k_nsprefix_ = None self.e_l = _cast(None, e_l) + self.e_l_nsprefix_ = None self.qd0 = _cast(None, qd0) + self.qd0_nsprefix_ = None self.pp = _cast(None, pp) + self.pp_nsprefix_ = None self.alphac = _cast(None, alphac) + self.alphac_nsprefix_ = None self.betac = _cast(None, betac) + self.betac_nsprefix_ = None self.cm = _cast(None, cm) + self.cm_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, PinskyRinzelCA3Cell) + CurrentSubclassModule_, PinskyRinzelCA3Cell + ) if subclass is not None: return subclass(*args_, **kwargs_) if PinskyRinzelCA3Cell.subclass: return PinskyRinzelCA3Cell.subclass(*args_, **kwargs_) else: return PinskyRinzelCA3Cell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_currentDensity(self, value): # Validate type Nml2Quantity_currentDensity, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_currentDensity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_currentDensity_patterns_, )) - validate_Nml2Quantity_currentDensity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A_per_m2|uA_per_cm2|mA_per_cm2)$']] + self.validate_Nml2Quantity_currentDensity_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_currentDensity_patterns_, + ) + ) + + validate_Nml2Quantity_currentDensity_patterns_ = [ + [ + "^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A_per_m2|uA_per_cm2|mA_per_cm2))$" + ] + ] + def validate_Nml2Quantity_conductanceDensity(self, value): # Validate type Nml2Quantity_conductanceDensity, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductanceDensity_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductanceDensity_patterns_, )) - validate_Nml2Quantity_conductanceDensity_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2)$']] + self.validate_Nml2Quantity_conductanceDensity_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_conductanceDensity_patterns_, + ) + ) + + validate_Nml2Quantity_conductanceDensity_patterns_ = [ + [ + "^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_m2|mS_per_cm2|S_per_cm2))$" + ] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] + self.validate_Nml2Quantity_none_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$"] + ] + def validate_Nml2Quantity_specificCapacitance(self, value): # Validate type Nml2Quantity_specificCapacitance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_specificCapacitance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_specificCapacitance_patterns_, )) - validate_Nml2Quantity_specificCapacitance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F_per_m2|uF_per_cm2)$']] - def hasContent_(self): if ( - super(PinskyRinzelCA3Cell, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_specificCapacitance_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_specificCapacitance_patterns_, + ) + ) + + validate_Nml2Quantity_specificCapacitance_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F_per_m2|uF_per_cm2))$"] + ] + + def _hasContent(self): + if super(PinskyRinzelCA3Cell, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PinskyRinzelCA3Cell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('PinskyRinzelCA3Cell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PinskyRinzelCA3Cell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("PinskyRinzelCA3Cell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "PinskyRinzelCA3Cell": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PinskyRinzelCA3Cell') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PinskyRinzelCA3Cell', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="PinskyRinzelCA3Cell", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="PinskyRinzelCA3Cell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PinskyRinzelCA3Cell'): - super(PinskyRinzelCA3Cell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PinskyRinzelCA3Cell') - if self.i_soma is not None and 'i_soma' not in already_processed: - already_processed.add('i_soma') - outfile.write(' iSoma=%s' % (quote_attrib(self.i_soma), )) - if self.i_dend is not None and 'i_dend' not in already_processed: - already_processed.add('i_dend') - outfile.write(' iDend=%s' % (quote_attrib(self.i_dend), )) - if self.gc is not None and 'gc' not in already_processed: - already_processed.add('gc') - outfile.write(' gc=%s' % (quote_attrib(self.gc), )) - if self.g_ls is not None and 'g_ls' not in already_processed: - already_processed.add('g_ls') - outfile.write(' gLs=%s' % (quote_attrib(self.g_ls), )) - if self.g_ld is not None and 'g_ld' not in already_processed: - already_processed.add('g_ld') - outfile.write(' gLd=%s' % (quote_attrib(self.g_ld), )) - if self.g_na is not None and 'g_na' not in already_processed: - already_processed.add('g_na') - outfile.write(' gNa=%s' % (quote_attrib(self.g_na), )) - if self.g_kdr is not None and 'g_kdr' not in already_processed: - already_processed.add('g_kdr') - outfile.write(' gKdr=%s' % (quote_attrib(self.g_kdr), )) - if self.g_ca is not None and 'g_ca' not in already_processed: - already_processed.add('g_ca') - outfile.write(' gCa=%s' % (quote_attrib(self.g_ca), )) - if self.g_kahp is not None and 'g_kahp' not in already_processed: - already_processed.add('g_kahp') - outfile.write(' gKahp=%s' % (quote_attrib(self.g_kahp), )) - if self.g_kc is not None and 'g_kc' not in already_processed: - already_processed.add('g_kc') - outfile.write(' gKC=%s' % (quote_attrib(self.g_kc), )) - if self.g_nmda is not None and 'g_nmda' not in already_processed: - already_processed.add('g_nmda') - outfile.write(' gNmda=%s' % (quote_attrib(self.g_nmda), )) - if self.g_ampa is not None and 'g_ampa' not in already_processed: - already_processed.add('g_ampa') - outfile.write(' gAmpa=%s' % (quote_attrib(self.g_ampa), )) - if self.e_na is not None and 'e_na' not in already_processed: - already_processed.add('e_na') - outfile.write(' eNa=%s' % (quote_attrib(self.e_na), )) - if self.e_ca is not None and 'e_ca' not in already_processed: - already_processed.add('e_ca') - outfile.write(' eCa=%s' % (quote_attrib(self.e_ca), )) - if self.e_k is not None and 'e_k' not in already_processed: - already_processed.add('e_k') - outfile.write(' eK=%s' % (quote_attrib(self.e_k), )) - if self.e_l is not None and 'e_l' not in already_processed: - already_processed.add('e_l') - outfile.write(' eL=%s' % (quote_attrib(self.e_l), )) - if self.qd0 is not None and 'qd0' not in already_processed: - already_processed.add('qd0') - outfile.write(' qd0=%s' % (quote_attrib(self.qd0), )) - if self.pp is not None and 'pp' not in already_processed: - already_processed.add('pp') - outfile.write(' pp=%s' % (quote_attrib(self.pp), )) - if self.alphac is not None and 'alphac' not in already_processed: - already_processed.add('alphac') - outfile.write(' alphac=%s' % (quote_attrib(self.alphac), )) - if self.betac is not None and 'betac' not in already_processed: - already_processed.add('betac') - outfile.write(' betac=%s' % (quote_attrib(self.betac), )) - if self.cm is not None and 'cm' not in already_processed: - already_processed.add('cm') - outfile.write(' cm=%s' % (quote_attrib(self.cm), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PinskyRinzelCA3Cell', fromsubclass_=False, pretty_print=True): - super(PinskyRinzelCA3Cell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="PinskyRinzelCA3Cell", + ): + super(PinskyRinzelCA3Cell, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="PinskyRinzelCA3Cell", + ) + if self.i_soma is not None and "i_soma" not in already_processed: + already_processed.add("i_soma") + outfile.write( + " iSoma=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.i_soma), input_name="iSoma" + ) + ), + ) + ) + if self.i_dend is not None and "i_dend" not in already_processed: + already_processed.add("i_dend") + outfile.write( + " iDend=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.i_dend), input_name="iDend" + ) + ), + ) + ) + if self.gc is not None and "gc" not in already_processed: + already_processed.add("gc") + outfile.write( + " gc=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.gc), input_name="gc") + ), + ) + ) + if self.g_ls is not None and "g_ls" not in already_processed: + already_processed.add("g_ls") + outfile.write( + " gLs=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.g_ls), input_name="gLs" + ) + ), + ) + ) + if self.g_ld is not None and "g_ld" not in already_processed: + already_processed.add("g_ld") + outfile.write( + " gLd=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.g_ld), input_name="gLd" + ) + ), + ) + ) + if self.g_na is not None and "g_na" not in already_processed: + already_processed.add("g_na") + outfile.write( + " gNa=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.g_na), input_name="gNa" + ) + ), + ) + ) + if self.g_kdr is not None and "g_kdr" not in already_processed: + already_processed.add("g_kdr") + outfile.write( + " gKdr=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.g_kdr), input_name="gKdr" + ) + ), + ) + ) + if self.g_ca is not None and "g_ca" not in already_processed: + already_processed.add("g_ca") + outfile.write( + " gCa=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.g_ca), input_name="gCa" + ) + ), + ) + ) + if self.g_kahp is not None and "g_kahp" not in already_processed: + already_processed.add("g_kahp") + outfile.write( + " gKahp=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.g_kahp), input_name="gKahp" + ) + ), + ) + ) + if self.g_kc is not None and "g_kc" not in already_processed: + already_processed.add("g_kc") + outfile.write( + " gKC=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.g_kc), input_name="gKC" + ) + ), + ) + ) + if self.g_nmda is not None and "g_nmda" not in already_processed: + already_processed.add("g_nmda") + outfile.write( + " gNmda=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.g_nmda), input_name="gNmda" + ) + ), + ) + ) + if self.g_ampa is not None and "g_ampa" not in already_processed: + already_processed.add("g_ampa") + outfile.write( + " gAmpa=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.g_ampa), input_name="gAmpa" + ) + ), + ) + ) + if self.e_na is not None and "e_na" not in already_processed: + already_processed.add("e_na") + outfile.write( + " eNa=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.e_na), input_name="eNa" + ) + ), + ) + ) + if self.e_ca is not None and "e_ca" not in already_processed: + already_processed.add("e_ca") + outfile.write( + " eCa=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.e_ca), input_name="eCa" + ) + ), + ) + ) + if self.e_k is not None and "e_k" not in already_processed: + already_processed.add("e_k") + outfile.write( + " eK=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.e_k), input_name="eK") + ), + ) + ) + if self.e_l is not None and "e_l" not in already_processed: + already_processed.add("e_l") + outfile.write( + " eL=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.e_l), input_name="eL") + ), + ) + ) + if self.qd0 is not None and "qd0" not in already_processed: + already_processed.add("qd0") + outfile.write( + " qd0=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.qd0), input_name="qd0") + ), + ) + ) + if self.pp is not None and "pp" not in already_processed: + already_processed.add("pp") + outfile.write( + " pp=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.pp), input_name="pp") + ), + ) + ) + if self.alphac is not None and "alphac" not in already_processed: + already_processed.add("alphac") + outfile.write( + " alphac=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.alphac), input_name="alphac" + ) + ), + ) + ) + if self.betac is not None and "betac" not in already_processed: + already_processed.add("betac") + outfile.write( + " betac=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.betac), input_name="betac" + ) + ), + ) + ) + if self.cm is not None and "cm" not in already_processed: + already_processed.add("cm") + outfile.write( + " cm=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.cm), input_name="cm") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="PinskyRinzelCA3Cell", + fromsubclass_=False, + pretty_print=True, + ): + super(PinskyRinzelCA3Cell, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('iSoma', node) - if value is not None and 'iSoma' not in already_processed: - already_processed.add('iSoma') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("iSoma", node) + if value is not None and "iSoma" not in already_processed: + already_processed.add("iSoma") self.i_soma = value - self.validate_Nml2Quantity_currentDensity(self.i_soma) # validate type Nml2Quantity_currentDensity - value = find_attr_value_('iDend', node) - if value is not None and 'iDend' not in already_processed: - already_processed.add('iDend') + self.validate_Nml2Quantity_currentDensity( + self.i_soma + ) # validate type Nml2Quantity_currentDensity + value = find_attr_value_("iDend", node) + if value is not None and "iDend" not in already_processed: + already_processed.add("iDend") self.i_dend = value - self.validate_Nml2Quantity_currentDensity(self.i_dend) # validate type Nml2Quantity_currentDensity - value = find_attr_value_('gc', node) - if value is not None and 'gc' not in already_processed: - already_processed.add('gc') + self.validate_Nml2Quantity_currentDensity( + self.i_dend + ) # validate type Nml2Quantity_currentDensity + value = find_attr_value_("gc", node) + if value is not None and "gc" not in already_processed: + already_processed.add("gc") self.gc = value - self.validate_Nml2Quantity_conductanceDensity(self.gc) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gLs', node) - if value is not None and 'gLs' not in already_processed: - already_processed.add('gLs') + self.validate_Nml2Quantity_conductanceDensity( + self.gc + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gLs", node) + if value is not None and "gLs" not in already_processed: + already_processed.add("gLs") self.g_ls = value - self.validate_Nml2Quantity_conductanceDensity(self.g_ls) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gLd', node) - if value is not None and 'gLd' not in already_processed: - already_processed.add('gLd') + self.validate_Nml2Quantity_conductanceDensity( + self.g_ls + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gLd", node) + if value is not None and "gLd" not in already_processed: + already_processed.add("gLd") self.g_ld = value - self.validate_Nml2Quantity_conductanceDensity(self.g_ld) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gNa', node) - if value is not None and 'gNa' not in already_processed: - already_processed.add('gNa') + self.validate_Nml2Quantity_conductanceDensity( + self.g_ld + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gNa", node) + if value is not None and "gNa" not in already_processed: + already_processed.add("gNa") self.g_na = value - self.validate_Nml2Quantity_conductanceDensity(self.g_na) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gKdr', node) - if value is not None and 'gKdr' not in already_processed: - already_processed.add('gKdr') + self.validate_Nml2Quantity_conductanceDensity( + self.g_na + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gKdr", node) + if value is not None and "gKdr" not in already_processed: + already_processed.add("gKdr") self.g_kdr = value - self.validate_Nml2Quantity_conductanceDensity(self.g_kdr) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gCa', node) - if value is not None and 'gCa' not in already_processed: - already_processed.add('gCa') + self.validate_Nml2Quantity_conductanceDensity( + self.g_kdr + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gCa", node) + if value is not None and "gCa" not in already_processed: + already_processed.add("gCa") self.g_ca = value - self.validate_Nml2Quantity_conductanceDensity(self.g_ca) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gKahp', node) - if value is not None and 'gKahp' not in already_processed: - already_processed.add('gKahp') + self.validate_Nml2Quantity_conductanceDensity( + self.g_ca + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gKahp", node) + if value is not None and "gKahp" not in already_processed: + already_processed.add("gKahp") self.g_kahp = value - self.validate_Nml2Quantity_conductanceDensity(self.g_kahp) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gKC', node) - if value is not None and 'gKC' not in already_processed: - already_processed.add('gKC') + self.validate_Nml2Quantity_conductanceDensity( + self.g_kahp + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gKC", node) + if value is not None and "gKC" not in already_processed: + already_processed.add("gKC") self.g_kc = value - self.validate_Nml2Quantity_conductanceDensity(self.g_kc) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gNmda', node) - if value is not None and 'gNmda' not in already_processed: - already_processed.add('gNmda') + self.validate_Nml2Quantity_conductanceDensity( + self.g_kc + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gNmda", node) + if value is not None and "gNmda" not in already_processed: + already_processed.add("gNmda") self.g_nmda = value - self.validate_Nml2Quantity_conductanceDensity(self.g_nmda) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('gAmpa', node) - if value is not None and 'gAmpa' not in already_processed: - already_processed.add('gAmpa') + self.validate_Nml2Quantity_conductanceDensity( + self.g_nmda + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("gAmpa", node) + if value is not None and "gAmpa" not in already_processed: + already_processed.add("gAmpa") self.g_ampa = value - self.validate_Nml2Quantity_conductanceDensity(self.g_ampa) # validate type Nml2Quantity_conductanceDensity - value = find_attr_value_('eNa', node) - if value is not None and 'eNa' not in already_processed: - already_processed.add('eNa') + self.validate_Nml2Quantity_conductanceDensity( + self.g_ampa + ) # validate type Nml2Quantity_conductanceDensity + value = find_attr_value_("eNa", node) + if value is not None and "eNa" not in already_processed: + already_processed.add("eNa") self.e_na = value - self.validate_Nml2Quantity_voltage(self.e_na) # validate type Nml2Quantity_voltage - value = find_attr_value_('eCa', node) - if value is not None and 'eCa' not in already_processed: - already_processed.add('eCa') + self.validate_Nml2Quantity_voltage( + self.e_na + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("eCa", node) + if value is not None and "eCa" not in already_processed: + already_processed.add("eCa") self.e_ca = value - self.validate_Nml2Quantity_voltage(self.e_ca) # validate type Nml2Quantity_voltage - value = find_attr_value_('eK', node) - if value is not None and 'eK' not in already_processed: - already_processed.add('eK') + self.validate_Nml2Quantity_voltage( + self.e_ca + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("eK", node) + if value is not None and "eK" not in already_processed: + already_processed.add("eK") self.e_k = value - self.validate_Nml2Quantity_voltage(self.e_k) # validate type Nml2Quantity_voltage - value = find_attr_value_('eL', node) - if value is not None and 'eL' not in already_processed: - already_processed.add('eL') + self.validate_Nml2Quantity_voltage( + self.e_k + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("eL", node) + if value is not None and "eL" not in already_processed: + already_processed.add("eL") self.e_l = value - self.validate_Nml2Quantity_voltage(self.e_l) # validate type Nml2Quantity_voltage - value = find_attr_value_('qd0', node) - if value is not None and 'qd0' not in already_processed: - already_processed.add('qd0') + self.validate_Nml2Quantity_voltage( + self.e_l + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("qd0", node) + if value is not None and "qd0" not in already_processed: + already_processed.add("qd0") self.qd0 = value - self.validate_Nml2Quantity_none(self.qd0) # validate type Nml2Quantity_none - value = find_attr_value_('pp', node) - if value is not None and 'pp' not in already_processed: - already_processed.add('pp') + self.validate_Nml2Quantity_none(self.qd0) # validate type Nml2Quantity_none + value = find_attr_value_("pp", node) + if value is not None and "pp" not in already_processed: + already_processed.add("pp") self.pp = value - self.validate_Nml2Quantity_none(self.pp) # validate type Nml2Quantity_none - value = find_attr_value_('alphac', node) - if value is not None and 'alphac' not in already_processed: - already_processed.add('alphac') + self.validate_Nml2Quantity_none(self.pp) # validate type Nml2Quantity_none + value = find_attr_value_("alphac", node) + if value is not None and "alphac" not in already_processed: + already_processed.add("alphac") self.alphac = value - self.validate_Nml2Quantity_none(self.alphac) # validate type Nml2Quantity_none - value = find_attr_value_('betac', node) - if value is not None and 'betac' not in already_processed: - already_processed.add('betac') + self.validate_Nml2Quantity_none( + self.alphac + ) # validate type Nml2Quantity_none + value = find_attr_value_("betac", node) + if value is not None and "betac" not in already_processed: + already_processed.add("betac") self.betac = value - self.validate_Nml2Quantity_none(self.betac) # validate type Nml2Quantity_none - value = find_attr_value_('cm', node) - if value is not None and 'cm' not in already_processed: - already_processed.add('cm') + self.validate_Nml2Quantity_none( + self.betac + ) # validate type Nml2Quantity_none + value = find_attr_value_("cm", node) + if value is not None and "cm" not in already_processed: + already_processed.add("cm") self.cm = value - self.validate_Nml2Quantity_specificCapacitance(self.cm) # validate type Nml2Quantity_specificCapacitance - super(PinskyRinzelCA3Cell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(PinskyRinzelCA3Cell, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_specificCapacitance( + self.cm + ) # validate type Nml2Quantity_specificCapacitance + super(PinskyRinzelCA3Cell, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(PinskyRinzelCA3Cell, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class PinskyRinzelCA3Cell class FitzHughNagumo1969Cell(BaseCell): + """FitzHughNagumo1969Cell -- The Fitzhugh Nagumo model is a two-dimensional simplification of the Hodgkin-Huxley model of spike generation in squid giant axons. This system was suggested by FitzHugh ( FitzHugh R. [1961]: Impulses and physiological states in theoretical models of nerve membrane. Biophysical J. 1:445-466 ), who called it " Bonhoeffer-van der Pol model ", and the equivalent circuit by Nagumo et al. ( Nagumo J. , Arimoto S. , and Yoshizawa S. [1962] An active pulse transmission line simulating nerve axon. Proc IRE. 50:2061-2070. 1962 ). This version corresponds to the one described in FitzHugh R. [1969]: Mathematical models of excitation and propagation in nerve. Chapter 1 ( pp. 1-85 in H. P. Schwan, ed. Biological Engineering, McGraw-Hill Book Co. , N. Y. ) + \n + :param a: + :type a: none + :param b: + :type b: none + :param I: plays the role of an external injected current + :type I: none + :param phi: + :type phi: none + :param V0: + :type V0: none + :param W0: + :type W0: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('a', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('b', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('I', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('phi', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('V0', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('W0', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), + MemberSpec_("a", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "a"}), + MemberSpec_("b", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "b"}), + MemberSpec_("I", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "I"}), + MemberSpec_( + "phi", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "phi"} + ), + MemberSpec_("V0", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "V0"}), + MemberSpec_("W0", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "W0"}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, a=None, b=None, I=None, phi=None, V0=None, W0=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + a=None, + b=None, + I=None, + phi=None, + V0=None, + W0=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(FitzHughNagumo1969Cell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("FitzHughNagumo1969Cell"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.a = _cast(None, a) + self.a_nsprefix_ = None self.b = _cast(None, b) + self.b_nsprefix_ = None self.I = _cast(None, I) + self.I_nsprefix_ = None self.phi = _cast(None, phi) + self.phi_nsprefix_ = None self.V0 = _cast(None, V0) + self.V0_nsprefix_ = None self.W0 = _cast(None, W0) + self.W0_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, FitzHughNagumo1969Cell) + CurrentSubclassModule_, FitzHughNagumo1969Cell + ) if subclass is not None: return subclass(*args_, **kwargs_) if FitzHughNagumo1969Cell.subclass: return FitzHughNagumo1969Cell.subclass(*args_, **kwargs_) else: return FitzHughNagumo1969Cell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] - def hasContent_(self): if ( - super(FitzHughNagumo1969Cell, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_none_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$"] + ] + + def _hasContent(self): + if super(FitzHughNagumo1969Cell, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FitzHughNagumo1969Cell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('FitzHughNagumo1969Cell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="FitzHughNagumo1969Cell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("FitzHughNagumo1969Cell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "FitzHughNagumo1969Cell": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FitzHughNagumo1969Cell') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FitzHughNagumo1969Cell', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="FitzHughNagumo1969Cell", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="FitzHughNagumo1969Cell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FitzHughNagumo1969Cell'): - super(FitzHughNagumo1969Cell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FitzHughNagumo1969Cell') - if self.a is not None and 'a' not in already_processed: - already_processed.add('a') - outfile.write(' a=%s' % (quote_attrib(self.a), )) - if self.b is not None and 'b' not in already_processed: - already_processed.add('b') - outfile.write(' b=%s' % (quote_attrib(self.b), )) - if self.I is not None and 'I' not in already_processed: - already_processed.add('I') - outfile.write(' I=%s' % (quote_attrib(self.I), )) - if self.phi is not None and 'phi' not in already_processed: - already_processed.add('phi') - outfile.write(' phi=%s' % (quote_attrib(self.phi), )) - if self.V0 is not None and 'V0' not in already_processed: - already_processed.add('V0') - outfile.write(' V0=%s' % (quote_attrib(self.V0), )) - if self.W0 is not None and 'W0' not in already_processed: - already_processed.add('W0') - outfile.write(' W0=%s' % (quote_attrib(self.W0), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FitzHughNagumo1969Cell', fromsubclass_=False, pretty_print=True): - super(FitzHughNagumo1969Cell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="FitzHughNagumo1969Cell", + ): + super(FitzHughNagumo1969Cell, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="FitzHughNagumo1969Cell", + ) + if self.a is not None and "a" not in already_processed: + already_processed.add("a") + outfile.write( + " a=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.a), input_name="a") + ), + ) + ) + if self.b is not None and "b" not in already_processed: + already_processed.add("b") + outfile.write( + " b=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.b), input_name="b") + ), + ) + ) + if self.I is not None and "I" not in already_processed: + already_processed.add("I") + outfile.write( + " I=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.I), input_name="I") + ), + ) + ) + if self.phi is not None and "phi" not in already_processed: + already_processed.add("phi") + outfile.write( + " phi=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.phi), input_name="phi") + ), + ) + ) + if self.V0 is not None and "V0" not in already_processed: + already_processed.add("V0") + outfile.write( + " V0=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.V0), input_name="V0") + ), + ) + ) + if self.W0 is not None and "W0" not in already_processed: + already_processed.add("W0") + outfile.write( + " W0=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.W0), input_name="W0") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="FitzHughNagumo1969Cell", + fromsubclass_=False, + pretty_print=True, + ): + super(FitzHughNagumo1969Cell, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('a', node) - if value is not None and 'a' not in already_processed: - already_processed.add('a') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("a", node) + if value is not None and "a" not in already_processed: + already_processed.add("a") self.a = value - self.validate_Nml2Quantity_none(self.a) # validate type Nml2Quantity_none - value = find_attr_value_('b', node) - if value is not None and 'b' not in already_processed: - already_processed.add('b') + self.validate_Nml2Quantity_none(self.a) # validate type Nml2Quantity_none + value = find_attr_value_("b", node) + if value is not None and "b" not in already_processed: + already_processed.add("b") self.b = value - self.validate_Nml2Quantity_none(self.b) # validate type Nml2Quantity_none - value = find_attr_value_('I', node) - if value is not None and 'I' not in already_processed: - already_processed.add('I') + self.validate_Nml2Quantity_none(self.b) # validate type Nml2Quantity_none + value = find_attr_value_("I", node) + if value is not None and "I" not in already_processed: + already_processed.add("I") self.I = value - self.validate_Nml2Quantity_none(self.I) # validate type Nml2Quantity_none - value = find_attr_value_('phi', node) - if value is not None and 'phi' not in already_processed: - already_processed.add('phi') + self.validate_Nml2Quantity_none(self.I) # validate type Nml2Quantity_none + value = find_attr_value_("phi", node) + if value is not None and "phi" not in already_processed: + already_processed.add("phi") self.phi = value - self.validate_Nml2Quantity_none(self.phi) # validate type Nml2Quantity_none - value = find_attr_value_('V0', node) - if value is not None and 'V0' not in already_processed: - already_processed.add('V0') + self.validate_Nml2Quantity_none(self.phi) # validate type Nml2Quantity_none + value = find_attr_value_("V0", node) + if value is not None and "V0" not in already_processed: + already_processed.add("V0") self.V0 = value - self.validate_Nml2Quantity_none(self.V0) # validate type Nml2Quantity_none - value = find_attr_value_('W0', node) - if value is not None and 'W0' not in already_processed: - already_processed.add('W0') + self.validate_Nml2Quantity_none(self.V0) # validate type Nml2Quantity_none + value = find_attr_value_("W0", node) + if value is not None and "W0" not in already_processed: + already_processed.add("W0") self.W0 = value - self.validate_Nml2Quantity_none(self.W0) # validate type Nml2Quantity_none - super(FitzHughNagumo1969Cell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(FitzHughNagumo1969Cell, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_none(self.W0) # validate type Nml2Quantity_none + super(FitzHughNagumo1969Cell, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(FitzHughNagumo1969Cell, self)._buildChildren( + child_, node, nodeName_, True + ) pass + + # end class FitzHughNagumo1969Cell class FitzHughNagumoCell(BaseCell): + """FitzHughNagumoCell -- Simple dimensionless model of spiking cell from FitzHugh and Nagumo. Superseded by **fitzHughNagumo1969Cell** ( See https://github.com/NeuroML/NeuroML2/issues/42 ) + \n + :param I: + :type I: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('I', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), + MemberSpec_("I", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "I"}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, I=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + I=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(FitzHughNagumoCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("FitzHughNagumoCell"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.I = _cast(None, I) + self.I_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, FitzHughNagumoCell) + CurrentSubclassModule_, FitzHughNagumoCell + ) if subclass is not None: return subclass(*args_, **kwargs_) if FitzHughNagumoCell.subclass: return FitzHughNagumoCell.subclass(*args_, **kwargs_) else: return FitzHughNagumoCell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] - def hasContent_(self): if ( - super(FitzHughNagumoCell, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_none_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$"] + ] + + def _hasContent(self): + if super(FitzHughNagumoCell, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FitzHughNagumoCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('FitzHughNagumoCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="FitzHughNagumoCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("FitzHughNagumoCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "FitzHughNagumoCell": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FitzHughNagumoCell') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FitzHughNagumoCell', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="FitzHughNagumoCell", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="FitzHughNagumoCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FitzHughNagumoCell'): - super(FitzHughNagumoCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FitzHughNagumoCell') - if self.I is not None and 'I' not in already_processed: - already_processed.add('I') - outfile.write(' I=%s' % (quote_attrib(self.I), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FitzHughNagumoCell', fromsubclass_=False, pretty_print=True): - super(FitzHughNagumoCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="FitzHughNagumoCell", + ): + super(FitzHughNagumoCell, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="FitzHughNagumoCell", + ) + if self.I is not None and "I" not in already_processed: + already_processed.add("I") + outfile.write( + " I=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.I), input_name="I") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="FitzHughNagumoCell", + fromsubclass_=False, + pretty_print=True, + ): + super(FitzHughNagumoCell, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('I', node) - if value is not None and 'I' not in already_processed: - already_processed.add('I') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("I", node) + if value is not None and "I" not in already_processed: + already_processed.add("I") self.I = value - self.validate_Nml2Quantity_none(self.I) # validate type Nml2Quantity_none - super(FitzHughNagumoCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(FitzHughNagumoCell, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_none(self.I) # validate type Nml2Quantity_none + super(FitzHughNagumoCell, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(FitzHughNagumoCell, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class FitzHughNagumoCell class BaseCellMembPotCap(BaseCell): - """This is to prevent it conflicting with attribute c (lowercase) e.g. - in izhikevichCell2007""" + """BaseCellMembPotCap -- Any cell with a membrane potential **v** with voltage units and a membrane capacitance **C.** Also defines exposed value **iSyn** for current due to external synapses and **iMemb** for total transmembrane current ( usually channel currents plus **iSyn** ) + \n + :param C: Total capacitance of the cell membrane + :type C: capacitance + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('C', 'Nml2Quantity_capacitance', 0, 0, {'use': u'required'}), + MemberSpec_( + "C", "Nml2Quantity_capacitance", 0, 0, {"use": "required", "name": "C"} + ), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, C=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + C=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseCellMembPotCap, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("BaseCellMembPotCap"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.C = _cast(None, C) + self.C_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseCellMembPotCap) + CurrentSubclassModule_, BaseCellMembPotCap + ) if subclass is not None: return subclass(*args_, **kwargs_) if BaseCellMembPotCap.subclass: return BaseCellMembPotCap.subclass(*args_, **kwargs_) else: return BaseCellMembPotCap(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_capacitance(self, value): # Validate type Nml2Quantity_capacitance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_capacitance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_capacitance_patterns_, )) - validate_Nml2Quantity_capacitance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F|uF|nF|pF)$']] - def hasContent_(self): if ( - super(BaseCellMembPotCap, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_capacitance_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_capacitance_patterns_, + ) + ) + + validate_Nml2Quantity_capacitance_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F|uF|nF|pF))$"] + ] + + def _hasContent(self): + if super(BaseCellMembPotCap, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCellMembPotCap', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseCellMembPotCap') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseCellMembPotCap", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseCellMembPotCap") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "BaseCellMembPotCap": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCellMembPotCap') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseCellMembPotCap', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseCellMembPotCap", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseCellMembPotCap", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseCellMembPotCap'): - super(BaseCellMembPotCap, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCellMembPotCap') - if self.C is not None and 'C' not in already_processed: - already_processed.add('C') - outfile.write(' C=%s' % (quote_attrib(self.C), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseCellMembPotCap", + ): + super(BaseCellMembPotCap, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseCellMembPotCap", + ) + if self.C is not None and "C" not in already_processed: + already_processed.add("C") + outfile.write( + " C=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.C), input_name="C") + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCellMembPotCap', fromsubclass_=False, pretty_print=True): - super(BaseCellMembPotCap, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseCellMembPotCap", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseCellMembPotCap, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('C', node) - if value is not None and 'C' not in already_processed: - already_processed.add('C') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("C", node) + if value is not None and "C" not in already_processed: + already_processed.add("C") self.C = value - self.validate_Nml2Quantity_capacitance(self.C) # validate type Nml2Quantity_capacitance - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_capacitance( + self.C + ) # validate type Nml2Quantity_capacitance + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseCellMembPotCap, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseCellMembPotCap, self).buildChildren(child_, node, nodeName_, True) + super(BaseCellMembPotCap, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(BaseCellMembPotCap, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class BaseCellMembPotCap class IzhikevichCell(BaseCell): + """IzhikevichCell -- Cell based on the 2003 model of Izhikevich, see http://izhikevich.org/publications/spikes.htm + \n + :param v0: Initial membrane potential + :type v0: voltage + :param a: Time scale of the recovery variable U + :type a: none + :param b: Sensitivity of U to the subthreshold fluctuations of the membrane potential V + :type b: none + :param c: After-spike reset value of V + :type c: none + :param d: After-spike increase to U + :type d: none + :param thresh: Spike threshold + :type thresh: voltage + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('v0', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('thresh', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('a', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('b', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('c', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), - MemberSpec_('d', 'Nml2Quantity_none', 0, 0, {'use': u'required'}), + MemberSpec_( + "v0", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "v0"} + ), + MemberSpec_( + "thresh", + "Nml2Quantity_voltage", + 0, + 0, + {"use": "required", "name": "thresh"}, + ), + MemberSpec_("a", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "a"}), + MemberSpec_("b", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "b"}), + MemberSpec_("c", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "c"}), + MemberSpec_("d", "Nml2Quantity_none", 0, 0, {"use": "required", "name": "d"}), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, v0=None, thresh=None, a=None, b=None, c=None, d=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + v0=None, + thresh=None, + a=None, + b=None, + c=None, + d=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IzhikevichCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("IzhikevichCell"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.v0 = _cast(None, v0) + self.v0_nsprefix_ = None self.thresh = _cast(None, thresh) + self.thresh_nsprefix_ = None self.a = _cast(None, a) + self.a_nsprefix_ = None self.b = _cast(None, b) + self.b_nsprefix_ = None self.c = _cast(None, c) + self.c_nsprefix_ = None self.d = _cast(None, d) + self.d_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IzhikevichCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IzhikevichCell) if subclass is not None: return subclass(*args_, **kwargs_) if IzhikevichCell.subclass: return IzhikevichCell.subclass(*args_, **kwargs_) else: return IzhikevichCell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + def validate_Nml2Quantity_none(self, value): # Validate type Nml2Quantity_none, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_none_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_none_patterns_, )) - validate_Nml2Quantity_none_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?$']] - def hasContent_(self): if ( - super(IzhikevichCell, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_none_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_none_patterns_, + ) + ) + + validate_Nml2Quantity_none_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?)$"] + ] + + def _hasContent(self): + if super(IzhikevichCell, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IzhikevichCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IzhikevichCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IzhikevichCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IzhikevichCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "IzhikevichCell": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IzhikevichCell') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IzhikevichCell', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IzhikevichCell" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IzhikevichCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IzhikevichCell'): - super(IzhikevichCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IzhikevichCell') - if self.v0 is not None and 'v0' not in already_processed: - already_processed.add('v0') - outfile.write(' v0=%s' % (quote_attrib(self.v0), )) - if self.thresh is not None and 'thresh' not in already_processed: - already_processed.add('thresh') - outfile.write(' thresh=%s' % (quote_attrib(self.thresh), )) - if self.a is not None and 'a' not in already_processed: - already_processed.add('a') - outfile.write(' a=%s' % (quote_attrib(self.a), )) - if self.b is not None and 'b' not in already_processed: - already_processed.add('b') - outfile.write(' b=%s' % (quote_attrib(self.b), )) - if self.c is not None and 'c' not in already_processed: - already_processed.add('c') - outfile.write(' c=%s' % (quote_attrib(self.c), )) - if self.d is not None and 'd' not in already_processed: - already_processed.add('d') - outfile.write(' d=%s' % (quote_attrib(self.d), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IzhikevichCell', fromsubclass_=False, pretty_print=True): - super(IzhikevichCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IzhikevichCell", + ): + super(IzhikevichCell, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IzhikevichCell" + ) + if self.v0 is not None and "v0" not in already_processed: + already_processed.add("v0") + outfile.write( + " v0=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.v0), input_name="v0") + ), + ) + ) + if self.thresh is not None and "thresh" not in already_processed: + already_processed.add("thresh") + outfile.write( + " thresh=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.thresh), input_name="thresh" + ) + ), + ) + ) + if self.a is not None and "a" not in already_processed: + already_processed.add("a") + outfile.write( + " a=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.a), input_name="a") + ), + ) + ) + if self.b is not None and "b" not in already_processed: + already_processed.add("b") + outfile.write( + " b=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.b), input_name="b") + ), + ) + ) + if self.c is not None and "c" not in already_processed: + already_processed.add("c") + outfile.write( + " c=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.c), input_name="c") + ), + ) + ) + if self.d is not None and "d" not in already_processed: + already_processed.add("d") + outfile.write( + " d=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.d), input_name="d") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IzhikevichCell", + fromsubclass_=False, + pretty_print=True, + ): + super(IzhikevichCell, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('v0', node) - if value is not None and 'v0' not in already_processed: - already_processed.add('v0') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("v0", node) + if value is not None and "v0" not in already_processed: + already_processed.add("v0") self.v0 = value - self.validate_Nml2Quantity_voltage(self.v0) # validate type Nml2Quantity_voltage - value = find_attr_value_('thresh', node) - if value is not None and 'thresh' not in already_processed: - already_processed.add('thresh') + self.validate_Nml2Quantity_voltage( + self.v0 + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("thresh", node) + if value is not None and "thresh" not in already_processed: + already_processed.add("thresh") self.thresh = value - self.validate_Nml2Quantity_voltage(self.thresh) # validate type Nml2Quantity_voltage - value = find_attr_value_('a', node) - if value is not None and 'a' not in already_processed: - already_processed.add('a') + self.validate_Nml2Quantity_voltage( + self.thresh + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("a", node) + if value is not None and "a" not in already_processed: + already_processed.add("a") self.a = value - self.validate_Nml2Quantity_none(self.a) # validate type Nml2Quantity_none - value = find_attr_value_('b', node) - if value is not None and 'b' not in already_processed: - already_processed.add('b') + self.validate_Nml2Quantity_none(self.a) # validate type Nml2Quantity_none + value = find_attr_value_("b", node) + if value is not None and "b" not in already_processed: + already_processed.add("b") self.b = value - self.validate_Nml2Quantity_none(self.b) # validate type Nml2Quantity_none - value = find_attr_value_('c', node) - if value is not None and 'c' not in already_processed: - already_processed.add('c') + self.validate_Nml2Quantity_none(self.b) # validate type Nml2Quantity_none + value = find_attr_value_("c", node) + if value is not None and "c" not in already_processed: + already_processed.add("c") self.c = value - self.validate_Nml2Quantity_none(self.c) # validate type Nml2Quantity_none - value = find_attr_value_('d', node) - if value is not None and 'd' not in already_processed: - already_processed.add('d') + self.validate_Nml2Quantity_none(self.c) # validate type Nml2Quantity_none + value = find_attr_value_("d", node) + if value is not None and "d" not in already_processed: + already_processed.add("d") self.d = value - self.validate_Nml2Quantity_none(self.d) # validate type Nml2Quantity_none - super(IzhikevichCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IzhikevichCell, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_none(self.d) # validate type Nml2Quantity_none + super(IzhikevichCell, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(IzhikevichCell, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class IzhikevichCell class IafCell(BaseCell): + """IafCell -- Integrate and fire cell with capacitance **C,** **leakConductance** and **leakReversal** + \n + :param leakConductance: + :type leakConductance: conductance + :param leakReversal: + :type leakReversal: voltage + :param thresh: + :type thresh: voltage + :param reset: + :type reset: voltage + :param C: Total capacitance of the cell membrane + :type C: capacitance + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('leak_reversal', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('thresh', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('reset', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('C', 'Nml2Quantity_capacitance', 0, 0, {'use': u'required'}), - MemberSpec_('leak_conductance', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), + MemberSpec_( + "leak_reversal", + "Nml2Quantity_voltage", + 0, + 0, + {"use": "required", "name": "leak_reversal"}, + ), + MemberSpec_( + "thresh", + "Nml2Quantity_voltage", + 0, + 0, + {"use": "required", "name": "thresh"}, + ), + MemberSpec_( + "reset", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "reset"} + ), + MemberSpec_( + "C", "Nml2Quantity_capacitance", 0, 0, {"use": "required", "name": "C"} + ), + MemberSpec_( + "leak_conductance", + "Nml2Quantity_conductance", + 0, + 0, + {"use": "required", "name": "leak_conductance"}, + ), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, leak_reversal=None, thresh=None, reset=None, C=None, leak_conductance=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + leak_reversal=None, + thresh=None, + reset=None, + C=None, + leak_conductance=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IafCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("IafCell"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.leak_reversal = _cast(None, leak_reversal) + self.leak_reversal_nsprefix_ = None self.thresh = _cast(None, thresh) + self.thresh_nsprefix_ = None self.reset = _cast(None, reset) + self.reset_nsprefix_ = None self.C = _cast(None, C) + self.C_nsprefix_ = None self.leak_conductance = _cast(None, leak_conductance) + self.leak_conductance_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IafCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IafCell) if subclass is not None: return subclass(*args_, **kwargs_) if IafCell.subclass: return IafCell.subclass(*args_, **kwargs_) else: return IafCell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + def validate_Nml2Quantity_capacitance(self, value): # Validate type Nml2Quantity_capacitance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_capacitance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_capacitance_patterns_, )) - validate_Nml2Quantity_capacitance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F|uF|nF|pF)$']] + self.validate_Nml2Quantity_capacitance_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_capacitance_patterns_, + ) + ) + + validate_Nml2Quantity_capacitance_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(F|uF|nF|pF))$"] + ] + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] - def hasContent_(self): if ( - super(IafCell, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_conductance_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$"] + ] + + def _hasContent(self): + if super(IafCell, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IafCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IafCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IafCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "IafCell": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafCell') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IafCell', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IafCell" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IafCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IafCell'): - super(IafCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafCell') - if self.leak_reversal is not None and 'leak_reversal' not in already_processed: - already_processed.add('leak_reversal') - outfile.write(' leakReversal=%s' % (quote_attrib(self.leak_reversal), )) - if self.thresh is not None and 'thresh' not in already_processed: - already_processed.add('thresh') - outfile.write(' thresh=%s' % (quote_attrib(self.thresh), )) - if self.reset is not None and 'reset' not in already_processed: - already_processed.add('reset') - outfile.write(' reset=%s' % (quote_attrib(self.reset), )) - if self.C is not None and 'C' not in already_processed: - already_processed.add('C') - outfile.write(' C=%s' % (quote_attrib(self.C), )) - if self.leak_conductance is not None and 'leak_conductance' not in already_processed: - already_processed.add('leak_conductance') - outfile.write(' leakConductance=%s' % (quote_attrib(self.leak_conductance), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="IafCell" + ): + super(IafCell, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IafCell" + ) + if self.leak_reversal is not None and "leak_reversal" not in already_processed: + already_processed.add("leak_reversal") + outfile.write( + " leakReversal=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.leak_reversal), input_name="leakReversal" + ) + ), + ) + ) + if self.thresh is not None and "thresh" not in already_processed: + already_processed.add("thresh") + outfile.write( + " thresh=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.thresh), input_name="thresh" + ) + ), + ) + ) + if self.reset is not None and "reset" not in already_processed: + already_processed.add("reset") + outfile.write( + " reset=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.reset), input_name="reset" + ) + ), + ) + ) + if self.C is not None and "C" not in already_processed: + already_processed.add("C") + outfile.write( + " C=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.C), input_name="C") + ), + ) + ) + if ( + self.leak_conductance is not None + and "leak_conductance" not in already_processed + ): + already_processed.add("leak_conductance") + outfile.write( + " leakConductance=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.leak_conductance), + input_name="leakConductance", + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafCell', fromsubclass_=False, pretty_print=True): - super(IafCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IafCell", + fromsubclass_=False, + pretty_print=True, + ): + super(IafCell, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('leakReversal', node) - if value is not None and 'leakReversal' not in already_processed: - already_processed.add('leakReversal') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("leakReversal", node) + if value is not None and "leakReversal" not in already_processed: + already_processed.add("leakReversal") self.leak_reversal = value - self.validate_Nml2Quantity_voltage(self.leak_reversal) # validate type Nml2Quantity_voltage - value = find_attr_value_('thresh', node) - if value is not None and 'thresh' not in already_processed: - already_processed.add('thresh') + self.validate_Nml2Quantity_voltage( + self.leak_reversal + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("thresh", node) + if value is not None and "thresh" not in already_processed: + already_processed.add("thresh") self.thresh = value - self.validate_Nml2Quantity_voltage(self.thresh) # validate type Nml2Quantity_voltage - value = find_attr_value_('reset', node) - if value is not None and 'reset' not in already_processed: - already_processed.add('reset') + self.validate_Nml2Quantity_voltage( + self.thresh + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("reset", node) + if value is not None and "reset" not in already_processed: + already_processed.add("reset") self.reset = value - self.validate_Nml2Quantity_voltage(self.reset) # validate type Nml2Quantity_voltage - value = find_attr_value_('C', node) - if value is not None and 'C' not in already_processed: - already_processed.add('C') + self.validate_Nml2Quantity_voltage( + self.reset + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("C", node) + if value is not None and "C" not in already_processed: + already_processed.add("C") self.C = value - self.validate_Nml2Quantity_capacitance(self.C) # validate type Nml2Quantity_capacitance - value = find_attr_value_('leakConductance', node) - if value is not None and 'leakConductance' not in already_processed: - already_processed.add('leakConductance') + self.validate_Nml2Quantity_capacitance( + self.C + ) # validate type Nml2Quantity_capacitance + value = find_attr_value_("leakConductance", node) + if value is not None and "leakConductance" not in already_processed: + already_processed.add("leakConductance") self.leak_conductance = value - self.validate_Nml2Quantity_conductance(self.leak_conductance) # validate type Nml2Quantity_conductance - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_conductance( + self.leak_conductance + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(IafCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IafCell, self).buildChildren(child_, node, nodeName_, True) + super(IafCell, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(IafCell, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class IafCell class IafTauCell(BaseCell): + """IafTauCell -- Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time constant **tau** + \n + :param leakReversal: + :type leakReversal: voltage + :param tau: + :type tau: time + :param thresh: The membrane potential at which to emit a spiking event and reset voltage + :type thresh: voltage + :param reset: The value the membrane potential is reset to on spiking + :type reset: voltage + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('leak_reversal', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('thresh', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('reset', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('tau', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_( + "leak_reversal", + "Nml2Quantity_voltage", + 0, + 0, + {"use": "required", "name": "leak_reversal"}, + ), + MemberSpec_( + "thresh", + "Nml2Quantity_voltage", + 0, + 0, + {"use": "required", "name": "thresh"}, + ), + MemberSpec_( + "reset", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "reset"} + ), + MemberSpec_( + "tau", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "tau"} + ), ] subclass = None superclass = BaseCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, leak_reversal=None, thresh=None, reset=None, tau=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + leak_reversal=None, + thresh=None, + reset=None, + tau=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IafTauCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("IafTauCell"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.leak_reversal = _cast(None, leak_reversal) + self.leak_reversal_nsprefix_ = None self.thresh = _cast(None, thresh) + self.thresh_nsprefix_ = None self.reset = _cast(None, reset) + self.reset_nsprefix_ = None self.tau = _cast(None, tau) + self.tau_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IafTauCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IafTauCell) if subclass is not None: return subclass(*args_, **kwargs_) if IafTauCell.subclass: return IafTauCell.subclass(*args_, **kwargs_) else: return IafTauCell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): if ( - super(IafTauCell, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + + def _hasContent(self): + if super(IafTauCell, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafTauCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IafTauCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IafTauCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IafTauCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "IafTauCell": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafTauCell') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IafTauCell', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IafTauCell" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IafTauCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IafTauCell'): - super(IafTauCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafTauCell') - if self.leak_reversal is not None and 'leak_reversal' not in already_processed: - already_processed.add('leak_reversal') - outfile.write(' leakReversal=%s' % (quote_attrib(self.leak_reversal), )) - if self.thresh is not None and 'thresh' not in already_processed: - already_processed.add('thresh') - outfile.write(' thresh=%s' % (quote_attrib(self.thresh), )) - if self.reset is not None and 'reset' not in already_processed: - already_processed.add('reset') - outfile.write(' reset=%s' % (quote_attrib(self.reset), )) - if self.tau is not None and 'tau' not in already_processed: - already_processed.add('tau') - outfile.write(' tau=%s' % (quote_attrib(self.tau), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="IafTauCell" + ): + super(IafTauCell, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IafTauCell" + ) + if self.leak_reversal is not None and "leak_reversal" not in already_processed: + already_processed.add("leak_reversal") + outfile.write( + " leakReversal=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.leak_reversal), input_name="leakReversal" + ) + ), + ) + ) + if self.thresh is not None and "thresh" not in already_processed: + already_processed.add("thresh") + outfile.write( + " thresh=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.thresh), input_name="thresh" + ) + ), + ) + ) + if self.reset is not None and "reset" not in already_processed: + already_processed.add("reset") + outfile.write( + " reset=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.reset), input_name="reset" + ) + ), + ) + ) + if self.tau is not None and "tau" not in already_processed: + already_processed.add("tau") + outfile.write( + " tau=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.tau), input_name="tau") + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafTauCell', fromsubclass_=False, pretty_print=True): - super(IafTauCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IafTauCell", + fromsubclass_=False, + pretty_print=True, + ): + super(IafTauCell, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('leakReversal', node) - if value is not None and 'leakReversal' not in already_processed: - already_processed.add('leakReversal') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("leakReversal", node) + if value is not None and "leakReversal" not in already_processed: + already_processed.add("leakReversal") self.leak_reversal = value - self.validate_Nml2Quantity_voltage(self.leak_reversal) # validate type Nml2Quantity_voltage - value = find_attr_value_('thresh', node) - if value is not None and 'thresh' not in already_processed: - already_processed.add('thresh') + self.validate_Nml2Quantity_voltage( + self.leak_reversal + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("thresh", node) + if value is not None and "thresh" not in already_processed: + already_processed.add("thresh") self.thresh = value - self.validate_Nml2Quantity_voltage(self.thresh) # validate type Nml2Quantity_voltage - value = find_attr_value_('reset', node) - if value is not None and 'reset' not in already_processed: - already_processed.add('reset') + self.validate_Nml2Quantity_voltage( + self.thresh + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("reset", node) + if value is not None and "reset" not in already_processed: + already_processed.add("reset") self.reset = value - self.validate_Nml2Quantity_voltage(self.reset) # validate type Nml2Quantity_voltage - value = find_attr_value_('tau', node) - if value is not None and 'tau' not in already_processed: - already_processed.add('tau') + self.validate_Nml2Quantity_voltage( + self.reset + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("tau", node) + if value is not None and "tau" not in already_processed: + already_processed.add("tau") self.tau = value - self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(IafTauCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IafTauCell, self).buildChildren(child_, node, nodeName_, True) + super(IafTauCell, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(IafTauCell, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class IafTauCell class GradedSynapse(BaseSynapse): - """Based on synapse in Methods of - http://www.nature.com/neuro/journal/v7/n12/abs/nn1352.html.""" + """GradedSynapse -- Graded/analog synapse. Based on synapse in Methods of http://www. nature.com/neuro/journal/v7/n12/abs/nn1352.html + \n + :param conductance: + :type conductance: conductance + :param delta: Slope of the activation curve + :type delta: voltage + :param k: Rate constant for transmitter-receptor dissociation rate + :type k: per_time + :param Vth: The half-activation voltage of the synapse + :type Vth: voltage + :param erev: The reversal potential of the synapse + :type erev: voltage + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('delta', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('Vth', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('k', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_( + "conductance", + "Nml2Quantity_conductance", + 0, + 0, + {"use": "required", "name": "conductance"}, + ), + MemberSpec_( + "delta", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "delta"} + ), + MemberSpec_( + "Vth", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "Vth"} + ), + MemberSpec_( + "k", "Nml2Quantity_pertime", 0, 0, {"use": "required", "name": "k"} + ), + MemberSpec_( + "erev", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "erev"} + ), ] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, conductance=None, delta=None, Vth=None, k=None, erev=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + conductance=None, + delta=None, + Vth=None, + k=None, + erev=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GradedSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("GradedSynapse"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.conductance = _cast(None, conductance) + self.conductance_nsprefix_ = None self.delta = _cast(None, delta) + self.delta_nsprefix_ = None self.Vth = _cast(None, Vth) + self.Vth_nsprefix_ = None self.k = _cast(None, k) + self.k_nsprefix_ = None self.erev = _cast(None, erev) + self.erev_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GradedSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GradedSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if GradedSynapse.subclass: return GradedSynapse.subclass(*args_, **kwargs_) else: return GradedSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.validate_Nml2Quantity_conductance_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] - def hasContent_(self): if ( - super(GradedSynapse, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_pertime_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_pertime_patterns_, + ) + ) + + validate_Nml2Quantity_pertime_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz))$"] + ] + + def _hasContent(self): + if super(GradedSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GradedSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GradedSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GradedSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GradedSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "GradedSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GradedSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GradedSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GradedSynapse" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GradedSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GradedSynapse'): - super(GradedSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GradedSynapse') - if self.conductance is not None and 'conductance' not in already_processed: - already_processed.add('conductance') - outfile.write(' conductance=%s' % (quote_attrib(self.conductance), )) - if self.delta is not None and 'delta' not in already_processed: - already_processed.add('delta') - outfile.write(' delta=%s' % (quote_attrib(self.delta), )) - if self.Vth is not None and 'Vth' not in already_processed: - already_processed.add('Vth') - outfile.write(' Vth=%s' % (quote_attrib(self.Vth), )) - if self.k is not None and 'k' not in already_processed: - already_processed.add('k') - outfile.write(' k=%s' % (quote_attrib(self.k), )) - if self.erev is not None and 'erev' not in already_processed: - already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GradedSynapse', fromsubclass_=False, pretty_print=True): - super(GradedSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GradedSynapse", + ): + super(GradedSynapse, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GradedSynapse" + ) + if self.conductance is not None and "conductance" not in already_processed: + already_processed.add("conductance") + outfile.write( + " conductance=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.conductance), input_name="conductance" + ) + ), + ) + ) + if self.delta is not None and "delta" not in already_processed: + already_processed.add("delta") + outfile.write( + " delta=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.delta), input_name="delta" + ) + ), + ) + ) + if self.Vth is not None and "Vth" not in already_processed: + already_processed.add("Vth") + outfile.write( + " Vth=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.Vth), input_name="Vth") + ), + ) + ) + if self.k is not None and "k" not in already_processed: + already_processed.add("k") + outfile.write( + " k=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.k), input_name="k") + ), + ) + ) + if self.erev is not None and "erev" not in already_processed: + already_processed.add("erev") + outfile.write( + " erev=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.erev), input_name="erev" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GradedSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(GradedSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('conductance', node) - if value is not None and 'conductance' not in already_processed: - already_processed.add('conductance') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("conductance", node) + if value is not None and "conductance" not in already_processed: + already_processed.add("conductance") self.conductance = value - self.validate_Nml2Quantity_conductance(self.conductance) # validate type Nml2Quantity_conductance - value = find_attr_value_('delta', node) - if value is not None and 'delta' not in already_processed: - already_processed.add('delta') + self.validate_Nml2Quantity_conductance( + self.conductance + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("delta", node) + if value is not None and "delta" not in already_processed: + already_processed.add("delta") self.delta = value - self.validate_Nml2Quantity_voltage(self.delta) # validate type Nml2Quantity_voltage - value = find_attr_value_('Vth', node) - if value is not None and 'Vth' not in already_processed: - already_processed.add('Vth') + self.validate_Nml2Quantity_voltage( + self.delta + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("Vth", node) + if value is not None and "Vth" not in already_processed: + already_processed.add("Vth") self.Vth = value - self.validate_Nml2Quantity_voltage(self.Vth) # validate type Nml2Quantity_voltage - value = find_attr_value_('k', node) - if value is not None and 'k' not in already_processed: - already_processed.add('k') + self.validate_Nml2Quantity_voltage( + self.Vth + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("k", node) + if value is not None and "k" not in already_processed: + already_processed.add("k") self.k = value - self.validate_Nml2Quantity_pertime(self.k) # validate type Nml2Quantity_pertime - value = find_attr_value_('erev', node) - if value is not None and 'erev' not in already_processed: - already_processed.add('erev') + self.validate_Nml2Quantity_pertime( + self.k + ) # validate type Nml2Quantity_pertime + value = find_attr_value_("erev", node) + if value is not None and "erev" not in already_processed: + already_processed.add("erev") self.erev = value - self.validate_Nml2Quantity_voltage(self.erev) # validate type Nml2Quantity_voltage - super(GradedSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(GradedSynapse, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_voltage( + self.erev + ) # validate type Nml2Quantity_voltage + super(GradedSynapse, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(GradedSynapse, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class GradedSynapse class LinearGradedSynapse(BaseSynapse): - """Behaves just like a one way gap junction.""" + """LinearGradedSynapse -- Behaves just like a one way gap junction. + \n + :param conductance: + :type conductance: conductance + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), + MemberSpec_( + "conductance", + "Nml2Quantity_conductance", + 0, + 0, + {"use": "required", "name": "conductance"}, + ), ] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, conductance=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + conductance=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(LinearGradedSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("LinearGradedSynapse"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.conductance = _cast(None, conductance) + self.conductance_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, LinearGradedSynapse) + CurrentSubclassModule_, LinearGradedSynapse + ) if subclass is not None: return subclass(*args_, **kwargs_) if LinearGradedSynapse.subclass: return LinearGradedSynapse.subclass(*args_, **kwargs_) else: return LinearGradedSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] - def hasContent_(self): if ( - super(LinearGradedSynapse, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_conductance_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$"] + ] + + def _hasContent(self): + if super(LinearGradedSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LinearGradedSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('LinearGradedSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="LinearGradedSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("LinearGradedSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "LinearGradedSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LinearGradedSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LinearGradedSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="LinearGradedSynapse", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="LinearGradedSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LinearGradedSynapse'): - super(LinearGradedSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LinearGradedSynapse') - if self.conductance is not None and 'conductance' not in already_processed: - already_processed.add('conductance') - outfile.write(' conductance=%s' % (quote_attrib(self.conductance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='LinearGradedSynapse', fromsubclass_=False, pretty_print=True): - super(LinearGradedSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="LinearGradedSynapse", + ): + super(LinearGradedSynapse, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="LinearGradedSynapse", + ) + if self.conductance is not None and "conductance" not in already_processed: + already_processed.add("conductance") + outfile.write( + " conductance=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.conductance), input_name="conductance" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="LinearGradedSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(LinearGradedSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('conductance', node) - if value is not None and 'conductance' not in already_processed: - already_processed.add('conductance') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("conductance", node) + if value is not None and "conductance" not in already_processed: + already_processed.add("conductance") self.conductance = value - self.validate_Nml2Quantity_conductance(self.conductance) # validate type Nml2Quantity_conductance - super(LinearGradedSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(LinearGradedSynapse, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_conductance( + self.conductance + ) # validate type Nml2Quantity_conductance + super(LinearGradedSynapse, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(LinearGradedSynapse, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class LinearGradedSynapse class SilentSynapse(BaseSynapse): - """Dummy synapse which emits no current. Used as presynaptic endpoint - for analog synaptic connection (continuousConnection).""" - member_data_items_ = [ - ] + """SilentSynapse -- Dummy synapse which emits no current. Used as presynaptic endpoint for analog synaptic connection.""" + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(SilentSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("SilentSynapse"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, SilentSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, SilentSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if SilentSynapse.subclass: return SilentSynapse.subclass(*args_, **kwargs_) else: return SilentSynapse(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(SilentSynapse, self).hasContent_() - ): + + def _hasContent(self): + if super(SilentSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SilentSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('SilentSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SilentSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("SilentSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "SilentSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SilentSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SilentSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SilentSynapse" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="SilentSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SilentSynapse'): - super(SilentSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SilentSynapse') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SilentSynapse', fromsubclass_=False, pretty_print=True): - super(SilentSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="SilentSynapse", + ): + super(SilentSynapse, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="SilentSynapse" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="SilentSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(SilentSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(SilentSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(SilentSynapse, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + super(SilentSynapse, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(SilentSynapse, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class SilentSynapse class GapJunction(BaseSynapse): - """Gap junction/single electrical connection""" + """GapJunction -- Gap junction/single electrical connection + \n + :param conductance: + :type conductance: conductance + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), + MemberSpec_( + "conductance", + "Nml2Quantity_conductance", + 0, + 0, + {"use": "required", "name": "conductance"}, + ), ] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, conductance=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + conductance=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(GapJunction, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("GapJunction"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.conductance = _cast(None, conductance) + self.conductance_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, GapJunction) + subclass = getSubclassFromModule_(CurrentSubclassModule_, GapJunction) if subclass is not None: return subclass(*args_, **kwargs_) if GapJunction.subclass: return GapJunction.subclass(*args_, **kwargs_) else: return GapJunction(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] - def hasContent_(self): if ( - super(GapJunction, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_conductance_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$"] + ] + + def _hasContent(self): + if super(GapJunction, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GapJunction', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('GapJunction') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GapJunction", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("GapJunction") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "GapJunction": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GapJunction') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GapJunction', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GapJunction" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="GapJunction", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GapJunction'): - super(GapJunction, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GapJunction') - if self.conductance is not None and 'conductance' not in already_processed: - already_processed.add('conductance') - outfile.write(' conductance=%s' % (quote_attrib(self.conductance), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='GapJunction', fromsubclass_=False, pretty_print=True): - super(GapJunction, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="GapJunction", + ): + super(GapJunction, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="GapJunction" + ) + if self.conductance is not None and "conductance" not in already_processed: + already_processed.add("conductance") + outfile.write( + " conductance=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.conductance), input_name="conductance" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="GapJunction", + fromsubclass_=False, + pretty_print=True, + ): + super(GapJunction, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('conductance', node) - if value is not None and 'conductance' not in already_processed: - already_processed.add('conductance') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("conductance", node) + if value is not None and "conductance" not in already_processed: + already_processed.add("conductance") self.conductance = value - self.validate_Nml2Quantity_conductance(self.conductance) # validate type Nml2Quantity_conductance - super(GapJunction, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(GapJunction, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_conductance( + self.conductance + ) # validate type Nml2Quantity_conductance + super(GapJunction, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(GapJunction, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class GapJunction class BaseCurrentBasedSynapse(BaseSynapse): - member_data_items_ = [ - ] + """BaseCurrentBasedSynapse -- Synapse model which produces a synaptic current.""" + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseCurrentBasedSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("BaseCurrentBasedSynapse"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseCurrentBasedSynapse) + CurrentSubclassModule_, BaseCurrentBasedSynapse + ) if subclass is not None: return subclass(*args_, **kwargs_) if BaseCurrentBasedSynapse.subclass: return BaseCurrentBasedSynapse.subclass(*args_, **kwargs_) else: return BaseCurrentBasedSynapse(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(BaseCurrentBasedSynapse, self).hasContent_() - ): + + def _hasContent(self): + if super(BaseCurrentBasedSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCurrentBasedSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseCurrentBasedSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseCurrentBasedSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseCurrentBasedSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "BaseCurrentBasedSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCurrentBasedSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseCurrentBasedSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseCurrentBasedSynapse", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseCurrentBasedSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseCurrentBasedSynapse'): - super(BaseCurrentBasedSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseCurrentBasedSynapse') - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseCurrentBasedSynapse", + ): + super(BaseCurrentBasedSynapse, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseCurrentBasedSynapse", + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseCurrentBasedSynapse', fromsubclass_=False, pretty_print=True): - super(BaseCurrentBasedSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseCurrentBasedSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseCurrentBasedSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseCurrentBasedSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseCurrentBasedSynapse, self).buildChildren(child_, node, nodeName_, True) + super(BaseCurrentBasedSynapse, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(BaseCurrentBasedSynapse, self)._buildChildren( + child_, node, nodeName_, True + ) pass + + # end class BaseCurrentBasedSynapse class BaseVoltageDepSynapse(BaseSynapse): - member_data_items_ = [ - ] + """BaseVoltageDepSynapse -- Base type for synapses with a dependence on membrane potential""" + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = BaseSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseVoltageDepSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("BaseVoltageDepSynapse"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseVoltageDepSynapse) + CurrentSubclassModule_, BaseVoltageDepSynapse + ) if subclass is not None: return subclass(*args_, **kwargs_) if BaseVoltageDepSynapse.subclass: return BaseVoltageDepSynapse.subclass(*args_, **kwargs_) else: return BaseVoltageDepSynapse(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(BaseVoltageDepSynapse, self).hasContent_() - ): + + def _hasContent(self): + if super(BaseVoltageDepSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseVoltageDepSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseVoltageDepSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseVoltageDepSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseVoltageDepSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "BaseVoltageDepSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseVoltageDepSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseVoltageDepSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseVoltageDepSynapse", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseVoltageDepSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseVoltageDepSynapse'): - super(BaseVoltageDepSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseVoltageDepSynapse') - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseVoltageDepSynapse", + ): + super(BaseVoltageDepSynapse, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseVoltageDepSynapse", + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseVoltageDepSynapse', fromsubclass_=False, pretty_print=True): - super(BaseVoltageDepSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseVoltageDepSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseVoltageDepSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseVoltageDepSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseVoltageDepSynapse, self).buildChildren(child_, node, nodeName_, True) + super(BaseVoltageDepSynapse, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(BaseVoltageDepSynapse, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class BaseVoltageDepSynapse class IonChannel(IonChannelScalable): - """Note ionChannel and ionChannelHH are currently functionally - identical. This is needed since many existing examples use - ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove - one of these, probably ionChannelHH.""" + """IonChannel -- Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH. + \n + :param conductance: + :type conductance: conductance + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('species', 'NmlId', 0, 1, {'use': u'optional'}), - MemberSpec_('type', 'channelTypes', 0, 1, {'use': u'optional'}), - MemberSpec_('conductance', 'Nml2Quantity_conductance', 0, 1, {'use': u'optional'}), - MemberSpec_('gates', 'GateHHUndetermined', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHUndetermined', u'name': u'gate', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_hh_rates', 'GateHHRates', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHRates', u'name': u'gateHHrates', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_h_hrates_taus', 'GateHHRatesTau', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHRatesTau', u'name': u'gateHHratesTau', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_hh_tau_infs', 'GateHHTauInf', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHTauInf', u'name': u'gateHHtauInf', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_h_hrates_infs', 'GateHHRatesInf', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHRatesInf', u'name': u'gateHHratesInf', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_h_hrates_tau_infs', 'GateHHRatesTauInf', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHRatesTauInf', u'name': u'gateHHratesTauInf', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_hh_instantaneouses', 'GateHHInstantaneous', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateHHInstantaneous', u'name': u'gateHHInstantaneous', u'minOccurs': u'0'}, 1), - MemberSpec_('gate_fractionals', 'GateFractional', 1, 1, {u'maxOccurs': u'unbounded', u'type': u'GateFractional', u'name': u'gateFractional', u'minOccurs': u'0'}, 1), + MemberSpec_("species", "NmlId", 0, 1, {"use": "optional", "name": "species"}), + MemberSpec_("type", "channelTypes", 0, 1, {"use": "optional", "name": "type"}), + MemberSpec_( + "conductance", + "Nml2Quantity_conductance", + 0, + 1, + {"use": "optional", "name": "conductance"}, + ), + MemberSpec_( + "gates", + "GateHHUndetermined", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "gate", + "type": "GateHHUndetermined", + }, + 1, + ), + MemberSpec_( + "gate_hh_rates", + "GateHHRates", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "gateHHrates", + "type": "GateHHRates", + }, + 1, + ), + MemberSpec_( + "gate_h_hrates_taus", + "GateHHRatesTau", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "gateHHratesTau", + "type": "GateHHRatesTau", + }, + 1, + ), + MemberSpec_( + "gate_hh_tau_infs", + "GateHHTauInf", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "gateHHtauInf", + "type": "GateHHTauInf", + }, + 1, + ), + MemberSpec_( + "gate_h_hrates_infs", + "GateHHRatesInf", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "gateHHratesInf", + "type": "GateHHRatesInf", + }, + 1, + ), + MemberSpec_( + "gate_h_hrates_tau_infs", + "GateHHRatesTauInf", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "gateHHratesTauInf", + "type": "GateHHRatesTauInf", + }, + 1, + ), + MemberSpec_( + "gate_hh_instantaneouses", + "GateHHInstantaneous", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "gateHHInstantaneous", + "type": "GateHHInstantaneous", + }, + 1, + ), + MemberSpec_( + "gate_fractionals", + "GateFractional", + 1, + 1, + { + "maxOccurs": "unbounded", + "minOccurs": "0", + "name": "gateFractional", + "type": "GateFractional", + }, + 1, + ), ] subclass = None superclass = IonChannelScalable - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, q10_conductance_scalings=None, species=None, type=None, conductance=None, gates=None, gate_hh_rates=None, gate_h_hrates_taus=None, gate_hh_tau_infs=None, gate_h_hrates_infs=None, gate_h_hrates_tau_infs=None, gate_hh_instantaneouses=None, gate_fractionals=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + q10_conductance_scalings=None, + species=None, + type=None, + conductance=None, + gates=None, + gate_hh_rates=None, + gate_h_hrates_taus=None, + gate_hh_tau_infs=None, + gate_h_hrates_infs=None, + gate_h_hrates_tau_infs=None, + gate_hh_instantaneouses=None, + gate_fractionals=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IonChannel, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, q10_conductance_scalings, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("IonChannel"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + q10_conductance_scalings, + extensiontype_, + **kwargs_ + ) self.species = _cast(None, species) + self.species_nsprefix_ = None self.type = _cast(None, type) + self.type_nsprefix_ = None self.conductance = _cast(None, conductance) + self.conductance_nsprefix_ = None if gates is None: self.gates = [] else: self.gates = gates + self.gates_nsprefix_ = None if gate_hh_rates is None: self.gate_hh_rates = [] else: self.gate_hh_rates = gate_hh_rates + self.gate_hh_rates_nsprefix_ = None if gate_h_hrates_taus is None: self.gate_h_hrates_taus = [] else: self.gate_h_hrates_taus = gate_h_hrates_taus + self.gate_h_hrates_taus_nsprefix_ = None if gate_hh_tau_infs is None: self.gate_hh_tau_infs = [] else: self.gate_hh_tau_infs = gate_hh_tau_infs + self.gate_hh_tau_infs_nsprefix_ = None if gate_h_hrates_infs is None: self.gate_h_hrates_infs = [] else: self.gate_h_hrates_infs = gate_h_hrates_infs + self.gate_h_hrates_infs_nsprefix_ = None if gate_h_hrates_tau_infs is None: self.gate_h_hrates_tau_infs = [] else: self.gate_h_hrates_tau_infs = gate_h_hrates_tau_infs + self.gate_h_hrates_tau_infs_nsprefix_ = None if gate_hh_instantaneouses is None: self.gate_hh_instantaneouses = [] else: self.gate_hh_instantaneouses = gate_hh_instantaneouses + self.gate_hh_instantaneouses_nsprefix_ = None if gate_fractionals is None: self.gate_fractionals = [] else: self.gate_fractionals = gate_fractionals + self.gate_fractionals_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IonChannel) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IonChannel) if subclass is not None: return subclass(*args_, **kwargs_) if IonChannel.subclass: return IonChannel.subclass(*args_, **kwargs_) else: return IonChannel(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + def validate_channelTypes(self, value): # Validate type channelTypes, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - value = str(value) - enumerations = ['ionChannelPassive', 'ionChannelHH'] - enumeration_respectee = False - for enum in enumerations: - if value == enum: - enumeration_respectee = True - break - if not enumeration_respectee: - warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on channelTypes' % {"value" : value.encode("utf-8")} ) + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + value = value + enumerations = ["ionChannelPassive", "ionChannelHH"] + if value not in enumerations: + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on channelTypes' + % {"value": encode_str_2_3(value), "lineno": lineno} + ) + result = False + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] - def hasContent_(self): - if ( - self.gates or - self.gate_hh_rates or - self.gate_h_hrates_taus or - self.gate_hh_tau_infs or - self.gate_h_hrates_infs or - self.gate_h_hrates_tau_infs or - self.gate_hh_instantaneouses or - self.gate_fractionals or - super(IonChannel, self).hasContent_() + self.validate_Nml2Quantity_conductance_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$"] + ] + + def _hasContent(self): + if ( + self.gates + or self.gate_hh_rates + or self.gate_h_hrates_taus + or self.gate_hh_tau_infs + or self.gate_h_hrates_infs + or self.gate_h_hrates_tau_infs + or self.gate_hh_instantaneouses + or self.gate_fractionals + or super(IonChannel, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannel', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IonChannel') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="IonChannel", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IonChannel") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "IonChannel": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannel') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannel', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IonChannel" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IonChannel", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannel'): - super(IonChannel, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannel') - if self.species is not None and 'species' not in already_processed: - already_processed.add('species') - outfile.write(' species=%s' % (quote_attrib(self.species), )) - if self.type is not None and 'type' not in already_processed: - already_processed.add('type') - outfile.write(' type=%s' % (quote_attrib(self.type), )) - if self.conductance is not None and 'conductance' not in already_processed: - already_processed.add('conductance') - outfile.write(' conductance=%s' % (quote_attrib(self.conductance), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="IonChannel" + ): + super(IonChannel, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IonChannel" + ) + if self.species is not None and "species" not in already_processed: + already_processed.add("species") + outfile.write( + " species=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.species), input_name="species" + ) + ), + ) + ) + if self.type is not None and "type" not in already_processed: + already_processed.add("type") + outfile.write( + " type=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.type), input_name="type" + ) + ), + ) + ) + if self.conductance is not None and "conductance" not in already_processed: + already_processed.add("conductance") + outfile.write( + " conductance=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.conductance), input_name="conductance" + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannel', fromsubclass_=False, pretty_print=True): - super(IonChannel, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="IonChannel", + fromsubclass_=False, + pretty_print=True, + ): + super(IonChannel, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" for gate_ in self.gates: - gate_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gate', pretty_print=pretty_print) + namespaceprefix_ = ( + self.gates_nsprefix_ + ":" + if (UseCapturedNS_ and self.gates_nsprefix_) + else "" + ) + gate_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gate", + pretty_print=pretty_print, + ) for gateHHrates_ in self.gate_hh_rates: - gateHHrates_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHrates', pretty_print=pretty_print) + namespaceprefix_ = ( + self.gate_hh_rates_nsprefix_ + ":" + if (UseCapturedNS_ and self.gate_hh_rates_nsprefix_) + else "" + ) + gateHHrates_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gateHHrates", + pretty_print=pretty_print, + ) for gateHHratesTau_ in self.gate_h_hrates_taus: - gateHHratesTau_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHratesTau', pretty_print=pretty_print) + namespaceprefix_ = ( + self.gate_h_hrates_taus_nsprefix_ + ":" + if (UseCapturedNS_ and self.gate_h_hrates_taus_nsprefix_) + else "" + ) + gateHHratesTau_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gateHHratesTau", + pretty_print=pretty_print, + ) for gateHHtauInf_ in self.gate_hh_tau_infs: - gateHHtauInf_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHtauInf', pretty_print=pretty_print) + namespaceprefix_ = ( + self.gate_hh_tau_infs_nsprefix_ + ":" + if (UseCapturedNS_ and self.gate_hh_tau_infs_nsprefix_) + else "" + ) + gateHHtauInf_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gateHHtauInf", + pretty_print=pretty_print, + ) for gateHHratesInf_ in self.gate_h_hrates_infs: - gateHHratesInf_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHratesInf', pretty_print=pretty_print) + namespaceprefix_ = ( + self.gate_h_hrates_infs_nsprefix_ + ":" + if (UseCapturedNS_ and self.gate_h_hrates_infs_nsprefix_) + else "" + ) + gateHHratesInf_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gateHHratesInf", + pretty_print=pretty_print, + ) for gateHHratesTauInf_ in self.gate_h_hrates_tau_infs: - gateHHratesTauInf_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHratesTauInf', pretty_print=pretty_print) + namespaceprefix_ = ( + self.gate_h_hrates_tau_infs_nsprefix_ + ":" + if (UseCapturedNS_ and self.gate_h_hrates_tau_infs_nsprefix_) + else "" + ) + gateHHratesTauInf_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gateHHratesTauInf", + pretty_print=pretty_print, + ) for gateHHInstantaneous_ in self.gate_hh_instantaneouses: - gateHHInstantaneous_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateHHInstantaneous', pretty_print=pretty_print) + namespaceprefix_ = ( + self.gate_hh_instantaneouses_nsprefix_ + ":" + if (UseCapturedNS_ and self.gate_hh_instantaneouses_nsprefix_) + else "" + ) + gateHHInstantaneous_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gateHHInstantaneous", + pretty_print=pretty_print, + ) for gateFractional_ in self.gate_fractionals: - gateFractional_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='gateFractional', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.gate_fractionals_nsprefix_ + ":" + if (UseCapturedNS_ and self.gate_fractionals_nsprefix_) + else "" + ) + gateFractional_.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="gateFractional", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('species', node) - if value is not None and 'species' not in already_processed: - already_processed.add('species') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("species", node) + if value is not None and "species" not in already_processed: + already_processed.add("species") self.species = value - self.validate_NmlId(self.species) # validate type NmlId - value = find_attr_value_('type', node) - if value is not None and 'type' not in already_processed: - already_processed.add('type') + self.validate_NmlId(self.species) # validate type NmlId + value = find_attr_value_("type", node) + if value is not None and "type" not in already_processed: + already_processed.add("type") self.type = value - self.validate_channelTypes(self.type) # validate type channelTypes - value = find_attr_value_('conductance', node) - if value is not None and 'conductance' not in already_processed: - already_processed.add('conductance') + self.validate_channelTypes(self.type) # validate type channelTypes + value = find_attr_value_("conductance", node) + if value is not None and "conductance" not in already_processed: + already_processed.add("conductance") self.conductance = value - self.validate_Nml2Quantity_conductance(self.conductance) # validate type Nml2Quantity_conductance - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_conductance( + self.conductance + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(IonChannel, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'gate': + super(IonChannel, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "gate": obj_ = GateHHUndetermined.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gates.append(obj_) - obj_.original_tagname_ = 'gate' - elif nodeName_ == 'gateHHrates': + obj_.original_tagname_ = "gate" + elif nodeName_ == "gateHHrates": obj_ = GateHHRates.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gate_hh_rates.append(obj_) - obj_.original_tagname_ = 'gateHHrates' - elif nodeName_ == 'gateHHratesTau': + obj_.original_tagname_ = "gateHHrates" + elif nodeName_ == "gateHHratesTau": obj_ = GateHHRatesTau.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gate_h_hrates_taus.append(obj_) - obj_.original_tagname_ = 'gateHHratesTau' - elif nodeName_ == 'gateHHtauInf': + obj_.original_tagname_ = "gateHHratesTau" + elif nodeName_ == "gateHHtauInf": obj_ = GateHHTauInf.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gate_hh_tau_infs.append(obj_) - obj_.original_tagname_ = 'gateHHtauInf' - elif nodeName_ == 'gateHHratesInf': + obj_.original_tagname_ = "gateHHtauInf" + elif nodeName_ == "gateHHratesInf": obj_ = GateHHRatesInf.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gate_h_hrates_infs.append(obj_) - obj_.original_tagname_ = 'gateHHratesInf' - elif nodeName_ == 'gateHHratesTauInf': + obj_.original_tagname_ = "gateHHratesInf" + elif nodeName_ == "gateHHratesTauInf": obj_ = GateHHRatesTauInf.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gate_h_hrates_tau_infs.append(obj_) - obj_.original_tagname_ = 'gateHHratesTauInf' - elif nodeName_ == 'gateHHInstantaneous': + obj_.original_tagname_ = "gateHHratesTauInf" + elif nodeName_ == "gateHHInstantaneous": obj_ = GateHHInstantaneous.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gate_hh_instantaneouses.append(obj_) - obj_.original_tagname_ = 'gateHHInstantaneous' - elif nodeName_ == 'gateFractional': + obj_.original_tagname_ = "gateHHInstantaneous" + elif nodeName_ == "gateFractional": obj_ = GateFractional.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.gate_fractionals.append(obj_) - obj_.original_tagname_ = 'gateFractional' - super(IonChannel, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "gateFractional" + super(IonChannel, self)._buildChildren(child_, node, nodeName_, True) + + # end class IonChannel class AlphaCurrSynapse(BasePynnSynapse): - member_data_items_ = [ - ] + """AlphaCurrSynapse -- Alpha synapse: rise time and decay time are both tau_syn. Current based synapse. + \n + :param tau_syn: + :type tau_syn: none + + """ + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = BasePynnSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + tau_syn=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(AlphaCurrSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("AlphaCurrSynapse"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, AlphaCurrSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, AlphaCurrSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if AlphaCurrSynapse.subclass: return AlphaCurrSynapse.subclass(*args_, **kwargs_) else: return AlphaCurrSynapse(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(AlphaCurrSynapse, self).hasContent_() - ): + + def _hasContent(self): + if super(AlphaCurrSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCurrSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('AlphaCurrSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AlphaCurrSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("AlphaCurrSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "AlphaCurrSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCurrSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlphaCurrSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="AlphaCurrSynapse", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="AlphaCurrSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlphaCurrSynapse'): - super(AlphaCurrSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCurrSynapse') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCurrSynapse', fromsubclass_=False, pretty_print=True): - super(AlphaCurrSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="AlphaCurrSynapse", + ): + super(AlphaCurrSynapse, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="AlphaCurrSynapse", + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AlphaCurrSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(AlphaCurrSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(AlphaCurrSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(AlphaCurrSynapse, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + super(AlphaCurrSynapse, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(AlphaCurrSynapse, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class AlphaCurrSynapse class ExpCurrSynapse(BasePynnSynapse): - member_data_items_ = [ - ] + """ExpCurrSynapse -- Current based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) + \n + :param tau_syn: + :type tau_syn: none + + """ + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = BasePynnSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + tau_syn=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ExpCurrSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ExpCurrSynapse"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ExpCurrSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ExpCurrSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if ExpCurrSynapse.subclass: return ExpCurrSynapse.subclass(*args_, **kwargs_) else: return ExpCurrSynapse(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(ExpCurrSynapse, self).hasContent_() - ): + + def _hasContent(self): + if super(ExpCurrSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpCurrSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpCurrSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpCurrSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ExpCurrSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ExpCurrSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpCurrSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpCurrSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpCurrSynapse" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ExpCurrSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpCurrSynapse'): - super(ExpCurrSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpCurrSynapse') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpCurrSynapse', fromsubclass_=False, pretty_print=True): - super(ExpCurrSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ExpCurrSynapse", + ): + super(ExpCurrSynapse, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpCurrSynapse" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpCurrSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(ExpCurrSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(ExpCurrSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ExpCurrSynapse, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + super(ExpCurrSynapse, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ExpCurrSynapse, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class ExpCurrSynapse class AlphaCondSynapse(BasePynnSynapse): + """AlphaCondSynapse -- Alpha synapse: rise time and decay time are both tau_syn. Conductance based synapse. + \n + :param e_rev: + :type e_rev: none + :param tau_syn: + :type tau_syn: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('e_rev', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("e_rev", "xs:float", 0, 0, {"use": "required", "name": "e_rev"}), ] subclass = None superclass = BasePynnSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, e_rev=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + tau_syn=None, + e_rev=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(AlphaCondSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("AlphaCondSynapse"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_ + ) self.e_rev = _cast(float, e_rev) + self.e_rev_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, AlphaCondSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, AlphaCondSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if AlphaCondSynapse.subclass: return AlphaCondSynapse.subclass(*args_, **kwargs_) else: return AlphaCondSynapse(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(AlphaCondSynapse, self).hasContent_() - ): + + def _hasContent(self): + if super(AlphaCondSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCondSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('AlphaCondSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AlphaCondSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("AlphaCondSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "AlphaCondSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCondSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlphaCondSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="AlphaCondSynapse", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="AlphaCondSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlphaCondSynapse'): - super(AlphaCondSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCondSynapse') - if self.e_rev is not None and 'e_rev' not in already_processed: - already_processed.add('e_rev') - outfile.write(' e_rev="%s"' % self.gds_format_float(self.e_rev, input_name='e_rev')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCondSynapse', fromsubclass_=False, pretty_print=True): - super(AlphaCondSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="AlphaCondSynapse", + ): + super(AlphaCondSynapse, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="AlphaCondSynapse", + ) + if self.e_rev is not None and "e_rev" not in already_processed: + already_processed.add("e_rev") + outfile.write( + ' e_rev="%s"' % self.gds_format_float(self.e_rev, input_name="e_rev") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AlphaCondSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(AlphaCondSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('e_rev', node) - if value is not None and 'e_rev' not in already_processed: - already_processed.add('e_rev') - try: - self.e_rev = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev): %s' % exp) - super(AlphaCondSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(AlphaCondSynapse, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("e_rev", node) + if value is not None and "e_rev" not in already_processed: + already_processed.add("e_rev") + value = self.gds_parse_float(value, node, "e_rev") + self.e_rev = value + super(AlphaCondSynapse, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(AlphaCondSynapse, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class AlphaCondSynapse class ExpCondSynapse(BasePynnSynapse): + """ExpCondSynapse -- Conductance based synapse with instantaneous rise and single exponential decay ( with time constant tau_syn ) + \n + :param e_rev: + :type e_rev: none + :param tau_syn: + :type tau_syn: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('e_rev', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("e_rev", "xs:float", 0, 0, {"use": "required", "name": "e_rev"}), ] subclass = None superclass = BasePynnSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau_syn=None, e_rev=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + tau_syn=None, + e_rev=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ExpCondSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ExpCondSynapse"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, tau_syn, **kwargs_ + ) self.e_rev = _cast(float, e_rev) + self.e_rev_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ExpCondSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ExpCondSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if ExpCondSynapse.subclass: return ExpCondSynapse.subclass(*args_, **kwargs_) else: return ExpCondSynapse(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(ExpCondSynapse, self).hasContent_() - ): + + def _hasContent(self): + if super(ExpCondSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpCondSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpCondSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpCondSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ExpCondSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ExpCondSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpCondSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpCondSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpCondSynapse" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ExpCondSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpCondSynapse'): - super(ExpCondSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpCondSynapse') - if self.e_rev is not None and 'e_rev' not in already_processed: - already_processed.add('e_rev') - outfile.write(' e_rev="%s"' % self.gds_format_float(self.e_rev, input_name='e_rev')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpCondSynapse', fromsubclass_=False, pretty_print=True): - super(ExpCondSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ExpCondSynapse", + ): + super(ExpCondSynapse, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpCondSynapse" + ) + if self.e_rev is not None and "e_rev" not in already_processed: + already_processed.add("e_rev") + outfile.write( + ' e_rev="%s"' % self.gds_format_float(self.e_rev, input_name="e_rev") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpCondSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(ExpCondSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('e_rev', node) - if value is not None and 'e_rev' not in already_processed: - already_processed.add('e_rev') - try: - self.e_rev = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev): %s' % exp) - super(ExpCondSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ExpCondSynapse, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("e_rev", node) + if value is not None and "e_rev" not in already_processed: + already_processed.add("e_rev") + value = self.gds_parse_float(value, node, "e_rev") + self.e_rev = value + super(ExpCondSynapse, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ExpCondSynapse, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class ExpCondSynapse class HH_cond_exp(basePyNNCell): + """HH_cond_exp -- Single-compartment Hodgkin-Huxley-type neuron with transient sodium and delayed-rectifier potassium currents using the ion channel models from Traub. + \n + :param gbar_K: + :type gbar_K: none + :param gbar_Na: + :type gbar_Na: none + :param g_leak: + :type g_leak: none + :param e_rev_K: + :type e_rev_K: none + :param e_rev_Na: + :type e_rev_Na: none + :param e_rev_leak: + :type e_rev_leak: none + :param v_offset: + :type v_offset: none + :param e_rev_E: + :type e_rev_E: none + :param e_rev_I: + :type e_rev_I: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau + _syn_I: none + :param v_init: + :type v_init: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('v_offset', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_E', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_I', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_K', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_Na', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_leak', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('g_leak', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('gbar_K', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('gbar_Na', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_( + "v_offset", "xs:float", 0, 0, {"use": "required", "name": "v_offset"} + ), + MemberSpec_( + "e_rev_E", "xs:float", 0, 0, {"use": "required", "name": "e_rev_E"} + ), + MemberSpec_( + "e_rev_I", "xs:float", 0, 0, {"use": "required", "name": "e_rev_I"} + ), + MemberSpec_( + "e_rev_K", "xs:float", 0, 0, {"use": "required", "name": "e_rev_K"} + ), + MemberSpec_( + "e_rev_Na", "xs:float", 0, 0, {"use": "required", "name": "e_rev_Na"} + ), + MemberSpec_( + "e_rev_leak", "xs:float", 0, 0, {"use": "required", "name": "e_rev_leak"} + ), + MemberSpec_("g_leak", "xs:float", 0, 0, {"use": "required", "name": "g_leak"}), + MemberSpec_("gbar_K", "xs:float", 0, 0, {"use": "required", "name": "gbar_K"}), + MemberSpec_( + "gbar_Na", "xs:float", 0, 0, {"use": "required", "name": "gbar_Na"} + ), ] subclass = None superclass = basePyNNCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, v_offset=None, e_rev_E=None, e_rev_I=None, e_rev_K=None, e_rev_Na=None, e_rev_leak=None, g_leak=None, gbar_K=None, gbar_Na=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + v_offset=None, + e_rev_E=None, + e_rev_I=None, + e_rev_K=None, + e_rev_Na=None, + e_rev_leak=None, + g_leak=None, + gbar_K=None, + gbar_Na=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(HH_cond_exp, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("HH_cond_exp"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + **kwargs_ + ) self.v_offset = _cast(float, v_offset) + self.v_offset_nsprefix_ = None self.e_rev_E = _cast(float, e_rev_E) + self.e_rev_E_nsprefix_ = None self.e_rev_I = _cast(float, e_rev_I) + self.e_rev_I_nsprefix_ = None self.e_rev_K = _cast(float, e_rev_K) + self.e_rev_K_nsprefix_ = None self.e_rev_Na = _cast(float, e_rev_Na) + self.e_rev_Na_nsprefix_ = None self.e_rev_leak = _cast(float, e_rev_leak) + self.e_rev_leak_nsprefix_ = None self.g_leak = _cast(float, g_leak) + self.g_leak_nsprefix_ = None self.gbar_K = _cast(float, gbar_K) + self.gbar_K_nsprefix_ = None self.gbar_Na = _cast(float, gbar_Na) + self.gbar_Na_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, HH_cond_exp) + subclass = getSubclassFromModule_(CurrentSubclassModule_, HH_cond_exp) if subclass is not None: return subclass(*args_, **kwargs_) if HH_cond_exp.subclass: return HH_cond_exp.subclass(*args_, **kwargs_) else: return HH_cond_exp(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(HH_cond_exp, self).hasContent_() - ): + + def _hasContent(self): + if super(HH_cond_exp, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HH_cond_exp', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('HH_cond_exp') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="HH_cond_exp", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("HH_cond_exp") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "HH_cond_exp": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HH_cond_exp') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='HH_cond_exp', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="HH_cond_exp" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="HH_cond_exp", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='HH_cond_exp'): - super(HH_cond_exp, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='HH_cond_exp') - if self.v_offset is not None and 'v_offset' not in already_processed: - already_processed.add('v_offset') - outfile.write(' v_offset="%s"' % self.gds_format_float(self.v_offset, input_name='v_offset')) - if self.e_rev_E is not None and 'e_rev_E' not in already_processed: - already_processed.add('e_rev_E') - outfile.write(' e_rev_E="%s"' % self.gds_format_float(self.e_rev_E, input_name='e_rev_E')) - if self.e_rev_I is not None and 'e_rev_I' not in already_processed: - already_processed.add('e_rev_I') - outfile.write(' e_rev_I="%s"' % self.gds_format_float(self.e_rev_I, input_name='e_rev_I')) - if self.e_rev_K is not None and 'e_rev_K' not in already_processed: - already_processed.add('e_rev_K') - outfile.write(' e_rev_K="%s"' % self.gds_format_float(self.e_rev_K, input_name='e_rev_K')) - if self.e_rev_Na is not None and 'e_rev_Na' not in already_processed: - already_processed.add('e_rev_Na') - outfile.write(' e_rev_Na="%s"' % self.gds_format_float(self.e_rev_Na, input_name='e_rev_Na')) - if self.e_rev_leak is not None and 'e_rev_leak' not in already_processed: - already_processed.add('e_rev_leak') - outfile.write(' e_rev_leak="%s"' % self.gds_format_float(self.e_rev_leak, input_name='e_rev_leak')) - if self.g_leak is not None and 'g_leak' not in already_processed: - already_processed.add('g_leak') - outfile.write(' g_leak="%s"' % self.gds_format_float(self.g_leak, input_name='g_leak')) - if self.gbar_K is not None and 'gbar_K' not in already_processed: - already_processed.add('gbar_K') - outfile.write(' gbar_K="%s"' % self.gds_format_float(self.gbar_K, input_name='gbar_K')) - if self.gbar_Na is not None and 'gbar_Na' not in already_processed: - already_processed.add('gbar_Na') - outfile.write(' gbar_Na="%s"' % self.gds_format_float(self.gbar_Na, input_name='gbar_Na')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='HH_cond_exp', fromsubclass_=False, pretty_print=True): - super(HH_cond_exp, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="HH_cond_exp", + ): + super(HH_cond_exp, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="HH_cond_exp" + ) + if self.v_offset is not None and "v_offset" not in already_processed: + already_processed.add("v_offset") + outfile.write( + ' v_offset="%s"' + % self.gds_format_float(self.v_offset, input_name="v_offset") + ) + if self.e_rev_E is not None and "e_rev_E" not in already_processed: + already_processed.add("e_rev_E") + outfile.write( + ' e_rev_E="%s"' + % self.gds_format_float(self.e_rev_E, input_name="e_rev_E") + ) + if self.e_rev_I is not None and "e_rev_I" not in already_processed: + already_processed.add("e_rev_I") + outfile.write( + ' e_rev_I="%s"' + % self.gds_format_float(self.e_rev_I, input_name="e_rev_I") + ) + if self.e_rev_K is not None and "e_rev_K" not in already_processed: + already_processed.add("e_rev_K") + outfile.write( + ' e_rev_K="%s"' + % self.gds_format_float(self.e_rev_K, input_name="e_rev_K") + ) + if self.e_rev_Na is not None and "e_rev_Na" not in already_processed: + already_processed.add("e_rev_Na") + outfile.write( + ' e_rev_Na="%s"' + % self.gds_format_float(self.e_rev_Na, input_name="e_rev_Na") + ) + if self.e_rev_leak is not None and "e_rev_leak" not in already_processed: + already_processed.add("e_rev_leak") + outfile.write( + ' e_rev_leak="%s"' + % self.gds_format_float(self.e_rev_leak, input_name="e_rev_leak") + ) + if self.g_leak is not None and "g_leak" not in already_processed: + already_processed.add("g_leak") + outfile.write( + ' g_leak="%s"' % self.gds_format_float(self.g_leak, input_name="g_leak") + ) + if self.gbar_K is not None and "gbar_K" not in already_processed: + already_processed.add("gbar_K") + outfile.write( + ' gbar_K="%s"' % self.gds_format_float(self.gbar_K, input_name="gbar_K") + ) + if self.gbar_Na is not None and "gbar_Na" not in already_processed: + already_processed.add("gbar_Na") + outfile.write( + ' gbar_Na="%s"' + % self.gds_format_float(self.gbar_Na, input_name="gbar_Na") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="HH_cond_exp", + fromsubclass_=False, + pretty_print=True, + ): + super(HH_cond_exp, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('v_offset', node) - if value is not None and 'v_offset' not in already_processed: - already_processed.add('v_offset') - try: - self.v_offset = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (v_offset): %s' % exp) - value = find_attr_value_('e_rev_E', node) - if value is not None and 'e_rev_E' not in already_processed: - already_processed.add('e_rev_E') - try: - self.e_rev_E = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_E): %s' % exp) - value = find_attr_value_('e_rev_I', node) - if value is not None and 'e_rev_I' not in already_processed: - already_processed.add('e_rev_I') - try: - self.e_rev_I = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_I): %s' % exp) - value = find_attr_value_('e_rev_K', node) - if value is not None and 'e_rev_K' not in already_processed: - already_processed.add('e_rev_K') - try: - self.e_rev_K = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_K): %s' % exp) - value = find_attr_value_('e_rev_Na', node) - if value is not None and 'e_rev_Na' not in already_processed: - already_processed.add('e_rev_Na') - try: - self.e_rev_Na = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_Na): %s' % exp) - value = find_attr_value_('e_rev_leak', node) - if value is not None and 'e_rev_leak' not in already_processed: - already_processed.add('e_rev_leak') - try: - self.e_rev_leak = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_leak): %s' % exp) - value = find_attr_value_('g_leak', node) - if value is not None and 'g_leak' not in already_processed: - already_processed.add('g_leak') - try: - self.g_leak = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (g_leak): %s' % exp) - value = find_attr_value_('gbar_K', node) - if value is not None and 'gbar_K' not in already_processed: - already_processed.add('gbar_K') - try: - self.gbar_K = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (gbar_K): %s' % exp) - value = find_attr_value_('gbar_Na', node) - if value is not None and 'gbar_Na' not in already_processed: - already_processed.add('gbar_Na') - try: - self.gbar_Na = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (gbar_Na): %s' % exp) - super(HH_cond_exp, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(HH_cond_exp, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("v_offset", node) + if value is not None and "v_offset" not in already_processed: + already_processed.add("v_offset") + value = self.gds_parse_float(value, node, "v_offset") + self.v_offset = value + value = find_attr_value_("e_rev_E", node) + if value is not None and "e_rev_E" not in already_processed: + already_processed.add("e_rev_E") + value = self.gds_parse_float(value, node, "e_rev_E") + self.e_rev_E = value + value = find_attr_value_("e_rev_I", node) + if value is not None and "e_rev_I" not in already_processed: + already_processed.add("e_rev_I") + value = self.gds_parse_float(value, node, "e_rev_I") + self.e_rev_I = value + value = find_attr_value_("e_rev_K", node) + if value is not None and "e_rev_K" not in already_processed: + already_processed.add("e_rev_K") + value = self.gds_parse_float(value, node, "e_rev_K") + self.e_rev_K = value + value = find_attr_value_("e_rev_Na", node) + if value is not None and "e_rev_Na" not in already_processed: + already_processed.add("e_rev_Na") + value = self.gds_parse_float(value, node, "e_rev_Na") + self.e_rev_Na = value + value = find_attr_value_("e_rev_leak", node) + if value is not None and "e_rev_leak" not in already_processed: + already_processed.add("e_rev_leak") + value = self.gds_parse_float(value, node, "e_rev_leak") + self.e_rev_leak = value + value = find_attr_value_("g_leak", node) + if value is not None and "g_leak" not in already_processed: + already_processed.add("g_leak") + value = self.gds_parse_float(value, node, "g_leak") + self.g_leak = value + value = find_attr_value_("gbar_K", node) + if value is not None and "gbar_K" not in already_processed: + already_processed.add("gbar_K") + value = self.gds_parse_float(value, node, "gbar_K") + self.gbar_K = value + value = find_attr_value_("gbar_Na", node) + if value is not None and "gbar_Na" not in already_processed: + already_processed.add("gbar_Na") + value = self.gds_parse_float(value, node, "gbar_Na") + self.gbar_Na = value + super(HH_cond_exp, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(HH_cond_exp, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class HH_cond_exp class basePyNNIaFCell(basePyNNCell): + """basePyNNIaFCell -- Base type of any PyNN standard integrate and fire model + \n + :param tau_refrac: + :type tau_refrac: none + :param v_thresh: + :type v_thresh: none + :param tau_m: + :type tau_m: none + :param v_rest: + :type v_rest: none + :param v_reset: + :type v_reset: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('tau_m', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('tau_refrac', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('v_reset', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('v_rest', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('v_thresh', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("tau_m", "xs:float", 0, 0, {"use": "required", "name": "tau_m"}), + MemberSpec_( + "tau_refrac", "xs:float", 0, 0, {"use": "required", "name": "tau_refrac"} + ), + MemberSpec_( + "v_reset", "xs:float", 0, 0, {"use": "required", "name": "v_reset"} + ), + MemberSpec_("v_rest", "xs:float", 0, 0, {"use": "required", "name": "v_rest"}), + MemberSpec_( + "v_thresh", "xs:float", 0, 0, {"use": "required", "name": "v_thresh"} + ), ] subclass = None superclass = basePyNNCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + tau_m=None, + tau_refrac=None, + v_reset=None, + v_rest=None, + v_thresh=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(basePyNNIaFCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("basePyNNIaFCell"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + extensiontype_, + **kwargs_ + ) self.tau_m = _cast(float, tau_m) + self.tau_m_nsprefix_ = None self.tau_refrac = _cast(float, tau_refrac) + self.tau_refrac_nsprefix_ = None self.v_reset = _cast(float, v_reset) + self.v_reset_nsprefix_ = None self.v_rest = _cast(float, v_rest) + self.v_rest_nsprefix_ = None self.v_thresh = _cast(float, v_thresh) + self.v_thresh_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, basePyNNIaFCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, basePyNNIaFCell) if subclass is not None: return subclass(*args_, **kwargs_) if basePyNNIaFCell.subclass: return basePyNNIaFCell.subclass(*args_, **kwargs_) else: return basePyNNIaFCell(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(basePyNNIaFCell, self).hasContent_() - ): + + def _hasContent(self): + if super(basePyNNIaFCell, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNIaFCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('basePyNNIaFCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="basePyNNIaFCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("basePyNNIaFCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "basePyNNIaFCell": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNIaFCell') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='basePyNNIaFCell', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="basePyNNIaFCell" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="basePyNNIaFCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='basePyNNIaFCell'): - super(basePyNNIaFCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNIaFCell') - if self.tau_m is not None and 'tau_m' not in already_processed: - already_processed.add('tau_m') - outfile.write(' tau_m="%s"' % self.gds_format_float(self.tau_m, input_name='tau_m')) - if self.tau_refrac is not None and 'tau_refrac' not in already_processed: - already_processed.add('tau_refrac') - outfile.write(' tau_refrac="%s"' % self.gds_format_float(self.tau_refrac, input_name='tau_refrac')) - if self.v_reset is not None and 'v_reset' not in already_processed: - already_processed.add('v_reset') - outfile.write(' v_reset="%s"' % self.gds_format_float(self.v_reset, input_name='v_reset')) - if self.v_rest is not None and 'v_rest' not in already_processed: - already_processed.add('v_rest') - outfile.write(' v_rest="%s"' % self.gds_format_float(self.v_rest, input_name='v_rest')) - if self.v_thresh is not None and 'v_thresh' not in already_processed: - already_processed.add('v_thresh') - outfile.write(' v_thresh="%s"' % self.gds_format_float(self.v_thresh, input_name='v_thresh')) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="basePyNNIaFCell", + ): + super(basePyNNIaFCell, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="basePyNNIaFCell" + ) + if self.tau_m is not None and "tau_m" not in already_processed: + already_processed.add("tau_m") + outfile.write( + ' tau_m="%s"' % self.gds_format_float(self.tau_m, input_name="tau_m") + ) + if self.tau_refrac is not None and "tau_refrac" not in already_processed: + already_processed.add("tau_refrac") + outfile.write( + ' tau_refrac="%s"' + % self.gds_format_float(self.tau_refrac, input_name="tau_refrac") + ) + if self.v_reset is not None and "v_reset" not in already_processed: + already_processed.add("v_reset") + outfile.write( + ' v_reset="%s"' + % self.gds_format_float(self.v_reset, input_name="v_reset") + ) + if self.v_rest is not None and "v_rest" not in already_processed: + already_processed.add("v_rest") + outfile.write( + ' v_rest="%s"' % self.gds_format_float(self.v_rest, input_name="v_rest") + ) + if self.v_thresh is not None and "v_thresh" not in already_processed: + already_processed.add("v_thresh") + outfile.write( + ' v_thresh="%s"' + % self.gds_format_float(self.v_thresh, input_name="v_thresh") + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNIaFCell', fromsubclass_=False, pretty_print=True): - super(basePyNNIaFCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="basePyNNIaFCell", + fromsubclass_=False, + pretty_print=True, + ): + super(basePyNNIaFCell, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('tau_m', node) - if value is not None and 'tau_m' not in already_processed: - already_processed.add('tau_m') - try: - self.tau_m = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_m): %s' % exp) - value = find_attr_value_('tau_refrac', node) - if value is not None and 'tau_refrac' not in already_processed: - already_processed.add('tau_refrac') - try: - self.tau_refrac = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_refrac): %s' % exp) - value = find_attr_value_('v_reset', node) - if value is not None and 'v_reset' not in already_processed: - already_processed.add('v_reset') - try: - self.v_reset = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (v_reset): %s' % exp) - value = find_attr_value_('v_rest', node) - if value is not None and 'v_rest' not in already_processed: - already_processed.add('v_rest') - try: - self.v_rest = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (v_rest): %s' % exp) - value = find_attr_value_('v_thresh', node) - if value is not None and 'v_thresh' not in already_processed: - already_processed.add('v_thresh') - try: - self.v_thresh = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (v_thresh): %s' % exp) - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("tau_m", node) + if value is not None and "tau_m" not in already_processed: + already_processed.add("tau_m") + value = self.gds_parse_float(value, node, "tau_m") + self.tau_m = value + value = find_attr_value_("tau_refrac", node) + if value is not None and "tau_refrac" not in already_processed: + already_processed.add("tau_refrac") + value = self.gds_parse_float(value, node, "tau_refrac") + self.tau_refrac = value + value = find_attr_value_("v_reset", node) + if value is not None and "v_reset" not in already_processed: + already_processed.add("v_reset") + value = self.gds_parse_float(value, node, "v_reset") + self.v_reset = value + value = find_attr_value_("v_rest", node) + if value is not None and "v_rest" not in already_processed: + already_processed.add("v_rest") + value = self.gds_parse_float(value, node, "v_rest") + self.v_rest = value + value = find_attr_value_("v_thresh", node) + if value is not None and "v_thresh" not in already_processed: + already_processed.add("v_thresh") + value = self.gds_parse_float(value, node, "v_thresh") + self.v_thresh = value + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(basePyNNIaFCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(basePyNNIaFCell, self).buildChildren(child_, node, nodeName_, True) + super(basePyNNIaFCell, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(basePyNNIaFCell, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class basePyNNIaFCell class ContinuousConnection(BaseConnectionNewFormat): - """Individual continuous/analog synaptic connection""" + """ContinuousConnection -- An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Can be used for analog synapses.""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('pre_component', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('post_component', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_( + "pre_component", "NmlId", 0, 0, {"use": "required", "name": "pre_component"} + ), + MemberSpec_( + "post_component", + "NmlId", + 0, + 0, + {"use": "required", "name": "post_component"}, + ), ] subclass = None superclass = BaseConnectionNewFormat - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', pre_component=None, post_component=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell=None, + pre_segment="0", + pre_fraction_along="0.5", + post_cell=None, + post_segment="0", + post_fraction_along="0.5", + pre_component=None, + post_component=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ContinuousConnection, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ContinuousConnection"), self).__init__( + neuro_lex_id, + id, + pre_cell, + pre_segment, + pre_fraction_along, + post_cell, + post_segment, + post_fraction_along, + extensiontype_, + **kwargs_ + ) self.pre_component = _cast(None, pre_component) + self.pre_component_nsprefix_ = None self.post_component = _cast(None, post_component) + self.post_component_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ContinuousConnection) + CurrentSubclassModule_, ContinuousConnection + ) if subclass is not None: return subclass(*args_, **kwargs_) if ContinuousConnection.subclass: return ContinuousConnection.subclass(*args_, **kwargs_) else: return ContinuousConnection(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): if ( - super(ContinuousConnection, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): + if super(ContinuousConnection, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ContinuousConnection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ContinuousConnection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ContinuousConnection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ContinuousConnection": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnection') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContinuousConnection', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContinuousConnection'): - super(ContinuousConnection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnection') - if self.pre_component is not None and 'pre_component' not in already_processed: - already_processed.add('pre_component') - outfile.write(' preComponent=%s' % (quote_attrib(self.pre_component), )) - if self.post_component is not None and 'post_component' not in already_processed: - already_processed.add('post_component') - outfile.write(' postComponent=%s' % (quote_attrib(self.post_component), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ContinuousConnection", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ContinuousConnection", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ContinuousConnection", + ): + super(ContinuousConnection, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ContinuousConnection", + ) + if self.pre_component is not None and "pre_component" not in already_processed: + already_processed.add("pre_component") + outfile.write( + " preComponent=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.pre_component), input_name="preComponent" + ) + ), + ) + ) + if ( + self.post_component is not None + and "post_component" not in already_processed + ): + already_processed.add("post_component") + outfile.write( + " postComponent=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.post_component), + input_name="postComponent", + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnection', fromsubclass_=False, pretty_print=True): - super(ContinuousConnection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ContinuousConnection", + fromsubclass_=False, + pretty_print=True, + ): + super(ContinuousConnection, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('preComponent', node) - if value is not None and 'preComponent' not in already_processed: - already_processed.add('preComponent') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("preComponent", node) + if value is not None and "preComponent" not in already_processed: + already_processed.add("preComponent") self.pre_component = value - self.validate_NmlId(self.pre_component) # validate type NmlId - value = find_attr_value_('postComponent', node) - if value is not None and 'postComponent' not in already_processed: - already_processed.add('postComponent') + self.validate_NmlId(self.pre_component) # validate type NmlId + value = find_attr_value_("postComponent", node) + if value is not None and "postComponent" not in already_processed: + already_processed.add("postComponent") self.post_component = value - self.validate_NmlId(self.post_component) # validate type NmlId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_NmlId(self.post_component) # validate type NmlId + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(ContinuousConnection, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ContinuousConnection, self).buildChildren(child_, node, nodeName_, True) + super(ContinuousConnection, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ContinuousConnection, self)._buildChildren(child_, node, nodeName_, True) pass def _get_cell_id(self, id_string): - return int(float(id_string)) - + return int(float(id_string)) def get_pre_cell_id(self): + """Get the ID of the pre-synaptic cell + + :returns: ID of pre-synaptic cell + :rtype: str + """ return self._get_cell_id(self.pre_cell) def get_post_cell_id(self): + """Get the ID of the post-synaptic cell + + :returns: ID of post-synaptic cell + :rtype: str + """ return self._get_cell_id(self.post_cell) def get_pre_segment_id(self): + """Get the ID of the pre-synpatic segment + + :returns: ID of pre-synaptic segment. + :rtype: str + """ return int(self.pre_segment) def get_post_segment_id(self): + """Get the ID of the post-synpatic segment + + :returns: ID of post-synaptic segment. + :rtype: str + """ return int(self.post_segment) def get_pre_fraction_along(self): + """Get pre-synaptic fraction along information""" return float(self.pre_fraction_along) def get_post_fraction_along(self): + """Get post-synaptic fraction along information""" return float(self.post_fraction_along) - def get_pre_info(self): - - return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ '%.5f'%self.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') + """Get pre-synaptic information summary""" + + return str(self.get_pre_cell_id()) + ( + ":" + + str(self.get_pre_segment_id()) + + "(" + + "%.5f" % self.get_pre_fraction_along() + + ")" + if self.get_pre_segment_id() != 0 or self.get_pre_fraction_along() != 0.5 + else "" + ) def get_post_info(self): - - return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ '%.5f'%self.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') - + """Get post-synaptic information summary""" + + return str(self.get_post_cell_id()) + ( + ":" + + str(self.get_post_segment_id()) + + "(" + + "%.5f" % self.get_post_fraction_along() + + ")" + if self.get_post_segment_id() != 0 or self.get_post_fraction_along() != 0.5 + else "" + ) def __str__(self): - return "Continuous Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", pre comp: "+str(self.pre_component)+", post comp: "+str(self.post_component) - + return ( + "Continuous Connection " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + + ", pre comp: " + + str(self.pre_component) + + ", post comp: " + + str(self.post_component) + ) # end class ContinuousConnection class ElectricalConnection(BaseConnectionNewFormat): - """Individual electrical synaptic connection""" + """ElectricalConnection -- To enable connections between populations through gap junctions.""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('synapse', 'NmlId', 0, 0, {'use': u'required'}), + MemberSpec_("synapse", "NmlId", 0, 0, {"use": "required", "name": "synapse"}), ] subclass = None superclass = BaseConnectionNewFormat - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', synapse=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell=None, + pre_segment="0", + pre_fraction_along="0.5", + post_cell=None, + post_segment="0", + post_fraction_along="0.5", + synapse=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ElectricalConnection, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ElectricalConnection"), self).__init__( + neuro_lex_id, + id, + pre_cell, + pre_segment, + pre_fraction_along, + post_cell, + post_segment, + post_fraction_along, + extensiontype_, + **kwargs_ + ) self.synapse = _cast(None, synapse) + self.synapse_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ElectricalConnection) + CurrentSubclassModule_, ElectricalConnection + ) if subclass is not None: return subclass(*args_, **kwargs_) if ElectricalConnection.subclass: return ElectricalConnection.subclass(*args_, **kwargs_) else: return ElectricalConnection(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): if ( - super(ElectricalConnection, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): + if super(ElectricalConnection, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ElectricalConnection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ElectricalConnection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ElectricalConnection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ElectricalConnection": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnection') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ElectricalConnection', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ElectricalConnection'): - super(ElectricalConnection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnection') - if self.synapse is not None and 'synapse' not in already_processed: - already_processed.add('synapse') - outfile.write(' synapse=%s' % (quote_attrib(self.synapse), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ElectricalConnection", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ElectricalConnection", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ElectricalConnection", + ): + super(ElectricalConnection, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ElectricalConnection", + ) + if self.synapse is not None and "synapse" not in already_processed: + already_processed.add("synapse") + outfile.write( + " synapse=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.synapse), input_name="synapse" + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnection', fromsubclass_=False, pretty_print=True): - super(ElectricalConnection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ElectricalConnection", + fromsubclass_=False, + pretty_print=True, + ): + super(ElectricalConnection, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('synapse', node) - if value is not None and 'synapse' not in already_processed: - already_processed.add('synapse') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("synapse", node) + if value is not None and "synapse" not in already_processed: + already_processed.add("synapse") self.synapse = value - self.validate_NmlId(self.synapse) # validate type NmlId - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_NmlId(self.synapse) # validate type NmlId + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(ElectricalConnection, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ElectricalConnection, self).buildChildren(child_, node, nodeName_, True) + super(ElectricalConnection, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ElectricalConnection, self)._buildChildren(child_, node, nodeName_, True) pass def _get_cell_id(self, id_string): - return int(float(id_string)) + return int(float(id_string)) def get_pre_cell_id(self): + """Get the ID of the pre-synaptic cell + + :returns: ID of pre-synaptic cell + :rtype: str + """ return self._get_cell_id(self.pre_cell) def get_post_cell_id(self): + """Get the ID of the post-synaptic cell + + :returns: ID of post-synaptic cell + :rtype: str + """ return self._get_cell_id(self.post_cell) def get_pre_segment_id(self): + """Get the ID of the pre-synpatic segment + + :returns: ID of pre-synaptic segment. + :rtype: str + """ return int(self.pre_segment) def get_post_segment_id(self): + """Get the ID of the post-synpatic segment + + :returns: ID of post-synaptic segment. + :rtype: str + """ return int(self.post_segment) def get_pre_fraction_along(self): + """Get pre-synaptic fraction along information""" return float(self.pre_fraction_along) def get_post_fraction_along(self): + """Get post-synaptic fraction along information""" return float(self.post_fraction_along) - def get_pre_info(self): - - return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ '%.5f'%self.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') + """Get pre-synaptic information summary""" + + return str(self.get_pre_cell_id()) + ( + ":" + + str(self.get_pre_segment_id()) + + "(" + + "%.5f" % self.get_pre_fraction_along() + + ")" + if self.get_pre_segment_id() != 0 or self.get_pre_fraction_along() != 0.5 + else "" + ) def get_post_info(self): - - return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ '%.5f'%self.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') - + """Get post-synaptic information summary""" + + return str(self.get_post_cell_id()) + ( + ":" + + str(self.get_post_segment_id()) + + "(" + + "%.5f" % self.get_post_fraction_along() + + ")" + if self.get_post_segment_id() != 0 or self.get_post_fraction_along() != 0.5 + else "" + ) def __str__(self): - return "Electrical Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", synapse: "+str(self.synapse) - + return ( + "Electrical Connection " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + + ", synapse: " + + str(self.synapse) + ) # end class ElectricalConnection class ConnectionWD(BaseConnectionOldFormat): - """Individual synaptic connection with weight and delay""" + """ConnectionWD -- Event connection between named components, which gets processed via a new instance of a synapse component which is created on the target component, includes setting of **weight** and **delay** for the synaptic connection + \n + :param weight: + :type weight: none + :param delay: + :type delay: time + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('weight', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('delay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_("weight", "xs:float", 0, 0, {"use": "required", "name": "weight"}), + MemberSpec_( + "delay", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "delay"} + ), ] subclass = None superclass = BaseConnectionOldFormat - def __init__(self, neuro_lex_id=None, id=None, pre_cell_id=None, pre_segment_id='0', pre_fraction_along='0.5', post_cell_id=None, post_segment_id='0', post_fraction_along='0.5', weight=None, delay=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell_id=None, + pre_segment_id="0", + pre_fraction_along="0.5", + post_cell_id=None, + post_segment_id="0", + post_fraction_along="0.5", + weight=None, + delay=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ConnectionWD, self).__init__(neuro_lex_id, id, pre_cell_id, pre_segment_id, pre_fraction_along, post_cell_id, post_segment_id, post_fraction_along, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ConnectionWD"), self).__init__( + neuro_lex_id, + id, + pre_cell_id, + pre_segment_id, + pre_fraction_along, + post_cell_id, + post_segment_id, + post_fraction_along, + **kwargs_ + ) self.weight = _cast(float, weight) + self.weight_nsprefix_ = None self.delay = _cast(None, delay) + self.delay_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ConnectionWD) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ConnectionWD) if subclass is not None: return subclass(*args_, **kwargs_) if ConnectionWD.subclass: return ConnectionWD.subclass(*args_, **kwargs_) else: return ConnectionWD(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): if ( - super(ConnectionWD, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + + def _hasContent(self): + if super(ConnectionWD, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConnectionWD', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ConnectionWD') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ConnectionWD", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ConnectionWD") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ConnectionWD": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConnectionWD') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ConnectionWD', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ConnectionWD'): - super(ConnectionWD, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ConnectionWD') - if self.weight is not None and 'weight' not in already_processed: - already_processed.add('weight') - outfile.write(' weight="%s"' % self.gds_format_float(self.weight, input_name='weight')) - if self.delay is not None and 'delay' not in already_processed: - already_processed.add('delay') - outfile.write(' delay=%s' % (quote_attrib(self.delay), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ConnectionWD', fromsubclass_=False, pretty_print=True): - super(ConnectionWD, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ConnectionWD" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ConnectionWD", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ConnectionWD", + ): + super(ConnectionWD, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ConnectionWD" + ) + if self.weight is not None and "weight" not in already_processed: + already_processed.add("weight") + outfile.write( + ' weight="%s"' % self.gds_format_float(self.weight, input_name="weight") + ) + if self.delay is not None and "delay" not in already_processed: + already_processed.add("delay") + outfile.write( + " delay=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.delay), input_name="delay" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ConnectionWD", + fromsubclass_=False, + pretty_print=True, + ): + super(ConnectionWD, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('weight', node) - if value is not None and 'weight' not in already_processed: - already_processed.add('weight') - try: - self.weight = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (weight): %s' % exp) - value = find_attr_value_('delay', node) - if value is not None and 'delay' not in already_processed: - already_processed.add('delay') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("weight", node) + if value is not None and "weight" not in already_processed: + already_processed.add("weight") + value = self.gds_parse_float(value, node, "weight") + self.weight = value + value = find_attr_value_("delay", node) + if value is not None and "delay" not in already_processed: + already_processed.add("delay") self.delay = value - self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time - super(ConnectionWD, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ConnectionWD, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_time( + self.delay + ) # validate type Nml2Quantity_time + super(ConnectionWD, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ConnectionWD, self)._buildChildren(child_, node, nodeName_, True) pass def _get_cell_id(self, id_string): - if '[' in id_string: - return int(id_string.split('[')[1].split(']')[0]) + if "[" in id_string: + return int(id_string.split("[")[1].split("]")[0]) else: - return int(id_string.split('/')[2]) + return int(id_string.split("/")[2]) def get_pre_cell_id(self): + """Get the ID of the pre-synaptic cell + + :returns: ID of pre-synaptic cell + :rtype: str + """ return self._get_cell_id(self.pre_cell_id) def get_post_cell_id(self): + """Get the ID of the post-synaptic cell + + :returns: ID of post-synaptic cell + :rtype: str + """ return self._get_cell_id(self.post_cell_id) def get_pre_segment_id(self): + """Get the ID of the pre-synpatic segment + + :returns: ID of pre-synaptic segment. + :rtype: str + """ return int(self.pre_segment_id) def get_post_segment_id(self): + """Get the ID of the post-synpatic segment + + :returns: ID of post-synaptic segment. + :rtype: str + """ return int(self.post_segment_id) def get_pre_fraction_along(self): + """Get pre-synaptic fraction along information""" return float(self.pre_fraction_along) def get_post_fraction_along(self): + """Get post-synaptic fraction along information""" return float(self.post_fraction_along) - def get_pre_info(self): - - return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ '%.5f'%self.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') + """Get pre-synaptic information summary""" + + return str(self.get_pre_cell_id()) + ( + ":" + + str(self.get_pre_segment_id()) + + "(" + + "%.5f" % self.get_pre_fraction_along() + + ")" + if self.get_pre_segment_id() != 0 or self.get_pre_fraction_along() != 0.5 + else "" + ) def get_post_info(self): - - return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ '%.5f'%self.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') + """Get post-synaptic information summary""" + + return str(self.get_post_cell_id()) + ( + ":" + + str(self.get_post_segment_id()) + + "(" + + "%.5f" % self.get_post_fraction_along() + + ")" + if self.get_post_segment_id() != 0 or self.get_post_fraction_along() != 0.5 + else "" + ) def __str__(self): - return "Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info()) + return ( + "Connection " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + ) - def __str__(self): - return "Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", weight: "+'%f' % (float(self.weight))+", delay: "+'%.5f' % (self.get_delay_in_ms())+" ms" + return ( + "Connection " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + + ", weight: " + + "%f" % (float(self.weight)) + + ", delay: " + + "%.5f" % (self.get_delay_in_ms()) + + " ms" + ) def get_delay_in_ms(self): - if 'ms' in self.delay: + """Get connection delay in milli seconds + + :returns: connection delay in milli seconds + :rtype: float + """ + if "ms" in self.delay: return float(self.delay[:-2].strip()) - elif 's' in self.delay: - return float(self.delay[:-1].strip())*1000.0 + elif "s" in self.delay: + return float(self.delay[:-1].strip()) * 1000.0 # end class ConnectionWD class Connection(BaseConnectionOldFormat): - """Individual chemical (event based) synaptic connection, weight==1 and - no delay""" - member_data_items_ = [ - ] + """Connection -- Event connection directly between named components, which gets processed via a new instance of a **synapse** component which is created on the target component. Normally contained inside a **projection** element.""" + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = BaseConnectionOldFormat - def __init__(self, neuro_lex_id=None, id=None, pre_cell_id=None, pre_segment_id='0', pre_fraction_along='0.5', post_cell_id=None, post_segment_id='0', post_fraction_along='0.5', **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell_id=None, + pre_segment_id="0", + pre_fraction_along="0.5", + post_cell_id=None, + post_segment_id="0", + post_fraction_along="0.5", + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Connection, self).__init__(neuro_lex_id, id, pre_cell_id, pre_segment_id, pre_fraction_along, post_cell_id, post_segment_id, post_fraction_along, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("Connection"), self).__init__( + neuro_lex_id, + id, + pre_cell_id, + pre_segment_id, + pre_fraction_along, + post_cell_id, + post_segment_id, + post_fraction_along, + **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Connection) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Connection) if subclass is not None: return subclass(*args_, **kwargs_) if Connection.subclass: return Connection.subclass(*args_, **kwargs_) else: return Connection(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(Connection, self).hasContent_() - ): + + def _hasContent(self): + if super(Connection, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Connection', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Connection') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Connection", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Connection") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Connection": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Connection') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Connection', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Connection'): - super(Connection, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Connection') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Connection', fromsubclass_=False, pretty_print=True): - super(Connection, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Connection" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Connection", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="Connection" + ): + super(Connection, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Connection" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Connection", + fromsubclass_=False, + pretty_print=True, + ): + super(Connection, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(Connection, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(Connection, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + super(Connection, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(Connection, self)._buildChildren(child_, node, nodeName_, True) pass def _get_cell_id(self, id_string): - if '[' in id_string: - return int(id_string.split('[')[1].split(']')[0]) + if "[" in id_string: + return int(id_string.split("[")[1].split("]")[0]) else: - return int(id_string.split('/')[2]) + return int(id_string.split("/")[2]) def get_pre_cell_id(self): + """Get the ID of the pre-synaptic cell + + :returns: ID of pre-synaptic cell + :rtype: str + """ return self._get_cell_id(self.pre_cell_id) def get_post_cell_id(self): + """Get the ID of the post-synaptic cell + + :returns: ID of post-synaptic cell + :rtype: str + """ return self._get_cell_id(self.post_cell_id) def get_pre_segment_id(self): + """Get the ID of the pre-synpatic segment + + :returns: ID of pre-synaptic segment. + :rtype: str + """ return int(self.pre_segment_id) def get_post_segment_id(self): + """Get the ID of the post-synpatic segment + + :returns: ID of post-synaptic segment. + :rtype: str + """ return int(self.post_segment_id) def get_pre_fraction_along(self): + """Get pre-synaptic fraction along information""" return float(self.pre_fraction_along) def get_post_fraction_along(self): + """Get post-synaptic fraction along information""" return float(self.post_fraction_along) - def get_pre_info(self): - - return str(self.get_pre_cell_id())+(':'+str(self.get_pre_segment_id())+'('+ '%.5f'%self.get_pre_fraction_along()+')' if self.get_pre_segment_id()!=0 or self.get_pre_fraction_along()!=0.5 else '') + """Get pre-synaptic information summary""" + + return str(self.get_pre_cell_id()) + ( + ":" + + str(self.get_pre_segment_id()) + + "(" + + "%.5f" % self.get_pre_fraction_along() + + ")" + if self.get_pre_segment_id() != 0 or self.get_pre_fraction_along() != 0.5 + else "" + ) def get_post_info(self): - - return str(self.get_post_cell_id())+(':'+str(self.get_post_segment_id())+'('+ '%.5f'%self.get_post_fraction_along()+')' if self.get_post_segment_id()!=0 or self.get_post_fraction_along()!=0.5 else '') + """Get post-synaptic information summary""" + + return str(self.get_post_cell_id()) + ( + ":" + + str(self.get_post_segment_id()) + + "(" + + "%.5f" % self.get_post_fraction_along() + + ")" + if self.get_post_segment_id() != 0 or self.get_post_fraction_along() != 0.5 + else "" + ) def __str__(self): - return "Connection "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info()) + return ( + "Connection " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + ) # end class Connection class Cell2CaPools(Cell): + """Cell2CaPools -- Variant of cell with two independent Ca2+ pools. Cell with **segment** s specified in a **morphology** element along with details on its **biophysicalProperties** . NOTE: this can only be correctly simulated using jLEMS when there is a single segment in the cell, and **v** of this cell represents the membrane potential in that isopotential segment.""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('biophysical_properties2_ca_pools', 'BiophysicalProperties2CaPools', 0, 1, {u'type': u'BiophysicalProperties2CaPools', u'name': u'biophysicalProperties2CaPools', u'minOccurs': u'0'}, None), + MemberSpec_( + "biophysical_properties2_ca_pools", + "BiophysicalProperties2CaPools", + 0, + 1, + { + "minOccurs": "0", + "name": "biophysicalProperties2CaPools", + "type": "BiophysicalProperties2CaPools", + }, + None, + ), ] subclass = None superclass = Cell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, morphology_attr=None, biophysical_properties_attr=None, morphology=None, biophysical_properties=None, biophysical_properties2_ca_pools=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + morphology_attr=None, + biophysical_properties_attr=None, + morphology=None, + biophysical_properties=None, + biophysical_properties2_ca_pools=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Cell2CaPools, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, morphology_attr, biophysical_properties_attr, morphology, biophysical_properties, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("Cell2CaPools"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + morphology_attr, + biophysical_properties_attr, + morphology, + biophysical_properties, + **kwargs_ + ) self.biophysical_properties2_ca_pools = biophysical_properties2_ca_pools + self.biophysical_properties2_ca_pools_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, Cell2CaPools) + subclass = getSubclassFromModule_(CurrentSubclassModule_, Cell2CaPools) if subclass is not None: return subclass(*args_, **kwargs_) if Cell2CaPools.subclass: return Cell2CaPools.subclass(*args_, **kwargs_) else: return Cell2CaPools(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): + + def _hasContent(self): if ( - self.biophysical_properties2_ca_pools is not None or - super(Cell2CaPools, self).hasContent_() + self.biophysical_properties2_ca_pools is not None + or super(Cell2CaPools, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Cell2CaPools', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Cell2CaPools') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Cell2CaPools", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Cell2CaPools") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Cell2CaPools": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Cell2CaPools') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Cell2CaPools', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Cell2CaPools" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Cell2CaPools", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Cell2CaPools'): - super(Cell2CaPools, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Cell2CaPools') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Cell2CaPools', fromsubclass_=False, pretty_print=True): - super(Cell2CaPools, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="Cell2CaPools", + ): + super(Cell2CaPools, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="Cell2CaPools" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="Cell2CaPools", + fromsubclass_=False, + pretty_print=True, + ): + super(Cell2CaPools, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.biophysical_properties2_ca_pools is not None: - self.biophysical_properties2_ca_pools.export(outfile, level, namespaceprefix_, namespacedef_='', name_='biophysicalProperties2CaPools', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.biophysical_properties2_ca_pools_nsprefix_ + ":" + if (UseCapturedNS_ and self.biophysical_properties2_ca_pools_nsprefix_) + else "" + ) + self.biophysical_properties2_ca_pools.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="biophysicalProperties2CaPools", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(Cell2CaPools, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'biophysicalProperties2CaPools': + + def _buildAttributes(self, node, attrs, already_processed): + super(Cell2CaPools, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "biophysicalProperties2CaPools": obj_ = BiophysicalProperties2CaPools.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.biophysical_properties2_ca_pools = obj_ - obj_.original_tagname_ = 'biophysicalProperties2CaPools' - super(Cell2CaPools, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "biophysicalProperties2CaPools" + super(Cell2CaPools, self)._buildChildren(child_, node, nodeName_, True) + + # end class Cell2CaPools class AdExIaFCell(BaseCellMembPotCap): + """AdExIaFCell -- Model based on Brette R and Gerstner W ( 2005 ) Adaptive Exponential Integrate-and-Fire Model as an Effective Description of Neuronal Activity. J Neurophysiol 94:3637-3642 + \n + :param gL: + :type gL: conductance + :param EL: + :type EL: voltage + :param VT: + :type VT: voltage + :param thresh: + :type thresh: voltage + :param reset: + :type reset: voltage + :param delT: + :type delT: voltage + :param tauw: + :type tauw: time + :param refract: + :type refract: time + :param a: + :type a: conductance + :param b: + :type b: current + :param C: Total capacitance of the cell membrane + :type C: capacitance + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('g_l', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('EL', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('reset', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('VT', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('thresh', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('del_t', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('tauw', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('refract', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('a', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('b', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), + MemberSpec_( + "g_l", "Nml2Quantity_conductance", 0, 0, {"use": "required", "name": "g_l"} + ), + MemberSpec_( + "EL", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "EL"} + ), + MemberSpec_( + "reset", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "reset"} + ), + MemberSpec_( + "VT", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "VT"} + ), + MemberSpec_( + "thresh", + "Nml2Quantity_voltage", + 0, + 0, + {"use": "required", "name": "thresh"}, + ), + MemberSpec_( + "del_t", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "del_t"} + ), + MemberSpec_( + "tauw", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "tauw"} + ), + MemberSpec_( + "refract", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "refract"} + ), + MemberSpec_( + "a", "Nml2Quantity_conductance", 0, 0, {"use": "required", "name": "a"} + ), + MemberSpec_( + "b", "Nml2Quantity_current", 0, 0, {"use": "required", "name": "b"} + ), ] subclass = None superclass = BaseCellMembPotCap - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, C=None, g_l=None, EL=None, reset=None, VT=None, thresh=None, del_t=None, tauw=None, refract=None, a=None, b=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + C=None, + g_l=None, + EL=None, + reset=None, + VT=None, + thresh=None, + del_t=None, + tauw=None, + refract=None, + a=None, + b=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(AdExIaFCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, C, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("AdExIaFCell"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, C, **kwargs_ + ) self.g_l = _cast(None, g_l) + self.g_l_nsprefix_ = None self.EL = _cast(None, EL) + self.EL_nsprefix_ = None self.reset = _cast(None, reset) + self.reset_nsprefix_ = None self.VT = _cast(None, VT) + self.VT_nsprefix_ = None self.thresh = _cast(None, thresh) + self.thresh_nsprefix_ = None self.del_t = _cast(None, del_t) + self.del_t_nsprefix_ = None self.tauw = _cast(None, tauw) + self.tauw_nsprefix_ = None self.refract = _cast(None, refract) + self.refract_nsprefix_ = None self.a = _cast(None, a) + self.a_nsprefix_ = None self.b = _cast(None, b) + self.b_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, AdExIaFCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, AdExIaFCell) if subclass is not None: return subclass(*args_, **kwargs_) if AdExIaFCell.subclass: return AdExIaFCell.subclass(*args_, **kwargs_) else: return AdExIaFCell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.validate_Nml2Quantity_conductance_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] - def hasContent_(self): if ( - super(AdExIaFCell, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_current_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_current_patterns_, + ) + ) + + validate_Nml2Quantity_current_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA))$"] + ] + + def _hasContent(self): + if super(AdExIaFCell, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AdExIaFCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('AdExIaFCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AdExIaFCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("AdExIaFCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "AdExIaFCell": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AdExIaFCell') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AdExIaFCell', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="AdExIaFCell" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="AdExIaFCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AdExIaFCell'): - super(AdExIaFCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AdExIaFCell') - if self.g_l is not None and 'g_l' not in already_processed: - already_processed.add('g_l') - outfile.write(' gL=%s' % (quote_attrib(self.g_l), )) - if self.EL is not None and 'EL' not in already_processed: - already_processed.add('EL') - outfile.write(' EL=%s' % (quote_attrib(self.EL), )) - if self.reset is not None and 'reset' not in already_processed: - already_processed.add('reset') - outfile.write(' reset=%s' % (quote_attrib(self.reset), )) - if self.VT is not None and 'VT' not in already_processed: - already_processed.add('VT') - outfile.write(' VT=%s' % (quote_attrib(self.VT), )) - if self.thresh is not None and 'thresh' not in already_processed: - already_processed.add('thresh') - outfile.write(' thresh=%s' % (quote_attrib(self.thresh), )) - if self.del_t is not None and 'del_t' not in already_processed: - already_processed.add('del_t') - outfile.write(' delT=%s' % (quote_attrib(self.del_t), )) - if self.tauw is not None and 'tauw' not in already_processed: - already_processed.add('tauw') - outfile.write(' tauw=%s' % (quote_attrib(self.tauw), )) - if self.refract is not None and 'refract' not in already_processed: - already_processed.add('refract') - outfile.write(' refract=%s' % (quote_attrib(self.refract), )) - if self.a is not None and 'a' not in already_processed: - already_processed.add('a') - outfile.write(' a=%s' % (quote_attrib(self.a), )) - if self.b is not None and 'b' not in already_processed: - already_processed.add('b') - outfile.write(' b=%s' % (quote_attrib(self.b), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AdExIaFCell', fromsubclass_=False, pretty_print=True): - super(AdExIaFCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="AdExIaFCell", + ): + super(AdExIaFCell, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="AdExIaFCell" + ) + if self.g_l is not None and "g_l" not in already_processed: + already_processed.add("g_l") + outfile.write( + " gL=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.g_l), input_name="gL") + ), + ) + ) + if self.EL is not None and "EL" not in already_processed: + already_processed.add("EL") + outfile.write( + " EL=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.EL), input_name="EL") + ), + ) + ) + if self.reset is not None and "reset" not in already_processed: + already_processed.add("reset") + outfile.write( + " reset=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.reset), input_name="reset" + ) + ), + ) + ) + if self.VT is not None and "VT" not in already_processed: + already_processed.add("VT") + outfile.write( + " VT=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.VT), input_name="VT") + ), + ) + ) + if self.thresh is not None and "thresh" not in already_processed: + already_processed.add("thresh") + outfile.write( + " thresh=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.thresh), input_name="thresh" + ) + ), + ) + ) + if self.del_t is not None and "del_t" not in already_processed: + already_processed.add("del_t") + outfile.write( + " delT=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.del_t), input_name="delT" + ) + ), + ) + ) + if self.tauw is not None and "tauw" not in already_processed: + already_processed.add("tauw") + outfile.write( + " tauw=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.tauw), input_name="tauw" + ) + ), + ) + ) + if self.refract is not None and "refract" not in already_processed: + already_processed.add("refract") + outfile.write( + " refract=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.refract), input_name="refract" + ) + ), + ) + ) + if self.a is not None and "a" not in already_processed: + already_processed.add("a") + outfile.write( + " a=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.a), input_name="a") + ), + ) + ) + if self.b is not None and "b" not in already_processed: + already_processed.add("b") + outfile.write( + " b=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.b), input_name="b") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AdExIaFCell", + fromsubclass_=False, + pretty_print=True, + ): + super(AdExIaFCell, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('gL', node) - if value is not None and 'gL' not in already_processed: - already_processed.add('gL') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("gL", node) + if value is not None and "gL" not in already_processed: + already_processed.add("gL") self.g_l = value - self.validate_Nml2Quantity_conductance(self.g_l) # validate type Nml2Quantity_conductance - value = find_attr_value_('EL', node) - if value is not None and 'EL' not in already_processed: - already_processed.add('EL') + self.validate_Nml2Quantity_conductance( + self.g_l + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("EL", node) + if value is not None and "EL" not in already_processed: + already_processed.add("EL") self.EL = value - self.validate_Nml2Quantity_voltage(self.EL) # validate type Nml2Quantity_voltage - value = find_attr_value_('reset', node) - if value is not None and 'reset' not in already_processed: - already_processed.add('reset') + self.validate_Nml2Quantity_voltage( + self.EL + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("reset", node) + if value is not None and "reset" not in already_processed: + already_processed.add("reset") self.reset = value - self.validate_Nml2Quantity_voltage(self.reset) # validate type Nml2Quantity_voltage - value = find_attr_value_('VT', node) - if value is not None and 'VT' not in already_processed: - already_processed.add('VT') + self.validate_Nml2Quantity_voltage( + self.reset + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("VT", node) + if value is not None and "VT" not in already_processed: + already_processed.add("VT") self.VT = value - self.validate_Nml2Quantity_voltage(self.VT) # validate type Nml2Quantity_voltage - value = find_attr_value_('thresh', node) - if value is not None and 'thresh' not in already_processed: - already_processed.add('thresh') + self.validate_Nml2Quantity_voltage( + self.VT + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("thresh", node) + if value is not None and "thresh" not in already_processed: + already_processed.add("thresh") self.thresh = value - self.validate_Nml2Quantity_voltage(self.thresh) # validate type Nml2Quantity_voltage - value = find_attr_value_('delT', node) - if value is not None and 'delT' not in already_processed: - already_processed.add('delT') + self.validate_Nml2Quantity_voltage( + self.thresh + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("delT", node) + if value is not None and "delT" not in already_processed: + already_processed.add("delT") self.del_t = value - self.validate_Nml2Quantity_voltage(self.del_t) # validate type Nml2Quantity_voltage - value = find_attr_value_('tauw', node) - if value is not None and 'tauw' not in already_processed: - already_processed.add('tauw') + self.validate_Nml2Quantity_voltage( + self.del_t + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("tauw", node) + if value is not None and "tauw" not in already_processed: + already_processed.add("tauw") self.tauw = value - self.validate_Nml2Quantity_time(self.tauw) # validate type Nml2Quantity_time - value = find_attr_value_('refract', node) - if value is not None and 'refract' not in already_processed: - already_processed.add('refract') + self.validate_Nml2Quantity_time( + self.tauw + ) # validate type Nml2Quantity_time + value = find_attr_value_("refract", node) + if value is not None and "refract" not in already_processed: + already_processed.add("refract") self.refract = value - self.validate_Nml2Quantity_time(self.refract) # validate type Nml2Quantity_time - value = find_attr_value_('a', node) - if value is not None and 'a' not in already_processed: - already_processed.add('a') + self.validate_Nml2Quantity_time( + self.refract + ) # validate type Nml2Quantity_time + value = find_attr_value_("a", node) + if value is not None and "a" not in already_processed: + already_processed.add("a") self.a = value - self.validate_Nml2Quantity_conductance(self.a) # validate type Nml2Quantity_conductance - value = find_attr_value_('b', node) - if value is not None and 'b' not in already_processed: - already_processed.add('b') + self.validate_Nml2Quantity_conductance( + self.a + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("b", node) + if value is not None and "b" not in already_processed: + already_processed.add("b") self.b = value - self.validate_Nml2Quantity_current(self.b) # validate type Nml2Quantity_current - super(AdExIaFCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(AdExIaFCell, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_current( + self.b + ) # validate type Nml2Quantity_current + super(AdExIaFCell, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(AdExIaFCell, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class AdExIaFCell class Izhikevich2007Cell(BaseCellMembPotCap): + """Izhikevich2007Cell -- Cell based on the modified Izhikevich model in Izhikevich 2007, Dynamical systems in neuroscience, MIT Press + \n + :param v0: + :type v0: voltage + :param k: + :type k: conductance_per_voltage + :param vr: + :type vr: voltage + :param vt: + :type vt: voltage + :param vpeak: + :type vpeak: voltage + :param a: + :type a: per_time + :param b: + :type b: conductance + :param c: + :type c: voltage + :param d: + :type d: current + :param C: Total capacitance of the cell membrane + :type C: capacitance + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('v0', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('k', 'Nml2Quantity_conductancePerVoltage', 0, 0, {'use': u'required'}), - MemberSpec_('vr', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('vt', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('vpeak', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('a', 'Nml2Quantity_pertime', 0, 0, {'use': u'required'}), - MemberSpec_('b', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('c', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), - MemberSpec_('d', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), + MemberSpec_( + "v0", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "v0"} + ), + MemberSpec_( + "k", + "Nml2Quantity_conductancePerVoltage", + 0, + 0, + {"use": "required", "name": "k"}, + ), + MemberSpec_( + "vr", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "vr"} + ), + MemberSpec_( + "vt", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "vt"} + ), + MemberSpec_( + "vpeak", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "vpeak"} + ), + MemberSpec_( + "a", "Nml2Quantity_pertime", 0, 0, {"use": "required", "name": "a"} + ), + MemberSpec_( + "b", "Nml2Quantity_conductance", 0, 0, {"use": "required", "name": "b"} + ), + MemberSpec_( + "c", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "c"} + ), + MemberSpec_( + "d", "Nml2Quantity_current", 0, 0, {"use": "required", "name": "d"} + ), ] subclass = None superclass = BaseCellMembPotCap - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, C=None, v0=None, k=None, vr=None, vt=None, vpeak=None, a=None, b=None, c=None, d=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + C=None, + v0=None, + k=None, + vr=None, + vt=None, + vpeak=None, + a=None, + b=None, + c=None, + d=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(Izhikevich2007Cell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, C, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("Izhikevich2007Cell"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, C, **kwargs_ + ) self.v0 = _cast(None, v0) + self.v0_nsprefix_ = None self.k = _cast(None, k) + self.k_nsprefix_ = None self.vr = _cast(None, vr) + self.vr_nsprefix_ = None self.vt = _cast(None, vt) + self.vt_nsprefix_ = None self.vpeak = _cast(None, vpeak) + self.vpeak_nsprefix_ = None self.a = _cast(None, a) + self.a_nsprefix_ = None self.b = _cast(None, b) + self.b_nsprefix_ = None self.c = _cast(None, c) + self.c_nsprefix_ = None self.d = _cast(None, d) + self.d_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, Izhikevich2007Cell) + CurrentSubclassModule_, Izhikevich2007Cell + ) if subclass is not None: return subclass(*args_, **kwargs_) if Izhikevich2007Cell.subclass: return Izhikevich2007Cell.subclass(*args_, **kwargs_) else: return Izhikevich2007Cell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + def validate_Nml2Quantity_conductancePerVoltage(self, value): # Validate type Nml2Quantity_conductancePerVoltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductancePerVoltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductancePerVoltage_patterns_, )) - validate_Nml2Quantity_conductancePerVoltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_V|nS_per_mV)$']] + self.validate_Nml2Quantity_conductancePerVoltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_conductancePerVoltage_patterns_, + ) + ) + + validate_Nml2Quantity_conductancePerVoltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S_per_V|nS_per_mV))$"] + ] + def validate_Nml2Quantity_pertime(self, value): # Validate type Nml2Quantity_pertime, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_pertime_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_pertime_patterns_, )) - validate_Nml2Quantity_pertime_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz)$']] + self.validate_Nml2Quantity_pertime_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_pertime_patterns_, + ) + ) + + validate_Nml2Quantity_pertime_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(per_s|per_ms|Hz))$"] + ] + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.validate_Nml2Quantity_conductance_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$"] + ] + def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] - def hasContent_(self): if ( - super(Izhikevich2007Cell, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_current_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_current_patterns_, + ) + ) + + validate_Nml2Quantity_current_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA))$"] + ] + + def _hasContent(self): + if super(Izhikevich2007Cell, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Izhikevich2007Cell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('Izhikevich2007Cell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Izhikevich2007Cell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("Izhikevich2007Cell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "Izhikevich2007Cell": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Izhikevich2007Cell') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='Izhikevich2007Cell', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="Izhikevich2007Cell", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="Izhikevich2007Cell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Izhikevich2007Cell'): - super(Izhikevich2007Cell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Izhikevich2007Cell') - if self.v0 is not None and 'v0' not in already_processed: - already_processed.add('v0') - outfile.write(' v0=%s' % (quote_attrib(self.v0), )) - if self.k is not None and 'k' not in already_processed: - already_processed.add('k') - outfile.write(' k=%s' % (quote_attrib(self.k), )) - if self.vr is not None and 'vr' not in already_processed: - already_processed.add('vr') - outfile.write(' vr=%s' % (quote_attrib(self.vr), )) - if self.vt is not None and 'vt' not in already_processed: - already_processed.add('vt') - outfile.write(' vt=%s' % (quote_attrib(self.vt), )) - if self.vpeak is not None and 'vpeak' not in already_processed: - already_processed.add('vpeak') - outfile.write(' vpeak=%s' % (quote_attrib(self.vpeak), )) - if self.a is not None and 'a' not in already_processed: - already_processed.add('a') - outfile.write(' a=%s' % (quote_attrib(self.a), )) - if self.b is not None and 'b' not in already_processed: - already_processed.add('b') - outfile.write(' b=%s' % (quote_attrib(self.b), )) - if self.c is not None and 'c' not in already_processed: - already_processed.add('c') - outfile.write(' c=%s' % (quote_attrib(self.c), )) - if self.d is not None and 'd' not in already_processed: - already_processed.add('d') - outfile.write(' d=%s' % (quote_attrib(self.d), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='Izhikevich2007Cell', fromsubclass_=False, pretty_print=True): - super(Izhikevich2007Cell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="Izhikevich2007Cell", + ): + super(Izhikevich2007Cell, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="Izhikevich2007Cell", + ) + if self.v0 is not None and "v0" not in already_processed: + already_processed.add("v0") + outfile.write( + " v0=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.v0), input_name="v0") + ), + ) + ) + if self.k is not None and "k" not in already_processed: + already_processed.add("k") + outfile.write( + " k=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.k), input_name="k") + ), + ) + ) + if self.vr is not None and "vr" not in already_processed: + already_processed.add("vr") + outfile.write( + " vr=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.vr), input_name="vr") + ), + ) + ) + if self.vt is not None and "vt" not in already_processed: + already_processed.add("vt") + outfile.write( + " vt=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.vt), input_name="vt") + ), + ) + ) + if self.vpeak is not None and "vpeak" not in already_processed: + already_processed.add("vpeak") + outfile.write( + " vpeak=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.vpeak), input_name="vpeak" + ) + ), + ) + ) + if self.a is not None and "a" not in already_processed: + already_processed.add("a") + outfile.write( + " a=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.a), input_name="a") + ), + ) + ) + if self.b is not None and "b" not in already_processed: + already_processed.add("b") + outfile.write( + " b=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.b), input_name="b") + ), + ) + ) + if self.c is not None and "c" not in already_processed: + already_processed.add("c") + outfile.write( + " c=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.c), input_name="c") + ), + ) + ) + if self.d is not None and "d" not in already_processed: + already_processed.add("d") + outfile.write( + " d=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.d), input_name="d") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="Izhikevich2007Cell", + fromsubclass_=False, + pretty_print=True, + ): + super(Izhikevich2007Cell, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('v0', node) - if value is not None and 'v0' not in already_processed: - already_processed.add('v0') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("v0", node) + if value is not None and "v0" not in already_processed: + already_processed.add("v0") self.v0 = value - self.validate_Nml2Quantity_voltage(self.v0) # validate type Nml2Quantity_voltage - value = find_attr_value_('k', node) - if value is not None and 'k' not in already_processed: - already_processed.add('k') + self.validate_Nml2Quantity_voltage( + self.v0 + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("k", node) + if value is not None and "k" not in already_processed: + already_processed.add("k") self.k = value - self.validate_Nml2Quantity_conductancePerVoltage(self.k) # validate type Nml2Quantity_conductancePerVoltage - value = find_attr_value_('vr', node) - if value is not None and 'vr' not in already_processed: - already_processed.add('vr') + self.validate_Nml2Quantity_conductancePerVoltage( + self.k + ) # validate type Nml2Quantity_conductancePerVoltage + value = find_attr_value_("vr", node) + if value is not None and "vr" not in already_processed: + already_processed.add("vr") self.vr = value - self.validate_Nml2Quantity_voltage(self.vr) # validate type Nml2Quantity_voltage - value = find_attr_value_('vt', node) - if value is not None and 'vt' not in already_processed: - already_processed.add('vt') + self.validate_Nml2Quantity_voltage( + self.vr + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("vt", node) + if value is not None and "vt" not in already_processed: + already_processed.add("vt") self.vt = value - self.validate_Nml2Quantity_voltage(self.vt) # validate type Nml2Quantity_voltage - value = find_attr_value_('vpeak', node) - if value is not None and 'vpeak' not in already_processed: - already_processed.add('vpeak') + self.validate_Nml2Quantity_voltage( + self.vt + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("vpeak", node) + if value is not None and "vpeak" not in already_processed: + already_processed.add("vpeak") self.vpeak = value - self.validate_Nml2Quantity_voltage(self.vpeak) # validate type Nml2Quantity_voltage - value = find_attr_value_('a', node) - if value is not None and 'a' not in already_processed: - already_processed.add('a') + self.validate_Nml2Quantity_voltage( + self.vpeak + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("a", node) + if value is not None and "a" not in already_processed: + already_processed.add("a") self.a = value - self.validate_Nml2Quantity_pertime(self.a) # validate type Nml2Quantity_pertime - value = find_attr_value_('b', node) - if value is not None and 'b' not in already_processed: - already_processed.add('b') + self.validate_Nml2Quantity_pertime( + self.a + ) # validate type Nml2Quantity_pertime + value = find_attr_value_("b", node) + if value is not None and "b" not in already_processed: + already_processed.add("b") self.b = value - self.validate_Nml2Quantity_conductance(self.b) # validate type Nml2Quantity_conductance - value = find_attr_value_('c', node) - if value is not None and 'c' not in already_processed: - already_processed.add('c') + self.validate_Nml2Quantity_conductance( + self.b + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("c", node) + if value is not None and "c" not in already_processed: + already_processed.add("c") self.c = value - self.validate_Nml2Quantity_voltage(self.c) # validate type Nml2Quantity_voltage - value = find_attr_value_('d', node) - if value is not None and 'd' not in already_processed: - already_processed.add('d') + self.validate_Nml2Quantity_voltage( + self.c + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("d", node) + if value is not None and "d" not in already_processed: + already_processed.add("d") self.d = value - self.validate_Nml2Quantity_current(self.d) # validate type Nml2Quantity_current - super(Izhikevich2007Cell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(Izhikevich2007Cell, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_current( + self.d + ) # validate type Nml2Quantity_current + super(Izhikevich2007Cell, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(Izhikevich2007Cell, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class Izhikevich2007Cell class IafRefCell(IafCell): + """IafRefCell -- Integrate and fire cell with capacitance **C,** **leakConductance,** **leakReversal** and refractory period **refract** + \n + :param refract: + :type refract: time + :param leakConductance: + :type leakConductance: conductance + :param leakReversal: + :type leakReversal: voltage + :param thresh: + :type thresh: voltage + :param reset: + :type reset: voltage + :param C: Total capacitance of the cell membrane + :type C: capacitance + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('refract', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_( + "refract", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "refract"} + ), ] subclass = None superclass = IafCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, leak_reversal=None, thresh=None, reset=None, C=None, leak_conductance=None, refract=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + leak_reversal=None, + thresh=None, + reset=None, + C=None, + leak_conductance=None, + refract=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IafRefCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, leak_reversal, thresh, reset, C, leak_conductance, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("IafRefCell"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + leak_reversal, + thresh, + reset, + C, + leak_conductance, + **kwargs_ + ) self.refract = _cast(None, refract) + self.refract_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IafRefCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IafRefCell) if subclass is not None: return subclass(*args_, **kwargs_) if IafRefCell.subclass: return IafRefCell.subclass(*args_, **kwargs_) else: return IafRefCell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): if ( - super(IafRefCell, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + + def _hasContent(self): + if super(IafRefCell, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafRefCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IafRefCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IafRefCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IafRefCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "IafRefCell": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafRefCell') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IafRefCell', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IafRefCell" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IafRefCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IafRefCell'): - super(IafRefCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafRefCell') - if self.refract is not None and 'refract' not in already_processed: - already_processed.add('refract') - outfile.write(' refract=%s' % (quote_attrib(self.refract), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafRefCell', fromsubclass_=False, pretty_print=True): - super(IafRefCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, outfile, level, already_processed, namespaceprefix_="", name_="IafRefCell" + ): + super(IafRefCell, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IafRefCell" + ) + if self.refract is not None and "refract" not in already_processed: + already_processed.add("refract") + outfile.write( + " refract=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.refract), input_name="refract" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IafRefCell", + fromsubclass_=False, + pretty_print=True, + ): + super(IafRefCell, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('refract', node) - if value is not None and 'refract' not in already_processed: - already_processed.add('refract') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("refract", node) + if value is not None and "refract" not in already_processed: + already_processed.add("refract") self.refract = value - self.validate_Nml2Quantity_time(self.refract) # validate type Nml2Quantity_time - super(IafRefCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IafRefCell, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_time( + self.refract + ) # validate type Nml2Quantity_time + super(IafRefCell, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(IafRefCell, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class IafRefCell class IafTauRefCell(IafTauCell): + """IafTauRefCell -- Integrate and fire cell which returns to its leak reversal potential of **leakReversal** with a time course **tau.** It has a refractory period of **refract** after spiking + \n + :param refract: + :type refract: time + :param leakReversal: + :type leakReversal: voltage + :param tau: + :type tau: time + :param thresh: The membrane potential at which to emit a spiking event and reset voltage + :type thresh: voltage + :param reset: The value the membrane potential is reset to on spiking + :type reset: voltage + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('refract', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_( + "refract", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "refract"} + ), ] subclass = None superclass = IafTauCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, leak_reversal=None, thresh=None, reset=None, tau=None, refract=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + leak_reversal=None, + thresh=None, + reset=None, + tau=None, + refract=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IafTauRefCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, leak_reversal, thresh, reset, tau, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("IafTauRefCell"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + leak_reversal, + thresh, + reset, + tau, + **kwargs_ + ) self.refract = _cast(None, refract) + self.refract_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IafTauRefCell) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IafTauRefCell) if subclass is not None: return subclass(*args_, **kwargs_) if IafTauRefCell.subclass: return IafTauRefCell.subclass(*args_, **kwargs_) else: return IafTauRefCell(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): if ( - super(IafTauRefCell, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + + def _hasContent(self): + if super(IafTauRefCell, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafTauRefCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IafTauRefCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IafTauRefCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IafTauRefCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "IafTauRefCell": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafTauRefCell') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IafTauRefCell', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IafTauRefCell" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IafTauRefCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IafTauRefCell'): - super(IafTauRefCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IafTauRefCell') - if self.refract is not None and 'refract' not in already_processed: - already_processed.add('refract') - outfile.write(' refract=%s' % (quote_attrib(self.refract), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IafTauRefCell', fromsubclass_=False, pretty_print=True): - super(IafTauRefCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IafTauRefCell", + ): + super(IafTauRefCell, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IafTauRefCell" + ) + if self.refract is not None and "refract" not in already_processed: + already_processed.add("refract") + outfile.write( + " refract=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.refract), input_name="refract" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IafTauRefCell", + fromsubclass_=False, + pretty_print=True, + ): + super(IafTauRefCell, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('refract', node) - if value is not None and 'refract' not in already_processed: - already_processed.add('refract') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("refract", node) + if value is not None and "refract" not in already_processed: + already_processed.add("refract") self.refract = value - self.validate_Nml2Quantity_time(self.refract) # validate type Nml2Quantity_time - super(IafTauRefCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IafTauRefCell, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_time( + self.refract + ) # validate type Nml2Quantity_time + super(IafTauRefCell, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(IafTauRefCell, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class IafTauRefCell class DoubleSynapse(BaseVoltageDepSynapse): + """DoubleSynapse -- Synapse consisting of two independent synaptic mechanisms ( e. g. AMPA-R and NMDA-R ), which can be easily colocated in connections""" + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('synapse1', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('synapse2', 'NmlId', 0, 0, {'use': u'required'}), - MemberSpec_('synapse1_path', 'xs:string', 0, 0, {'use': u'required'}), - MemberSpec_('synapse2_path', 'xs:string', 0, 0, {'use': u'required'}), + MemberSpec_("synapse1", "NmlId", 0, 0, {"use": "required", "name": "synapse1"}), + MemberSpec_("synapse2", "NmlId", 0, 0, {"use": "required", "name": "synapse2"}), + MemberSpec_( + "synapse1_path", + "xs:string", + 0, + 0, + {"use": "required", "name": "synapse1_path"}, + ), + MemberSpec_( + "synapse2_path", + "xs:string", + 0, + 0, + {"use": "required", "name": "synapse2_path"}, + ), ] subclass = None superclass = BaseVoltageDepSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, synapse1=None, synapse2=None, synapse1_path=None, synapse2_path=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + synapse1=None, + synapse2=None, + synapse1_path=None, + synapse2_path=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(DoubleSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("DoubleSynapse"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.synapse1 = _cast(None, synapse1) + self.synapse1_nsprefix_ = None self.synapse2 = _cast(None, synapse2) + self.synapse2_nsprefix_ = None self.synapse1_path = _cast(None, synapse1_path) + self.synapse1_path_nsprefix_ = None self.synapse2_path = _cast(None, synapse2_path) + self.synapse2_path_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, DoubleSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, DoubleSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if DoubleSynapse.subclass: return DoubleSynapse.subclass(*args_, **kwargs_) else: return DoubleSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_NmlId(self, value): # Validate type NmlId, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_NmlId_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_NmlId_patterns_, )) - validate_NmlId_patterns_ = [[u'^[a-zA-Z_][a-zA-Z0-9_]*$']] - def hasContent_(self): if ( - super(DoubleSynapse, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_NmlId_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_NmlId_patterns_, + ) + ) + + validate_NmlId_patterns_ = [["^([a-zA-Z_][a-zA-Z0-9_]*)$"]] + + def _hasContent(self): + if super(DoubleSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DoubleSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('DoubleSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DoubleSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("DoubleSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "DoubleSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DoubleSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DoubleSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="DoubleSynapse" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="DoubleSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DoubleSynapse'): - super(DoubleSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DoubleSynapse') - if self.synapse1 is not None and 'synapse1' not in already_processed: - already_processed.add('synapse1') - outfile.write(' synapse1=%s' % (quote_attrib(self.synapse1), )) - if self.synapse2 is not None and 'synapse2' not in already_processed: - already_processed.add('synapse2') - outfile.write(' synapse2=%s' % (quote_attrib(self.synapse2), )) - if self.synapse1_path is not None and 'synapse1_path' not in already_processed: - already_processed.add('synapse1_path') - outfile.write(' synapse1Path=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse1_path), input_name='synapse1Path')), )) - if self.synapse2_path is not None and 'synapse2_path' not in already_processed: - already_processed.add('synapse2_path') - outfile.write(' synapse2Path=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.synapse2_path), input_name='synapse2Path')), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DoubleSynapse', fromsubclass_=False, pretty_print=True): - super(DoubleSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="DoubleSynapse", + ): + super(DoubleSynapse, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="DoubleSynapse" + ) + if self.synapse1 is not None and "synapse1" not in already_processed: + already_processed.add("synapse1") + outfile.write( + " synapse1=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.synapse1), input_name="synapse1" + ) + ), + ) + ) + if self.synapse2 is not None and "synapse2" not in already_processed: + already_processed.add("synapse2") + outfile.write( + " synapse2=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.synapse2), input_name="synapse2" + ) + ), + ) + ) + if self.synapse1_path is not None and "synapse1_path" not in already_processed: + already_processed.add("synapse1_path") + outfile.write( + " synapse1Path=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.synapse1_path), input_name="synapse1Path" + ) + ), + ) + ) + if self.synapse2_path is not None and "synapse2_path" not in already_processed: + already_processed.add("synapse2_path") + outfile.write( + " synapse2Path=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.synapse2_path), input_name="synapse2Path" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="DoubleSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(DoubleSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('synapse1', node) - if value is not None and 'synapse1' not in already_processed: - already_processed.add('synapse1') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("synapse1", node) + if value is not None and "synapse1" not in already_processed: + already_processed.add("synapse1") self.synapse1 = value - self.validate_NmlId(self.synapse1) # validate type NmlId - value = find_attr_value_('synapse2', node) - if value is not None and 'synapse2' not in already_processed: - already_processed.add('synapse2') + self.validate_NmlId(self.synapse1) # validate type NmlId + value = find_attr_value_("synapse2", node) + if value is not None and "synapse2" not in already_processed: + already_processed.add("synapse2") self.synapse2 = value - self.validate_NmlId(self.synapse2) # validate type NmlId - value = find_attr_value_('synapse1Path', node) - if value is not None and 'synapse1Path' not in already_processed: - already_processed.add('synapse1Path') + self.validate_NmlId(self.synapse2) # validate type NmlId + value = find_attr_value_("synapse1Path", node) + if value is not None and "synapse1Path" not in already_processed: + already_processed.add("synapse1Path") self.synapse1_path = value - value = find_attr_value_('synapse2Path', node) - if value is not None and 'synapse2Path' not in already_processed: - already_processed.add('synapse2Path') + value = find_attr_value_("synapse2Path", node) + if value is not None and "synapse2Path" not in already_processed: + already_processed.add("synapse2Path") self.synapse2_path = value - super(DoubleSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(DoubleSynapse, self).buildChildren(child_, node, nodeName_, True) + super(DoubleSynapse, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(DoubleSynapse, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class DoubleSynapse class AlphaCurrentSynapse(BaseCurrentBasedSynapse): + """AlphaCurrentSynapse -- Alpha current synapse: rise time and decay time are both **tau.** + \n + :param tau: Time course for rise and decay + :type tau: time + :param ibase: Baseline current increase after receiving a spike + :type ibase: current + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('tau', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('ibase', 'Nml2Quantity_current', 0, 0, {'use': u'required'}), + MemberSpec_( + "tau", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "tau"} + ), + MemberSpec_( + "ibase", "Nml2Quantity_current", 0, 0, {"use": "required", "name": "ibase"} + ), ] subclass = None superclass = BaseCurrentBasedSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, tau=None, ibase=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + tau=None, + ibase=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(AlphaCurrentSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("AlphaCurrentSynapse"), self).__init__( + neuro_lex_id, id, metaid, notes, properties, annotation, **kwargs_ + ) self.tau = _cast(None, tau) + self.tau_nsprefix_ = None self.ibase = _cast(None, ibase) + self.ibase_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, AlphaCurrentSynapse) + CurrentSubclassModule_, AlphaCurrentSynapse + ) if subclass is not None: return subclass(*args_, **kwargs_) if AlphaCurrentSynapse.subclass: return AlphaCurrentSynapse.subclass(*args_, **kwargs_) else: return AlphaCurrentSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + def validate_Nml2Quantity_current(self, value): # Validate type Nml2Quantity_current, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_current_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_current_patterns_, )) - validate_Nml2Quantity_current_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA)$']] - def hasContent_(self): if ( - super(AlphaCurrentSynapse, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_current_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_current_patterns_, + ) + ) + + validate_Nml2Quantity_current_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(A|uA|nA|pA))$"] + ] + + def _hasContent(self): + if super(AlphaCurrentSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCurrentSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('AlphaCurrentSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AlphaCurrentSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("AlphaCurrentSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "AlphaCurrentSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCurrentSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlphaCurrentSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="AlphaCurrentSynapse", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="AlphaCurrentSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlphaCurrentSynapse'): - super(AlphaCurrentSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaCurrentSynapse') - if self.tau is not None and 'tau' not in already_processed: - already_processed.add('tau') - outfile.write(' tau=%s' % (quote_attrib(self.tau), )) - if self.ibase is not None and 'ibase' not in already_processed: - already_processed.add('ibase') - outfile.write(' ibase=%s' % (quote_attrib(self.ibase), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaCurrentSynapse', fromsubclass_=False, pretty_print=True): - super(AlphaCurrentSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="AlphaCurrentSynapse", + ): + super(AlphaCurrentSynapse, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="AlphaCurrentSynapse", + ) + if self.tau is not None and "tau" not in already_processed: + already_processed.add("tau") + outfile.write( + " tau=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.tau), input_name="tau") + ), + ) + ) + if self.ibase is not None and "ibase" not in already_processed: + already_processed.add("ibase") + outfile.write( + " ibase=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.ibase), input_name="ibase" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AlphaCurrentSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(AlphaCurrentSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('tau', node) - if value is not None and 'tau' not in already_processed: - already_processed.add('tau') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("tau", node) + if value is not None and "tau" not in already_processed: + already_processed.add("tau") self.tau = value - self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time - value = find_attr_value_('ibase', node) - if value is not None and 'ibase' not in already_processed: - already_processed.add('ibase') + self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time + value = find_attr_value_("ibase", node) + if value is not None and "ibase" not in already_processed: + already_processed.add("ibase") self.ibase = value - self.validate_Nml2Quantity_current(self.ibase) # validate type Nml2Quantity_current - super(AlphaCurrentSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(AlphaCurrentSynapse, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_current( + self.ibase + ) # validate type Nml2Quantity_current + super(AlphaCurrentSynapse, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(AlphaCurrentSynapse, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class AlphaCurrentSynapse class BaseConductanceBasedSynapseTwo(BaseVoltageDepSynapse): + """BaseConductanceBasedSynapseTwo -- Synapse model suited for a sum of two expTwoSynapses which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 + \n + :param gbase1: Baseline conductance 1 + :type gbase1: conductance + :param gbase2: Baseline conductance 2 + :type gbase2: conductance + :param erev: Reversal potential of the synapse + :type erev: voltage + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('gbase1', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('gbase2', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_( + "gbase1", + "Nml2Quantity_conductance", + 0, + 0, + {"use": "required", "name": "gbase1"}, + ), + MemberSpec_( + "gbase2", + "Nml2Quantity_conductance", + 0, + 0, + {"use": "required", "name": "gbase2"}, + ), + MemberSpec_( + "erev", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "erev"} + ), ] subclass = None superclass = BaseVoltageDepSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase1=None, gbase2=None, erev=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + gbase1=None, + gbase2=None, + erev=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseConductanceBasedSynapseTwo, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("BaseConductanceBasedSynapseTwo"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.gbase1 = _cast(None, gbase1) + self.gbase1_nsprefix_ = None self.gbase2 = _cast(None, gbase2) + self.gbase2_nsprefix_ = None self.erev = _cast(None, erev) + self.erev_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseConductanceBasedSynapseTwo) + CurrentSubclassModule_, BaseConductanceBasedSynapseTwo + ) if subclass is not None: return subclass(*args_, **kwargs_) if BaseConductanceBasedSynapseTwo.subclass: return BaseConductanceBasedSynapseTwo.subclass(*args_, **kwargs_) else: return BaseConductanceBasedSynapseTwo(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.validate_Nml2Quantity_conductance_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): if ( - super(BaseConductanceBasedSynapseTwo, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + + def _hasContent(self): + if super(BaseConductanceBasedSynapseTwo, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConductanceBasedSynapseTwo', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseConductanceBasedSynapseTwo') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConductanceBasedSynapseTwo", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get( + "BaseConductanceBasedSynapseTwo" + ) if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if ( + self.original_tagname_ is not None + and name_ == "BaseConductanceBasedSynapseTwo" + ): name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConductanceBasedSynapseTwo') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConductanceBasedSynapseTwo', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseConductanceBasedSynapseTwo", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseConductanceBasedSynapseTwo", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConductanceBasedSynapseTwo'): - super(BaseConductanceBasedSynapseTwo, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConductanceBasedSynapseTwo') - if self.gbase1 is not None and 'gbase1' not in already_processed: - already_processed.add('gbase1') - outfile.write(' gbase1=%s' % (quote_attrib(self.gbase1), )) - if self.gbase2 is not None and 'gbase2' not in already_processed: - already_processed.add('gbase2') - outfile.write(' gbase2=%s' % (quote_attrib(self.gbase2), )) - if self.erev is not None and 'erev' not in already_processed: - already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseConductanceBasedSynapseTwo", + ): + super(BaseConductanceBasedSynapseTwo, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseConductanceBasedSynapseTwo", + ) + if self.gbase1 is not None and "gbase1" not in already_processed: + already_processed.add("gbase1") + outfile.write( + " gbase1=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.gbase1), input_name="gbase1" + ) + ), + ) + ) + if self.gbase2 is not None and "gbase2" not in already_processed: + already_processed.add("gbase2") + outfile.write( + " gbase2=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.gbase2), input_name="gbase2" + ) + ), + ) + ) + if self.erev is not None and "erev" not in already_processed: + already_processed.add("erev") + outfile.write( + " erev=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.erev), input_name="erev" + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConductanceBasedSynapseTwo', fromsubclass_=False, pretty_print=True): - super(BaseConductanceBasedSynapseTwo, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConductanceBasedSynapseTwo", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseConductanceBasedSynapseTwo, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('gbase1', node) - if value is not None and 'gbase1' not in already_processed: - already_processed.add('gbase1') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("gbase1", node) + if value is not None and "gbase1" not in already_processed: + already_processed.add("gbase1") self.gbase1 = value - self.validate_Nml2Quantity_conductance(self.gbase1) # validate type Nml2Quantity_conductance - value = find_attr_value_('gbase2', node) - if value is not None and 'gbase2' not in already_processed: - already_processed.add('gbase2') + self.validate_Nml2Quantity_conductance( + self.gbase1 + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("gbase2", node) + if value is not None and "gbase2" not in already_processed: + already_processed.add("gbase2") self.gbase2 = value - self.validate_Nml2Quantity_conductance(self.gbase2) # validate type Nml2Quantity_conductance - value = find_attr_value_('erev', node) - if value is not None and 'erev' not in already_processed: - already_processed.add('erev') + self.validate_Nml2Quantity_conductance( + self.gbase2 + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("erev", node) + if value is not None and "erev" not in already_processed: + already_processed.add("erev") self.erev = value - self.validate_Nml2Quantity_voltage(self.erev) # validate type Nml2Quantity_voltage - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_voltage( + self.erev + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseConductanceBasedSynapseTwo, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseConductanceBasedSynapseTwo, self).buildChildren(child_, node, nodeName_, True) + super(BaseConductanceBasedSynapseTwo, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(BaseConductanceBasedSynapseTwo, self)._buildChildren( + child_, node, nodeName_, True + ) pass + + # end class BaseConductanceBasedSynapseTwo class BaseConductanceBasedSynapse(BaseVoltageDepSynapse): + """BaseConductanceBasedSynapse -- Synapse model which exposes a conductance **g** in addition to producing a current. Not necessarily ohmic!! cno_0000027 + \n + :param gbase: Baseline conductance, generally the maximum conductance following a single spike + :type gbase: conductance + :param erev: Reversal potential of the synapse + :type erev: voltage + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('gbase', 'Nml2Quantity_conductance', 0, 0, {'use': u'required'}), - MemberSpec_('erev', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_( + "gbase", + "Nml2Quantity_conductance", + 0, + 0, + {"use": "required", "name": "gbase"}, + ), + MemberSpec_( + "erev", "Nml2Quantity_voltage", 0, 0, {"use": "required", "name": "erev"} + ), ] subclass = None superclass = BaseVoltageDepSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + gbase=None, + erev=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BaseConductanceBasedSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("BaseConductanceBasedSynapse"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + extensiontype_, + **kwargs_ + ) self.gbase = _cast(None, gbase) + self.gbase_nsprefix_ = None self.erev = _cast(None, erev) + self.erev_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BaseConductanceBasedSynapse) + CurrentSubclassModule_, BaseConductanceBasedSynapse + ) if subclass is not None: return subclass(*args_, **kwargs_) if BaseConductanceBasedSynapse.subclass: return BaseConductanceBasedSynapse.subclass(*args_, **kwargs_) else: return BaseConductanceBasedSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_conductance(self, value): # Validate type Nml2Quantity_conductance, a restriction on xs:string. - if value is not None and Validate_simpletypes_: + if ( + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None + ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_conductance_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_conductance_patterns_, )) - validate_Nml2Quantity_conductance_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS)$']] + self.validate_Nml2Quantity_conductance_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_conductance_patterns_, + ) + ) + + validate_Nml2Quantity_conductance_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(S|mS|uS|nS|pS))$"] + ] + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): if ( - super(BaseConductanceBasedSynapse, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + + def _hasContent(self): + if super(BaseConductanceBasedSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConductanceBasedSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseConductanceBasedSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConductanceBasedSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BaseConductanceBasedSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if ( + self.original_tagname_ is not None + and name_ == "BaseConductanceBasedSynapse" + ): name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConductanceBasedSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaseConductanceBasedSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseConductanceBasedSynapse", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BaseConductanceBasedSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseConductanceBasedSynapse'): - super(BaseConductanceBasedSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseConductanceBasedSynapse') - if self.gbase is not None and 'gbase' not in already_processed: - already_processed.add('gbase') - outfile.write(' gbase=%s' % (quote_attrib(self.gbase), )) - if self.erev is not None and 'erev' not in already_processed: - already_processed.add('erev') - outfile.write(' erev=%s' % (quote_attrib(self.erev), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BaseConductanceBasedSynapse", + ): + super(BaseConductanceBasedSynapse, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BaseConductanceBasedSynapse", + ) + if self.gbase is not None and "gbase" not in already_processed: + already_processed.add("gbase") + outfile.write( + " gbase=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.gbase), input_name="gbase" + ) + ), + ) + ) + if self.erev is not None and "erev" not in already_processed: + already_processed.add("erev") + outfile.write( + " erev=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.erev), input_name="erev" + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BaseConductanceBasedSynapse', fromsubclass_=False, pretty_print=True): - super(BaseConductanceBasedSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="BaseConductanceBasedSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(BaseConductanceBasedSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('gbase', node) - if value is not None and 'gbase' not in already_processed: - already_processed.add('gbase') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("gbase", node) + if value is not None and "gbase" not in already_processed: + already_processed.add("gbase") self.gbase = value - self.validate_Nml2Quantity_conductance(self.gbase) # validate type Nml2Quantity_conductance - value = find_attr_value_('erev', node) - if value is not None and 'erev' not in already_processed: - already_processed.add('erev') + self.validate_Nml2Quantity_conductance( + self.gbase + ) # validate type Nml2Quantity_conductance + value = find_attr_value_("erev", node) + if value is not None and "erev" not in already_processed: + already_processed.add("erev") self.erev = value - self.validate_Nml2Quantity_voltage(self.erev) # validate type Nml2Quantity_voltage - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_voltage( + self.erev + ) # validate type Nml2Quantity_voltage + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(BaseConductanceBasedSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(BaseConductanceBasedSynapse, self).buildChildren(child_, node, nodeName_, True) + super(BaseConductanceBasedSynapse, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(BaseConductanceBasedSynapse, self)._buildChildren( + child_, node, nodeName_, True + ) pass + + # end class BaseConductanceBasedSynapse class IonChannelVShift(IonChannel): - """Same as ionChannel, but with a vShift parameter to change voltage - activation of gates. The exact usage of vShift in expressions - for rates is determined by the individual gates.""" + """IonChannelVShift -- Same as **ionChannel** , but with a **vShift** parameter to change voltage activation of gates. The exact usage of **vShift** in expressions for rates is determined by the individual gates. + \n + :param vShift: + :type vShift: voltage + :param conductance: + :type conductance: conductance + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('v_shift', 'Nml2Quantity_voltage', 0, 0, {'use': u'required'}), + MemberSpec_( + "v_shift", + "Nml2Quantity_voltage", + 0, + 0, + {"use": "required", "name": "v_shift"}, + ), ] subclass = None superclass = IonChannel - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, q10_conductance_scalings=None, species=None, type=None, conductance=None, gates=None, gate_hh_rates=None, gate_h_hrates_taus=None, gate_hh_tau_infs=None, gate_h_hrates_infs=None, gate_h_hrates_tau_infs=None, gate_hh_instantaneouses=None, gate_fractionals=None, v_shift=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + q10_conductance_scalings=None, + species=None, + type=None, + conductance=None, + gates=None, + gate_hh_rates=None, + gate_h_hrates_taus=None, + gate_hh_tau_infs=None, + gate_h_hrates_infs=None, + gate_h_hrates_tau_infs=None, + gate_hh_instantaneouses=None, + gate_fractionals=None, + v_shift=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IonChannelVShift, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, q10_conductance_scalings, species, type, conductance, gates, gate_hh_rates, gate_h_hrates_taus, gate_hh_tau_infs, gate_h_hrates_infs, gate_h_hrates_tau_infs, gate_hh_instantaneouses, gate_fractionals, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("IonChannelVShift"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + q10_conductance_scalings, + species, + type, + conductance, + gates, + gate_hh_rates, + gate_h_hrates_taus, + gate_hh_tau_infs, + gate_h_hrates_infs, + gate_h_hrates_tau_infs, + gate_hh_instantaneouses, + gate_fractionals, + **kwargs_ + ) self.v_shift = _cast(None, v_shift) + self.v_shift_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IonChannelVShift) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IonChannelVShift) if subclass is not None: return subclass(*args_, **kwargs_) if IonChannelVShift.subclass: return IonChannelVShift.subclass(*args_, **kwargs_) else: return IonChannelVShift(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_voltage(self, value): # Validate type Nml2Quantity_voltage, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_voltage_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_voltage_patterns_, )) - validate_Nml2Quantity_voltage_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV)$']] - def hasContent_(self): if ( - super(IonChannelVShift, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_voltage_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_voltage_patterns_, + ) + ) + + validate_Nml2Quantity_voltage_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(V|mV))$"] + ] + + def _hasContent(self): + if super(IonChannelVShift, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelVShift', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IonChannelVShift') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IonChannelVShift", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IonChannelVShift") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "IonChannelVShift": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelVShift') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannelVShift', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="IonChannelVShift", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IonChannelVShift", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannelVShift'): - super(IonChannelVShift, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelVShift') - if self.v_shift is not None and 'v_shift' not in already_processed: - already_processed.add('v_shift') - outfile.write(' vShift=%s' % (quote_attrib(self.v_shift), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelVShift', fromsubclass_=False, pretty_print=True): - super(IonChannelVShift, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IonChannelVShift", + ): + super(IonChannelVShift, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="IonChannelVShift", + ) + if self.v_shift is not None and "v_shift" not in already_processed: + already_processed.add("v_shift") + outfile.write( + " vShift=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.v_shift), input_name="vShift" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IonChannelVShift", + fromsubclass_=False, + pretty_print=True, + ): + super(IonChannelVShift, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('vShift', node) - if value is not None and 'vShift' not in already_processed: - already_processed.add('vShift') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("vShift", node) + if value is not None and "vShift" not in already_processed: + already_processed.add("vShift") self.v_shift = value - self.validate_Nml2Quantity_voltage(self.v_shift) # validate type Nml2Quantity_voltage - super(IonChannelVShift, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IonChannelVShift, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_voltage( + self.v_shift + ) # validate type Nml2Quantity_voltage + super(IonChannelVShift, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(IonChannelVShift, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class IonChannelVShift class IonChannelHH(IonChannel): - """Note ionChannel and ionChannelHH are currently functionally - identical. This is needed since many existing examples use - ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove - one of these, probably ionChannelHH.""" - member_data_items_ = [ - ] + """IonChannelHH -- Note **ionChannel** and **ionChannelHH** are currently functionally identical. This is needed since many existing examples use ionChannel, some use ionChannelHH. NeuroML v2beta4 should remove one of these, probably ionChannelHH. + \n + :param conductance: + :type conductance: conductance + + """ + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = IonChannel - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, q10_conductance_scalings=None, species=None, type=None, conductance=None, gates=None, gate_hh_rates=None, gate_h_hrates_taus=None, gate_hh_tau_infs=None, gate_h_hrates_infs=None, gate_h_hrates_tau_infs=None, gate_hh_instantaneouses=None, gate_fractionals=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + q10_conductance_scalings=None, + species=None, + type=None, + conductance=None, + gates=None, + gate_hh_rates=None, + gate_h_hrates_taus=None, + gate_hh_tau_infs=None, + gate_h_hrates_infs=None, + gate_h_hrates_tau_infs=None, + gate_hh_instantaneouses=None, + gate_fractionals=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IonChannelHH, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, q10_conductance_scalings, species, type, conductance, gates, gate_hh_rates, gate_h_hrates_taus, gate_hh_tau_infs, gate_h_hrates_infs, gate_h_hrates_tau_infs, gate_hh_instantaneouses, gate_fractionals, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("IonChannelHH"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + q10_conductance_scalings, + species, + type, + conductance, + gates, + gate_hh_rates, + gate_h_hrates_taus, + gate_hh_tau_infs, + gate_h_hrates_infs, + gate_h_hrates_tau_infs, + gate_hh_instantaneouses, + gate_fractionals, + **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IonChannelHH) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IonChannelHH) if subclass is not None: return subclass(*args_, **kwargs_) if IonChannelHH.subclass: return IonChannelHH.subclass(*args_, **kwargs_) else: return IonChannelHH(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(IonChannelHH, self).hasContent_() - ): + + def _hasContent(self): + if super(IonChannelHH, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelHH', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IonChannelHH') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IonChannelHH", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IonChannelHH") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "IonChannelHH": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelHH') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IonChannelHH', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IonChannelHH" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IonChannelHH", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IonChannelHH'): - super(IonChannelHH, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IonChannelHH') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IonChannelHH', fromsubclass_=False, pretty_print=True): - super(IonChannelHH, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IonChannelHH", + ): + super(IonChannelHH, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IonChannelHH" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IonChannelHH", + fromsubclass_=False, + pretty_print=True, + ): + super(IonChannelHH, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(IonChannelHH, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IonChannelHH, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + super(IonChannelHH, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(IonChannelHH, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class IonChannelHH class IF_curr_exp(basePyNNIaFCell): - member_data_items_ = [ - ] + """IF_curr_exp -- Leaky integrate and fire model with fixed threshold and decaying-exponential post-synaptic current + \n + :param tau_refrac: + :type tau_refrac: none + :param v_thresh: + :type v_thresh: none + :param tau_m: + :type tau_m: none + :param v_rest: + :type v_rest: none + :param v_reset: + :type v_reset: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none + + """ + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = basePyNNIaFCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + tau_m=None, + tau_refrac=None, + v_reset=None, + v_rest=None, + v_thresh=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IF_curr_exp, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("IF_curr_exp"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + tau_m, + tau_refrac, + v_reset, + v_rest, + v_thresh, + **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IF_curr_exp) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IF_curr_exp) if subclass is not None: return subclass(*args_, **kwargs_) if IF_curr_exp.subclass: return IF_curr_exp.subclass(*args_, **kwargs_) else: return IF_curr_exp(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(IF_curr_exp, self).hasContent_() - ): + + def _hasContent(self): + if super(IF_curr_exp, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_curr_exp', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IF_curr_exp') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IF_curr_exp", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IF_curr_exp") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "IF_curr_exp": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_curr_exp') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IF_curr_exp', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IF_curr_exp" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IF_curr_exp", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IF_curr_exp'): - super(IF_curr_exp, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_curr_exp') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_curr_exp', fromsubclass_=False, pretty_print=True): - super(IF_curr_exp, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IF_curr_exp", + ): + super(IF_curr_exp, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IF_curr_exp" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IF_curr_exp", + fromsubclass_=False, + pretty_print=True, + ): + super(IF_curr_exp, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(IF_curr_exp, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IF_curr_exp, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + super(IF_curr_exp, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(IF_curr_exp, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class IF_curr_exp class IF_curr_alpha(basePyNNIaFCell): - member_data_items_ = [ - ] + """IF_curr_alpha -- Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic current + \n + :param tau_refrac: + :type tau_refrac: none + :param v_thresh: + :type v_thresh: none + :param tau_m: + :type tau_m: none + :param v_rest: + :type v_rest: none + :param v_reset: + :type v_reset: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none + + """ + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = basePyNNIaFCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + tau_m=None, + tau_refrac=None, + v_reset=None, + v_rest=None, + v_thresh=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IF_curr_alpha, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("IF_curr_alpha"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + tau_m, + tau_refrac, + v_reset, + v_rest, + v_thresh, + **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IF_curr_alpha) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IF_curr_alpha) if subclass is not None: return subclass(*args_, **kwargs_) if IF_curr_alpha.subclass: return IF_curr_alpha.subclass(*args_, **kwargs_) else: return IF_curr_alpha(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(IF_curr_alpha, self).hasContent_() - ): + + def _hasContent(self): + if super(IF_curr_alpha, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_curr_alpha', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IF_curr_alpha') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IF_curr_alpha", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IF_curr_alpha") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "IF_curr_alpha": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_curr_alpha') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IF_curr_alpha', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IF_curr_alpha" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IF_curr_alpha", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IF_curr_alpha'): - super(IF_curr_alpha, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_curr_alpha') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_curr_alpha', fromsubclass_=False, pretty_print=True): - super(IF_curr_alpha, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IF_curr_alpha", + ): + super(IF_curr_alpha, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IF_curr_alpha" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IF_curr_alpha", + fromsubclass_=False, + pretty_print=True, + ): + super(IF_curr_alpha, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(IF_curr_alpha, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IF_curr_alpha, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + super(IF_curr_alpha, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(IF_curr_alpha, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class IF_curr_alpha class basePyNNIaFCondCell(basePyNNIaFCell): + """basePyNNIaFCondCell -- Base type of conductance based PyNN IaF cell models + \n + :param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_E: none + :param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_I: none + :param tau_refrac: + :type tau_refrac: none + :param v_thresh: + :type v_thresh: none + :param tau_m: + :type tau_m: none + :param v_rest: + :type v_rest: none + :param v_reset: + :type v_reset: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('e_rev_E', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('e_rev_I', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_( + "e_rev_E", "xs:float", 0, 0, {"use": "required", "name": "e_rev_E"} + ), + MemberSpec_( + "e_rev_I", "xs:float", 0, 0, {"use": "required", "name": "e_rev_I"} + ), ] subclass = None superclass = basePyNNIaFCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + tau_m=None, + tau_refrac=None, + v_reset=None, + v_rest=None, + v_thresh=None, + e_rev_E=None, + e_rev_I=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(basePyNNIaFCondCell, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("basePyNNIaFCondCell"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + tau_m, + tau_refrac, + v_reset, + v_rest, + v_thresh, + extensiontype_, + **kwargs_ + ) self.e_rev_E = _cast(float, e_rev_E) + self.e_rev_E_nsprefix_ = None self.e_rev_I = _cast(float, e_rev_I) + self.e_rev_I_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, basePyNNIaFCondCell) + CurrentSubclassModule_, basePyNNIaFCondCell + ) if subclass is not None: return subclass(*args_, **kwargs_) if basePyNNIaFCondCell.subclass: return basePyNNIaFCondCell.subclass(*args_, **kwargs_) else: return basePyNNIaFCondCell(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(basePyNNIaFCondCell, self).hasContent_() - ): + + def _hasContent(self): + if super(basePyNNIaFCondCell, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNIaFCondCell', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('basePyNNIaFCondCell') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="basePyNNIaFCondCell", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("basePyNNIaFCondCell") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "basePyNNIaFCondCell": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNIaFCondCell') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='basePyNNIaFCondCell', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="basePyNNIaFCondCell", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="basePyNNIaFCondCell", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='basePyNNIaFCondCell'): - super(basePyNNIaFCondCell, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='basePyNNIaFCondCell') - if self.e_rev_E is not None and 'e_rev_E' not in already_processed: - already_processed.add('e_rev_E') - outfile.write(' e_rev_E="%s"' % self.gds_format_float(self.e_rev_E, input_name='e_rev_E')) - if self.e_rev_I is not None and 'e_rev_I' not in already_processed: - already_processed.add('e_rev_I') - outfile.write(' e_rev_I="%s"' % self.gds_format_float(self.e_rev_I, input_name='e_rev_I')) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="basePyNNIaFCondCell", + ): + super(basePyNNIaFCondCell, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="basePyNNIaFCondCell", + ) + if self.e_rev_E is not None and "e_rev_E" not in already_processed: + already_processed.add("e_rev_E") + outfile.write( + ' e_rev_E="%s"' + % self.gds_format_float(self.e_rev_E, input_name="e_rev_E") + ) + if self.e_rev_I is not None and "e_rev_I" not in already_processed: + already_processed.add("e_rev_I") + outfile.write( + ' e_rev_I="%s"' + % self.gds_format_float(self.e_rev_I, input_name="e_rev_I") + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='basePyNNIaFCondCell', fromsubclass_=False, pretty_print=True): - super(basePyNNIaFCondCell, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="basePyNNIaFCondCell", + fromsubclass_=False, + pretty_print=True, + ): + super(basePyNNIaFCondCell, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('e_rev_E', node) - if value is not None and 'e_rev_E' not in already_processed: - already_processed.add('e_rev_E') - try: - self.e_rev_E = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_E): %s' % exp) - value = find_attr_value_('e_rev_I', node) - if value is not None and 'e_rev_I' not in already_processed: - already_processed.add('e_rev_I') - try: - self.e_rev_I = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (e_rev_I): %s' % exp) - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("e_rev_E", node) + if value is not None and "e_rev_E" not in already_processed: + already_processed.add("e_rev_E") + value = self.gds_parse_float(value, node, "e_rev_E") + self.e_rev_E = value + value = find_attr_value_("e_rev_I", node) + if value is not None and "e_rev_I" not in already_processed: + already_processed.add("e_rev_I") + value = self.gds_parse_float(value, node, "e_rev_I") + self.e_rev_I = value + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(basePyNNIaFCondCell, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(basePyNNIaFCondCell, self).buildChildren(child_, node, nodeName_, True) + super(basePyNNIaFCondCell, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(basePyNNIaFCondCell, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class basePyNNIaFCondCell class ContinuousConnectionInstance(ContinuousConnection): - """Individual continuous/analog synaptic connection - instance based""" - member_data_items_ = [ - ] + """ContinuousConnectionInstance -- An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses.""" + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = ContinuousConnection - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', pre_component=None, post_component=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell=None, + pre_segment="0", + pre_fraction_along="0.5", + post_cell=None, + post_segment="0", + post_fraction_along="0.5", + pre_component=None, + post_component=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ContinuousConnectionInstance, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, pre_component, post_component, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ContinuousConnectionInstance"), self).__init__( + neuro_lex_id, + id, + pre_cell, + pre_segment, + pre_fraction_along, + post_cell, + post_segment, + post_fraction_along, + pre_component, + post_component, + extensiontype_, + **kwargs_ + ) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ContinuousConnectionInstance) + CurrentSubclassModule_, ContinuousConnectionInstance + ) if subclass is not None: return subclass(*args_, **kwargs_) if ContinuousConnectionInstance.subclass: return ContinuousConnectionInstance.subclass(*args_, **kwargs_) else: return ContinuousConnectionInstance(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(ContinuousConnectionInstance, self).hasContent_() - ): + + def _hasContent(self): + if super(ContinuousConnectionInstance, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnectionInstance', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ContinuousConnectionInstance') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ContinuousConnectionInstance", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ContinuousConnectionInstance") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if ( + self.original_tagname_ is not None + and name_ == "ContinuousConnectionInstance" + ): name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnectionInstance') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContinuousConnectionInstance', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContinuousConnectionInstance'): - super(ContinuousConnectionInstance, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnectionInstance') - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ContinuousConnectionInstance", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ContinuousConnectionInstance", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ContinuousConnectionInstance", + ): + super(ContinuousConnectionInstance, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ContinuousConnectionInstance", + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnectionInstance', fromsubclass_=False, pretty_print=True): - super(ContinuousConnectionInstance, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ContinuousConnectionInstance", + fromsubclass_=False, + pretty_print=True, + ): + super(ContinuousConnectionInstance, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(ContinuousConnectionInstance, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ContinuousConnectionInstance, self).buildChildren(child_, node, nodeName_, True) + super(ContinuousConnectionInstance, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ContinuousConnectionInstance, self)._buildChildren( + child_, node, nodeName_, True + ) pass def _get_cell_id(self, id_string): - if '[' in id_string: - return int(id_string.split('[')[1].split(']')[0]) + if "[" in id_string: + return int(id_string.split("[")[1].split("]")[0]) else: - return int(id_string.split('/')[2]) - + return int(id_string.split("/")[2]) def __str__(self): - return "Continuous Connection (Instance based) "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", pre comp: "+str(self.pre_component)+", post comp: "+str(self.post_component) - + return ( + "Continuous Connection (Instance based) " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + + ", pre comp: " + + str(self.pre_component) + + ", post comp: " + + str(self.post_component) + ) # end class ContinuousConnectionInstance class ElectricalConnectionInstance(ElectricalConnection): - """Projection between two populations consisting of analog connections - (e.g. graded synapses)""" - member_data_items_ = [ - ] + """ElectricalConnectionInstance -- To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements.""" + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = ElectricalConnection - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', synapse=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell=None, + pre_segment="0", + pre_fraction_along="0.5", + post_cell=None, + post_segment="0", + post_fraction_along="0.5", + synapse=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ElectricalConnectionInstance, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, synapse, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ElectricalConnectionInstance"), self).__init__( + neuro_lex_id, + id, + pre_cell, + pre_segment, + pre_fraction_along, + post_cell, + post_segment, + post_fraction_along, + synapse, + extensiontype_, + **kwargs_ + ) self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ElectricalConnectionInstance) + CurrentSubclassModule_, ElectricalConnectionInstance + ) if subclass is not None: return subclass(*args_, **kwargs_) if ElectricalConnectionInstance.subclass: return ElectricalConnectionInstance.subclass(*args_, **kwargs_) else: return ElectricalConnectionInstance(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(ElectricalConnectionInstance, self).hasContent_() - ): + + def _hasContent(self): + if super(ElectricalConnectionInstance, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnectionInstance', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ElectricalConnectionInstance') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ElectricalConnectionInstance", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ElectricalConnectionInstance") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if ( + self.original_tagname_ is not None + and name_ == "ElectricalConnectionInstance" + ): name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnectionInstance') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ElectricalConnectionInstance', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ElectricalConnectionInstance'): - super(ElectricalConnectionInstance, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnectionInstance') - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ElectricalConnectionInstance", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ElectricalConnectionInstance", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ElectricalConnectionInstance", + ): + super(ElectricalConnectionInstance, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ElectricalConnectionInstance", + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnectionInstance', fromsubclass_=False, pretty_print=True): - super(ElectricalConnectionInstance, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ElectricalConnectionInstance", + fromsubclass_=False, + pretty_print=True, + ): + super(ElectricalConnectionInstance, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(ElectricalConnectionInstance, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ElectricalConnectionInstance, self).buildChildren(child_, node, nodeName_, True) + super(ElectricalConnectionInstance, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ElectricalConnectionInstance, self)._buildChildren( + child_, node, nodeName_, True + ) pass def _get_cell_id(self, id_string): - if '[' in id_string: - return int(id_string.split('[')[1].split(']')[0]) + if "[" in id_string: + return int(id_string.split("[")[1].split("]")[0]) else: - return int(id_string.split('/')[2]) + return int(id_string.split("/")[2]) def __str__(self): - return "Electrical Connection (Instance based) "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", synapse: "+str(self.synapse) - + return ( + "Electrical Connection (Instance based) " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + + ", synapse: " + + str(self.synapse) + ) # end class ElectricalConnectionInstance class ExpThreeSynapse(BaseConductanceBasedSynapseTwo): + """ExpThreeSynapse -- Ohmic synapse similar to expTwoSynapse but consisting of two components that can differ in decay times and max conductances but share the same rise time. + \n + :param tauRise: + :type tauRise: time + :param tauDecay1: + :type tauDecay1: time + :param tauDecay2: + :type tauDecay2: time + :param gbase1: Baseline conductance 1 + :type gbase1: conductance + :param gbase2: Baseline conductance 2 + :type gbase2: conductance + :param erev: Reversal potential of the synapse + :type erev: voltage + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('tau_decay1', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('tau_decay2', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('tau_rise', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_( + "tau_decay1", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "tau_decay1"}, + ), + MemberSpec_( + "tau_decay2", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "tau_decay2"}, + ), + MemberSpec_( + "tau_rise", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "tau_rise"}, + ), ] subclass = None superclass = BaseConductanceBasedSynapseTwo - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase1=None, gbase2=None, erev=None, tau_decay1=None, tau_decay2=None, tau_rise=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + gbase1=None, + gbase2=None, + erev=None, + tau_decay1=None, + tau_decay2=None, + tau_rise=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ExpThreeSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase1, gbase2, erev, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ExpThreeSynapse"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + gbase1, + gbase2, + erev, + **kwargs_ + ) self.tau_decay1 = _cast(None, tau_decay1) + self.tau_decay1_nsprefix_ = None self.tau_decay2 = _cast(None, tau_decay2) + self.tau_decay2_nsprefix_ = None self.tau_rise = _cast(None, tau_rise) + self.tau_rise_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ExpThreeSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ExpThreeSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if ExpThreeSynapse.subclass: return ExpThreeSynapse.subclass(*args_, **kwargs_) else: return ExpThreeSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): if ( - super(ExpThreeSynapse, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + + def _hasContent(self): + if super(ExpThreeSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpThreeSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpThreeSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpThreeSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ExpThreeSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ExpThreeSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpThreeSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpThreeSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpThreeSynapse" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ExpThreeSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpThreeSynapse'): - super(ExpThreeSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpThreeSynapse') - if self.tau_decay1 is not None and 'tau_decay1' not in already_processed: - already_processed.add('tau_decay1') - outfile.write(' tauDecay1=%s' % (quote_attrib(self.tau_decay1), )) - if self.tau_decay2 is not None and 'tau_decay2' not in already_processed: - already_processed.add('tau_decay2') - outfile.write(' tauDecay2=%s' % (quote_attrib(self.tau_decay2), )) - if self.tau_rise is not None and 'tau_rise' not in already_processed: - already_processed.add('tau_rise') - outfile.write(' tauRise=%s' % (quote_attrib(self.tau_rise), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpThreeSynapse', fromsubclass_=False, pretty_print=True): - super(ExpThreeSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ExpThreeSynapse", + ): + super(ExpThreeSynapse, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpThreeSynapse" + ) + if self.tau_decay1 is not None and "tau_decay1" not in already_processed: + already_processed.add("tau_decay1") + outfile.write( + " tauDecay1=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.tau_decay1), input_name="tauDecay1" + ) + ), + ) + ) + if self.tau_decay2 is not None and "tau_decay2" not in already_processed: + already_processed.add("tau_decay2") + outfile.write( + " tauDecay2=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.tau_decay2), input_name="tauDecay2" + ) + ), + ) + ) + if self.tau_rise is not None and "tau_rise" not in already_processed: + already_processed.add("tau_rise") + outfile.write( + " tauRise=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.tau_rise), input_name="tauRise" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpThreeSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(ExpThreeSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('tauDecay1', node) - if value is not None and 'tauDecay1' not in already_processed: - already_processed.add('tauDecay1') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("tauDecay1", node) + if value is not None and "tauDecay1" not in already_processed: + already_processed.add("tauDecay1") self.tau_decay1 = value - self.validate_Nml2Quantity_time(self.tau_decay1) # validate type Nml2Quantity_time - value = find_attr_value_('tauDecay2', node) - if value is not None and 'tauDecay2' not in already_processed: - already_processed.add('tauDecay2') + self.validate_Nml2Quantity_time( + self.tau_decay1 + ) # validate type Nml2Quantity_time + value = find_attr_value_("tauDecay2", node) + if value is not None and "tauDecay2" not in already_processed: + already_processed.add("tauDecay2") self.tau_decay2 = value - self.validate_Nml2Quantity_time(self.tau_decay2) # validate type Nml2Quantity_time - value = find_attr_value_('tauRise', node) - if value is not None and 'tauRise' not in already_processed: - already_processed.add('tauRise') + self.validate_Nml2Quantity_time( + self.tau_decay2 + ) # validate type Nml2Quantity_time + value = find_attr_value_("tauRise", node) + if value is not None and "tauRise" not in already_processed: + already_processed.add("tauRise") self.tau_rise = value - self.validate_Nml2Quantity_time(self.tau_rise) # validate type Nml2Quantity_time - super(ExpThreeSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ExpThreeSynapse, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_time( + self.tau_rise + ) # validate type Nml2Quantity_time + super(ExpThreeSynapse, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ExpThreeSynapse, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class ExpThreeSynapse class ExpTwoSynapse(BaseConductanceBasedSynapse): + """ExpTwoSynapse -- Ohmic synapse model whose conductance waveform on receiving an event has a rise time of **tauRise** and a decay time of **tauDecay.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** + \n + :param tauRise: + :type tauRise: time + :param tauDecay: + :type tauDecay: time + :param gbase: Baseline conductance, generally the maximum conductance following a single spike + :type gbase: conductance + :param erev: Reversal potential of the synapse + :type erev: voltage + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('tau_decay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), - MemberSpec_('tau_rise', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_( + "tau_decay", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "tau_decay"}, + ), + MemberSpec_( + "tau_rise", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "tau_rise"}, + ), ] subclass = None superclass = BaseConductanceBasedSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, tau_decay=None, tau_rise=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + gbase=None, + erev=None, + tau_decay=None, + tau_rise=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ExpTwoSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase, erev, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ExpTwoSynapse"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + gbase, + erev, + extensiontype_, + **kwargs_ + ) self.tau_decay = _cast(None, tau_decay) + self.tau_decay_nsprefix_ = None self.tau_rise = _cast(None, tau_rise) + self.tau_rise_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ExpTwoSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ExpTwoSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if ExpTwoSynapse.subclass: return ExpTwoSynapse.subclass(*args_, **kwargs_) else: return ExpTwoSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): if ( - super(ExpTwoSynapse, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + + def _hasContent(self): + if super(ExpTwoSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpTwoSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpTwoSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpTwoSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ExpTwoSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ExpTwoSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpTwoSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpTwoSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpTwoSynapse" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ExpTwoSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpTwoSynapse'): - super(ExpTwoSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpTwoSynapse') - if self.tau_decay is not None and 'tau_decay' not in already_processed: - already_processed.add('tau_decay') - outfile.write(' tauDecay=%s' % (quote_attrib(self.tau_decay), )) - if self.tau_rise is not None and 'tau_rise' not in already_processed: - already_processed.add('tau_rise') - outfile.write(' tauRise=%s' % (quote_attrib(self.tau_rise), )) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ExpTwoSynapse", + ): + super(ExpTwoSynapse, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpTwoSynapse" + ) + if self.tau_decay is not None and "tau_decay" not in already_processed: + already_processed.add("tau_decay") + outfile.write( + " tauDecay=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.tau_decay), input_name="tauDecay" + ) + ), + ) + ) + if self.tau_rise is not None and "tau_rise" not in already_processed: + already_processed.add("tau_rise") + outfile.write( + " tauRise=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.tau_rise), input_name="tauRise" + ) + ), + ) + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpTwoSynapse', fromsubclass_=False, pretty_print=True): - super(ExpTwoSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpTwoSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(ExpTwoSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('tauDecay', node) - if value is not None and 'tauDecay' not in already_processed: - already_processed.add('tauDecay') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("tauDecay", node) + if value is not None and "tauDecay" not in already_processed: + already_processed.add("tauDecay") self.tau_decay = value - self.validate_Nml2Quantity_time(self.tau_decay) # validate type Nml2Quantity_time - value = find_attr_value_('tauRise', node) - if value is not None and 'tauRise' not in already_processed: - already_processed.add('tauRise') + self.validate_Nml2Quantity_time( + self.tau_decay + ) # validate type Nml2Quantity_time + value = find_attr_value_("tauRise", node) + if value is not None and "tauRise" not in already_processed: + already_processed.add("tauRise") self.tau_rise = value - self.validate_Nml2Quantity_time(self.tau_rise) # validate type Nml2Quantity_time - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + self.validate_Nml2Quantity_time( + self.tau_rise + ) # validate type Nml2Quantity_time + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(ExpTwoSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ExpTwoSynapse, self).buildChildren(child_, node, nodeName_, True) + super(ExpTwoSynapse, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ExpTwoSynapse, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class ExpTwoSynapse class ExpOneSynapse(BaseConductanceBasedSynapse): + """ExpOneSynapse -- Ohmic synapse model whose conductance rises instantaneously by ( **gbase** * **weight** ) on receiving an event, and which decays exponentially to zero with time course **tauDecay** + \n + :param tauDecay: Time course of decay + :type tauDecay: time + :param gbase: Baseline conductance, generally the maximum conductance following a single spike + :type gbase: conductance + :param erev: Reversal potential of the synapse + :type erev: voltage + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('tau_decay', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_( + "tau_decay", + "Nml2Quantity_time", + 0, + 0, + {"use": "required", "name": "tau_decay"}, + ), ] subclass = None superclass = BaseConductanceBasedSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, tau_decay=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + gbase=None, + erev=None, + tau_decay=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ExpOneSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase, erev, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ExpOneSynapse"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + gbase, + erev, + **kwargs_ + ) self.tau_decay = _cast(None, tau_decay) + self.tau_decay_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, ExpOneSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, ExpOneSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if ExpOneSynapse.subclass: return ExpOneSynapse.subclass(*args_, **kwargs_) else: return ExpOneSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): if ( - super(ExpOneSynapse, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + + def _hasContent(self): + if super(ExpOneSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpOneSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ExpOneSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpOneSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ExpOneSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "ExpOneSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpOneSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ExpOneSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpOneSynapse" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ExpOneSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ExpOneSynapse'): - super(ExpOneSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ExpOneSynapse') - if self.tau_decay is not None and 'tau_decay' not in already_processed: - already_processed.add('tau_decay') - outfile.write(' tauDecay=%s' % (quote_attrib(self.tau_decay), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ExpOneSynapse', fromsubclass_=False, pretty_print=True): - super(ExpOneSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ExpOneSynapse", + ): + super(ExpOneSynapse, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="ExpOneSynapse" + ) + if self.tau_decay is not None and "tau_decay" not in already_processed: + already_processed.add("tau_decay") + outfile.write( + " tauDecay=%s" + % ( + self.gds_encode( + self.gds_format_string( + quote_attrib(self.tau_decay), input_name="tauDecay" + ) + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ExpOneSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(ExpOneSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('tauDecay', node) - if value is not None and 'tauDecay' not in already_processed: - already_processed.add('tauDecay') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("tauDecay", node) + if value is not None and "tauDecay" not in already_processed: + already_processed.add("tauDecay") self.tau_decay = value - self.validate_Nml2Quantity_time(self.tau_decay) # validate type Nml2Quantity_time - super(ExpOneSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ExpOneSynapse, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_time( + self.tau_decay + ) # validate type Nml2Quantity_time + super(ExpOneSynapse, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ExpOneSynapse, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class ExpOneSynapse class AlphaSynapse(BaseConductanceBasedSynapse): + """AlphaSynapse -- Ohmic synapse model where rise time and decay time are both **tau.** Max conductance reached during this time ( assuming zero conductance before ) is **gbase** * **weight.** + \n + :param tau: Time course of rise/decay + :type tau: time + :param gbase: Baseline conductance, generally the maximum conductance following a single spike + :type gbase: conductance + :param erev: Reversal potential of the synapse + :type erev: voltage + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('tau', 'Nml2Quantity_time', 0, 0, {'use': u'required'}), + MemberSpec_( + "tau", "Nml2Quantity_time", 0, 0, {"use": "required", "name": "tau"} + ), ] subclass = None superclass = BaseConductanceBasedSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, tau=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + gbase=None, + erev=None, + tau=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(AlphaSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase, erev, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("AlphaSynapse"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + gbase, + erev, + **kwargs_ + ) self.tau = _cast(None, tau) + self.tau_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, AlphaSynapse) + subclass = getSubclassFromModule_(CurrentSubclassModule_, AlphaSynapse) if subclass is not None: return subclass(*args_, **kwargs_) if AlphaSynapse.subclass: return AlphaSynapse.subclass(*args_, **kwargs_) else: return AlphaSynapse(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_Nml2Quantity_time(self, value): # Validate type Nml2Quantity_time, a restriction on xs:string. - if value is not None and Validate_simpletypes_: - if not self.gds_validate_simple_patterns( - self.validate_Nml2Quantity_time_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_Nml2Quantity_time_patterns_, )) - validate_Nml2Quantity_time_patterns_ = [[u'^-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms)$']] - def hasContent_(self): if ( - super(AlphaSynapse, self).hasContent_() + value is not None + and Validate_simpletypes_ + and self.gds_collector_ is not None ): + if not isinstance(value, str): + lineno = self.gds_get_node_lineno_() + self.gds_collector_.add_message( + 'Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' + % { + "value": value, + "lineno": lineno, + } + ) + return False + if not self.gds_validate_simple_patterns( + self.validate_Nml2Quantity_time_patterns_, value + ): + self.gds_collector_.add_message( + 'Value "%s" does not match xsd pattern restrictions: %s' + % ( + encode_str_2_3(value), + self.validate_Nml2Quantity_time_patterns_, + ) + ) + + validate_Nml2Quantity_time_patterns_ = [ + ["^(-?([0-9]*(\\.[0-9]+)?)([eE]-?[0-9]+)?[\\s]*(s|ms))$"] + ] + + def _hasContent(self): + if super(AlphaSynapse, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('AlphaSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AlphaSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("AlphaSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "AlphaSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlphaSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="AlphaSynapse" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="AlphaSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlphaSynapse'): - super(AlphaSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlphaSynapse') - if self.tau is not None and 'tau' not in already_processed: - already_processed.add('tau') - outfile.write(' tau=%s' % (quote_attrib(self.tau), )) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='AlphaSynapse', fromsubclass_=False, pretty_print=True): - super(AlphaSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="AlphaSynapse", + ): + super(AlphaSynapse, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="AlphaSynapse" + ) + if self.tau is not None and "tau" not in already_processed: + already_processed.add("tau") + outfile.write( + " tau=%s" + % ( + self.gds_encode( + self.gds_format_string(quote_attrib(self.tau), input_name="tau") + ), + ) + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="AlphaSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(AlphaSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('tau', node) - if value is not None and 'tau' not in already_processed: - already_processed.add('tau') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("tau", node) + if value is not None and "tau" not in already_processed: + already_processed.add("tau") self.tau = value - self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time - super(AlphaSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(AlphaSynapse, self).buildChildren(child_, node, nodeName_, True) + self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time + super(AlphaSynapse, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(AlphaSynapse, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class AlphaSynapse class EIF_cond_exp_isfa_ista(basePyNNIaFCondCell): + """EIF_cond_exp_isfa_ista -- Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with exponentially-decaying post-synaptic conductance + \n + :param v_spike: + :type v_spike: none + :param delta_T: + :type delta_T: none + :param tau_w: + :type tau_w: none + :param a: + :type a: none + :param b: + :type b: none + :param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_E: none + :param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_I: none + :param tau_refrac: + :type tau_refrac: none + :param v_thresh: + :type v_thresh: none + :param tau_m: + :type tau_m: none + :param v_rest: + :type v_rest: none + :param v_reset: + :type v_reset: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('a', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('b', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('delta_T', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('tau_w', 'xs:float', 0, 0, {'use': u'required'}), - MemberSpec_('v_spike', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("a", "xs:float", 0, 0, {"use": "required", "name": "a"}), + MemberSpec_("b", "xs:float", 0, 0, {"use": "required", "name": "b"}), + MemberSpec_( + "delta_T", "xs:float", 0, 0, {"use": "required", "name": "delta_T"} + ), + MemberSpec_("tau_w", "xs:float", 0, 0, {"use": "required", "name": "tau_w"}), + MemberSpec_( + "v_spike", "xs:float", 0, 0, {"use": "required", "name": "v_spike"} + ), ] subclass = None superclass = basePyNNIaFCondCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, a=None, b=None, delta_T=None, tau_w=None, v_spike=None, extensiontype_=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + tau_m=None, + tau_refrac=None, + v_reset=None, + v_rest=None, + v_thresh=None, + e_rev_E=None, + e_rev_I=None, + a=None, + b=None, + delta_T=None, + tau_w=None, + v_spike=None, + extensiontype_=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(EIF_cond_exp_isfa_ista, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, e_rev_E, e_rev_I, extensiontype_, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("EIF_cond_exp_isfa_ista"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + tau_m, + tau_refrac, + v_reset, + v_rest, + v_thresh, + e_rev_E, + e_rev_I, + extensiontype_, + **kwargs_ + ) self.a = _cast(float, a) + self.a_nsprefix_ = None self.b = _cast(float, b) + self.b_nsprefix_ = None self.delta_T = _cast(float, delta_T) + self.delta_T_nsprefix_ = None self.tau_w = _cast(float, tau_w) + self.tau_w_nsprefix_ = None self.v_spike = _cast(float, v_spike) + self.v_spike_nsprefix_ = None self.extensiontype_ = extensiontype_ + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, EIF_cond_exp_isfa_ista) + CurrentSubclassModule_, EIF_cond_exp_isfa_ista + ) if subclass is not None: return subclass(*args_, **kwargs_) if EIF_cond_exp_isfa_ista.subclass: return EIF_cond_exp_isfa_ista.subclass(*args_, **kwargs_) else: return EIF_cond_exp_isfa_ista(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(EIF_cond_exp_isfa_ista, self).hasContent_() - ): + + def _hasContent(self): + if super(EIF_cond_exp_isfa_ista, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EIF_cond_exp_isfa_ista', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('EIF_cond_exp_isfa_ista') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EIF_cond_exp_isfa_ista", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("EIF_cond_exp_isfa_ista") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "EIF_cond_exp_isfa_ista": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EIF_cond_exp_isfa_ista') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='EIF_cond_exp_isfa_ista', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="EIF_cond_exp_isfa_ista", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="EIF_cond_exp_isfa_ista", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='EIF_cond_exp_isfa_ista'): - super(EIF_cond_exp_isfa_ista, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EIF_cond_exp_isfa_ista') - if self.a is not None and 'a' not in already_processed: - already_processed.add('a') - outfile.write(' a="%s"' % self.gds_format_float(self.a, input_name='a')) - if self.b is not None and 'b' not in already_processed: - already_processed.add('b') - outfile.write(' b="%s"' % self.gds_format_float(self.b, input_name='b')) - if self.delta_T is not None and 'delta_T' not in already_processed: - already_processed.add('delta_T') - outfile.write(' delta_T="%s"' % self.gds_format_float(self.delta_T, input_name='delta_T')) - if self.tau_w is not None and 'tau_w' not in already_processed: - already_processed.add('tau_w') - outfile.write(' tau_w="%s"' % self.gds_format_float(self.tau_w, input_name='tau_w')) - if self.v_spike is not None and 'v_spike' not in already_processed: - already_processed.add('v_spike') - outfile.write(' v_spike="%s"' % self.gds_format_float(self.v_spike, input_name='v_spike')) - if self.extensiontype_ is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="EIF_cond_exp_isfa_ista", + ): + super(EIF_cond_exp_isfa_ista, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="EIF_cond_exp_isfa_ista", + ) + if self.a is not None and "a" not in already_processed: + already_processed.add("a") + outfile.write(' a="%s"' % self.gds_format_float(self.a, input_name="a")) + if self.b is not None and "b" not in already_processed: + already_processed.add("b") + outfile.write(' b="%s"' % self.gds_format_float(self.b, input_name="b")) + if self.delta_T is not None and "delta_T" not in already_processed: + already_processed.add("delta_T") + outfile.write( + ' delta_T="%s"' + % self.gds_format_float(self.delta_T, input_name="delta_T") + ) + if self.tau_w is not None and "tau_w" not in already_processed: + already_processed.add("tau_w") + outfile.write( + ' tau_w="%s"' % self.gds_format_float(self.tau_w, input_name="tau_w") + ) + if self.v_spike is not None and "v_spike" not in already_processed: + already_processed.add("v_spike") + outfile.write( + ' v_spike="%s"' + % self.gds_format_float(self.v_spike, input_name="v_spike") + ) + if self.extensiontype_ is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') - outfile.write(' xsi:type="%s"' % self.extensiontype_) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EIF_cond_exp_isfa_ista', fromsubclass_=False, pretty_print=True): - super(EIF_cond_exp_isfa_ista, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + if ":" not in self.extensiontype_: + imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get( + self.extensiontype_, "" + ) + outfile.write( + ' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_) + ) + else: + outfile.write(' xsi:type="%s"' % self.extensiontype_) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EIF_cond_exp_isfa_ista", + fromsubclass_=False, + pretty_print=True, + ): + super(EIF_cond_exp_isfa_ista, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('a', node) - if value is not None and 'a' not in already_processed: - already_processed.add('a') - try: - self.a = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (a): %s' % exp) - value = find_attr_value_('b', node) - if value is not None and 'b' not in already_processed: - already_processed.add('b') - try: - self.b = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (b): %s' % exp) - value = find_attr_value_('delta_T', node) - if value is not None and 'delta_T' not in already_processed: - already_processed.add('delta_T') - try: - self.delta_T = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (delta_T): %s' % exp) - value = find_attr_value_('tau_w', node) - if value is not None and 'tau_w' not in already_processed: - already_processed.add('tau_w') - try: - self.tau_w = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (tau_w): %s' % exp) - value = find_attr_value_('v_spike', node) - if value is not None and 'v_spike' not in already_processed: - already_processed.add('v_spike') - try: - self.v_spike = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (v_spike): %s' % exp) - value = find_attr_value_('xsi:type', node) - if value is not None and 'xsi:type' not in already_processed: - already_processed.add('xsi:type') + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("a", node) + if value is not None and "a" not in already_processed: + already_processed.add("a") + value = self.gds_parse_float(value, node, "a") + self.a = value + value = find_attr_value_("b", node) + if value is not None and "b" not in already_processed: + already_processed.add("b") + value = self.gds_parse_float(value, node, "b") + self.b = value + value = find_attr_value_("delta_T", node) + if value is not None and "delta_T" not in already_processed: + already_processed.add("delta_T") + value = self.gds_parse_float(value, node, "delta_T") + self.delta_T = value + value = find_attr_value_("tau_w", node) + if value is not None and "tau_w" not in already_processed: + already_processed.add("tau_w") + value = self.gds_parse_float(value, node, "tau_w") + self.tau_w = value + value = find_attr_value_("v_spike", node) + if value is not None and "v_spike" not in already_processed: + already_processed.add("v_spike") + value = self.gds_parse_float(value, node, "v_spike") + self.v_spike = value + value = find_attr_value_("xsi:type", node) + if value is not None and "xsi:type" not in already_processed: + already_processed.add("xsi:type") self.extensiontype_ = value - super(EIF_cond_exp_isfa_ista, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(EIF_cond_exp_isfa_ista, self).buildChildren(child_, node, nodeName_, True) + super(EIF_cond_exp_isfa_ista, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(EIF_cond_exp_isfa_ista, self)._buildChildren( + child_, node, nodeName_, True + ) pass + + # end class EIF_cond_exp_isfa_ista class IF_cond_exp(basePyNNIaFCondCell): - member_data_items_ = [ - ] + """IF_cond_exp -- Leaky integrate and fire model with fixed threshold and exponentially-decaying post-synaptic conductance + \n + :param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_E: none + :param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_I: none + :param tau_refrac: + :type tau_refrac: none + :param v_thresh: + :type v_thresh: none + :param tau_m: + :type tau_m: none + :param v_rest: + :type v_rest: none + :param v_reset: + :type v_reset: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none + + """ + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = basePyNNIaFCondCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + tau_m=None, + tau_refrac=None, + v_reset=None, + v_rest=None, + v_thresh=None, + e_rev_E=None, + e_rev_I=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IF_cond_exp, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, e_rev_E, e_rev_I, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("IF_cond_exp"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + tau_m, + tau_refrac, + v_reset, + v_rest, + v_thresh, + e_rev_E, + e_rev_I, + **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IF_cond_exp) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IF_cond_exp) if subclass is not None: return subclass(*args_, **kwargs_) if IF_cond_exp.subclass: return IF_cond_exp.subclass(*args_, **kwargs_) else: return IF_cond_exp(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(IF_cond_exp, self).hasContent_() - ): + + def _hasContent(self): + if super(IF_cond_exp, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_cond_exp', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IF_cond_exp') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IF_cond_exp", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IF_cond_exp") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "IF_cond_exp": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_cond_exp') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IF_cond_exp', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IF_cond_exp" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IF_cond_exp", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IF_cond_exp'): - super(IF_cond_exp, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_cond_exp') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_cond_exp', fromsubclass_=False, pretty_print=True): - super(IF_cond_exp, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IF_cond_exp", + ): + super(IF_cond_exp, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IF_cond_exp" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IF_cond_exp", + fromsubclass_=False, + pretty_print=True, + ): + super(IF_cond_exp, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(IF_cond_exp, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IF_cond_exp, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + super(IF_cond_exp, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(IF_cond_exp, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class IF_cond_exp class IF_cond_alpha(basePyNNIaFCondCell): - member_data_items_ = [ - ] + """IF_cond_alpha -- Leaky integrate and fire model with fixed threshold and alpha-function-shaped post-synaptic conductance + \n + :param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_E: none + :param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_I: none + :param tau_refrac: + :type tau_refrac: none + :param v_thresh: + :type v_thresh: none + :param tau_m: + :type tau_m: none + :param v_rest: + :type v_rest: none + :param v_reset: + :type v_reset: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none + + """ + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = basePyNNIaFCondCell - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + tau_m=None, + tau_refrac=None, + v_reset=None, + v_rest=None, + v_thresh=None, + e_rev_E=None, + e_rev_I=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(IF_cond_alpha, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, e_rev_E, e_rev_I, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("IF_cond_alpha"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + tau_m, + tau_refrac, + v_reset, + v_rest, + v_thresh, + e_rev_E, + e_rev_I, + **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: - subclass = getSubclassFromModule_( - CurrentSubclassModule_, IF_cond_alpha) + subclass = getSubclassFromModule_(CurrentSubclassModule_, IF_cond_alpha) if subclass is not None: return subclass(*args_, **kwargs_) if IF_cond_alpha.subclass: return IF_cond_alpha.subclass(*args_, **kwargs_) else: return IF_cond_alpha(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(IF_cond_alpha, self).hasContent_() - ): + + def _hasContent(self): + if super(IF_cond_alpha, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_cond_alpha', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('IF_cond_alpha') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IF_cond_alpha", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("IF_cond_alpha") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "IF_cond_alpha": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_cond_alpha') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IF_cond_alpha', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IF_cond_alpha" + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="IF_cond_alpha", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IF_cond_alpha'): - super(IF_cond_alpha, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IF_cond_alpha') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IF_cond_alpha', fromsubclass_=False, pretty_print=True): - super(IF_cond_alpha, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="IF_cond_alpha", + ): + super(IF_cond_alpha, self)._exportAttributes( + outfile, level, already_processed, namespaceprefix_, name_="IF_cond_alpha" + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="IF_cond_alpha", + fromsubclass_=False, + pretty_print=True, + ): + super(IF_cond_alpha, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(IF_cond_alpha, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(IF_cond_alpha, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + super(IF_cond_alpha, self)._buildAttributes(node, attrs, already_processed) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(IF_cond_alpha, self)._buildChildren(child_, node, nodeName_, True) pass + + # end class IF_cond_alpha class ContinuousConnectionInstanceW(ContinuousConnectionInstance): - """Individual continuous/analog synaptic connection - instance based. - Includes setting of _weight for the connection""" + """ContinuousConnectionInstanceW -- An instance of a connection in a **continuousProjection** between **presynapticPopulation** to another **postsynapticPopulation** through a **preComponent** at the start and **postComponent** at the end. Populations need to be of type **populationList** and contain **instance** and **location** elements. Can be used for analog synapses. Includes setting of **weight** for the connection + \n + :param weight: + :type weight: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('weight', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("weight", "xs:float", 0, 0, {"use": "required", "name": "weight"}), ] subclass = None superclass = ContinuousConnectionInstance - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', pre_component=None, post_component=None, weight=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell=None, + pre_segment="0", + pre_fraction_along="0.5", + post_cell=None, + post_segment="0", + post_fraction_along="0.5", + pre_component=None, + post_component=None, + weight=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ContinuousConnectionInstanceW, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, pre_component, post_component, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ContinuousConnectionInstanceW"), self).__init__( + neuro_lex_id, + id, + pre_cell, + pre_segment, + pre_fraction_along, + post_cell, + post_segment, + post_fraction_along, + pre_component, + post_component, + **kwargs_ + ) self.weight = _cast(float, weight) + self.weight_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ContinuousConnectionInstanceW) + CurrentSubclassModule_, ContinuousConnectionInstanceW + ) if subclass is not None: return subclass(*args_, **kwargs_) if ContinuousConnectionInstanceW.subclass: return ContinuousConnectionInstanceW.subclass(*args_, **kwargs_) else: return ContinuousConnectionInstanceW(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(ContinuousConnectionInstanceW, self).hasContent_() - ): + + def _hasContent(self): + if super(ContinuousConnectionInstanceW, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnectionInstanceW', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ContinuousConnectionInstanceW') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ContinuousConnectionInstanceW", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ContinuousConnectionInstanceW") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if ( + self.original_tagname_ is not None + and name_ == "ContinuousConnectionInstanceW" + ): name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnectionInstanceW') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ContinuousConnectionInstanceW', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ContinuousConnectionInstanceW'): - super(ContinuousConnectionInstanceW, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ContinuousConnectionInstanceW') - if self.weight is not None and 'weight' not in already_processed: - already_processed.add('weight') - outfile.write(' weight="%s"' % self.gds_format_float(self.weight, input_name='weight')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ContinuousConnectionInstanceW', fromsubclass_=False, pretty_print=True): - super(ContinuousConnectionInstanceW, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ContinuousConnectionInstanceW", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ContinuousConnectionInstanceW", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ContinuousConnectionInstanceW", + ): + super(ContinuousConnectionInstanceW, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ContinuousConnectionInstanceW", + ) + if self.weight is not None and "weight" not in already_processed: + already_processed.add("weight") + outfile.write( + ' weight="%s"' % self.gds_format_float(self.weight, input_name="weight") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ContinuousConnectionInstanceW", + fromsubclass_=False, + pretty_print=True, + ): + super(ContinuousConnectionInstanceW, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('weight', node) - if value is not None and 'weight' not in already_processed: - already_processed.add('weight') - try: - self.weight = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (weight): %s' % exp) - super(ContinuousConnectionInstanceW, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ContinuousConnectionInstanceW, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("weight", node) + if value is not None and "weight" not in already_processed: + already_processed.add("weight") + value = self.gds_parse_float(value, node, "weight") + self.weight = value + super(ContinuousConnectionInstanceW, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ContinuousConnectionInstanceW, self)._buildChildren( + child_, node, nodeName_, True + ) pass def get_weight(self): + """Get weight. + + If weight is not set, the default value of 1.0 is returned. + """ - return float(self.weight) if self.weight!=None else 1.0 + return float(self.weight) if self.weight != None else 1.0 def __str__(self): - return "Continuous Connection (Instance based & weight) "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", pre comp: "+str(self.pre_component)+", post comp: "+str(self.post_component)+", weight: "+'%.6f'%self.get_weight() - + return ( + "Continuous Connection (Instance based & weight) " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + + ", pre comp: " + + str(self.pre_component) + + ", post comp: " + + str(self.post_component) + + ", weight: " + + "%.6f" % self.get_weight() + ) # end class ContinuousConnectionInstanceW class ElectricalConnectionInstanceW(ElectricalConnectionInstance): - """Projection between two populations consisting of analog connections - (e.g. graded synapses). Includes setting of weight for the - connection""" + """ElectricalConnectionInstanceW -- To enable connections between populations through gap junctions. Populations need to be of type **populationList** and contain **instance** and **location** elements. Includes setting of **weight** for the connection + \n + :param weight: + :type weight: none + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('weight', 'xs:float', 0, 0, {'use': u'required'}), + MemberSpec_("weight", "xs:float", 0, 0, {"use": "required", "name": "weight"}), ] subclass = None superclass = ElectricalConnectionInstance - def __init__(self, neuro_lex_id=None, id=None, pre_cell=None, pre_segment='0', pre_fraction_along='0.5', post_cell=None, post_segment='0', post_fraction_along='0.5', synapse=None, weight=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + pre_cell=None, + pre_segment="0", + pre_fraction_along="0.5", + post_cell=None, + post_segment="0", + post_fraction_along="0.5", + synapse=None, + weight=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(ElectricalConnectionInstanceW, self).__init__(neuro_lex_id, id, pre_cell, pre_segment, pre_fraction_along, post_cell, post_segment, post_fraction_along, synapse, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("ElectricalConnectionInstanceW"), self).__init__( + neuro_lex_id, + id, + pre_cell, + pre_segment, + pre_fraction_along, + post_cell, + post_segment, + post_fraction_along, + synapse, + **kwargs_ + ) self.weight = _cast(float, weight) + self.weight_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, ElectricalConnectionInstanceW) + CurrentSubclassModule_, ElectricalConnectionInstanceW + ) if subclass is not None: return subclass(*args_, **kwargs_) if ElectricalConnectionInstanceW.subclass: return ElectricalConnectionInstanceW.subclass(*args_, **kwargs_) else: return ElectricalConnectionInstanceW(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(ElectricalConnectionInstanceW, self).hasContent_() - ): + + def _hasContent(self): + if super(ElectricalConnectionInstanceW, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnectionInstanceW', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('ElectricalConnectionInstanceW') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ElectricalConnectionInstanceW", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("ElectricalConnectionInstanceW") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if ( + self.original_tagname_ is not None + and name_ == "ElectricalConnectionInstanceW" + ): name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnectionInstanceW') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ElectricalConnectionInstanceW', pretty_print=pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ElectricalConnectionInstanceW'): - super(ElectricalConnectionInstanceW, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ElectricalConnectionInstanceW') - if self.weight is not None and 'weight' not in already_processed: - already_processed.add('weight') - outfile.write(' weight="%s"' % self.gds_format_float(self.weight, input_name='weight')) - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ElectricalConnectionInstanceW', fromsubclass_=False, pretty_print=True): - super(ElectricalConnectionInstanceW, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ElectricalConnectionInstanceW", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="ElectricalConnectionInstanceW", + pretty_print=pretty_print, + ) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="ElectricalConnectionInstanceW", + ): + super(ElectricalConnectionInstanceW, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="ElectricalConnectionInstanceW", + ) + if self.weight is not None and "weight" not in already_processed: + already_processed.add("weight") + outfile.write( + ' weight="%s"' % self.gds_format_float(self.weight, input_name="weight") + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="ElectricalConnectionInstanceW", + fromsubclass_=False, + pretty_print=True, + ): + super(ElectricalConnectionInstanceW, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) pass - def build(self, node): + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - value = find_attr_value_('weight', node) - if value is not None and 'weight' not in already_processed: - already_processed.add('weight') - try: - self.weight = float(value) - except ValueError as exp: - raise ValueError('Bad float/double attribute (weight): %s' % exp) - super(ElectricalConnectionInstanceW, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(ElectricalConnectionInstanceW, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_("weight", node) + if value is not None and "weight" not in already_processed: + already_processed.add("weight") + value = self.gds_parse_float(value, node, "weight") + self.weight = value + super(ElectricalConnectionInstanceW, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(ElectricalConnectionInstanceW, self)._buildChildren( + child_, node, nodeName_, True + ) pass def get_weight(self): + """Get the weight of the connection - return float(self.weight) if self.weight!=None else 1.0 + If a weight is not set (or is set to None), returns the default value + of 1.0. + + :returns: weight of connection or 1.0 if not set + :rtype: float + """ + + return float(self.weight) if self.weight != None else 1.0 def __str__(self): - return "Electrical Connection (Instance based & weight) "+str(self.id)+": "+str(self.get_pre_info())+" -> "+str(self.get_post_info())+ ", synapse: "+str(self.synapse) + ", weight: "+'%.6f'%self.get_weight() + return ( + "Electrical Connection (Instance based & weight) " + + str(self.id) + + ": " + + str(self.get_pre_info()) + + " -> " + + str(self.get_post_info()) + + ", synapse: " + + str(self.synapse) + + ", weight: " + + "%.6f" % self.get_weight() + ) # end class ElectricalConnectionInstanceW class BlockingPlasticSynapse(ExpTwoSynapse): + """BlockingPlasticSynapse -- Biexponential synapse that allows for optional block and plasticity mechanisms, which can be expressed as child elements. + \n + :param tauRise: + :type tauRise: time + :param tauDecay: + :type tauDecay: time + :param gbase: Baseline conductance, generally the maximum conductance following a single spike + :type gbase: conductance + :param erev: Reversal potential of the synapse + :type erev: voltage + + """ + + __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ - MemberSpec_('plasticity_mechanism', 'PlasticityMechanism', 0, 1, {u'type': u'PlasticityMechanism', u'name': u'plasticityMechanism', u'minOccurs': u'0'}, None), - MemberSpec_('block_mechanism', 'BlockMechanism', 0, 1, {u'type': u'BlockMechanism', u'name': u'blockMechanism', u'minOccurs': u'0'}, None), + MemberSpec_( + "plasticity_mechanism", + "PlasticityMechanism", + 0, + 1, + { + "minOccurs": "0", + "name": "plasticityMechanism", + "type": "PlasticityMechanism", + }, + None, + ), + MemberSpec_( + "block_mechanism", + "BlockMechanism", + 0, + 1, + {"minOccurs": "0", "name": "blockMechanism", "type": "BlockMechanism"}, + None, + ), ] subclass = None superclass = ExpTwoSynapse - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, gbase=None, erev=None, tau_decay=None, tau_rise=None, plasticity_mechanism=None, block_mechanism=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + gbase=None, + erev=None, + tau_decay=None, + tau_rise=None, + plasticity_mechanism=None, + block_mechanism=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(BlockingPlasticSynapse, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, gbase, erev, tau_decay, tau_rise, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("BlockingPlasticSynapse"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + gbase, + erev, + tau_decay, + tau_rise, + **kwargs_ + ) self.plasticity_mechanism = plasticity_mechanism + self.plasticity_mechanism_nsprefix_ = None self.block_mechanism = block_mechanism + self.block_mechanism_nsprefix_ = None + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, BlockingPlasticSynapse) + CurrentSubclassModule_, BlockingPlasticSynapse + ) if subclass is not None: return subclass(*args_, **kwargs_) if BlockingPlasticSynapse.subclass: return BlockingPlasticSynapse.subclass(*args_, **kwargs_) else: return BlockingPlasticSynapse(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): + + def _hasContent(self): if ( - self.plasticity_mechanism is not None or - self.block_mechanism is not None or - super(BlockingPlasticSynapse, self).hasContent_() + self.plasticity_mechanism is not None + or self.block_mechanism is not None + or super(BlockingPlasticSynapse, self)._hasContent() ): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BlockingPlasticSynapse', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('BlockingPlasticSynapse') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="BlockingPlasticSynapse", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("BlockingPlasticSynapse") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "BlockingPlasticSynapse": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BlockingPlasticSynapse') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BlockingPlasticSynapse', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BlockingPlasticSynapse", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="BlockingPlasticSynapse", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BlockingPlasticSynapse'): - super(BlockingPlasticSynapse, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BlockingPlasticSynapse') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BlockingPlasticSynapse', fromsubclass_=False, pretty_print=True): - super(BlockingPlasticSynapse, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="BlockingPlasticSynapse", + ): + super(BlockingPlasticSynapse, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="BlockingPlasticSynapse", + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_=' xmlns:None="http://www.neuroml.org/schema/neuroml2" ', + name_="BlockingPlasticSynapse", + fromsubclass_=False, + pretty_print=True, + ): + super(BlockingPlasticSynapse, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' + eol_ = "" if self.plasticity_mechanism is not None: - self.plasticity_mechanism.export(outfile, level, namespaceprefix_, namespacedef_='', name_='plasticityMechanism', pretty_print=pretty_print) + namespaceprefix_ = ( + self.plasticity_mechanism_nsprefix_ + ":" + if (UseCapturedNS_ and self.plasticity_mechanism_nsprefix_) + else "" + ) + self.plasticity_mechanism.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="plasticityMechanism", + pretty_print=pretty_print, + ) if self.block_mechanism is not None: - self.block_mechanism.export(outfile, level, namespaceprefix_, namespacedef_='', name_='blockMechanism', pretty_print=pretty_print) - def build(self, node): + namespaceprefix_ = ( + self.block_mechanism_nsprefix_ + ":" + if (UseCapturedNS_ and self.block_mechanism_nsprefix_) + else "" + ) + self.block_mechanism.export( + outfile, + level, + namespaceprefix_, + namespacedef_="", + name_="blockMechanism", + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(BlockingPlasticSynapse, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - if nodeName_ == 'plasticityMechanism': + + def _buildAttributes(self, node, attrs, already_processed): + super(BlockingPlasticSynapse, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + if nodeName_ == "plasticityMechanism": obj_ = PlasticityMechanism.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.plasticity_mechanism = obj_ - obj_.original_tagname_ = 'plasticityMechanism' - elif nodeName_ == 'blockMechanism': + obj_.original_tagname_ = "plasticityMechanism" + elif nodeName_ == "blockMechanism": obj_ = BlockMechanism.factory(parent_object_=self) - obj_.build(child_) + obj_.build(child_, gds_collector_=gds_collector_) self.block_mechanism = obj_ - obj_.original_tagname_ = 'blockMechanism' - super(BlockingPlasticSynapse, self).buildChildren(child_, node, nodeName_, True) + obj_.original_tagname_ = "blockMechanism" + super(BlockingPlasticSynapse, self)._buildChildren( + child_, node, nodeName_, True + ) + + # end class BlockingPlasticSynapse class EIF_cond_alpha_isfa_ista(EIF_cond_exp_isfa_ista): - member_data_items_ = [ - ] + """EIF_cond_alpha_isfa_ista -- Adaptive exponential integrate and fire neuron according to Brette R and Gerstner W ( 2005 ) with alpha-function-shaped post-synaptic conductance + \n + :param v_spike: + :type v_spike: none + :param delta_T: + :type delta_T: none + :param tau_w: + :type tau_w: none + :param a: + :type a: none + :param b: + :type b: none + :param e_rev_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_E: none + :param e_rev_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type e_rev_I: none + :param tau_refrac: + :type tau_refrac: none + :param v_thresh: + :type v_thresh: none + :param tau_m: + :type tau_m: none + :param v_rest: + :type v_rest: none + :param v_reset: + :type v_reset: none + :param cm: + :type cm: none + :param i_offset: + :type i_offset: none + :param tau_syn_E: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_E: none + :param tau_syn_I: This parameter is never used in the NeuroML2 description of this cell! Any synapse producing a current can be placed on this cell + :type tau_syn_I: none + :param v_init: + :type v_init: none + + """ + + __hash__ = GeneratedsSuper.__hash__ + member_data_items_ = [] subclass = None superclass = EIF_cond_exp_isfa_ista - def __init__(self, neuro_lex_id=None, id=None, metaid=None, notes=None, properties=None, annotation=None, cm=None, i_offset=None, tau_syn_E=None, tau_syn_I=None, v_init=None, tau_m=None, tau_refrac=None, v_reset=None, v_rest=None, v_thresh=None, e_rev_E=None, e_rev_I=None, a=None, b=None, delta_T=None, tau_w=None, v_spike=None, **kwargs_): + + def __init__( + self, + neuro_lex_id=None, + id=None, + metaid=None, + notes=None, + properties=None, + annotation=None, + cm=None, + i_offset=None, + tau_syn_E=None, + tau_syn_I=None, + v_init=None, + tau_m=None, + tau_refrac=None, + v_reset=None, + v_rest=None, + v_thresh=None, + e_rev_E=None, + e_rev_I=None, + a=None, + b=None, + delta_T=None, + tau_w=None, + v_spike=None, + gds_collector_=None, + **kwargs_ + ): + self.gds_collector_ = gds_collector_ + self.gds_elementtree_node_ = None self.original_tagname_ = None - self.parent_object_ = kwargs_.get('parent_object_') - super(EIF_cond_alpha_isfa_ista, self).__init__(neuro_lex_id, id, metaid, notes, properties, annotation, cm, i_offset, tau_syn_E, tau_syn_I, v_init, tau_m, tau_refrac, v_reset, v_rest, v_thresh, e_rev_E, e_rev_I, a, b, delta_T, tau_w, v_spike, **kwargs_) + self.parent_object_ = kwargs_.get("parent_object_") + self.ns_prefix_ = None + super(globals().get("EIF_cond_alpha_isfa_ista"), self).__init__( + neuro_lex_id, + id, + metaid, + notes, + properties, + annotation, + cm, + i_offset, + tau_syn_E, + tau_syn_I, + v_init, + tau_m, + tau_refrac, + v_reset, + v_rest, + v_thresh, + e_rev_E, + e_rev_I, + a, + b, + delta_T, + tau_w, + v_spike, + **kwargs_ + ) + def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( - CurrentSubclassModule_, EIF_cond_alpha_isfa_ista) + CurrentSubclassModule_, EIF_cond_alpha_isfa_ista + ) if subclass is not None: return subclass(*args_, **kwargs_) if EIF_cond_alpha_isfa_ista.subclass: return EIF_cond_alpha_isfa_ista.subclass(*args_, **kwargs_) else: return EIF_cond_alpha_isfa_ista(*args_, **kwargs_) + factory = staticmethod(factory) - def hasContent_(self): - if ( - super(EIF_cond_alpha_isfa_ista, self).hasContent_() - ): + + def _hasContent(self): + if super(EIF_cond_alpha_isfa_ista, self)._hasContent(): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EIF_cond_alpha_isfa_ista', pretty_print=True): - imported_ns_def_ = GenerateDSNamespaceDefs_.get('EIF_cond_alpha_isfa_ista') + + def export( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EIF_cond_alpha_isfa_ista", + pretty_print=True, + ): + imported_ns_def_ = GenerateDSNamespaceDefs_.get("EIF_cond_alpha_isfa_ista") if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: - eol_ = '\n' + eol_ = "\n" else: - eol_ = '' - if self.original_tagname_ is not None: + eol_ = "" + if self.original_tagname_ is not None and name_ == "EIF_cond_alpha_isfa_ista": name_ = self.original_tagname_ + if UseCapturedNS_ and self.ns_prefix_: + namespaceprefix_ = self.ns_prefix_ + ":" showIndent(outfile, level, pretty_print) - outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) - already_processed = set() - self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EIF_cond_alpha_isfa_ista') - if self.hasContent_(): - outfile.write('>%s' % (eol_, )) - self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='EIF_cond_alpha_isfa_ista', pretty_print=pretty_print) + outfile.write( + "<%s%s%s" + % ( + namespaceprefix_, + name_, + namespacedef_ and " " + namespacedef_ or "", + ) + ) + already_processed = set() + self._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="EIF_cond_alpha_isfa_ista", + ) + if self._hasContent(): + outfile.write(">%s" % (eol_,)) + self._exportChildren( + outfile, + level + 1, + namespaceprefix_, + namespacedef_, + name_="EIF_cond_alpha_isfa_ista", + pretty_print=pretty_print, + ) showIndent(outfile, level, pretty_print) - outfile.write('%s' % (namespaceprefix_, name_, eol_)) - else: - outfile.write('/>%s' % (eol_, )) - def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='EIF_cond_alpha_isfa_ista'): - super(EIF_cond_alpha_isfa_ista, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='EIF_cond_alpha_isfa_ista') - def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='EIF_cond_alpha_isfa_ista', fromsubclass_=False, pretty_print=True): - super(EIF_cond_alpha_isfa_ista, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print) - def build(self, node): - already_processed = set() - self.buildAttributes(node, node.attrib, already_processed) + outfile.write("%s" % (namespaceprefix_, name_, eol_)) + else: + outfile.write("/>%s" % (eol_,)) + + def _exportAttributes( + self, + outfile, + level, + already_processed, + namespaceprefix_="", + name_="EIF_cond_alpha_isfa_ista", + ): + super(EIF_cond_alpha_isfa_ista, self)._exportAttributes( + outfile, + level, + already_processed, + namespaceprefix_, + name_="EIF_cond_alpha_isfa_ista", + ) + + def _exportChildren( + self, + outfile, + level, + namespaceprefix_="", + namespacedef_="", + name_="EIF_cond_alpha_isfa_ista", + fromsubclass_=False, + pretty_print=True, + ): + super(EIF_cond_alpha_isfa_ista, self)._exportChildren( + outfile, + level, + namespaceprefix_, + namespacedef_, + name_, + True, + pretty_print=pretty_print, + ) + + def build(self, node, gds_collector_=None): + self.gds_collector_ = gds_collector_ + if SaveElementTreeNode: + self.gds_elementtree_node_ = node + already_processed = set() + self.ns_prefix_ = node.prefix + self._buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] - self.buildChildren(child, node, nodeName_) + self._buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self - def buildAttributes(self, node, attrs, already_processed): - super(EIF_cond_alpha_isfa_ista, self).buildAttributes(node, attrs, already_processed) - def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): - super(EIF_cond_alpha_isfa_ista, self).buildChildren(child_, node, nodeName_, True) + + def _buildAttributes(self, node, attrs, already_processed): + super(EIF_cond_alpha_isfa_ista, self)._buildAttributes( + node, attrs, already_processed + ) + + def _buildChildren( + self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None + ): + super(EIF_cond_alpha_isfa_ista, self)._buildChildren( + child_, node, nodeName_, True + ) pass + + # end class EIF_cond_alpha_isfa_ista GDSClassesMapping = { - 'neuroml': NeuroMLDocument, + "neuroml": NeuroMLDocument, } @@ -23742,95 +57880,176 @@ def get_root_tag(node): return tag, rootClass -def parse(inFileName, silence=False): +def get_required_ns_prefix_defs(rootNode): + """Get all name space prefix definitions required in this XML doc. + Return a dictionary of definitions and a char string of definitions. + """ + nsmap = { + prefix: uri + for node in rootNode.iter() + for (prefix, uri) in node.nsmap.items() + if prefix is not None + } + namespacedefs = " ".join( + ['xmlns:{}="{}"'.format(prefix, uri) for prefix, uri in nsmap.items()] + ) + return nsmap, namespacedefs + + +def parse(inFileName, silence=False, print_warnings=True): + global CapturedNsmap_ + gds_collector = GdsCollector_() parser = None doc = parsexml_(inFileName, parser) rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: - rootTag = 'Property' + rootTag = "Property" rootClass = Property rootObj = rootClass.factory() - rootObj.build(rootNode) - # Enable Python to collect the space used by the DOM. - doc = None -## if not silence: -## sys.stdout.write('\n') -## rootObj.export( -## sys.stdout, 0, name_=rootTag, -## namespacedef_='', -## pretty_print=True) + rootObj.build(rootNode, gds_collector_=gds_collector) + CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, namespacedef_=namespacedefs, pretty_print=True + ) + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) return rootObj -def parseEtree(inFileName, silence=False): +def parseEtree( + inFileName, + silence=False, + print_warnings=True, + mapping=None, + reverse_mapping=None, + nsmap=None, +): parser = None doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: - rootTag = 'Property' + rootTag = "Property" rootClass = Property rootObj = rootClass.factory() - rootObj.build(rootNode) + rootObj.build(rootNode, gds_collector_=gds_collector) + if mapping is None: + mapping = {} + if reverse_mapping is None: + reverse_mapping = {} + rootElement = rootObj.to_etree( + None, + name_=rootTag, + mapping_=mapping, + reverse_mapping_=reverse_mapping, + nsmap_=nsmap, + ) + reverse_node_mapping = rootObj.gds_reverse_node_mapping(mapping) # Enable Python to collect the space used by the DOM. - doc = None - mapping = {} - rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) - reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) -## if not silence: -## content = etree_.tostring( -## rootElement, pretty_print=True, -## xml_declaration=True, encoding="utf-8") -## sys.stdout.write(content) -## sys.stdout.write('\n') - return rootObj, rootElement, mapping, reverse_mapping - - -def parseString(inString, silence=False): - '''Parse a string, create the object tree, and export it. + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, xml_declaration=True, encoding="utf-8" + ) + sys.stdout.write(str(content)) + sys.stdout.write("\n") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) + return rootObj, rootElement, mapping, reverse_node_mapping + + +def parseString(inString, silence=False, print_warnings=True): + """Parse a string, create the object tree, and export it. Arguments: - inString -- A string. This XML fragment should not start with an XML declaration containing an encoding. - silence -- A boolean. If False, export the object. Returns -- The root object in the tree. - ''' + """ parser = None - rootNode= parsexmlstring_(inString, parser) + rootNode = parsexmlstring_(inString, parser) + gds_collector = GdsCollector_() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: - rootTag = 'Property' + rootTag = "Property" rootClass = Property rootObj = rootClass.factory() - rootObj.build(rootNode) - # Enable Python to collect the space used by the DOM. -## if not silence: -## sys.stdout.write('\n') -## rootObj.export( -## sys.stdout, 0, name_=rootTag, -## namespacedef_='') + rootObj.build(rootNode, gds_collector_=gds_collector) + if not SaveElementTreeNode: + rootNode = None + if not silence: + sys.stdout.write('\n') + rootObj.export(sys.stdout, 0, name_=rootTag, namespacedef_="") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) return rootObj -def parseLiteral(inFileName, silence=False): +def parseLiteral(inFileName, silence=False, print_warnings=True): parser = None doc = parsexml_(inFileName, parser) + gds_collector = GdsCollector_() rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: - rootTag = 'Property' + rootTag = "Property" rootClass = Property rootObj = rootClass.factory() - rootObj.build(rootNode) + rootObj.build(rootNode, gds_collector_=gds_collector) # Enable Python to collect the space used by the DOM. - doc = None -## if not silence: -## sys.stdout.write('#from nml import *\n\n') -## sys.stdout.write('import nml as model_\n\n') -## sys.stdout.write('rootObj = model_.rootClass(\n') -## rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) -## sys.stdout.write(')\n') + if not SaveElementTreeNode: + doc = None + rootNode = None + if not silence: + sys.stdout.write("#from nml import *\n\n") + sys.stdout.write("import nml as model_\n\n") + sys.stdout.write("rootObj = model_.rootClass(\n") + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(")\n") + if print_warnings and len(gds_collector.get_messages()) > 0: + separator = ("-" * 50) + "\n" + sys.stderr.write(separator) + sys.stderr.write( + "----- Warnings -- count: {} -----\n".format( + len(gds_collector.get_messages()), + ) + ) + gds_collector.write_messages(sys.stderr) + sys.stderr.write(separator) return rootObj @@ -23842,10 +58061,245 @@ def main(): usage() -if __name__ == '__main__': - #import pdb; pdb.set_trace() +if __name__ == "__main__": + # import pdb; pdb.set_trace() main() +RenameMappings_ = {} + +# +# Mapping of namespaces to types defined in them +# and the file in which each is defined. +# simpleTypes are marked "ST" and complexTypes "CT". +NamespaceToDefMappings_ = { + "http://www.neuroml.org/schema/neuroml2": [ + ("NmlId", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_none", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_voltage", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_length", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_resistance", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_resistivity", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_conductance", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_conductanceDensity", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_permeability", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_time", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_pertime", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_capacitance", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_specificCapacitance", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_concentration", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_current", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_currentDensity", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_temperature", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_rhoFactor", "NeuroML_v2.2.xsd", "ST"), + ("Nml2Quantity_conductancePerVoltage", "NeuroML_v2.2.xsd", "ST"), + ("MetaId", "NeuroML_v2.2.xsd", "ST"), + ("NeuroLexId", "NeuroML_v2.2.xsd", "ST"), + ("NonNegativeInteger", "NeuroML_v2.2.xsd", "ST"), + ("PositiveInteger", "NeuroML_v2.2.xsd", "ST"), + ("DoubleGreaterThanZero", "NeuroML_v2.2.xsd", "ST"), + ("ZeroOrOne", "NeuroML_v2.2.xsd", "ST"), + ("Notes", "NeuroML_v2.2.xsd", "ST"), + ("ZeroToOne", "NeuroML_v2.2.xsd", "ST"), + ("channelTypes", "NeuroML_v2.2.xsd", "ST"), + ("gateTypes", "NeuroML_v2.2.xsd", "ST"), + ("BlockTypes", "NeuroML_v2.2.xsd", "ST"), + ("PlasticityTypes", "NeuroML_v2.2.xsd", "ST"), + ("Metric", "NeuroML_v2.2.xsd", "ST"), + ("networkTypes", "NeuroML_v2.2.xsd", "ST"), + ("allowedSpaces", "NeuroML_v2.2.xsd", "ST"), + ("populationTypes", "NeuroML_v2.2.xsd", "ST"), + ("Property", "NeuroML_v2.2.xsd", "CT"), + ("Annotation", "NeuroML_v2.2.xsd", "CT"), + ("ComponentType", "NeuroML_v2.2.xsd", "CT"), + ("Constant", "NeuroML_v2.2.xsd", "CT"), + ("Exposure", "NeuroML_v2.2.xsd", "CT"), + ("NamedDimensionalType", "NeuroML_v2.2.xsd", "CT"), + ("NamedDimensionalVariable", "NeuroML_v2.2.xsd", "CT"), + ("Parameter", "NeuroML_v2.2.xsd", "CT"), + ("LEMS_Property", "NeuroML_v2.2.xsd", "CT"), + ("Requirement", "NeuroML_v2.2.xsd", "CT"), + ("InstanceRequirement", "NeuroML_v2.2.xsd", "CT"), + ("Dynamics", "NeuroML_v2.2.xsd", "CT"), + ("DerivedVariable", "NeuroML_v2.2.xsd", "CT"), + ("StateVariable", "NeuroML_v2.2.xsd", "CT"), + ("ConditionalDerivedVariable", "NeuroML_v2.2.xsd", "CT"), + ("Case", "NeuroML_v2.2.xsd", "CT"), + ("TimeDerivative", "NeuroML_v2.2.xsd", "CT"), + ("NeuroMLDocument", "NeuroML_v2.2.xsd", "CT"), + ("IncludeType", "NeuroML_v2.2.xsd", "CT"), + ("IonChannelScalable", "NeuroML_v2.2.xsd", "CT"), + ("IonChannelKS", "NeuroML_v2.2.xsd", "CT"), + ("IonChannel", "NeuroML_v2.2.xsd", "CT"), + ("IonChannelHH", "NeuroML_v2.2.xsd", "CT"), + ("IonChannelVShift", "NeuroML_v2.2.xsd", "CT"), + ("Q10ConductanceScaling", "NeuroML_v2.2.xsd", "CT"), + ("ClosedState", "NeuroML_v2.2.xsd", "CT"), + ("OpenState", "NeuroML_v2.2.xsd", "CT"), + ("ForwardTransition", "NeuroML_v2.2.xsd", "CT"), + ("ReverseTransition", "NeuroML_v2.2.xsd", "CT"), + ("TauInfTransition", "NeuroML_v2.2.xsd", "CT"), + ("GateKS", "NeuroML_v2.2.xsd", "CT"), + ("GateHHUndetermined", "NeuroML_v2.2.xsd", "CT"), + ("GateHHRates", "NeuroML_v2.2.xsd", "CT"), + ("GateHHTauInf", "NeuroML_v2.2.xsd", "CT"), + ("GateHHRatesTauInf", "NeuroML_v2.2.xsd", "CT"), + ("GateHHRatesTau", "NeuroML_v2.2.xsd", "CT"), + ("GateHHRatesInf", "NeuroML_v2.2.xsd", "CT"), + ("GateHHInstantaneous", "NeuroML_v2.2.xsd", "CT"), + ("GateFractional", "NeuroML_v2.2.xsd", "CT"), + ("GateFractionalSubgate", "NeuroML_v2.2.xsd", "CT"), + ("Q10Settings", "NeuroML_v2.2.xsd", "CT"), + ("HHRate", "NeuroML_v2.2.xsd", "CT"), + ("HHVariable", "NeuroML_v2.2.xsd", "CT"), + ("HHTime", "NeuroML_v2.2.xsd", "CT"), + ("DecayingPoolConcentrationModel", "NeuroML_v2.2.xsd", "CT"), + ("FixedFactorConcentrationModel", "NeuroML_v2.2.xsd", "CT"), + ("BaseSynapse", "NeuroML_v2.2.xsd", "CT"), + ("BaseVoltageDepSynapse", "NeuroML_v2.2.xsd", "CT"), + ("BaseCurrentBasedSynapse", "NeuroML_v2.2.xsd", "CT"), + ("BaseConductanceBasedSynapse", "NeuroML_v2.2.xsd", "CT"), + ("BaseConductanceBasedSynapseTwo", "NeuroML_v2.2.xsd", "CT"), + ("GapJunction", "NeuroML_v2.2.xsd", "CT"), + ("SilentSynapse", "NeuroML_v2.2.xsd", "CT"), + ("LinearGradedSynapse", "NeuroML_v2.2.xsd", "CT"), + ("GradedSynapse", "NeuroML_v2.2.xsd", "CT"), + ("AlphaCurrentSynapse", "NeuroML_v2.2.xsd", "CT"), + ("AlphaSynapse", "NeuroML_v2.2.xsd", "CT"), + ("ExpOneSynapse", "NeuroML_v2.2.xsd", "CT"), + ("ExpTwoSynapse", "NeuroML_v2.2.xsd", "CT"), + ("ExpThreeSynapse", "NeuroML_v2.2.xsd", "CT"), + ("DoubleSynapse", "NeuroML_v2.2.xsd", "CT"), + ("BlockingPlasticSynapse", "NeuroML_v2.2.xsd", "CT"), + ("BlockMechanism", "NeuroML_v2.2.xsd", "CT"), + ("PlasticityMechanism", "NeuroML_v2.2.xsd", "CT"), + ("BaseCell", "NeuroML_v2.2.xsd", "CT"), + ("IafTauCell", "NeuroML_v2.2.xsd", "CT"), + ("IafTauRefCell", "NeuroML_v2.2.xsd", "CT"), + ("IafCell", "NeuroML_v2.2.xsd", "CT"), + ("IafRefCell", "NeuroML_v2.2.xsd", "CT"), + ("IzhikevichCell", "NeuroML_v2.2.xsd", "CT"), + ("BaseCellMembPotCap", "NeuroML_v2.2.xsd", "CT"), + ("Izhikevich2007Cell", "NeuroML_v2.2.xsd", "CT"), + ("AdExIaFCell", "NeuroML_v2.2.xsd", "CT"), + ("FitzHughNagumoCell", "NeuroML_v2.2.xsd", "CT"), + ("FitzHughNagumo1969Cell", "NeuroML_v2.2.xsd", "CT"), + ("PinskyRinzelCA3Cell", "NeuroML_v2.2.xsd", "CT"), + ("Cell", "NeuroML_v2.2.xsd", "CT"), + ("Cell2CaPools", "NeuroML_v2.2.xsd", "CT"), + ("Morphology", "NeuroML_v2.2.xsd", "CT"), + ("Segment", "NeuroML_v2.2.xsd", "CT"), + ("SegmentParent", "NeuroML_v2.2.xsd", "CT"), + ("Point3DWithDiam", "NeuroML_v2.2.xsd", "CT"), + ("SegmentGroup", "NeuroML_v2.2.xsd", "CT"), + ("InhomogeneousParameter", "NeuroML_v2.2.xsd", "CT"), + ("ProximalDetails", "NeuroML_v2.2.xsd", "CT"), + ("DistalDetails", "NeuroML_v2.2.xsd", "CT"), + ("Member", "NeuroML_v2.2.xsd", "CT"), + ("Include", "NeuroML_v2.2.xsd", "CT"), + ("Path", "NeuroML_v2.2.xsd", "CT"), + ("SubTree", "NeuroML_v2.2.xsd", "CT"), + ("SegmentEndPoint", "NeuroML_v2.2.xsd", "CT"), + ("BiophysicalProperties", "NeuroML_v2.2.xsd", "CT"), + ("BiophysicalProperties2CaPools", "NeuroML_v2.2.xsd", "CT"), + ("MembraneProperties", "NeuroML_v2.2.xsd", "CT"), + ("MembraneProperties2CaPools", "NeuroML_v2.2.xsd", "CT"), + ("SpikeThresh", "NeuroML_v2.2.xsd", "CT"), + ("SpecificCapacitance", "NeuroML_v2.2.xsd", "CT"), + ("InitMembPotential", "NeuroML_v2.2.xsd", "CT"), + ("Resistivity", "NeuroML_v2.2.xsd", "CT"), + ("ChannelPopulation", "NeuroML_v2.2.xsd", "CT"), + ("ChannelDensityNonUniform", "NeuroML_v2.2.xsd", "CT"), + ("ChannelDensityNonUniformNernst", "NeuroML_v2.2.xsd", "CT"), + ("ChannelDensityNonUniformGHK", "NeuroML_v2.2.xsd", "CT"), + ("ChannelDensity", "NeuroML_v2.2.xsd", "CT"), + ("ChannelDensityVShift", "NeuroML_v2.2.xsd", "CT"), + ("ChannelDensityNernst", "NeuroML_v2.2.xsd", "CT"), + ("ChannelDensityNernstCa2", "NeuroML_v2.2.xsd", "CT"), + ("ChannelDensityGHK", "NeuroML_v2.2.xsd", "CT"), + ("ChannelDensityGHK2", "NeuroML_v2.2.xsd", "CT"), + ("VariableParameter", "NeuroML_v2.2.xsd", "CT"), + ("InhomogeneousValue", "NeuroML_v2.2.xsd", "CT"), + ("Species", "NeuroML_v2.2.xsd", "CT"), + ("ConcentrationModel_D", "NeuroML_v2.2.xsd", "CT"), + ("IntracellularProperties", "NeuroML_v2.2.xsd", "CT"), + ("IntracellularProperties2CaPools", "NeuroML_v2.2.xsd", "CT"), + ("ExtracellularProperties", "NeuroML_v2.2.xsd", "CT"), + ("ExtracellularPropertiesLocal", "NeuroML_v2.2.xsd", "CT"), + ("ReactionScheme", "NeuroML_v2.2.xsd", "CT"), + ("PulseGenerator", "NeuroML_v2.2.xsd", "CT"), + ("PulseGeneratorDL", "NeuroML_v2.2.xsd", "CT"), + ("SineGenerator", "NeuroML_v2.2.xsd", "CT"), + ("SineGeneratorDL", "NeuroML_v2.2.xsd", "CT"), + ("RampGenerator", "NeuroML_v2.2.xsd", "CT"), + ("RampGeneratorDL", "NeuroML_v2.2.xsd", "CT"), + ("CompoundInput", "NeuroML_v2.2.xsd", "CT"), + ("CompoundInputDL", "NeuroML_v2.2.xsd", "CT"), + ("VoltageClamp", "NeuroML_v2.2.xsd", "CT"), + ("VoltageClampTriple", "NeuroML_v2.2.xsd", "CT"), + ("Spike", "NeuroML_v2.2.xsd", "CT"), + ("SpikeArray", "NeuroML_v2.2.xsd", "CT"), + ("TimedSynapticInput", "NeuroML_v2.2.xsd", "CT"), + ("SpikeGenerator", "NeuroML_v2.2.xsd", "CT"), + ("SpikeGeneratorRandom", "NeuroML_v2.2.xsd", "CT"), + ("SpikeGeneratorPoisson", "NeuroML_v2.2.xsd", "CT"), + ("SpikeGeneratorRefPoisson", "NeuroML_v2.2.xsd", "CT"), + ("PoissonFiringSynapse", "NeuroML_v2.2.xsd", "CT"), + ("TransientPoissonFiringSynapse", "NeuroML_v2.2.xsd", "CT"), + ("Network", "NeuroML_v2.2.xsd", "CT"), + ("Space", "NeuroML_v2.2.xsd", "CT"), + ("SpaceStructure", "NeuroML_v2.2.xsd", "CT"), + ("Region", "NeuroML_v2.2.xsd", "CT"), + ("Population", "NeuroML_v2.2.xsd", "CT"), + ("Layout", "NeuroML_v2.2.xsd", "CT"), + ("UnstructuredLayout", "NeuroML_v2.2.xsd", "CT"), + ("RandomLayout", "NeuroML_v2.2.xsd", "CT"), + ("GridLayout", "NeuroML_v2.2.xsd", "CT"), + ("Instance", "NeuroML_v2.2.xsd", "CT"), + ("Location", "NeuroML_v2.2.xsd", "CT"), + ("CellSet", "NeuroML_v2.2.xsd", "CT"), + ("SynapticConnection", "NeuroML_v2.2.xsd", "CT"), + ("BaseProjection", "NeuroML_v2.2.xsd", "CT"), + ("Projection", "NeuroML_v2.2.xsd", "CT"), + ("BaseConnection", "NeuroML_v2.2.xsd", "CT"), + ("BaseConnectionOldFormat", "NeuroML_v2.2.xsd", "CT"), + ("BaseConnectionNewFormat", "NeuroML_v2.2.xsd", "CT"), + ("Connection", "NeuroML_v2.2.xsd", "CT"), + ("ConnectionWD", "NeuroML_v2.2.xsd", "CT"), + ("ElectricalProjection", "NeuroML_v2.2.xsd", "CT"), + ("ElectricalConnection", "NeuroML_v2.2.xsd", "CT"), + ("ElectricalConnectionInstance", "NeuroML_v2.2.xsd", "CT"), + ("ElectricalConnectionInstanceW", "NeuroML_v2.2.xsd", "CT"), + ("ContinuousProjection", "NeuroML_v2.2.xsd", "CT"), + ("ContinuousConnection", "NeuroML_v2.2.xsd", "CT"), + ("ContinuousConnectionInstance", "NeuroML_v2.2.xsd", "CT"), + ("ContinuousConnectionInstanceW", "NeuroML_v2.2.xsd", "CT"), + ("ExplicitInput", "NeuroML_v2.2.xsd", "CT"), + ("InputList", "NeuroML_v2.2.xsd", "CT"), + ("Input", "NeuroML_v2.2.xsd", "CT"), + ("InputW", "NeuroML_v2.2.xsd", "CT"), + ("basePyNNCell", "NeuroML_v2.2.xsd", "CT"), + ("basePyNNIaFCell", "NeuroML_v2.2.xsd", "CT"), + ("basePyNNIaFCondCell", "NeuroML_v2.2.xsd", "CT"), + ("IF_curr_alpha", "NeuroML_v2.2.xsd", "CT"), + ("IF_curr_exp", "NeuroML_v2.2.xsd", "CT"), + ("IF_cond_alpha", "NeuroML_v2.2.xsd", "CT"), + ("IF_cond_exp", "NeuroML_v2.2.xsd", "CT"), + ("EIF_cond_exp_isfa_ista", "NeuroML_v2.2.xsd", "CT"), + ("EIF_cond_alpha_isfa_ista", "NeuroML_v2.2.xsd", "CT"), + ("HH_cond_exp", "NeuroML_v2.2.xsd", "CT"), + ("BasePynnSynapse", "NeuroML_v2.2.xsd", "CT"), + ("ExpCondSynapse", "NeuroML_v2.2.xsd", "CT"), + ("AlphaCondSynapse", "NeuroML_v2.2.xsd", "CT"), + ("ExpCurrSynapse", "NeuroML_v2.2.xsd", "CT"), + ("AlphaCurrSynapse", "NeuroML_v2.2.xsd", "CT"), + ("SpikeSourcePoisson", "NeuroML_v2.2.xsd", "CT"), + ("BaseWithoutId", "NeuroML_v2.2.xsd", "CT"), + ("BaseNonNegativeIntegerId", "NeuroML_v2.2.xsd", "CT"), + ("Base", "NeuroML_v2.2.xsd", "CT"), + ("Standalone", "NeuroML_v2.2.xsd", "CT"), + ] +} __all__ = [ "AdExIaFCell", @@ -24031,11 +58485,10 @@ def main(): "TimedSynapticInput", "TransientPoissonFiringSynapse", "UnstructuredLayout", - "ValueAcrossSegOrSegGroup", "VariableParameter", "VoltageClamp", "VoltageClampTriple", "basePyNNCell", "basePyNNIaFCell", - "basePyNNIaFCondCell" + "basePyNNIaFCondCell", ] diff --git a/neuroml/nml/regenerate-nml.sh b/neuroml/nml/regenerate-nml.sh new file mode 100755 index 00000000..dddb75cb --- /dev/null +++ b/neuroml/nml/regenerate-nml.sh @@ -0,0 +1,94 @@ +#!/bin/bash + +# Copyright 2021 NeuroML contributors +# File : regenerate-nml.sh +# Regenerate nml.py from the current schema version + + +echo "Note(1): Please remember to update the schema from the NeuroML2 repository" +echo "Note(2): Must be run in neuroml/nml/" +NEUROML_VERSION=$(grep -E 'current_neuroml_version.*' ../__init__.py | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') +SCHEMA_FILE=NeuroML_${NEUROML_VERSION}.xsd +PYTHON_VERSION=$(python --version 2>&1) + +regenerate () { + if command -v generateDS > /dev/null 2>&1 + then + echo "Rebuilding nml.py from ${SCHEMA_FILE} (Version: ${NEUROML_VERSION})" + echo "generateds version:" + generateDS --version + echo "Python version: $PYTHON_VERSION" + + rm -f nml.py + + + # The version of generateDS for Python 2.7 has a slightly different + # style for arguments. You do not give helper_methods.py, just + # helper_methods + if [[ "$PYTHON_VERSION" =~ "2.7" ]] + then + PYTHONPATH="$PYTHONPATH:." generateDS -o nml.py --use-getter-setter=none --user-methods=helper_methods $SCHEMA_FILE + else + PYTHONPATH="$PYTHONPATH:." generateDS -o nml.py --use-getter-setter=none --user-methods=helper_methods.py $SCHEMA_FILE + fi + else + echo "GenerateDS not installed" + echo "Run: pip install generateds" + exit 1 + fi +} + +reformat () { + if command -v black > /dev/null 2>&1 + then + echo "Formatting new nml.py with black" + black nml.py + else + echo "black is not installed" + echo "Run: pip install black" + fi +} + +usage () { + echo "$0: Regenerate and reformat nml.py from the XSD schema file" + echo + echo "Usage: $0 [-arfh]" + echo + echo "-r: regenerate" + echo "-f: reformat" + echo "-a: regenerate and reformat" + echo "-h: print this help text and exit" +} + +if [ $# -lt 1 ] +then + usage + exit 1 +fi + +# parse options +while getopts "arfh" OPTION +do + case $OPTION in + r) + regenerate + exit 0 + ;; + f) + reformat + exit 0 + ;; + a) + regenerate && reformat + exit 0 + ;; + h) + usage + exit 0 + ;; + ?) + usage + exit 1 + ;; + esac +done diff --git a/neuroml/test/misc_tests.py b/neuroml/test/misc_tests.py index ac14a6d7..c11e498a 100644 --- a/neuroml/test/misc_tests.py +++ b/neuroml/test/misc_tests.py @@ -13,19 +13,19 @@ except ImportError: import unittest -class TestCommonProperties(unittest.TestCase): +class TestCommonProperties(unittest.TestCase): def test_instatiation(self): """ Since classes are auto-generated, need to test that they correctly instantiate """ - - for name,test_class in inspect.getmembers(sys.modules[neuroml.__name__]): + + for name, test_class in inspect.getmembers(sys.modules[neuroml.__name__]): if sys.version_info >= (2, 7): print(sys.version_info) if inspect.isclass(test_class): ob = test_class() - self.assertIsInstance(ob,test_class) + self.assertIsInstance(ob, test_class) else: print("Warning - Python<2.7 does not support this test") pass diff --git a/neuroml/test/test_arraymorph.py b/neuroml/test/test_arraymorph.py index e8fb887f..a81b7eec 100644 --- a/neuroml/test/test_arraymorph.py +++ b/neuroml/test/test_arraymorph.py @@ -10,50 +10,52 @@ except ImportError: import unittest -class TestObjectBuiltMorphology(unittest.TestCase): +class TestObjectBuiltMorphology(unittest.TestCase): def setUp(self): """ Testing a complex hand-built morphology (from neuroml objects rather than arrays) """ - p = neuroml.Point3DWithDiam(x=0,y=0,z=0,diameter=50) - d = neuroml.Point3DWithDiam(x=50,y=0,z=0,diameter=50) + p = neuroml.Point3DWithDiam(x=0, y=0, z=0, diameter=50) + d = neuroml.Point3DWithDiam(x=50, y=0, z=0, diameter=50) soma = neuroml.Segment(proximal=p, distal=d) - soma.name = 'Soma' + soma.name = "Soma" soma.id = 0 - - #now make an axon with 100 compartments: - + + # now make an axon with 100 compartments: + parent = neuroml.SegmentParent(segments=soma.id) parent_segment = soma axon_segments = [] seg_id = 1 for i in range(100): - p = neuroml.Point3DWithDiam(x=parent_segment.distal.x, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - d = neuroml.Point3DWithDiam(x=parent_segment.distal.x+10, - y=parent_segment.distal.y, - z=parent_segment.distal.z, - diameter=0.1) - - axon_segment = neuroml.Segment(proximal = p, - distal = d, - parent = parent) - + p = neuroml.Point3DWithDiam( + x=parent_segment.distal.x, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + d = neuroml.Point3DWithDiam( + x=parent_segment.distal.x + 10, + y=parent_segment.distal.y, + z=parent_segment.distal.z, + diameter=0.1, + ) + + axon_segment = neuroml.Segment(proximal=p, distal=d, parent=parent) + axon_segment.id = seg_id - - axon_segment.name = 'axon_segment_' + str(axon_segment.id) - - #now reset everything: + + axon_segment.name = "axon_segment_" + str(axon_segment.id) + + # now reset everything: parent = neuroml.SegmentParent(segments=axon_segment.id) parent_segment = axon_segment - seg_id += 1 - + seg_id += 1 + axon_segments.append(axon_segment) test_morphology = am.ArrayMorphology() @@ -69,106 +71,118 @@ def test_valid_morphology_ids(self): def test_invalid_morphology_ids(self): morphology = self.test_morphology - morphology.segments[0].id = 5 + morphology.segments[0].id = 5 self.assertFalse(morphology.valid_ids) def test_num_segments(self): num_segments = len(self.test_morphology.segments) - self.assertEqual(num_segments,101) + self.assertEqual(num_segments, 101) def test_segments_ids_ok(self): - self.assertEqual(self.test_morphology.segments[30].id,30) + self.assertEqual(self.test_morphology.segments[30].id, 30) def test_soma_still_located_at_zero(self): - self.assertEqual(self.test_morphology.segments[0].name,'Soma') - self.assertEqual(self.test_morphology.segments[0].id,0) + self.assertEqual(self.test_morphology.segments[0].name, "Soma") + self.assertEqual(self.test_morphology.segments[0].id, 0) def test_segment_vertices_ok(self): - self.assertEqual(self.test_morphology.segments[1].proximal.x,50.0) - + self.assertEqual(self.test_morphology.segments[1].proximal.x, 50.0) + def test_axon_names_ok(self): - self.assertEqual(self.test_morphology.segments[32].name,'axon_segment_32') + self.assertEqual(self.test_morphology.segments[32].name, "axon_segment_32") def test_segment_instance(self): seg = self.test_morphology.segments[47] - self.assertIsInstance(seg,neuroml.nml.nml.Segment) + self.assertIsInstance(seg, neuroml.nml.nml.Segment) -class TestArrayMorphology(unittest.TestCase): +class TestArrayMorphology(unittest.TestCase): def setUp(self): num_segments = int(100) num_vertices = num_segments + 1 - x = np.linspace(0,10,num_vertices) + x = np.linspace(0, 10, num_vertices) y = np.zeros(num_vertices) z = np.zeros(num_vertices) - d = np.linspace(1,0.01,num_vertices) + d = np.linspace(1, 0.01, num_vertices) - connectivity = range(-1,num_segments) + connectivity = range(-1, num_segments) - vertices = np.array([x,y,z,d]).T + vertices = np.array([x, y, z, d]).T self.complex_vertices = vertices - + physical_mask = np.zeros(num_vertices) - #third segment is non-physical: + # third segment is non-physical: physical_mask[2] = 1 physical_mask[20] = 1 - - self.complex_morphology = am.ArrayMorphology(vertices=vertices, - connectivity=connectivity, - physical_mask=physical_mask, - id = 'test_arraymorph') - - self.valid_vertices = [[0,0,0,0.1], - [1,0,0,0.2], - [2,0,0,0.3], - [3,0,0,0.4]] - - self.valid_connectivity = [-1,0,1,2] - - self.optimized_morphology = am.ArrayMorphology(vertices=self.valid_vertices, - connectivity=self.valid_connectivity, - id = 'test_arraymorph') - - proximal_point = neuroml.Point3DWithDiam(x=0.1, - y=0.2, - z=0.3, - diameter=1.1,) - - distal_point = neuroml.Point3DWithDiam(x=0.0, - y=0.0, - z=0.0, - diameter=1.1,) - - soma = neuroml.Segment(proximal = proximal_point, - distal = distal_point,) + + self.complex_morphology = am.ArrayMorphology( + vertices=vertices, + connectivity=connectivity, + physical_mask=physical_mask, + id="test_arraymorph", + ) + + self.valid_vertices = [ + [0, 0, 0, 0.1], + [1, 0, 0, 0.2], + [2, 0, 0, 0.3], + [3, 0, 0, 0.4], + ] + + self.valid_connectivity = [-1, 0, 1, 2] + + self.optimized_morphology = am.ArrayMorphology( + vertices=self.valid_vertices, + connectivity=self.valid_connectivity, + id="test_arraymorph", + ) + + proximal_point = neuroml.Point3DWithDiam( + x=0.1, + y=0.2, + z=0.3, + diameter=1.1, + ) + + distal_point = neuroml.Point3DWithDiam( + x=0.0, + y=0.0, + z=0.0, + diameter=1.1, + ) + + soma = neuroml.Segment( + proximal=proximal_point, + distal=distal_point, + ) self.small_morphology = am.ArrayMorphology() self.small_morphology.segments.append(soma) def test_single_segment_morphology_instantiation(self): print(self.small_morphology.connectivity) seg = self.small_morphology.segments[0] - self.assertIsInstance(seg,neuroml.nml.nml.Segment) + self.assertIsInstance(seg, neuroml.nml.nml.Segment) def test_single_segment_morphology_length(self): - self.assertEqual(len(self.small_morphology.segments),1) + self.assertEqual(len(self.small_morphology.segments), 1) def test_index_error(self): """ There is no segments[1] for a one-segment morphology """ - self.assertRaises(IndexError,self.small_morphology.segments.__getitem__,1) - + self.assertRaises(IndexError, self.small_morphology.segments.__getitem__, 1) + def test_single_floating_segment(self): """ Because physical_mask[4] = 1 a segment should be skipped as it is floating. """ - + seg = self.complex_morphology.segments[3] seg_proximal_x = seg.proximal.x seg_distal_x = seg.distal.x @@ -176,15 +190,15 @@ def test_single_floating_segment(self): equivalent_proximal_vertex = self.complex_vertices[5][0] equivalent_distal_vertex = self.complex_vertices[4][0] - self.assertEqual(seg_proximal_x,equivalent_proximal_vertex) - self.assertEqual(seg_distal_x,equivalent_distal_vertex) + self.assertEqual(seg_proximal_x, equivalent_proximal_vertex) + self.assertEqual(seg_distal_x, equivalent_distal_vertex) def test_double_floating_segment(self): """ Because physical_mask[4] = 1 a segment should be skipped as it is floating. """ - + seg = self.complex_morphology.segments[3] seg_proximal_x = seg.proximal.x seg_distal_x = seg.distal.x @@ -192,14 +206,13 @@ def test_double_floating_segment(self): equivalent_proximal_vertex = self.complex_vertices[5][0] equivalent_distal_vertex = self.complex_vertices[4][0] - self.assertEqual(seg_proximal_x,equivalent_proximal_vertex) - self.assertEqual(seg_distal_x,equivalent_distal_vertex) - + self.assertEqual(seg_proximal_x, equivalent_proximal_vertex) + self.assertEqual(seg_distal_x, equivalent_distal_vertex) def test_segments_len(self): num_segments = 98 len_segment_list = len(self.complex_morphology.segments) - self.assertEqual(num_segments,len_segment_list) + self.assertEqual(num_segments, len_segment_list) def test_add_segment_len(self): """ @@ -207,19 +220,22 @@ def test_add_segment_len(self): and distal vertices should be used. The internal connectivity should be passed. """ - - proximal_point = neuroml.Point3DWithDiam(x=0.1, - y=0.2, - z=0.3, - diameter=1.1,) - distal_point = neuroml.Point3DWithDiam(x=0.0, - y=0.0, - z=0.0, - diameter=1.1,) + proximal_point = neuroml.Point3DWithDiam( + x=0.1, + y=0.2, + z=0.3, + diameter=1.1, + ) - seg = neuroml.Segment(proximal = proximal_point, - distal = distal_point) + distal_point = neuroml.Point3DWithDiam( + x=0.0, + y=0.0, + z=0.0, + diameter=1.1, + ) + + seg = neuroml.Segment(proximal=proximal_point, distal=distal_point) num_segments = len(self.complex_morphology.segments) @@ -227,40 +243,42 @@ def test_add_segment_len(self): len_segment_list = len(self.complex_morphology.segments) - self.assertEqual(num_segments+1, len_segment_list) + self.assertEqual(num_segments + 1, len_segment_list) self.setUp() def test_add_segment_vertices_added(self): - proximal_point = neuroml.Point3DWithDiam(x=0.1, - y=0.2, - z=0.3, - diameter=0.1,) + proximal_point = neuroml.Point3DWithDiam( + x=0.1, + y=0.2, + z=0.3, + diameter=0.1, + ) - distal_point = neuroml.Point3DWithDiam(x=0.0, - y=0.0, - z=0.0, - diameter=0.1) + distal_point = neuroml.Point3DWithDiam(x=0.0, y=0.0, z=0.0, diameter=0.1) - seg = neuroml.Segment(proximal = proximal_point, - distal = distal_point) + seg = neuroml.Segment(proximal=proximal_point, distal=distal_point) num_segments = len(self.complex_morphology.segments) self.optimized_morphology.segments.append(seg) true_vertices = self.optimized_morphology.vertices - expected_vertices = np.array([[0,0,0,0.1], - [1,0,0,0.2], - [2,0,0,0.3], - [3,0,0,0.4], - [0,0,0,0.1], - [0.1,0.2,0.3,0.1],]) - - arrays_equal = np.array_equal(true_vertices,expected_vertices) + expected_vertices = np.array( + [ + [0, 0, 0, 0.1], + [1, 0, 0, 0.2], + [2, 0, 0, 0.3], + [3, 0, 0, 0.4], + [0, 0, 0, 0.1], + [0.1, 0.2, 0.3, 0.1], + ] + ) + + arrays_equal = np.array_equal(true_vertices, expected_vertices) self.assertTrue(arrays_equal) self.setUp() - + def tes_add_segment_connectivity_valid(self): pass @@ -269,7 +287,7 @@ def test_num_vertices(self): Morphology with one segment """ - self.assertEqual(self.optimized_morphology.num_vertices,4) + self.assertEqual(self.optimized_morphology.num_vertices, 4) def test_valid_morphology(self): """ @@ -280,99 +298,89 @@ def test_valid_morphology(self): # does not like. # Ignore the VisibleDeprecationWarning that numpy throws. with warnings.catch_warnings(): - warnings.filterwarnings("ignore", "Creating an ndarray from ragged nested sequences") - vertices=[[0,0,0],[1,1]] - connectivity=[-1,0] - self.assertRaises(AssertionError,am.ArrayMorphology,vertices,connectivity) - - vertices=[[0,0,0],[1,1,1]] - connectivity=[-1,0,0] - self.assertRaises(AssertionError,am.ArrayMorphology,vertices,connectivity) - - vertices=[[0,0,0],[1,1,1]] - connectivity=[] - self.assertRaises(AssertionError,am.ArrayMorphology,vertices,connectivity) + warnings.filterwarnings( + "ignore", "Creating an ndarray from ragged nested sequences" + ) + vertices = [[0, 0, 0], [1, 1]] + connectivity = [-1, 0] + self.assertRaises( + AssertionError, am.ArrayMorphology, vertices, connectivity + ) + + vertices = [[0, 0, 0], [1, 1, 1]] + connectivity = [-1, 0, 0] + self.assertRaises(AssertionError, am.ArrayMorphology, vertices, connectivity) + + vertices = [[0, 0, 0], [1, 1, 1]] + connectivity = [] + self.assertRaises(AssertionError, am.ArrayMorphology, vertices, connectivity) def test_root_index(self): - self.assertEqual(self.optimized_morphology.root_index,0) - + self.assertEqual(self.optimized_morphology.root_index, 0) + def test_physical_indeces(self): physical_indices = self.optimized_morphology.physical_indices - self.assertTrue(np.array_equal(physical_indices,[0,1,2,3])) + self.assertTrue(np.array_equal(physical_indices, [0, 1, 2, 3])) def test_children(self): - self.assertTrue(self.optimized_morphology.children(1),2) + self.assertTrue(self.optimized_morphology.children(1), 2) def test_to_root(self): - new_morphology = am.ArrayMorphology(self.optimized_morphology.vertices, - self.optimized_morphology.connectivity) + new_morphology = am.ArrayMorphology( + self.optimized_morphology.vertices, self.optimized_morphology.connectivity + ) new_morphology.to_root(2) new_connectivity = new_morphology.connectivity - self.assertTrue(np.array_equal(new_connectivity,[1,2,-1,2])) + self.assertTrue(np.array_equal(new_connectivity, [1, 2, -1, 2])) def test_to_neuroml_morphology(self): neuroml_morphology = self.optimized_morphology.to_neuroml_morphology(id="Test") - self.assertEqual(neuroml_morphology.id,"Test") - self.assertEqual(len(neuroml_morphology.segments),3) - + self.assertEqual(neuroml_morphology.id, "Test") + self.assertEqual(len(neuroml_morphology.segments), 3) def test_pop(self): - new_morphology = am.ArrayMorphology(self.optimized_morphology.vertices, - self.optimized_morphology.connectivity)# - + new_morphology = am.ArrayMorphology( + self.optimized_morphology.vertices, self.optimized_morphology.connectivity + ) # + new_morphology.pop(1) new_connectivity = new_morphology.connectivity - self.assertTrue(np.array_equal(new_connectivity,[-1,0,1])) + self.assertTrue(np.array_equal(new_connectivity, [-1, 0, 1])) def test_segment_getter(self): segment = self.optimized_morphology.segments[0] - self.assertIsInstance(segment,neuroml.Segment) - self.assertEqual(segment.proximal.diameter,0.2) - self.assertEqual(segment.distal.diameter,0.1) + self.assertIsInstance(segment, neuroml.Segment) + self.assertEqual(segment.proximal.diameter, 0.2) + self.assertEqual(segment.distal.diameter, 0.1) def test_segmentlist_getter(self): segment = self.optimized_morphology.segments[1] segment_again = self.optimized_morphology.segments[1] - self.assertEqual(segment,segment_again) + self.assertEqual(segment, segment_again) def test_segmentlist_setter(self): - p = neuroml.Point3DWithDiam(x=0.9, - y=0.0, - z=0.0, - diameter=0.1) - - d = neuroml.Point3DWithDiam(x=0.0, - y=0.0, - z=0.0, - diameter=0.1) + p = neuroml.Point3DWithDiam(x=0.9, y=0.0, z=0.0, diameter=0.1) - new_segment = neuroml.Segment(proximal=p, - distal=d) + d = neuroml.Point3DWithDiam(x=0.0, y=0.0, z=0.0, diameter=0.1) + + new_segment = neuroml.Segment(proximal=p, distal=d) self.optimized_morphology.segments[2] = new_segment - self.assertEqual(self.optimized_morphology.segments[2],new_segment) + self.assertEqual(self.optimized_morphology.segments[2], new_segment) def test_segmentlist_setter_by_inference(self): - p = neuroml.Point3DWithDiam(x=0.9, - y=0.0, - z=0.0, - diameter=0.1) - - d = neuroml.Point3DWithDiam(x=0.0, - y=0.0, - z=0.0, - diameter=0.1) + p = neuroml.Point3DWithDiam(x=0.9, y=0.0, z=0.0, diameter=0.1) - new_segment = neuroml.Segment(proximal=p, - distal=d) + d = neuroml.Point3DWithDiam(x=0.0, y=0.0, z=0.0, diameter=0.1) - self.optimized_morphology.segments[2] = new_segment - self.assertEqual(self.optimized_morphology.segments[2].proximal.x,0.9) + new_segment = neuroml.Segment(proximal=p, distal=d) + self.optimized_morphology.segments[2] = new_segment + self.assertEqual(self.optimized_morphology.segments[2].proximal.x, 0.9) def test_instantiation(self): """ @@ -389,9 +397,9 @@ def test_parents(self): test_segment_1 = self.optimized_morphology.segments[0] test_segment_2 = self.optimized_morphology.segments[1] - self.assertEqual(test_segment_1.id,1) - self.assertEqual(test_segment_2.id,2) - self.assertEqual(test_segment_2.parent.segments,1) + self.assertEqual(test_segment_1.id, 1) + self.assertEqual(test_segment_2.id, 2) + self.assertEqual(test_segment_2.parent.segments, 1) self.assertIsNone(test_segment_1.parent) def test_valid_morphology_ids(self): @@ -400,7 +408,7 @@ def test_valid_morphology_ids(self): def test_invalid_morphology_ids(self): morphology = self.optimized_morphology - morphology.segments[0].id = 5 + morphology.segments[0].id = 5 self.assertFalse(morphology.valid_ids) def test_large_arraymorph(self): @@ -414,25 +422,28 @@ def test_large_arraymorph(self): num_segments = int(1e6) num_vertices = num_segments + 1 - x = np.linspace(0,10,num_vertices) + x = np.linspace(0, 10, num_vertices) y = np.zeros(num_vertices) z = np.zeros(num_vertices) - d = np.linspace(1,0.01,num_vertices) + d = np.linspace(1, 0.01, num_vertices) + + vertices = np.array([x, y, z, d]).T + + connectivity = range(-1, num_segments) - vertices = np.array([x,y,z,d]).T - - connectivity = range(-1,num_segments) + big_arraymorph = am.ArrayMorphology( + vertices=vertices, connectivity=connectivity + ) - big_arraymorph = am.ArrayMorphology(vertices = vertices, - connectivity = connectivity) + self.assertIsInstance(big_arraymorph.segments[3], neuroml.Segment) - self.assertIsInstance(big_arraymorph.segments[3],neuroml.Segment) + self.assertEqual(big_arraymorph.segments[0].distal.diameter, 1.0) + # following test not as obvious as it seems - first execution of getter does not have the same result as second + self.assertEqual(big_arraymorph.segments[2333], big_arraymorph.segments[2333]) - self.assertEqual(big_arraymorph.segments[0].distal.diameter,1.0) - #following test not as obvious as it seems - first execution of getter does not have the same result as second - self.assertEqual(big_arraymorph.segments[2333],big_arraymorph.segments[2333]) - - self.assertEqual(big_arraymorph.segments[0].distal.diameter,1.0) - self.assertEqual(big_arraymorph.segments[num_segments-1].proximal.x,10.0) - self.assertEqual(big_arraymorph.segments[0].distal.x,0.0) - self.assertEqual(big_arraymorph.segments[num_segments-1].proximal.diameter,0.01) + self.assertEqual(big_arraymorph.segments[0].distal.diameter, 1.0) + self.assertEqual(big_arraymorph.segments[num_segments - 1].proximal.x, 10.0) + self.assertEqual(big_arraymorph.segments[0].distal.x, 0.0) + self.assertEqual( + big_arraymorph.segments[num_segments - 1].proximal.diameter, 0.01 + ) diff --git a/neuroml/test/test_cell.py b/neuroml/test/test_cell.py index 89a24686..f0a239d3 100644 --- a/neuroml/test/test_cell.py +++ b/neuroml/test/test_cell.py @@ -24,84 +24,92 @@ class TestCell(unittest.TestCase): - def test_cell_methods(self): - cells = ['Purk2M9s','pyr_4_sym.cell'] + cells = ["Purk2M9s", "pyr_4_sym.cell"] - cells = ['pyr_4_sym'] + cells = ["pyr_4_sym"] for cell_name in cells: - local_path = '../examples/test_files/%s.cell.nml'%cell_name + local_path = "../examples/test_files/%s.cell.nml" % cell_name if os.path.isfile(local_path): test_file_path = local_path else: root_dir = os.path.dirname(neuroml.__file__) - test_file_path = os.path.join(root_dir,'examples/test_files/%s.cell.nml'%cell_name) - print('test file path is: '+test_file_path) + test_file_path = os.path.join( + root_dir, "examples/test_files/%s.cell.nml" % cell_name + ) + print("test file path is: " + test_file_path) doc = loaders.NeuroMLLoader.load(test_file_path) cell = doc.cells[0] - self.assertEqual(cell.id,cell_name.split('.')[0]) + self.assertEqual(cell.id, cell_name.split(".")[0]) exp_num_segs = 9 - self.assertEqual(cell.morphology.num_segments,exp_num_segs) - self.assertEqual(len(cell.get_segment_ids_vs_segments()),exp_num_segs) - self.assertRaises(Exception, lambda: cell.get_segment(-1)) # Seg -1 doesn't exist... + self.assertEqual(cell.morphology.num_segments, exp_num_segs) + self.assertEqual(len(cell.get_segment_ids_vs_segments()), exp_num_segs) + self.assertRaises( + Exception, lambda: cell.get_segment(-1) + ) # Seg -1 doesn't exist... cell.summary() - #cell.get_ordered_segments_in_groups = get_ordered_segments_in_groups + # cell.get_ordered_segments_in_groups = get_ordered_segments_in_groups - for grp in ['soma_group', ['soma_group','basal_dends'],['dendrite_group'],['all']]: + for grp in [ + "soma_group", + ["soma_group", "basal_dends"], + ["dendrite_group"], + ["all"], + ]: print("-----------------------------") - print(" Testing %s..."%grp) + print(" Testing %s..." % grp) segs, cuml, path_prox, path_dist = cell.get_ordered_segments_in_groups( grp, - check_parentage=(grp==['all']), + check_parentage=(grp == ["all"]), include_cumulative_lengths=True, - include_path_lengths=True + include_path_lengths=True, ) - print("Segs %s: %s"%(grp,segs)) - print("Cuml %s: %s"%(grp,cuml)) - print("Prox %s: %s"%(grp,path_prox)) - print("Dist %s: %s"%(grp,path_dist)) + print("Segs %s: %s" % (grp, segs)) + print("Cuml %s: %s" % (grp, cuml)) + print("Prox %s: %s" % (grp, path_prox)) + print("Dist %s: %s" % (grp, path_dist)) for s in segs: - assert len(segs[s])==len(cuml[s]) + assert len(segs[s]) == len(cuml[s]) ##assert len(segs[s])==len(path_prox[s]) ##assert len(segs[s])==len(path_dist[s]) - if grp=='soma_group': - assert len(segs['soma_group'])==1 - soma_len = cuml['soma_group'][-1] - print("soma_len: %s"%soma_len) + if grp == "soma_group": + assert len(segs["soma_group"]) == 1 + soma_len = cuml["soma_group"][-1] + print("soma_len: %s" % soma_len) - if grp==['all']: - assert len(segs['all'])==9 - all_len = cuml['all'][-1] + if grp == ["all"]: + assert len(segs["all"]) == 9 + all_len = cuml["all"][-1] - if grp==['dendrite_group']: - assert len(segs['dendrite_group'])==8 - dend_len = cuml['dendrite_group'][-1] - print("dend_len: %s"%dend_len) + if grp == ["dendrite_group"]: + assert len(segs["dendrite_group"]) == 8 + dend_len = cuml["dendrite_group"][-1] + print("dend_len: %s" % dend_len) - assert all_len == soma_len+dend_len + assert all_len == soma_len + dend_len def test_cell_methods2(self): - cell = Cell(id='cell0') + cell = Cell(id="cell0") - diam = 1. - d0=Point3DWithDiam(x=0, y=0, z=0, diameter=diam) - p=Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + diam = 1.0 + d0 = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + p = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) - seg0 = Segment(id=0, name='soma',proximal=p, distal=d0) + seg0 = Segment(id=0, name="soma", proximal=p, distal=d0) - d1=Point3DWithDiam(x=10, y=0, z=0, diameter=diam) + d1 = Point3DWithDiam(x=10, y=0, z=0, diameter=diam) cell.morphology = Morphology() cell.morphology.segments.append(seg0) @@ -109,67 +117,78 @@ def test_cell_methods2(self): seg1 = Segment(id=1, distal=d1, parent=SegmentParent(0)) cell.morphology.segments.append(seg1) - d2=Point3DWithDiam(x=20, y=0, z=0, diameter=diam) + d2 = Point3DWithDiam(x=20, y=0, z=0, diameter=diam) seg2 = Segment(id=2, proximal=d1, distal=d2, parent=SegmentParent(seg1.id)) cell.morphology.segments.append(seg2) - d3=Point3DWithDiam(x=20, y=10, z=0, diameter=diam) + d3 = Point3DWithDiam(x=20, y=10, z=0, diameter=diam) seg3 = Segment(id=3, distal=d3, parent=SegmentParent(seg2.id, fraction_along=1)) cell.morphology.segments.append(seg3) - sg1 = SegmentGroup(id='all') - for seg in [seg0,seg1,seg2,seg3]: + sg1 = SegmentGroup(id="all") + for seg in [seg0, seg1, seg2, seg3]: sg1.members.append(Member(seg.id)) cell.morphology.segment_groups.append(sg1) - sg2 = SegmentGroup(id='soma_group') + sg2 = SegmentGroup(id="soma_group") for seg in [seg0]: sg2.members.append(Member(seg.id)) cell.morphology.segment_groups.append(sg2) - sg3 = SegmentGroup(id='dend_group') - for seg in [seg1,seg2,seg3]: + sg3 = SegmentGroup(id="dend_group") + for seg in [seg1, seg2, seg3]: sg3.members.append(Member(seg.id)) cell.morphology.segment_groups.append(sg3) - sg4 = SegmentGroup(id='soma_dends') - for sg in [sg2,sg3]: + sg4 = SegmentGroup(id="soma_dends") + for sg in [sg2, sg3]: sg4.includes.append(Include(sg.id)) cell.morphology.segment_groups.append(sg4) - expected = {sg1.id:4,sg2.id:1,sg3.id:3,sg4.id:4} + expected = {sg1.id: 4, sg2.id: 1, sg3.id: 3, sg4.id: 4} - for sg in [sg1,sg2,sg3,sg4]: + for sg in [sg1, sg2, sg3, sg4]: segs = cell.get_all_segments_in_group(sg.id) - print('\nSeg group %s has segments: %s'%(sg,segs)) - self.assertEqual(expected[sg.id],len(segs)) + print("\nSeg group %s has segments: %s" % (sg, segs)) + self.assertEqual(expected[sg.id], len(segs)) osegs = cell.get_ordered_segments_in_groups(sg.id) - print('Seg group %s has ordered segments: %s'%(sg.id,osegs)) - self.assertEqual(expected[sg.id],len(osegs[sg.id])) - - ord_segs, cumulative_lengths, path_lengths_to_proximal, path_lengths_to_distal = cell.get_ordered_segments_in_groups( - sg.id, - include_cumulative_lengths=True, - include_path_lengths=True + print("Seg group %s has ordered segments: %s" % (sg.id, osegs)) + self.assertEqual(expected[sg.id], len(osegs[sg.id])) + + ( + ord_segs, + cumulative_lengths, + path_lengths_to_proximal, + path_lengths_to_distal, + ) = cell.get_ordered_segments_in_groups( + sg.id, include_cumulative_lengths=True, include_path_lengths=True ) - print('Seg group %s has cumulative_lengths: %s'%(sg.id,cumulative_lengths)) - self.assertEqual(expected[sg.id],len(cumulative_lengths[sg.id])) + print( + "Seg group %s has cumulative_lengths: %s" % (sg.id, cumulative_lengths) + ) + self.assertEqual(expected[sg.id], len(cumulative_lengths[sg.id])) - print('Seg group %s has path_lengths_to_proximal: %s'%(sg.id,path_lengths_to_proximal)) - self.assertEqual(expected[sg.id],len(path_lengths_to_proximal[sg.id])) + print( + "Seg group %s has path_lengths_to_proximal: %s" + % (sg.id, path_lengths_to_proximal) + ) + self.assertEqual(expected[sg.id], len(path_lengths_to_proximal[sg.id])) - print('Seg group %s has path_lengths_to_distal: %s'%(sg.id,path_lengths_to_distal)) - self.assertEqual(expected[sg.id],len(path_lengths_to_distal[sg.id])) + print( + "Seg group %s has path_lengths_to_distal: %s" + % (sg.id, path_lengths_to_distal) + ) + self.assertEqual(expected[sg.id], len(path_lengths_to_distal[sg.id])) def runTest(self): print("Running tests in TestCell") -if __name__ == '__main__': +if __name__ == "__main__": ta = TestCell() ta.test_cell_methods() diff --git a/neuroml/test/test_hdf5_optimized.py b/neuroml/test/test_hdf5_optimized.py index 300cfdaa..63837f6b 100644 --- a/neuroml/test/test_hdf5_optimized.py +++ b/neuroml/test/test_hdf5_optimized.py @@ -19,64 +19,66 @@ class TestNeuroMLHDF5Optimized(unittest.TestCase): base_dir = os.path.dirname(__file__) - #base_dir = '.' + # base_dir = '.' def runTest(self): print("Running tests in TestNeuroMLHDF5Optimized") def test_write_load(self): - #for f in []: - #for f in ['complete.nml']: - #for f in ['simplenet.nml','testh5.nml','MediumNet.net.nml','complete.nml']: + # for f in []: + # for f in ['complete.nml']: + # for f in ['simplenet.nml','testh5.nml','MediumNet.net.nml','complete.nml']: - for f in ['simplenet.nml','MediumNet.net.nml']: - file_name = '%s/../examples/test_files/%s'%(self.base_dir,f) + for f in ["simplenet.nml", "MediumNet.net.nml"]: + file_name = "%s/../examples/test_files/%s" % (self.base_dir, f) - print("Loading %s"%file_name) + print("Loading %s" % file_name) - nml_doc0 = loaders.read_neuroml2_file(file_name,include_includes=True) + nml_doc0 = loaders.read_neuroml2_file(file_name, include_includes=True) summary0 = nml_doc0.summary() print(summary0) - nml_h5_file = '%s/../examples/tmp/%s__1.h5'%(self.base_dir,f) + nml_h5_file = "%s/../examples/tmp/%s__1.h5" % (self.base_dir, f) writers.NeuroMLHdf5Writer.write(nml_doc0, nml_h5_file) - print("Written to: %s"%nml_h5_file) + print("Written to: %s" % nml_h5_file) - nml_doc1 = loaders.read_neuroml2_file(nml_h5_file,include_includes=True,optimized=True) + nml_doc1 = loaders.read_neuroml2_file( + nml_h5_file, include_includes=True, optimized=True + ) - summary1 = nml_doc1.summary().replace(' (optimized)','') - print('\n'+summary1) + summary1 = nml_doc1.summary().replace(" (optimized)", "") + print("\n" + summary1) - compare(summary0,summary1) + compare(summary0, summary1) - nml_h5_file_2 = '%s/../examples/tmp/%s__2.h5'%(self.base_dir,f) + nml_h5_file_2 = "%s/../examples/tmp/%s__2.h5" % (self.base_dir, f) writers.NeuroMLHdf5Writer.write(nml_doc1, nml_h5_file_2) - print("Written to: %s"%nml_h5_file_2) - #exit() - nml_doc2 = loaders.read_neuroml2_file(nml_h5_file_2,include_includes=True) + print("Written to: %s" % nml_h5_file_2) + # exit() + nml_doc2 = loaders.read_neuroml2_file(nml_h5_file_2, include_includes=True) summary2 = nml_doc2.summary() - print("Reloaded: %s"%nml_h5_file_2) - print('\n'+summary2) + print("Reloaded: %s" % nml_h5_file_2) + print("\n" + summary2) - compare(summary0,summary2) + compare(summary0, summary2) - nml_h5_file_3 = '%s/../examples/tmp/%s__3.nml'%(self.base_dir,f) + nml_h5_file_3 = "%s/../examples/tmp/%s__3.nml" % (self.base_dir, f) writers.NeuroMLWriter.write(nml_doc1, nml_h5_file_3) - print("Written to: %s"%nml_h5_file_3) + print("Written to: %s" % nml_h5_file_3) - nml_doc3 = loaders.read_neuroml2_file(nml_h5_file_3,include_includes=True) + nml_doc3 = loaders.read_neuroml2_file(nml_h5_file_3, include_includes=True) summary3 = nml_doc3.summary() - print("Reloaded: %s"%nml_h5_file_3) - print('\n'+summary3) + print("Reloaded: %s" % nml_h5_file_3) + print("\n" + summary3) - compare(summary0,summary3) + compare(summary0, summary3) -if __name__ == '__main__': +if __name__ == "__main__": tnxp = TestNeuroMLHDF5Optimized() tnxp.test_write_load() diff --git a/neuroml/test/test_hdf5_parser.py b/neuroml/test/test_hdf5_parser.py index 1e0a3477..8b5b80f0 100644 --- a/neuroml/test/test_hdf5_parser.py +++ b/neuroml/test/test_hdf5_parser.py @@ -18,58 +18,58 @@ class TestNeuroMLHDF5Parser(unittest.TestCase): base_dir = os.path.dirname(__file__) - #base_dir = '.' + # base_dir = '.' def test_write_load_hdf5(self): - #for f in []: - #for f in ['MediumNet.net.nml']: - for f in ['simplenet.nml','testh5.nml','MediumNet.net.nml','complete.nml']: - file_name = '%s/../examples/test_files/%s'%(self.base_dir,f) + # for f in []: + # for f in ['MediumNet.net.nml']: + for f in ["simplenet.nml", "testh5.nml", "MediumNet.net.nml", "complete.nml"]: + file_name = "%s/../examples/test_files/%s" % (self.base_dir, f) - print("Loading %s"%file_name) + print("Loading %s" % file_name) - nml_doc0 = loaders.read_neuroml2_file(file_name,include_includes=True) + nml_doc0 = loaders.read_neuroml2_file(file_name, include_includes=True) summary0 = nml_doc0.summary() print(summary0) - nml_h5_file = '%s/../examples/tmp/%s.h5'%(self.base_dir,f) + nml_h5_file = "%s/../examples/tmp/%s.h5" % (self.base_dir, f) writers.NeuroMLHdf5Writer.write(nml_doc0, nml_h5_file) - print("Written to: %s"%nml_h5_file) + print("Written to: %s" % nml_h5_file) nml_doc2 = loaders.NeuroMLHdf5Loader.load(nml_h5_file) summary1 = nml_doc2.summary() - print('\n'+summary1) + print("\n" + summary1) - compare(summary0,summary1) + compare(summary0, summary1) def test_parse(self): - file_name = self.base_dir+'/../examples/test_files/testh5.nml' + file_name = self.base_dir + "/../examples/test_files/testh5.nml" nml_doc0 = loaders.NeuroMLLoader.load(file_name) - summary0 = nml_doc0.summary(show_includes=False,show_non_network=False) - print('\n'+summary0) + summary0 = nml_doc0.summary(show_includes=False, show_non_network=False) + print("\n" + summary0) - print('-------------------------------\n\n') + print("-------------------------------\n\n") - nml_h5_file = self.base_dir+'/../examples/tmp/testh5a.nml.h5' + nml_h5_file = self.base_dir + "/../examples/tmp/testh5a.nml.h5" writers.NeuroMLHdf5Writer.write(nml_doc0, nml_h5_file) - print("Written H5 network file to: "+nml_h5_file) + print("Written H5 network file to: " + nml_h5_file) nml_doc2 = loaders.NeuroMLHdf5Loader.load(nml_h5_file) - summary1 = nml_doc2.summary(show_includes=False,show_non_network=False) - print('\n'+summary1) + summary1 = nml_doc2.summary(show_includes=False, show_non_network=False) + print("\n" + summary1) - compare(summary0,summary1) + compare(summary0, summary1) def runTest(self): print("Running tests in TestNeuroMLHDF5Parser") -if __name__ == '__main__': +if __name__ == "__main__": tnxp = TestNeuroMLHDF5Parser() tnxp.test_write_load_hdf5() diff --git a/neuroml/test/test_integration.py b/neuroml/test/test_integration.py index 75bc578c..e5ef4c3f 100644 --- a/neuroml/test/test_integration.py +++ b/neuroml/test/test_integration.py @@ -7,38 +7,38 @@ except ImportError: import unittest -class TestIntegration(unittest.TestCase): +class TestIntegration(unittest.TestCase): def setUp(self): """ Make an optimized morphology, add a couple of segments, save it and then load it back """ - - vertices = [[0,0,0,0.1],[1,0,0,0.2],[2,0,0,0.3],[3,0,0,0.4]] - connectivity = [-1,0,1,2] - - self.optimized_morphology = am.ArrayMorphology(vertices=vertices, - connectivity=connectivity, - id="arraymorph_test") + + vertices = [[0, 0, 0, 0.1], [1, 0, 0, 0.2], [2, 0, 0, 0.3], [3, 0, 0, 0.4]] + connectivity = [-1, 0, 1, 2] + + self.optimized_morphology = am.ArrayMorphology( + vertices=vertices, connectivity=connectivity, id="arraymorph_test" + ) seg = neuroml.Segment() - #TODO: - #self.optimized_morphology.segments.append(seg) + # TODO: + # self.optimized_morphology.segments.append(seg) doc = neuroml.NeuroMLDocument() cell = neuroml.Cell() def test_to_neuroml_morphology_and_write(self): - neuroml_morphology = self.optimized_morphology.to_neuroml_morphology(id="Test") - self.assertEqual(neuroml_morphology.id,"Test") - self.assertEqual(len(neuroml_morphology.segments),3) - self.assertIsNone(writers.NeuroMLWriter.write(neuroml_morphology,'/dev/null')) + neuroml_morphology = self.optimized_morphology.to_neuroml_morphology(id="Test") + self.assertEqual(neuroml_morphology.id, "Test") + self.assertEqual(len(neuroml_morphology.segments), 3) + self.assertIsNone(writers.NeuroMLWriter.write(neuroml_morphology, "/dev/null")) def test_arraymorph_properties(self): - self.assertEqual(self.optimized_morphology.id,"arraymorph_test") - + self.assertEqual(self.optimized_morphology.id, "arraymorph_test") + def test_arraymorph_write(self): - writers.NeuroMLWriter.write(self.optimized_morphology,'/dev/null') + writers.NeuroMLWriter.write(self.optimized_morphology, "/dev/null") diff --git a/neuroml/test/test_loaders.py b/neuroml/test/test_loaders.py index 08498062..8e98d4bf 100644 --- a/neuroml/test/test_loaders.py +++ b/neuroml/test/test_loaders.py @@ -12,19 +12,32 @@ except ImportError: import unittest + class TestNeuroMLLoader(unittest.TestCase): def test_load_neuroml(self): root_dir = os.path.dirname(neuroml.__file__) - print('root dir is: '+root_dir) - test_file_path = os.path.join(root_dir,'examples/test_files/Purk2M9s.nml') - print('test fi le path is: ' + test_file_path) + print("root dir is: " + root_dir) + test_file_path = os.path.join(root_dir, "examples/test_files/Purk2M9s.nml") + print("test fi le path is: " + test_file_path) - f = open(test_file_path,'r') - #print(f.read()) + f = open(test_file_path, "r") + # print(f.read()) doc = loaders.NeuroMLLoader.load(test_file_path) - self.assertEqual(doc.id,'Purk2M9s') + self.assertEqual(doc.id, "Purk2M9s") f.close() - print('Finished test') + print("Finished test") + + def test_non_neuroml_file(self): + """Test an non-NeuroML document.""" + root_dir = os.path.dirname(neuroml.__file__) + test_file_path = os.path.join(root_dir, "examples/test_files/sbml-example.xml") + print("test file path is: " + test_file_path) + + try: + loaders.NeuroMLLoader.load(test_file_path) + except TypeError: + print("Exception raised. Test passes.") + if __name__ == "__main__": t = TestNeuroMLLoader() diff --git a/neuroml/test/test_morphology.py b/neuroml/test/test_morphology.py index c0366611..69fd3e9a 100644 --- a/neuroml/test/test_morphology.py +++ b/neuroml/test/test_morphology.py @@ -9,22 +9,21 @@ import unittest2 as unittest except ImportError: import unittest - -class TestSingleMorphology(unittest.TestCase): +class TestSingleMorphology(unittest.TestCase): def setUp(self): self.test_morphology = neuroml.Morphology() self.test_morphology.id = "TestMorph" for i in range(10): self.test_morphology.segments.append(neuroml.Segment()) - + def test_id(self): """ Test if Morphology instantiation and id assignment is working """ - self.assertEqual(self.test_morphology.id,"TestMorph") + self.assertEqual(self.test_morphology.id, "TestMorph") def test_num_segments(self): - self.assertEqual(self.test_morphology.num_segments,10) + self.assertEqual(self.test_morphology.num_segments, 10) diff --git a/neuroml/test/test_nml.py b/neuroml/test/test_nml.py new file mode 100644 index 00000000..30c5b3d7 --- /dev/null +++ b/neuroml/test/test_nml.py @@ -0,0 +1,165 @@ +#!/usr/bin/env python3 +""" +Enter one line description here. + +File: + +Copyright 2021 Ankur Sinha +Author: Ankur Sinha +""" + + +import neuroml + +try: + import unittest2 as unittest +except ImportError: + import unittest + +import platform + + +class TestNML(unittest.TestCase): + + """Tests related to nml.py""" + + def test_get_members(self): + """Test get_members()""" + example_base = neuroml.Base(id="examplebase") + base_members = example_base.get_members() + + member_names = [] + for m in base_members: + member_names.append(m.get_name()) + + # From parent: baseWithoutId + self.assertIn("neuro_lex_id", member_names) + # From itself + self.assertIn("id", member_names) + # Not in here: + self.assertNotIn("cell", member_names) + + def test_generic_add_single(self): + """Test the generic add function for single addition.""" + doc = neuroml.NeuroMLDocument(id="testdoc") + cell = neuroml.Cell(id="testcell") + cell1 = neuroml.Cell(id="testcell1") + biprop = neuroml.BiophysicalProperties(id="biprops") + net = neuroml.Network(id="net") + + # Success: returns nothing (None) + self.assertIsNone(doc.add(cell)) + + # Already added, so throw exception + if int(platform.python_version_tuple()[0]) > 2: + with self.assertWarns(UserWarning): + doc.add(cell) + else: + doc.add(cell) + + # Success + self.assertIsNone(doc.add(cell1)) + + # str is not the type for any member + with self.assertRaises(Exception): + doc.add("A STRING") + + # success + self.assertIsNone(cell.add(biprop)) + self.assertIsNone(cell1.add(biprop)) + + # failures + with self.assertRaises(Exception): + cell.add(net) + with self.assertRaises(Exception): + biprop.add(net) + + def test_generic_add_multiple(self): + """Test add() when an object may belong to multiple members. + + From our example on the docs. + """ + na_channel = neuroml.IonChannelHH( + id="na_channel", + notes="Sodium channel for HH cell", + conductance="10pS", + species="na", + ) + gate_m = neuroml.GateHHRates( + id="na_m", instances="3", notes="m gate for na channel" + ) + + m_forward_rate = neuroml.HHRate( + type="HHExpLinearRate", rate="1per_ms", midpoint="-40mV", scale="10mV" + ) + m_reverse_rate = neuroml.HHRate( + type="HHExpRate", rate="4per_ms", midpoint="-65mV", scale="-18mV" + ) + + # HHRate can go to two different members, so an exception is thrown + # needs hint, as done below + with self.assertRaises(Exception): + gate_m.add(m_forward_rate) + with self.assertRaises(Exception): + gate_m.add(m_reverse_rate) + + gate_m.add(m_forward_rate, hint="forward_rate") + gate_m.add(m_reverse_rate, hint="reverse_rate") + + na_channel.gate_hh_rates.append(gate_m) + + gate_h = neuroml.GateHHRates( + id="na_h", instances="1", notes="h gate for na channel" + ) + h_forward_rate = neuroml.HHRate( + type="HHExpRate", rate="0.07per_ms", midpoint="-65mV", scale="-20mV" + ) + h_reverse_rate = neuroml.HHRate( + type="HHSigmoidRate", rate="1per_ms", midpoint="-35mV", scale="10mV" + ) + + # HHRate can go to two different members, so an exception is thrown + # needs hint, as done below + with self.assertRaises(Exception): + gate_h.add(h_forward_rate) + with self.assertRaises(Exception): + gate_h.add(h_reverse_rate) + + gate_h.add(h_forward_rate, hint="forward_rate") + gate_h.add(h_reverse_rate, hint="reverse_rate") + + def test_add_to_container(self): + """Test adding multiple objects to a container class.""" + network = neuroml.Network() + # They have the same id, but they are unique objects as far as Python + # is concerned + pop0 = neuroml.Population() + pop1 = neuroml.Population() + pop2 = neuroml.Population() + + network.add(pop0) + network.add(pop1) + network.add(pop2) + + pop3 = neuroml.Population(id="unique") + network.add(pop3) + # warning because this is already added + if int(platform.python_version_tuple()[0]) > 2: + with self.assertWarns(UserWarning): + network.add(pop3) + else: + network.add(pop3) + + # Note that for Python, this is a new object + # So we can add it again + pop4 = neuroml.Population(id="unique") + network.add(pop4) + + def test_info(self): + """Test getting member info.""" + cell = neuroml.Cell(id="testcell") + info = cell.info() + if int(platform.python_version_tuple()[0]) > 2: + self.assertRegex(info, "morphology") + self.assertRegex(info, "biophysical_properties") + self.assertNotRegex(info, "network") diff --git a/neuroml/test/test_segment.py b/neuroml/test/test_segment.py index 77342157..99925075 100644 --- a/neuroml/test/test_segment.py +++ b/neuroml/test/test_segment.py @@ -15,25 +15,24 @@ import unittest2 as unittest except ImportError: import unittest - -class TestSingleSegment(unittest.TestCase): +class TestSingleSegment(unittest.TestCase): def test_proximal_diameter(self): - proximal_point = Point3DWithDiam(diameter = 3.21) - seg = Segment(proximal = proximal_point) + proximal_point = Point3DWithDiam(diameter=3.21) + seg = Segment(proximal=proximal_point) self.assertEqual(seg.proximal.diameter, 3.21) def test_distal_diameter(self): - distal_point = Point3DWithDiam(diameter = 3.21) - seg = Segment(distal = distal_point) + distal_point = Point3DWithDiam(diameter=3.21) + seg = Segment(distal=distal_point) - self.assertEqual(seg.distal.diameter,3.21) + self.assertEqual(seg.distal.diameter, 3.21) def test_proximal_coordinates(self): proximal_point = Point3DWithDiam(x=0.1, y=0.2, z=0.3) - seg = Segment(proximal = proximal_point) + seg = Segment(proximal=proximal_point) self.assertEqual(seg.proximal.x, 0.1) self.assertEqual(seg.proximal.y, 0.2) @@ -41,248 +40,206 @@ def test_proximal_coordinates(self): def test_distal_coordinates(self): distal_point = Point3DWithDiam(x=0.1, y=0.2, z=0.3) - seg = Segment(distal = distal_point) + seg = Segment(distal=distal_point) self.assertEqual(seg.distal.x, 0.1) self.assertEqual(seg.distal.y, 0.2) self.assertEqual(seg.distal.z, 0.3) + class TestHelperProperties(unittest.TestCase): """ Tests for helper properties for volume, area etc These are not part of the neuroML schema itself """ + def test_length0(self): - diam = 1. - len = 1. - d=Point3DWithDiam(x=0, - y=0, - z=0, - diameter=diam) - p=Point3DWithDiam(x=0, - y=len, - z=0, - diameter=diam) - + diam = 1.0 + len = 1.0 + d = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + p = Point3DWithDiam(x=0, y=len, z=0, diameter=diam) + seg = Segment(proximal=p, distal=d) self.assertAlmostEqual(seg.length, 1, places=7) - + def test_length(self): - d=Point3DWithDiam(x=0.2, - y=2.4, - z=3.5, - diameter=0.6) - p=Point3DWithDiam(x=0.5, - y=2.0, - z=1.8, - diameter=0.9) - + d = Point3DWithDiam(x=0.2, y=2.4, z=3.5, diameter=0.6) + p = Point3DWithDiam(x=0.5, y=2.0, z=1.8, diameter=0.9) + seg = Segment(proximal=p, distal=d) self.assertAlmostEqual(seg.length, 1.772004514, places=7) def test_area0(self): - diam = 1. - len = 1. - d=Point3DWithDiam(x=0, - y=0, - z=0, - diameter=diam) - p=Point3DWithDiam(x=0, - y=len, - z=0, - diameter=diam) - + diam = 1.0 + len = 1.0 + d = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + p = Point3DWithDiam(x=0, y=len, z=0, diameter=diam) + seg = Segment(proximal=p, distal=d) - self.assertAlmostEqual(seg.surface_area, math.pi*diam*len, places=7) - + self.assertAlmostEqual(seg.surface_area, math.pi * diam * len, places=7) + def test_area1(self): - diam = 1. - len = 1. - d=Point3DWithDiam(x=0, - y=0, - z=0, - diameter=diam) - p=Point3DWithDiam(x=0, - y=len, - z=0, - diameter=diam*1.01) - + diam = 1.0 + len = 1.0 + d = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + p = Point3DWithDiam(x=0, y=len, z=0, diameter=diam * 1.01) + seg = Segment(proximal=p, distal=d) self.assertAlmostEqual(seg.surface_area, 3.15734008, places=7) - - + def test_area(self): - d=Point3DWithDiam(x=0.2, - y=2.4, - z=3.5, - diameter=0.6) - p=Point3DWithDiam(x=0.5, - y=2.0, - z=1.8, - diameter=0.9) - + d = Point3DWithDiam(x=0.2, y=2.4, z=3.5, diameter=0.6) + p = Point3DWithDiam(x=0.5, y=2.0, z=1.8, diameter=0.9) + seg = Segment(proximal=p, distal=d) self.assertAlmostEqual(seg.surface_area, 4.19011944, places=7) - def test_volume0(self): - - diam = 1. - len = 1. - d=Point3DWithDiam(x=0, - y=0, - z=0, - diameter=diam) - p=Point3DWithDiam(x=0, - y=len, - z=0, - diameter=diam) + + diam = 1.0 + len = 1.0 + d = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + p = Point3DWithDiam(x=0, y=len, z=0, diameter=diam) seg = Segment(proximal=p, distal=d) - self.assertAlmostEqual(seg.volume, math.pi*(diam/2)**2*len, places=7) - + self.assertAlmostEqual(seg.volume, math.pi * (diam / 2) ** 2 * len, places=7) + def test_volume1(self): - - diam = 1. - len = 1. - d=Point3DWithDiam(x=0, - y=0, - z=0, - diameter=diam) - p=Point3DWithDiam(x=0, - y=len, - z=0, - diameter=diam*1.01) + + diam = 1.0 + len = 1.0 + d = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + p = Point3DWithDiam(x=0, y=len, z=0, diameter=diam * 1.01) seg = Segment(proximal=p, distal=d) self.assertAlmostEqual(seg.volume, 0.793278324, places=7) - def test_volume(self): - d=Point3DWithDiam(x=0.2, - y=2.4, - z=3.5, - diameter=0.6) - p=Point3DWithDiam(x=0.5, - y=2.0, - z=1.8, - diameter=0.9) + d = Point3DWithDiam(x=0.2, y=2.4, z=3.5, diameter=0.6) + p = Point3DWithDiam(x=0.5, y=2.0, z=1.8, diameter=0.9) seg = Segment(proximal=p, distal=d) self.assertAlmostEqual(seg.volume, 0.7932855820702964, places=7) - - + def test_spherical(self): - - diam = 1. - d=Point3DWithDiam(x=0, - y=0, - z=0, - diameter=diam) - p=Point3DWithDiam(x=0, - y=0, - z=0, - diameter=diam) - + + diam = 1.0 + d = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + p = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + seg = Segment(id=0, proximal=p, distal=d) self.assertEqual(seg.length, 0) self.assertEqual(seg.surface_area, math.pi) - - + def test_cell_with_segs(self): - - cell = Cell(id='cell0') - - diam = 1. - d0=Point3DWithDiam(x=0, y=0, z=0, diameter=diam) - p=Point3DWithDiam(x=0, y=0, z=0, diameter=diam) - + + cell = Cell(id="cell0") + + diam = 1.0 + d0 = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + p = Point3DWithDiam(x=0, y=0, z=0, diameter=diam) + seg0 = Segment(id=0, proximal=p, distal=d0) - - d1=Point3DWithDiam(x=10, y=0, z=0, diameter=diam) - + + d1 = Point3DWithDiam(x=10, y=0, z=0, diameter=diam) + cell.morphology = Morphology() cell.morphology.segments.append(seg0) - + seg1 = Segment(id=1, distal=d1, parent=SegmentParent(0)) cell.morphology.segments.append(seg1) - - d2=Point3DWithDiam(x=20, y=0, z=0, diameter=diam) - - seg2 = Segment(id=2, proximal =d1, distal=d2, parent=SegmentParent(seg1.id)) + + d2 = Point3DWithDiam(x=20, y=0, z=0, diameter=diam) + + seg2 = Segment(id=2, proximal=d1, distal=d2, parent=SegmentParent(seg1.id)) cell.morphology.segments.append(seg2) - - d3=Point3DWithDiam(x=15, y=10, z=0, diameter=diam) - - seg3 = Segment(id=3, distal=d3, parent=SegmentParent(seg2.id, fraction_along=0.5)) + + d3 = Point3DWithDiam(x=15, y=10, z=0, diameter=diam) + + seg3 = Segment( + id=3, distal=d3, parent=SegmentParent(seg2.id, fraction_along=0.5) + ) cell.morphology.segments.append(seg3) - for f in [0,0.25,0.5,0.75,1]: + for f in [0, 0.25, 0.5, 0.75, 1]: seg3.parent.fraction_along = f - print('Fract: %s, length: %s, proximal: %s'%(f, cell.get_segment_length(seg3.id), cell.get_actual_proximal(seg3.id))) - + print( + "Fract: %s, length: %s, proximal: %s" + % ( + f, + cell.get_segment_length(seg3.id), + cell.get_actual_proximal(seg3.id), + ) + ) + self.assertEqual(seg0.length, 0) self.assertEqual(cell.get_segment_length(seg0.id), 0) - + self.assertRaises(Exception, lambda: seg1.length) # No proximal self.assertEqual(cell.get_segment_length(seg1.id), 10) - + self.assertEqual(seg2.length, 10) self.assertEqual(cell.get_segment_length(seg2.id), 10) - - + self.assertEqual(seg0.surface_area, math.pi) self.assertEqual(cell.get_segment_surface_area(seg0.id), math.pi) - self.assertRaises(Exception, lambda: seg1.surface_area) # No proximal - self.assertEqual(cell.get_segment_surface_area(seg1.id), math.pi*10) - self.assertEqual(seg2.surface_area, math.pi*10) - self.assertEqual(cell.get_segment_surface_area(seg2.id), math.pi*10) - - v = 4.0/3*math.pi*(diam/2)**3 + self.assertRaises(Exception, lambda: seg1.surface_area) # No proximal + self.assertEqual(cell.get_segment_surface_area(seg1.id), math.pi * 10) + self.assertEqual(seg2.surface_area, math.pi * 10) + self.assertEqual(cell.get_segment_surface_area(seg2.id), math.pi * 10) + + v = 4.0 / 3 * math.pi * (diam / 2) ** 3 self.assertEqual(seg0.volume, v) self.assertEqual(cell.get_segment_volume(seg0.id), v) - v = math.pi*seg2.proximal.diameter/2*seg2.proximal.diameter/2*seg2.length - self.assertRaises(Exception, lambda: seg1.volume) # No proximal + v = ( + math.pi + * seg2.proximal.diameter + / 2 + * seg2.proximal.diameter + / 2 + * seg2.length + ) + self.assertRaises(Exception, lambda: seg1.volume) # No proximal self.assertAlmostEqual(cell.get_segment_volume(seg1.id), v, places=7) self.assertAlmostEqual(seg2.volume, v, places=7) self.assertAlmostEqual(cell.get_segment_volume(seg2.id), v, places=7) - - + # Break the sphere... - seg0.distal.diameter = diam*2 + seg0.distal.diameter = diam * 2 self.assertRaises(Exception, lambda: seg0.surface_area) self.assertRaises(Exception, lambda: seg0.volume) - - print('Passed...') - ''' ''' - + + print("Passed...") + """ """ + def runTest(self): print("Running tests in TestHelperProperties") + class TestAttachedSegments(unittest.TestCase): pass -if __name__ == '__main__': - +if __name__ == "__main__": + ta = TestHelperProperties() - + ta.test_length0() ta.test_length() - + ta.test_area0() ta.test_area1() ta.test_area() - + ta.test_volume0() ta.test_volume1() ta.test_volume() - + ta.test_spherical() - ta.test_cell_with_segs() \ No newline at end of file + ta.test_cell_with_segs() diff --git a/neuroml/test/test_writers.py b/neuroml/test/test_writers.py index e811585b..e1ae6733 100644 --- a/neuroml/test/test_writers.py +++ b/neuroml/test/test_writers.py @@ -14,197 +14,61 @@ import unittest -class TestJSONWriter(unittest.TestCase): - - def setUp(self): - num_segments = int(1e4) #Per cell - num_vertices = num_segments + 1 - - x = np.linspace(0,10,num_vertices) - y = np.zeros(num_vertices) - z = np.zeros(num_vertices) - d = np.linspace(1,0.01,num_vertices) - - vertices = np.array([x,y,z,d]).T - - connectivity = range(-1,num_segments) - - big_arraymorph = am.ArrayMorphology(vertices = vertices, - connectivity = connectivity) - transposed_x = x+10 - transposed_vertices = np.array([transposed_x,y,z,d]).T - - transposed_arraymorph = am.ArrayMorphology(vertices = transposed_vertices, - connectivity = connectivity) - - bigger_d = d + 0.5 - fatter_vertices = np.array([x,y,z,bigger_d]).T - - fatter_arraymorph = am.ArrayMorphology(vertices = fatter_vertices, - connectivity = connectivity) - - neuroml_cell = neuroml.Cell(id='cell_4') - neuroml_morphology = neuroml.Morphology(id = 'my_morph') - neuroml_cell.morphology = neuroml_morphology - - self.transposed_arraymorph = transposed_arraymorph - self.fatter_arraymorph = fatter_arraymorph - self.big_arraymorph = big_arraymorph - - self.cell_1 = neuroml.Cell(id='cell_1') - self.cell_2 = neuroml.Cell(id='cell_2') - self.cell_3 = neuroml.Cell(id='cell_3') - - self.cell_1.morphology = transposed_arraymorph - self.cell_2.morphology = fatter_arraymorph - self.cell_3.morphology = big_arraymorph - - self.test_doc = neuroml.NeuroMLDocument(id='TestDocument') - - self.test_doc.cells.append(self.cell_1) - self.test_doc.cells.append(self.cell_2) - self.test_doc.cells.append(self.cell_3) - self.test_doc.cells.append(neuroml_cell) - - def test_write_to_mongodb_backend(self): - writer_method = neuroml.writers.JSONWriter.write_to_mongodb - - try: - writer_method(neuroml_document = self.test_doc, - db = 'mike_test_db3') - except: - self.fail("Exception raised!") - - def test_write_to_mongodb_expected(self): - """ - More of an integration test, write a file and confirm the contents are - as expected. - - TODO: add a teardown - """ - - db_name = 'test_db_4' - - writer_method = neuroml.writers.JSONWriter.write_to_mongodb - writer_method(neuroml_document = self.test_doc, - db = db_name) - - loader_method = neuroml.loaders.JSONLoader.load_from_mongodb - - doc = loader_method(db = db_name, - id = self.test_doc.id, - host = 'localhost') - - array_morph = doc.cells[2].morphology - - connectivity_equal = np.testing.assert_array_equal(array_morph.connectivity,self.big_arraymorph.connectivity) - physical_masks_equal = np.testing.assert_array_equal(array_morph.physical_mask,self.big_arraymorph.physical_mask) - vertices_equal = np.testing.assert_array_equal(array_morph.vertices,self.big_arraymorph.vertices) - - - self.assertEqual(connectivity_equal,None) #None when equal - self.assertEqual(physical_masks_equal,None) #None when equal - self.assertEqual(vertices_equal,None) #None when equal - - def test_write_multiple_morphologies(self): - filename = tempfile.mkstemp()[1] - writer_method = neuroml.writers.JSONWriter.write - try: - writer_method(self.test_doc,filename) - except: - self.fail("Exception raised!") - - def test_write_expected(self): - """ - More of an integration test, write a file and confirm the contents are - as expected. - """ - - filename = tempfile.mkstemp()[1] - - writer_method = neuroml.writers.JSONWriter.write - writer_method(self.test_doc,filename) - - loader_method = neuroml.loaders.JSONLoader.load - - doc = loader_method(filename) - - array_morph = doc.cells[2].morphology - - connectivity_equal = np.testing.assert_array_equal(array_morph.connectivity,self.big_arraymorph.connectivity) - physical_masks_equal = np.testing.assert_array_equal(array_morph.physical_mask,self.big_arraymorph.physical_mask) - vertices_equal = np.testing.assert_array_equal(array_morph.vertices,self.big_arraymorph.vertices) - - - self.assertEqual(connectivity_equal,None) #None when equal - self.assertEqual(physical_masks_equal,None) #None when equal - self.assertEqual(vertices_equal,None) #None when equal - - def test_writer_instance(self): - filename = tempfile.mkstemp()[1] - writer_method = neuroml.writers.JSONWriter.write - writer_method(self.test_doc,filename) - - loader_method = neuroml.loaders.JSONLoader.load - document = loader_method(filename) - - self.assertIsInstance(document,neuroml.NeuroMLDocument) - - class TestNeuroMLWriter(unittest.TestCase): - def test_write_nonsense(self): """ Should Throw an attribute error as integers do not have an export method """ - a = 6 #not NeuroML-writeable + a = 6 # not NeuroML-writeable writer_method = neuroml.writers.NeuroMLWriter.write self.assertRaises(AttributeError, writer_method, a, "tmpfile") -class TestArrayMorphWriter(unittest.TestCase): +class TestArrayMorphWriter(unittest.TestCase): def setUp(self): num_segments = int(1e6) num_vertices = num_segments + 1 - x = np.linspace(0,10,num_vertices) + x = np.linspace(0, 10, num_vertices) y = np.zeros(num_vertices) z = np.zeros(num_vertices) - d = np.linspace(1,0.01,num_vertices) + d = np.linspace(1, 0.01, num_vertices) - vertices = np.array([x,y,z,d]).T + vertices = np.array([x, y, z, d]).T - connectivity = range(-1,num_segments) + connectivity = range(-1, num_segments) - big_arraymorph = am.ArrayMorphology(vertices = vertices, - connectivity = connectivity) - transposed_x = x+10 - transposed_vertices = np.array([transposed_x,y,z,d]).T + big_arraymorph = am.ArrayMorphology( + vertices=vertices, connectivity=connectivity + ) + transposed_x = x + 10 + transposed_vertices = np.array([transposed_x, y, z, d]).T - transposed_arraymorph = am.ArrayMorphology(vertices = transposed_vertices, - connectivity = connectivity) + transposed_arraymorph = am.ArrayMorphology( + vertices=transposed_vertices, connectivity=connectivity + ) bigger_d = d + 0.5 - fatter_vertices = np.array([x,y,z,bigger_d]).T - - fatter_arraymorph = am.ArrayMorphology(vertices = fatter_vertices, - connectivity = connectivity) + fatter_vertices = np.array([x, y, z, bigger_d]).T + fatter_arraymorph = am.ArrayMorphology( + vertices=fatter_vertices, connectivity=connectivity + ) self.transposed_arraymorph = transposed_arraymorph self.fatter_arraymorph = fatter_arraymorph self.big_arraymorph = big_arraymorph - self.cell_1 = neuroml.Cell(id='cell_1') - self.cell_2 = neuroml.Cell(id='cell_2') - self.cell_3 = neuroml.Cell(id='cell_3') + self.cell_1 = neuroml.Cell(id="cell_1") + self.cell_2 = neuroml.Cell(id="cell_2") + self.cell_3 = neuroml.Cell(id="cell_3") self.cell_1.morphology = transposed_arraymorph self.cell_2.morphology = fatter_arraymorph self.cell_3.morphology = big_arraymorph - self.test_doc = neuroml.NeuroMLDocument(id='TestDocument') + self.test_doc = neuroml.NeuroMLDocument(id="TestDocument") self.test_doc.cells.append(self.cell_1) self.test_doc.cells.append(self.cell_2) @@ -215,7 +79,7 @@ def test_write_big_arraymorph(self): filename = tempfile.mkstemp()[1] try: - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) except: self.fail("Exception raised!") @@ -228,35 +92,41 @@ def test_write_expected(self): filename = tempfile.mkstemp()[1] writer_method = neuroml.writers.ArrayMorphWriter.write - writer_method(self.big_arraymorph,filename) + writer_method(self.big_arraymorph, filename) loader_method = neuroml.loaders.ArrayMorphLoader.load doc = loader_method(filename) array_morph = doc.morphology[0] - connectivity_equal = np.testing.assert_array_equal(array_morph.connectivity,self.big_arraymorph.connectivity) - physical_masks_equal = np.testing.assert_array_equal(array_morph.physical_mask,self.big_arraymorph.physical_mask) - vertices_equal = np.testing.assert_array_equal(array_morph.vertices,self.big_arraymorph.vertices) + connectivity_equal = np.testing.assert_array_equal( + array_morph.connectivity, self.big_arraymorph.connectivity + ) + physical_masks_equal = np.testing.assert_array_equal( + array_morph.physical_mask, self.big_arraymorph.physical_mask + ) + vertices_equal = np.testing.assert_array_equal( + array_morph.vertices, self.big_arraymorph.vertices + ) - self.assertEqual(connectivity_equal,None) #None when equal - self.assertEqual(physical_masks_equal,None) #None when equal - self.assertEqual(vertices_equal,None) #None when equal + self.assertEqual(connectivity_equal, None) # None when equal + self.assertEqual(physical_masks_equal, None) # None when equal + self.assertEqual(vertices_equal, None) # None when equal def test_write_multiple_morphologies(self): filename = tempfile.mkstemp()[1] writer_method = neuroml.writers.ArrayMorphWriter.write try: - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) except: self.fail("Exception raised!") def test_write_multiple_morphologies(self): filename = tempfile.mkstemp()[1] writer_method = neuroml.writers.ArrayMorphWriter.write - writer_method(self.test_doc,filename) + writer_method(self.test_doc, filename) loader_method = neuroml.loaders.ArrayMorphLoader.load document = loader_method(filename) - self.assertIsInstance(document,neuroml.NeuroMLDocument) + self.assertIsInstance(document, neuroml.NeuroMLDocument) diff --git a/neuroml/test/test_xml_parser.py b/neuroml/test/test_xml_parser.py index 2fa1824d..8b8ec6d4 100644 --- a/neuroml/test/test_xml_parser.py +++ b/neuroml/test/test_xml_parser.py @@ -3,88 +3,79 @@ """ +from neuroml.hdf5.DefaultNetworkHandler import DefaultNetworkHandler from neuroml.hdf5.NetworkBuilder import NetworkBuilder from neuroml.hdf5.NeuroMLXMLParser import NeuroMLXMLParser from neuroml import loaders -import logging +import neuroml.writers as writers -import os - -try: - import unittest2 as unittest -except ImportError: - import unittest - - -class TestNeuroMLXMLParser(unittest.TestCase): - def test_parse(self): +import os +import pytest + + +@pytest.mark.parametrize( + "f", + [ + "simplenet.nml", + "testh5.nml", + "pyr_4_sym.cell.nml", + "MediumNet.net.nml", + "complete.nml", + ], +) +class TestNeuroMLXMLParser: + def test_parse(self, f): base_dir = os.path.dirname(__file__) - #base_dir = '.' - - logging.basicConfig(level=logging.INFO, format="%(name)-19s %(levelname)-5s - %(message)s") - - for f in ['simplenet.nml','testh5.nml','pyr_4_sym.cell.nml','MediumNet.net.nml','complete.nml']: - - file_name = base_dir+'/../examples/test_files/'+f - - nml_doc0 = loaders.read_neuroml2_file(file_name, - include_includes=True, - verbose=True) - - summary0 = nml_doc0.summary(show_includes=False,show_non_network=False) - print('\n'+summary0) - - from neuroml.hdf5.DefaultNetworkHandler import DefaultNetworkHandler - - nmlHandler = DefaultNetworkHandler() - - currParser = NeuroMLXMLParser(nmlHandler) - - currParser.parse(file_name) - - print('-------------------------------\n\n') + file_name = base_dir + "/../examples/test_files/" + f + print("---- Testing {}".format(file_name)) - nmlHandler = NetworkBuilder() + nml_doc0 = loaders.read_neuroml2_file( + file_name, include_includes=True, verbose=True + ) - currParser = NeuroMLXMLParser(nmlHandler) + summary0 = nml_doc0.summary(show_includes=False, show_non_network=False) + # print("\n" + summary0) - currParser.parse(file_name) + nmlHandler = DefaultNetworkHandler() + currParser = NeuroMLXMLParser(nmlHandler) + currParser.parse(file_name) - nml_doc = nmlHandler.get_nml_doc() + print("-------------------------------\n\n") - summary = nml_doc.summary(show_includes=False,show_non_network=False) + nmlHandler_new = NetworkBuilder() + currParser_new = NeuroMLXMLParser(nmlHandler_new) + currParser_new.parse(file_name) + nml_doc_new = nmlHandler_new.get_nml_doc() - print(summary) + summary = nml_doc_new.summary(show_includes=False, show_non_network=False) + # print(summary) - compare(summary,summary0) + compare(summary, summary0) - nml_file = base_dir+'/../examples/tmp/EXP_'+f - import neuroml.writers as writers - writers.NeuroMLWriter.write(nml_doc, nml_file) - print("Written network file to: "+nml_file) + nml_file_new = base_dir + "/../examples/tmp/EXP_" + f - def runTest(self): - print("Running tests in TestNeuroMLXMLParser") + writers.NeuroMLWriter.write(nml_doc_new, nml_file_new) + print("Written network file to: " + nml_file_new) -def compare(s1,s2): - if s1==s2: +def compare(s1, s2): + if s1 == s2: print("Same!") return - l1 = s1.split('\n') - l2 = s2.split('\n') + l1 = s1.split("\n") + l2 = s2.split("\n") - for i in range(min(len(l1),len(l2))): - if not l1[i]==l2[i]: - print("Mismatch at line %i:\n>>> %s\n<<< %s"%(i,l1[i],l2[i])) - if len(l1)!=len(l2): + for i in range(min(len(l1), len(l2))): + if not l1[i] == l2[i]: + print("Mismatch at line %i:\n>>> %s\n<<< %s" % (i, l1[i], l2[i])) + if len(l1) != len(l2): print("Different number of lines!") - assert(s1==s2) + assert s1 == s2 -if __name__ == '__main__': +if __name__ == "__main__": tnxp = TestNeuroMLXMLParser() tnxp.test_parse() diff --git a/neuroml/utils.py b/neuroml/utils.py index 662714b7..59623d4c 100644 --- a/neuroml/utils.py +++ b/neuroml/utils.py @@ -5,125 +5,186 @@ """ import os.path import sys +import inspect +import warnings import neuroml -import inspect def validate_neuroml2(file_name): + # (str) -> None + """Validate a NeuroML document against the NeuroML schema specification. + :param file_name: name of NeuroML file to validate. + :type file_name: str + """ from lxml import etree - try: - from urllib2 import urlopen # Python 2 - except: - from urllib.request import urlopen # Python 3 - - xsd_file = os.path.join(os.path.dirname(__file__), 'nml/NeuroML_%s.xsd'%neuroml.current_neuroml_version) - + + xsd_file = os.path.join( + os.path.dirname(__file__), + "nml/NeuroML_%s.xsd" % neuroml.current_neuroml_version, + ) + with open(xsd_file) as schema_file: xmlschema = etree.XMLSchema(etree.parse(schema_file)) - print("Validating %s against %s" %(file_name, xsd_file)) + print("Validating %s against %s" % (file_name, xsd_file)) if not xmlschema.validate(etree.parse(file_name)): - xmlschema.assertValid(etree.parse(file_name)) # print reason if file is invalid + xmlschema.assertValid( + etree.parse(file_name) + ) # print reason if file is invalid return print("It's valid!") -# Return True if .nml file is valid else false + def is_valid_neuroml2(file_name): + # (str) -> Bool + """Check if a file is valid NeuroML2. + + :param file_name: name of NeuroML file to check + :type file_name: str + :returns: True if file is valid, False if not. + :rtype: Boolean + """ from lxml import etree - try: - from urllib2 import urlopen # Python 2 - except: - from urllib.request import urlopen # Python 3 - xsd_file = os.path.join(os.path.dirname(__file__), 'nml/NeuroML_%s.xsd' % neuroml.current_neuroml_version) + xsd_file = os.path.join( + os.path.dirname(__file__), + "nml/NeuroML_%s.xsd" % neuroml.current_neuroml_version, + ) with open(xsd_file) as schema_file: xmlschema = etree.XMLSchema(etree.parse(schema_file)) - return (xmlschema.validate(etree.parse(file_name))) + return xmlschema.validate(etree.parse(file_name)) return False def print_summary(nml_file_name): - + """Print a summary of the NeuroML model in the given file. + + :param nml_file_name: name of NeuroML file to print summary of + :type nml_file_name: str + """ print(get_summary(nml_file_name)) - - + + def get_summary(nml_file_name): - + # (str) -> str + """Get a summary of the given NeuroML file. + + :param nml_file_name: name of NeuroML file to get summary of + :type nml_file_name: str + :returns: summary of provided file + :rtype: str + """ from neuroml.loaders import read_neuroml2_file - nml_doc = read_neuroml2_file(nml_file_name,include_includes=True, verbose=False, optimized=True) - + + nml_doc = read_neuroml2_file( + nml_file_name, include_includes=True, verbose=False, optimized=True + ) + return nml_doc.summary(show_includes=False) - + def add_all_to_document(nml_doc_src, nml_doc_tgt, verbose=False): - + """Add all members of the source NeuroML document to the target NeuroML document. + + :param nml_doc_src: source NeuroML document to copy from + :type nml_doc_src: NeuroMLDocument + :param nml_doc_tgt: target NeuroML document to copy to + :type nml_doc_tgt: NeuroMLDocument + :param verbose: control verbosity of working + :type verbose: bool + + :raises Exception: if a member could not be copied. + """ membs = inspect.getmembers(nml_doc_src) for memb in membs: - if isinstance(memb[1], list) and len(memb[1])>0 \ - and not memb[0].endswith('_'): + if isinstance(memb[1], list) and len(memb[1]) > 0 and not memb[0].endswith("_"): for entry in memb[1]: - if memb[0] != 'includes': - + if memb[0] != "includes": + added = False for c in getattr(nml_doc_tgt, memb[0]): - if hasattr(c,'id') and c.id == entry.id: + if hasattr(c, "id") and c.id == entry.id: added = True if not added: - #print(" Adding %s to list: %s" \ + # print(" Adding %s to list: %s" \ # %(entry.id if hasattr(entry,'id') else entry.name, memb[0])) getattr(nml_doc_tgt, memb[0]).append(entry) added = True - + if not added: - raise Exception("Could not add %s from %s to %s"%(entry, nml_doc_src, nml_doc_tgt)) - + raise Exception( + "Could not add %s from %s to %s" + % (entry, nml_doc_src, nml_doc_tgt) + ) + + def append_to_element(parent, child): - - import inspect - membs = inspect.getmembers(parent) - #print("Adding %s to element %s"%(child, parent)) - mappings = {} - for mdi in parent.member_data_items_: - mappings[mdi.data_type] = mdi.name - added = False - for memb in membs: - if isinstance(memb[1], list) and not memb[0].endswith('_'): - #print("Adding %s to %s in %s?"%(child.__class__.__name__, memb[0], parent.__class__.__name__)) - if mappings[child.__class__.__name__] == memb[0]: - for c in getattr(parent, memb[0]): - if c.id == child.id: - added = True - if not added: - getattr(parent, memb[0]).append(child) - #print("Adding %s to %s in %s?"%(child.__class__.__name__, memb[0], parent.__class__.__name__)) + """Append a child element to a parent Component + + :param parent: parent NeuroML component to add element to + :type parent: Object + :param child: child NeuroML component to be added to parent + :type child: Object + :raises Exception: when the child could not be added to the parent + """ + warnings.warn("This method is deprecated and will be removed in future releases. Please use the `add` methods provided in each NeuroML ComponentType object", FutureWarning, stacklevel=2) + membs = inspect.getmembers(parent) + # print("Adding %s to element %s"%(child, parent)) + mappings = {} + for mdi in parent.member_data_items_: + mappings[mdi.data_type] = mdi.name + added = False + for memb in membs: + if isinstance(memb[1], list) and not memb[0].endswith("_"): + # print("Adding %s to %s in %s?"%(child.__class__.__name__, memb[0], parent.__class__.__name__)) + if mappings[child.__class__.__name__] == memb[0]: + for c in getattr(parent, memb[0]): + if c.id == child.id: added = True - - if not added: - raise Exception("Could not add %s to %s"%(child, parent)) - + if not added: + getattr(parent, memb[0]).append(child) + # print("Adding %s to %s in %s?"%(child.__class__.__name__, memb[0], parent.__class__.__name__)) + added = True + + if not added: + raise Exception("Could not add %s to %s" % (child, parent)) + + def has_segment_fraction_info(connections): + """Check if connections include fraction information + + :param connections: list of connection objects + :type connections: list + :returns: True if connections include fragment information, otherwise False + :rtype: Boolean + """ if not connections: return False no_seg_fract_info = True - i=0 - while no_seg_fract_info and i= 2.20a; python_version >= '3.0' +generateds == 2.30.11; python_version == '2.7' +cython +flake8 +pytest +numpy +black ; python_version >= '3.0' diff --git a/requirements.txt b/requirements.txt index 338e612b..12676046 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,10 +1,9 @@ six lxml -cython numpy pymongo -numexpr +numexpr ; python_version >= '3' +numexpr<2.8 ; python_version < '3' simplejson; python_version < '3.5' -tables>=3.3.0 -jsonpickle>=0.9.6 -pytest +git+git://github.com/PyTables/PyTables/@master#egg=tables ; python_version >= '3.10' +tables>=3.3.0 ; python_version < '3.10' diff --git a/setup.cfg b/setup.cfg index 34ad2ac1..fa95b864 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,4 +3,13 @@ # spacing around operators, comment blocks, in argument lists # lines too long ignore = E501,E502,F403,F405,E231,E228,E225,E226,E265,E261 -exclude = neuroml/nml/nml.py +exclude = + neuroml/nml/nml.py, + doc, + build + +[mypy] +exclude = + neuroml/nml/nml.py, + doc, + build diff --git a/setup.py b/setup.py index d482d434..5dc68e28 100644 --- a/setup.py +++ b/setup.py @@ -4,44 +4,47 @@ long_description = open("README.md").read() -for line in open('neuroml/__init__.py'): +for line in open("neuroml/__init__.py"): if line.startswith("__version__"): version = line.split("=")[1].strip()[1:-1] setup( - name = "libNeuroML", - version = version, - packages = ['neuroml', 'neuroml.test','neuroml.nml','neuroml.hdf5'], - package_data = {'neuroml.test': ['*.nml'], 'neuroml.nml': ['*.xsd']}, - author = "libNeuroML authors and contributors", - author_email = "vellamike@gmail.com, p.gleeson@gmail.com", - description = "A Python library for working with NeuroML descriptions of neuronal models", - long_description = long_description, + name="libNeuroML", + version=version, + packages=["neuroml", "neuroml.test", "neuroml.nml", "neuroml.hdf5"], + package_data={"neuroml.test": ["*.nml"], "neuroml.nml": ["*.xsd"]}, + author="libNeuroML authors and contributors", + author_email="vellamike@gmail.com, p.gleeson@gmail.com", + description="A Python library for working with NeuroML descriptions of neuronal models", + long_description=long_description, long_description_content_type="text/markdown", - install_requires=['lxml', 'six'], + install_requires=["lxml", "six"], tests_require=["pytest"], - extras_require={"full": [ - "cython", - "numpy", - "pymongo", - "numexpr", - "simplejson; python_version < '3.5'", - "tables>=3.3.0", - "jsonpickle>=0.9.6" - ]}, - license = "BSD", + extras_require={ + "full": [ + "cython", + "numpy", + "pymongo", + "numexpr", + "simplejson; python_version < '3.5'", + "tables>=3.3.0", + ] + }, + license="BSD", url="http://libneuroml.readthedocs.org/en/latest/", - classifiers = [ - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: BSD License', - 'Natural Language :: English', - 'Operating System :: OS Independent', - 'Development Status :: 5 - Production/Stable', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Topic :: Scientific/Engineering :: Bio-Informatics', - 'Topic :: Scientific/Engineering'] + classifiers=[ + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", + "Natural Language :: English", + "Operating System :: OS Independent", + "Development Status :: 5 - Production/Stable", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Topic :: Scientific/Engineering :: Bio-Informatics", + "Topic :: Scientific/Engineering", + ], )